From 63214e81e13fc6da5e9bc9acfafe9969ec12e560 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 16 May 2024 00:00:45 +0200 Subject: [PATCH 01/42] Introduce initial version of TEE request processor --- Cargo.lock | 4 + core/lib/object_store/src/raw.rs | 2 + core/lib/prover_interface/Cargo.toml | 2 + core/lib/prover_interface/src/api.rs | 67 +++- core/lib/prover_interface/src/outputs.rs | 27 +- core/node/proof_data_handler/Cargo.toml | 3 + core/node/proof_data_handler/src/lib.rs | 29 +- .../src/request_processor.rs | 1 + .../src/tee_request_processor.rs | 295 ++++++++++++++++++ 9 files changed, 423 insertions(+), 7 deletions(-) create mode 100644 core/node/proof_data_handler/src/tee_request_processor.rs diff --git a/Cargo.lock b/Cargo.lock index 0bb1fd0fced5..fd6bdb52a3a3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9054,11 +9054,14 @@ dependencies = [ "axum", "tokio", "tracing", + "vm_utils", "zksync_config", "zksync_dal", "zksync_object_store", "zksync_prover_interface", + "zksync_state", "zksync_types", + "zksync_utils", ] [[package]] @@ -9122,6 +9125,7 @@ dependencies = [ "bincode", "chrono", "circuit_sequencer_api 0.1.50", + "multivm", "serde", "serde_with", "strum", diff --git a/core/lib/object_store/src/raw.rs b/core/lib/object_store/src/raw.rs index 8b99f9769900..66cda57a0ab1 100644 --- a/core/lib/object_store/src/raw.rs +++ b/core/lib/object_store/src/raw.rs @@ -16,6 +16,7 @@ pub enum Bucket { NodeAggregationWitnessJobsFri, SchedulerWitnessJobsFri, ProofsFri, + ProofsTee, StorageSnapshot, TeeVerifierInput, } @@ -33,6 +34,7 @@ impl Bucket { Self::NodeAggregationWitnessJobsFri => "node_aggregation_witness_jobs_fri", Self::SchedulerWitnessJobsFri => "scheduler_witness_jobs_fri", Self::ProofsFri => "proofs_fri", + Self::ProofsTee => "proofs_tee", Self::StorageSnapshot => "storage_logs_snapshots", Self::TeeVerifierInput => "tee_verifier_inputs", } diff --git a/core/lib/prover_interface/Cargo.toml b/core/lib/prover_interface/Cargo.toml index 216eec8b9857..e2368f731c89 100644 --- a/core/lib/prover_interface/Cargo.toml +++ b/core/lib/prover_interface/Cargo.toml @@ -10,8 +10,10 @@ keywords.workspace = true categories.workspace = true [dependencies] +multivm.workspace = true zksync_types.workspace = true zksync_object_store.workspace = true +# zksync_tee_verifier.workspace = true # We can use the newest api to send proofs to L1. circuit_sequencer_api_1_5_0.workspace = true diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 0353c6f39241..46737c07f4b1 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -1,14 +1,19 @@ //! Prover and server subsystems communicate via the API. //! This module defines the types used in the API. +use multivm::interface::{L1BatchEnv, SystemEnv}; use serde::{Deserialize, Serialize}; use zksync_types::{ basic_fri_types::Eip4844Blobs, + block::L2BlockExecutionData, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, - L1BatchNumber, + L1BatchNumber, H256, }; -use crate::{inputs::PrepareBasicCircuitsJob, outputs::L1BatchProofForL1}; +use crate::{ + inputs::PrepareBasicCircuitsJob, + outputs::{L1BatchProofForL1, L1BatchTeeProofForL1}, +}; #[derive(Debug, Serialize, Deserialize)] pub struct ProofGenerationData { @@ -19,18 +24,72 @@ pub struct ProofGenerationData { pub eip_4844_blobs: Eip4844Blobs, } +/// ****************************************************************** +/// Ugly copy-paste section +/// +/// This line would do, but there are circular dependencies: +/// type TeeProofGenerationData = zksync_tee_verifier::TeeVerifierInput; + +pub type TeeProofGenerationData = TeeVerifierInput; + +/// Version 1 of the data used as input for the TEE verifier. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct V1TeeVerifierInput { + prepare_basic_circuits_job: PrepareBasicCircuitsJob, + l2_blocks_execution_data: Vec, + l1_batch_env: L1BatchEnv, + system_env: SystemEnv, + used_contracts: Vec<(H256, Vec)>, +} + +/// Data used as input for the TEE verifier. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[non_exhaustive] +#[allow(clippy::large_enum_variant)] +pub enum TeeVerifierInput { + /// `V0` suppresses warning about irrefutable `let...else` pattern + V0, + V1(V1TeeVerifierInput), +} + +impl TeeVerifierInput { + pub fn new( + prepare_basic_circuits_job: PrepareBasicCircuitsJob, + l2_blocks_execution_data: Vec, + l1_batch_env: L1BatchEnv, + system_env: SystemEnv, + used_contracts: Vec<(H256, Vec)>, + ) -> Self { + TeeVerifierInput::V1(V1TeeVerifierInput { + prepare_basic_circuits_job, + l2_blocks_execution_data, + l1_batch_env, + system_env, + used_contracts, + }) + } +} +/// ****************************************************************** + #[derive(Debug, Serialize, Deserialize)] pub struct ProofGenerationDataRequest {} +pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest; + #[derive(Debug, Serialize, Deserialize)] -pub enum ProofGenerationDataResponse { - Success(Option>), +pub enum GenericProofGenerationDataResponse { + Success(Option>), Error(String), } +pub type ProofGenerationDataResponse = GenericProofGenerationDataResponse; +pub type TeeProofGenerationDataResponse = + GenericProofGenerationDataResponse; + #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofRequest { Proof(Box), + TeeProof(Box), // The proof generation was skipped due to sampling SkippedProofGeneration, } diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index 1ef9bb4bad29..832244ef5cbc 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -5,8 +5,7 @@ use serde::{Deserialize, Serialize}; use zksync_object_store::{serialize_using_bincode, Bucket, StoredObject}; use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber}; -/// The only type of proof utilized by the core subsystem: a "final" proof that can be sent -/// to the L1 contract. +/// A "final" proof that can be sent to the L1 contract. #[derive(Clone, Serialize, Deserialize)] pub struct L1BatchProofForL1 { pub aggregation_result_coords: [[u8; 32]; 4], @@ -14,6 +13,11 @@ pub struct L1BatchProofForL1 { pub protocol_version: ProtocolSemanticVersion, } +#[derive(Clone, Serialize, Deserialize)] +pub struct L1BatchTeeProofForL1 { + // TBD Remote Attestation Report / Signature should be here +} + impl fmt::Debug for L1BatchProofForL1 { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter @@ -23,6 +27,14 @@ impl fmt::Debug for L1BatchProofForL1 { } } +impl fmt::Debug for L1BatchTeeProofForL1 { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter + .debug_struct("L1BatchTeeProofForL1") + .finish_non_exhaustive() + } +} + impl StoredObject for L1BatchProofForL1 { const BUCKET: Bucket = Bucket::ProofsFri; type Key<'a> = (L1BatchNumber, ProtocolSemanticVersion); @@ -35,3 +47,14 @@ impl StoredObject for L1BatchProofForL1 { serialize_using_bincode!(); } + +impl StoredObject for L1BatchTeeProofForL1 { + const BUCKET: Bucket = Bucket::ProofsTee; + type Key<'a> = L1BatchNumber; + + fn encode_key(key: Self::Key<'_>) -> String { + format!("l1_batch_tee_proof_{key}.bin") + } + + serialize_using_bincode!(); +} diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 2e7141ea4d6b..0d55bf2dbb09 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -10,11 +10,14 @@ keywords.workspace = true categories.workspace = true [dependencies] +vm_utils.workspace = true zksync_config.workspace = true zksync_dal.workspace = true zksync_object_store.workspace = true zksync_prover_interface.workspace = true +zksync_state.workspace = true zksync_types.workspace = true +zksync_utils.workspace = true tracing.workspace = true anyhow.workspace = true diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 4bd082b00ddd..cecbb05116a4 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -6,10 +6,14 @@ use tokio::sync::watch; use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; -use zksync_prover_interface::api::{ProofGenerationDataRequest, SubmitProofRequest}; +use zksync_prover_interface::api::{ + ProofGenerationDataRequest, SubmitProofRequest, TeeProofGenerationDataRequest, +}; use zksync_types::commitment::L1BatchCommitmentMode; use crate::request_processor::RequestProcessor; +mod tee_request_processor; +use crate::tee_request_processor::TeeRequestProcessor; mod request_processor; @@ -22,6 +26,9 @@ pub async fn run_server( ) -> anyhow::Result<()> { let bind_address = SocketAddr::from(([0, 0, 0, 0], config.http_port)); tracing::debug!("Starting proof data handler server on {bind_address}"); + let get_tee_proof_gen_processor = + TeeRequestProcessor::new(blob_store.clone(), pool.clone(), config.clone()); + let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); let get_proof_gen_processor = RequestProcessor::new(blob_store, pool, config, commitment_mode); let submit_proof_processor = get_proof_gen_processor.clone(); let app = Router::new() @@ -46,6 +53,26 @@ pub async fn run_server( .await }, ), + ) + .route( + "/tee_proof_generation_data", + post( + move |payload: Json| async move { + get_tee_proof_gen_processor + .get_proof_generation_data(payload) + .await + }, + ), + ) + .route( + "/submit_tee_proof/:l1_batch_number", + post( + move |l1_batch_number: Path, payload: Json| async move { + submit_tee_proof_processor + .submit_proof(l1_batch_number, payload) + .await + }, + ), ); axum::Server::bind(&bind_address) diff --git a/core/node/proof_data_handler/src/request_processor.rs b/core/node/proof_data_handler/src/request_processor.rs index 582cb78f70c7..a0d793c885c5 100644 --- a/core/node/proof_data_handler/src/request_processor.rs +++ b/core/node/proof_data_handler/src/request_processor.rs @@ -252,6 +252,7 @@ impl RequestProcessor { .await .map_err(RequestProcessorError::Sqlx)?; } + SubmitProofRequest::TeeProof(_proof) => { /* TBD */ } SubmitProofRequest::SkippedProofGeneration => { self.pool .connection() diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs new file mode 100644 index 000000000000..3d6fb47142ee --- /dev/null +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -0,0 +1,295 @@ +use std::sync::Arc; + +use anyhow::Context; +use axum::{ + extract::Path, + http::StatusCode, + response::{IntoResponse, Response}, + Json, +}; +use tokio::runtime::Handle; +use vm_utils::storage::L1BatchParamsProvider; +use zksync_config::configs::ProofDataHandlerConfig; +use zksync_dal::{ConnectionPool, Core, CoreDal, SqlxError}; +use zksync_object_store::{ObjectStore, ObjectStoreError}; +use zksync_prover_interface::api::{ + SubmitProofRequest, SubmitProofResponse, TeeProofGenerationData, TeeProofGenerationDataRequest, + TeeProofGenerationDataResponse, +}; +use zksync_state::{PostgresStorage, ReadStorage}; +use zksync_types::{ + block::L1BatchHeader, commitment::serialize_commitments, web3::keccak256, L1BatchNumber, + L2BlockNumber, L2ChainId, H256, +}; +use zksync_utils::u256_to_h256; + +#[derive(Clone)] +pub(crate) struct TeeRequestProcessor { + blob_store: Arc, + pool: ConnectionPool, + config: ProofDataHandlerConfig, +} + +pub(crate) enum TeeRequestProcessorError { + ObjectStore(ObjectStoreError), + Sqlx(SqlxError), +} + +impl IntoResponse for TeeRequestProcessorError { + fn into_response(self) -> Response { + let (status_code, message) = match self { + TeeRequestProcessorError::ObjectStore(err) => { + tracing::error!("GCS error: {:?}", err); + ( + StatusCode::BAD_GATEWAY, + "Failed fetching/saving from GCS".to_owned(), + ) + } + TeeRequestProcessorError::Sqlx(err) => { + tracing::error!("Sqlx error: {:?}", err); + match err { + SqlxError::RowNotFound => { + (StatusCode::NOT_FOUND, "Non existing L1 batch".to_owned()) + } + _ => ( + StatusCode::BAD_GATEWAY, + "Failed fetching/saving from db".to_owned(), + ), + } + } + }; + (status_code, message).into_response() + } +} + +impl TeeRequestProcessor { + pub(crate) fn new( + blob_store: Arc, + pool: ConnectionPool, + config: ProofDataHandlerConfig, + ) -> Self { + Self { + blob_store, + pool, + config, + } + } + + pub(crate) async fn get_proof_generation_data( + &self, + request: Json, + ) -> Result, TeeRequestProcessorError> { + tracing::info!("Received request for proof generation data: {:?}", request); + + let mut connection = self.pool.connection().await.unwrap(); + + let l1_batch_number_result = connection + .proof_generation_dal() + .get_next_block_to_be_proven(self.config.proof_generation_timeout()) + .await; + + let l1_batch_number = match l1_batch_number_result { + Some(number) => number, + None => return Ok(Json(TeeProofGenerationDataResponse::Success(None))), + }; + + let blob = self + .blob_store + .get(l1_batch_number) + .await + .map_err(TeeRequestProcessorError::ObjectStore)?; + + let l1_batch_header = connection + .blocks_dal() + .get_l1_batch_header(l1_batch_number) + .await + .unwrap() + .expect(&format!("Missing header for {}", l1_batch_number)); + + let l2_blocks_execution_data = connection + .transactions_dal() + .get_l2_blocks_to_execute_for_l1_batch(l1_batch_number) + .await + .unwrap(); + + let last_batch_miniblock_number = l2_blocks_execution_data.first().unwrap().number - 1; + + let l1_batch_params_provider = L1BatchParamsProvider::new(&mut connection).await.unwrap(); + + let first_miniblock_in_batch = l1_batch_params_provider + .load_first_l2_block_in_batch(&mut connection, l1_batch_number) + .await + .unwrap() + .unwrap(); + + let validation_computational_gas_limit = u32::MAX; + + let (system_env, l1_batch_env) = l1_batch_params_provider + .load_l1_batch_params( + &mut connection, + &first_miniblock_in_batch, + validation_computational_gas_limit, + L2ChainId::default(), // TODO: pass correct chain id + // l2_chain_id, + ) + .await + .unwrap(); + + let rt_handle = Handle::current(); + + let pool = self.pool.clone(); + + // `PostgresStorage` needs a blocking context + let used_contracts = rt_handle + .spawn_blocking(move || { + Self::get_used_contracts(last_batch_miniblock_number, l1_batch_header, pool) + }) + .await + .unwrap() + .unwrap(); + + let proof_gen_data = TeeProofGenerationData::new( + blob, + l2_blocks_execution_data, + l1_batch_env, + system_env, + used_contracts, + ); + + Ok(Json(TeeProofGenerationDataResponse::Success(Some( + Box::new(proof_gen_data), + )))) + } + + fn get_used_contracts( + last_batch_miniblock_number: L2BlockNumber, + l1_batch_header: L1BatchHeader, + connection_pool: ConnectionPool, + ) -> anyhow::Result)>> { + let rt_handle = Handle::current(); + + let connection = rt_handle + .block_on(connection_pool.connection()) + .context("failed to get connection for TeeVerifierInputProducer")?; + + let mut pg_storage = + PostgresStorage::new(rt_handle, connection, last_batch_miniblock_number, true); + + Ok(l1_batch_header + .used_contract_hashes + .into_iter() + .filter_map(|hash| { + pg_storage + .load_factory_dep(u256_to_h256(hash)) + .map(|bytes| (u256_to_h256(hash), bytes)) + }) + .collect()) + } + + pub(crate) async fn submit_proof( + &self, + Path(l1_batch_number): Path, + Json(payload): Json, + ) -> Result, TeeRequestProcessorError> { + tracing::info!("Received proof for block number: {:?}", l1_batch_number); + let l1_batch_number = L1BatchNumber(l1_batch_number); + match payload { + SubmitProofRequest::Proof(proof) => { + let blob_url = self + .blob_store + .put(l1_batch_number, &*proof) + .await + .map_err(TeeRequestProcessorError::ObjectStore)?; + + let system_logs_hash_from_prover = + H256::from_slice(&proof.aggregation_result_coords[0]); + let state_diff_hash_from_prover = + H256::from_slice(&proof.aggregation_result_coords[1]); + let bootloader_heap_initial_content_from_prover = + H256::from_slice(&proof.aggregation_result_coords[2]); + let events_queue_state_from_prover = + H256::from_slice(&proof.aggregation_result_coords[3]); + + let mut storage = self.pool.connection().await.unwrap(); + + let l1_batch = storage + .blocks_dal() + .get_l1_batch_metadata(l1_batch_number) + .await + .unwrap() + .expect("Proved block without metadata"); + + let is_pre_boojum = l1_batch + .header + .protocol_version + .map(|v| v.is_pre_boojum()) + .unwrap_or(true); + if !is_pre_boojum { + let events_queue_state = l1_batch + .metadata + .events_queue_commitment + .expect("No events_queue_commitment"); + let bootloader_heap_initial_content = l1_batch + .metadata + .bootloader_initial_content_commitment + .expect("No bootloader_initial_content_commitment"); + + if events_queue_state != events_queue_state_from_prover + || bootloader_heap_initial_content + != bootloader_heap_initial_content_from_prover + { + let server_values = format!("events_queue_state = {events_queue_state}, bootloader_heap_initial_content = {bootloader_heap_initial_content}"); + let prover_values = format!("events_queue_state = {events_queue_state_from_prover}, bootloader_heap_initial_content = {bootloader_heap_initial_content_from_prover}"); + panic!( + "Auxilary output doesn't match, server values: {} prover values: {}", + server_values, prover_values + ); + } + } + + let system_logs = serialize_commitments(&l1_batch.header.system_logs); + let system_logs_hash = H256(keccak256(&system_logs)); + + if !is_pre_boojum { + let state_diff_hash = l1_batch + .header + .system_logs + .into_iter() + .find(|elem| elem.0.key == H256::from_low_u64_be(2)) + .expect("No state diff hash key") + .0 + .value; + + if state_diff_hash != state_diff_hash_from_prover + || system_logs_hash != system_logs_hash_from_prover + { + let server_values = format!("system_logs_hash = {system_logs_hash}, state_diff_hash = {state_diff_hash}"); + let prover_values = format!("system_logs_hash = {system_logs_hash_from_prover}, state_diff_hash = {state_diff_hash_from_prover}"); + panic!( + "Auxilary output doesn't match, server values: {} prover values: {}", + server_values, prover_values + ); + } + } + storage + .proof_generation_dal() + .save_proof_artifacts_metadata(l1_batch_number, &blob_url) + .await + .map_err(TeeRequestProcessorError::Sqlx)?; + } + SubmitProofRequest::TeeProof(_proof) => { /* TBD */ } + SubmitProofRequest::SkippedProofGeneration => { + self.pool + .connection() + .await + .unwrap() + .proof_generation_dal() + .mark_proof_generation_job_as_skipped(l1_batch_number) + .await + .map_err(TeeRequestProcessorError::Sqlx)?; + } + } + + Ok(Json(SubmitProofResponse::Success)) + } +} From 20dee9c751f1de98aba2c404dd5d717a10e8e83b Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 16 May 2024 23:58:07 +0200 Subject: [PATCH 02/42] WIP: simplify --- Cargo.lock | 3 +- core/lib/prover_interface/Cargo.toml | 4 +- core/lib/prover_interface/src/api.rs | 53 +------- core/node/proof_data_handler/Cargo.toml | 2 +- core/node/proof_data_handler/src/lib.rs | 7 +- .../src/tee_request_processor.rs | 113 ++---------------- 6 files changed, 20 insertions(+), 162 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fd6bdb52a3a3..d64bd9c82c13 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9059,7 +9059,7 @@ dependencies = [ "zksync_dal", "zksync_object_store", "zksync_prover_interface", - "zksync_state", + "zksync_tee_verifier", "zksync_types", "zksync_utils", ] @@ -9125,7 +9125,6 @@ dependencies = [ "bincode", "chrono", "circuit_sequencer_api 0.1.50", - "multivm", "serde", "serde_with", "strum", diff --git a/core/lib/prover_interface/Cargo.toml b/core/lib/prover_interface/Cargo.toml index e2368f731c89..d1e46bacd425 100644 --- a/core/lib/prover_interface/Cargo.toml +++ b/core/lib/prover_interface/Cargo.toml @@ -10,10 +10,8 @@ keywords.workspace = true categories.workspace = true [dependencies] -multivm.workspace = true -zksync_types.workspace = true zksync_object_store.workspace = true -# zksync_tee_verifier.workspace = true +zksync_types.workspace = true # We can use the newest api to send proofs to L1. circuit_sequencer_api_1_5_0.workspace = true diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 46737c07f4b1..6e08872fe2fa 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -1,13 +1,11 @@ //! Prover and server subsystems communicate via the API. //! This module defines the types used in the API. -use multivm::interface::{L1BatchEnv, SystemEnv}; use serde::{Deserialize, Serialize}; use zksync_types::{ basic_fri_types::Eip4844Blobs, - block::L2BlockExecutionData, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, - L1BatchNumber, H256, + L1BatchNumber, }; use crate::{ @@ -24,53 +22,6 @@ pub struct ProofGenerationData { pub eip_4844_blobs: Eip4844Blobs, } -/// ****************************************************************** -/// Ugly copy-paste section -/// -/// This line would do, but there are circular dependencies: -/// type TeeProofGenerationData = zksync_tee_verifier::TeeVerifierInput; - -pub type TeeProofGenerationData = TeeVerifierInput; - -/// Version 1 of the data used as input for the TEE verifier. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct V1TeeVerifierInput { - prepare_basic_circuits_job: PrepareBasicCircuitsJob, - l2_blocks_execution_data: Vec, - l1_batch_env: L1BatchEnv, - system_env: SystemEnv, - used_contracts: Vec<(H256, Vec)>, -} - -/// Data used as input for the TEE verifier. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -#[non_exhaustive] -#[allow(clippy::large_enum_variant)] -pub enum TeeVerifierInput { - /// `V0` suppresses warning about irrefutable `let...else` pattern - V0, - V1(V1TeeVerifierInput), -} - -impl TeeVerifierInput { - pub fn new( - prepare_basic_circuits_job: PrepareBasicCircuitsJob, - l2_blocks_execution_data: Vec, - l1_batch_env: L1BatchEnv, - system_env: SystemEnv, - used_contracts: Vec<(H256, Vec)>, - ) -> Self { - TeeVerifierInput::V1(V1TeeVerifierInput { - prepare_basic_circuits_job, - l2_blocks_execution_data, - l1_batch_env, - system_env, - used_contracts, - }) - } -} -/// ****************************************************************** - #[derive(Debug, Serialize, Deserialize)] pub struct ProofGenerationDataRequest {} @@ -83,8 +34,6 @@ pub enum GenericProofGenerationDataResponse { } pub type ProofGenerationDataResponse = GenericProofGenerationDataResponse; -pub type TeeProofGenerationDataResponse = - GenericProofGenerationDataResponse; #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofRequest { diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 0d55bf2dbb09..b8a56fc3b71a 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -15,7 +15,7 @@ zksync_config.workspace = true zksync_dal.workspace = true zksync_object_store.workspace = true zksync_prover_interface.workspace = true -zksync_state.workspace = true +zksync_tee_verifier.workspace = true zksync_types.workspace = true zksync_utils.workspace = true diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index cecbb05116a4..c9a669cec808 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -20,16 +20,17 @@ mod request_processor; pub async fn run_server( config: ProofDataHandlerConfig, blob_store: Arc, - pool: ConnectionPool, + connection_pool: ConnectionPool, commitment_mode: L1BatchCommitmentMode, mut stop_receiver: watch::Receiver, ) -> anyhow::Result<()> { let bind_address = SocketAddr::from(([0, 0, 0, 0], config.http_port)); tracing::debug!("Starting proof data handler server on {bind_address}"); let get_tee_proof_gen_processor = - TeeRequestProcessor::new(blob_store.clone(), pool.clone(), config.clone()); + TeeRequestProcessor::new(blob_store.clone(), connection_pool.clone(), config.clone()); let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); - let get_proof_gen_processor = RequestProcessor::new(blob_store, pool, config, commitment_mode); + let get_proof_gen_processor = + RequestProcessor::new(blob_store, connection_pool, config, commitment_mode); let submit_proof_processor = get_proof_gen_processor.clone(); let app = Router::new() .route( diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 3d6fb47142ee..734b27c5740f 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -1,27 +1,22 @@ use std::sync::Arc; -use anyhow::Context; use axum::{ extract::Path, http::StatusCode, response::{IntoResponse, Response}, Json, }; -use tokio::runtime::Handle; -use vm_utils::storage::L1BatchParamsProvider; use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{ConnectionPool, Core, CoreDal, SqlxError}; use zksync_object_store::{ObjectStore, ObjectStoreError}; use zksync_prover_interface::api::{ - SubmitProofRequest, SubmitProofResponse, TeeProofGenerationData, TeeProofGenerationDataRequest, - TeeProofGenerationDataResponse, + GenericProofGenerationDataResponse, SubmitProofRequest, SubmitProofResponse, + TeeProofGenerationDataRequest, }; -use zksync_state::{PostgresStorage, ReadStorage}; -use zksync_types::{ - block::L1BatchHeader, commitment::serialize_commitments, web3::keccak256, L1BatchNumber, - L2BlockNumber, L2ChainId, H256, -}; -use zksync_utils::u256_to_h256; +use zksync_tee_verifier::TeeVerifierInput; +use zksync_types::{commitment::serialize_commitments, web3::keccak256, L1BatchNumber, H256}; + +pub type TeeProofGenerationDataResponse = GenericProofGenerationDataResponse; #[derive(Clone)] pub(crate) struct TeeRequestProcessor { @@ -81,111 +76,27 @@ impl TeeRequestProcessor { ) -> Result, TeeRequestProcessorError> { tracing::info!("Received request for proof generation data: {:?}", request); - let mut connection = self.pool.connection().await.unwrap(); - - let l1_batch_number_result = connection - .proof_generation_dal() - .get_next_block_to_be_proven(self.config.proof_generation_timeout()) - .await; + // TODO: Replace this line with an appropriate method to get the next batch number to be proven. + // It's likely that a new SQL column needs to be added to the `proof_generation_details` table. + // Take self.config.proof_generation_timeout() into account when selecting the next batch to be proven! + let l1_batch_number_result = Option::from(L1BatchNumber::from(1)); let l1_batch_number = match l1_batch_number_result { Some(number) => number, None => return Ok(Json(TeeProofGenerationDataResponse::Success(None))), }; - let blob = self + let tee_verifier_input: TeeVerifierInput = self .blob_store .get(l1_batch_number) .await .map_err(TeeRequestProcessorError::ObjectStore)?; - let l1_batch_header = connection - .blocks_dal() - .get_l1_batch_header(l1_batch_number) - .await - .unwrap() - .expect(&format!("Missing header for {}", l1_batch_number)); - - let l2_blocks_execution_data = connection - .transactions_dal() - .get_l2_blocks_to_execute_for_l1_batch(l1_batch_number) - .await - .unwrap(); - - let last_batch_miniblock_number = l2_blocks_execution_data.first().unwrap().number - 1; - - let l1_batch_params_provider = L1BatchParamsProvider::new(&mut connection).await.unwrap(); - - let first_miniblock_in_batch = l1_batch_params_provider - .load_first_l2_block_in_batch(&mut connection, l1_batch_number) - .await - .unwrap() - .unwrap(); - - let validation_computational_gas_limit = u32::MAX; - - let (system_env, l1_batch_env) = l1_batch_params_provider - .load_l1_batch_params( - &mut connection, - &first_miniblock_in_batch, - validation_computational_gas_limit, - L2ChainId::default(), // TODO: pass correct chain id - // l2_chain_id, - ) - .await - .unwrap(); - - let rt_handle = Handle::current(); - - let pool = self.pool.clone(); - - // `PostgresStorage` needs a blocking context - let used_contracts = rt_handle - .spawn_blocking(move || { - Self::get_used_contracts(last_batch_miniblock_number, l1_batch_header, pool) - }) - .await - .unwrap() - .unwrap(); - - let proof_gen_data = TeeProofGenerationData::new( - blob, - l2_blocks_execution_data, - l1_batch_env, - system_env, - used_contracts, - ); - Ok(Json(TeeProofGenerationDataResponse::Success(Some( - Box::new(proof_gen_data), + Box::new(tee_verifier_input), )))) } - fn get_used_contracts( - last_batch_miniblock_number: L2BlockNumber, - l1_batch_header: L1BatchHeader, - connection_pool: ConnectionPool, - ) -> anyhow::Result)>> { - let rt_handle = Handle::current(); - - let connection = rt_handle - .block_on(connection_pool.connection()) - .context("failed to get connection for TeeVerifierInputProducer")?; - - let mut pg_storage = - PostgresStorage::new(rt_handle, connection, last_batch_miniblock_number, true); - - Ok(l1_batch_header - .used_contract_hashes - .into_iter() - .filter_map(|hash| { - pg_storage - .load_factory_dep(u256_to_h256(hash)) - .map(|bytes| (u256_to_h256(hash), bytes)) - }) - .collect()) - } - pub(crate) async fn submit_proof( &self, Path(l1_batch_number): Path, From 7ad45630eb4196d57524a19909ef4ac7b3e39998 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 17 May 2024 18:52:56 +0200 Subject: [PATCH 03/42] Refine the TEE proof submission endpoint --- core/lib/prover_interface/src/api.rs | 8 +- core/lib/prover_interface/src/outputs.rs | 3 +- core/node/proof_data_handler/src/errors.rs | 38 +++++ core/node/proof_data_handler/src/lib.rs | 10 +- .../src/request_processor.rs | 45 +---- .../src/tee_request_processor.rs | 159 +++--------------- 6 files changed, 77 insertions(+), 186 deletions(-) create mode 100644 core/node/proof_data_handler/src/errors.rs diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 6e08872fe2fa..689910bbc0ef 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -36,13 +36,15 @@ pub enum GenericProofGenerationDataResponse { pub type ProofGenerationDataResponse = GenericProofGenerationDataResponse; #[derive(Debug, Serialize, Deserialize)] -pub enum SubmitProofRequest { - Proof(Box), - TeeProof(Box), +pub enum GenericSubmitProofRequest { + Proof(Box), // The proof generation was skipped due to sampling SkippedProofGeneration, } +pub type SubmitProofRequest = GenericSubmitProofRequest; +pub type SubmitTeeProofRequest = GenericSubmitProofRequest; + #[derive(Debug, Serialize, Deserialize)] pub enum SubmitProofResponse { Success, diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index 832244ef5cbc..e9a6b922b7d1 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -15,7 +15,8 @@ pub struct L1BatchProofForL1 { #[derive(Clone, Serialize, Deserialize)] pub struct L1BatchTeeProofForL1 { - // TBD Remote Attestation Report / Signature should be here + // TODO revisit what else is needed here + signature: Vec, } impl fmt::Debug for L1BatchProofForL1 { diff --git a/core/node/proof_data_handler/src/errors.rs b/core/node/proof_data_handler/src/errors.rs new file mode 100644 index 000000000000..91405692f03a --- /dev/null +++ b/core/node/proof_data_handler/src/errors.rs @@ -0,0 +1,38 @@ +use axum::{ + http::StatusCode, + response::{IntoResponse, Response}, +}; +use zksync_dal::SqlxError; +use zksync_object_store::ObjectStoreError; + +pub(crate) enum RequestProcessorError { + ObjectStore(ObjectStoreError), + Sqlx(SqlxError), +} + +impl IntoResponse for RequestProcessorError { + fn into_response(self) -> Response { + let (status_code, message) = match self { + RequestProcessorError::ObjectStore(err) => { + tracing::error!("GCS error: {:?}", err); + ( + StatusCode::BAD_GATEWAY, + "Failed fetching/saving from GCS".to_owned(), + ) + } + RequestProcessorError::Sqlx(err) => { + tracing::error!("Sqlx error: {:?}", err); + match err { + SqlxError::RowNotFound => { + (StatusCode::NOT_FOUND, "Non existing L1 batch".to_owned()) + } + _ => ( + StatusCode::BAD_GATEWAY, + "Failed fetching/saving from db".to_owned(), + ), + } + } + }; + (status_code, message).into_response() + } +} diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index c9a669cec808..4e92c76c9147 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -7,15 +7,17 @@ use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ - ProofGenerationDataRequest, SubmitProofRequest, TeeProofGenerationDataRequest, + ProofGenerationDataRequest, SubmitProofRequest, SubmitTeeProofRequest, + TeeProofGenerationDataRequest, }; use zksync_types::commitment::L1BatchCommitmentMode; use crate::request_processor::RequestProcessor; -mod tee_request_processor; use crate::tee_request_processor::TeeRequestProcessor; +mod errors; mod request_processor; +mod tee_request_processor; pub async fn run_server( config: ProofDataHandlerConfig, @@ -66,9 +68,9 @@ pub async fn run_server( ), ) .route( - "/submit_tee_proof/:l1_batch_number", + "/submit_tee_proof/:l1_batch_number", // add TEE type as a parameter post( - move |l1_batch_number: Path, payload: Json| async move { + move |l1_batch_number: Path, payload: Json| async move { submit_tee_proof_processor .submit_proof(l1_batch_number, payload) .await diff --git a/core/node/proof_data_handler/src/request_processor.rs b/core/node/proof_data_handler/src/request_processor.rs index a0d793c885c5..28e399353d5b 100644 --- a/core/node/proof_data_handler/src/request_processor.rs +++ b/core/node/proof_data_handler/src/request_processor.rs @@ -1,14 +1,10 @@ use std::sync::Arc; -use axum::{ - extract::Path, - http::StatusCode, - response::{IntoResponse, Response}, - Json, -}; +use crate::errors::RequestProcessorError; +use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{ConnectionPool, Core, CoreDal, SqlxError}; -use zksync_object_store::{ObjectStore, ObjectStoreError}; +use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ ProofGenerationData, ProofGenerationDataRequest, ProofGenerationDataResponse, SubmitProofRequest, SubmitProofResponse, @@ -28,38 +24,6 @@ pub(crate) struct RequestProcessor { commitment_mode: L1BatchCommitmentMode, } -pub(crate) enum RequestProcessorError { - ObjectStore(ObjectStoreError), - Sqlx(SqlxError), -} - -impl IntoResponse for RequestProcessorError { - fn into_response(self) -> Response { - let (status_code, message) = match self { - RequestProcessorError::ObjectStore(err) => { - tracing::error!("GCS error: {:?}", err); - ( - StatusCode::BAD_GATEWAY, - "Failed fetching/saving from GCS".to_owned(), - ) - } - RequestProcessorError::Sqlx(err) => { - tracing::error!("Sqlx error: {:?}", err); - match err { - SqlxError::RowNotFound => { - (StatusCode::NOT_FOUND, "Non existing L1 batch".to_owned()) - } - _ => ( - StatusCode::BAD_GATEWAY, - "Failed fetching/saving from db".to_owned(), - ), - } - } - }; - (status_code, message).into_response() - } -} - impl RequestProcessor { pub(crate) fn new( blob_store: Arc, @@ -252,7 +216,6 @@ impl RequestProcessor { .await .map_err(RequestProcessorError::Sqlx)?; } - SubmitProofRequest::TeeProof(_proof) => { /* TBD */ } SubmitProofRequest::SkippedProofGeneration => { self.pool .connection() diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 734b27c5740f..dee1d710dc98 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -1,20 +1,16 @@ use std::sync::Arc; -use axum::{ - extract::Path, - http::StatusCode, - response::{IntoResponse, Response}, - Json, -}; +use crate::errors::RequestProcessorError; +use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{ConnectionPool, Core, CoreDal, SqlxError}; -use zksync_object_store::{ObjectStore, ObjectStoreError}; +use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ - GenericProofGenerationDataResponse, SubmitProofRequest, SubmitProofResponse, + GenericProofGenerationDataResponse, SubmitProofResponse, SubmitTeeProofRequest, TeeProofGenerationDataRequest, }; use zksync_tee_verifier::TeeVerifierInput; -use zksync_types::{commitment::serialize_commitments, web3::keccak256, L1BatchNumber, H256}; +use zksync_types::L1BatchNumber; pub type TeeProofGenerationDataResponse = GenericProofGenerationDataResponse; @@ -25,38 +21,6 @@ pub(crate) struct TeeRequestProcessor { config: ProofDataHandlerConfig, } -pub(crate) enum TeeRequestProcessorError { - ObjectStore(ObjectStoreError), - Sqlx(SqlxError), -} - -impl IntoResponse for TeeRequestProcessorError { - fn into_response(self) -> Response { - let (status_code, message) = match self { - TeeRequestProcessorError::ObjectStore(err) => { - tracing::error!("GCS error: {:?}", err); - ( - StatusCode::BAD_GATEWAY, - "Failed fetching/saving from GCS".to_owned(), - ) - } - TeeRequestProcessorError::Sqlx(err) => { - tracing::error!("Sqlx error: {:?}", err); - match err { - SqlxError::RowNotFound => { - (StatusCode::NOT_FOUND, "Non existing L1 batch".to_owned()) - } - _ => ( - StatusCode::BAD_GATEWAY, - "Failed fetching/saving from db".to_owned(), - ), - } - } - }; - (status_code, message).into_response() - } -} - impl TeeRequestProcessor { pub(crate) fn new( blob_store: Arc, @@ -73,7 +37,7 @@ impl TeeRequestProcessor { pub(crate) async fn get_proof_generation_data( &self, request: Json, - ) -> Result, TeeRequestProcessorError> { + ) -> Result, RequestProcessorError> { tracing::info!("Received request for proof generation data: {:?}", request); // TODO: Replace this line with an appropriate method to get the next batch number to be proven. @@ -90,7 +54,7 @@ impl TeeRequestProcessor { .blob_store .get(l1_batch_number) .await - .map_err(TeeRequestProcessorError::ObjectStore)?; + .map_err(RequestProcessorError::ObjectStore)?; Ok(Json(TeeProofGenerationDataResponse::Success(Some( Box::new(tee_verifier_input), @@ -100,104 +64,25 @@ impl TeeRequestProcessor { pub(crate) async fn submit_proof( &self, Path(l1_batch_number): Path, - Json(payload): Json, - ) -> Result, TeeRequestProcessorError> { + Json(payload): Json, + ) -> Result, RequestProcessorError> { tracing::info!("Received proof for block number: {:?}", l1_batch_number); - let l1_batch_number = L1BatchNumber(l1_batch_number); - match payload { - SubmitProofRequest::Proof(proof) => { - let blob_url = self - .blob_store - .put(l1_batch_number, &*proof) - .await - .map_err(TeeRequestProcessorError::ObjectStore)?; - let system_logs_hash_from_prover = - H256::from_slice(&proof.aggregation_result_coords[0]); - let state_diff_hash_from_prover = - H256::from_slice(&proof.aggregation_result_coords[1]); - let bootloader_heap_initial_content_from_prover = - H256::from_slice(&proof.aggregation_result_coords[2]); - let events_queue_state_from_prover = - H256::from_slice(&proof.aggregation_result_coords[3]); - - let mut storage = self.pool.connection().await.unwrap(); - - let l1_batch = storage - .blocks_dal() - .get_l1_batch_metadata(l1_batch_number) - .await - .unwrap() - .expect("Proved block without metadata"); - - let is_pre_boojum = l1_batch - .header - .protocol_version - .map(|v| v.is_pre_boojum()) - .unwrap_or(true); - if !is_pre_boojum { - let events_queue_state = l1_batch - .metadata - .events_queue_commitment - .expect("No events_queue_commitment"); - let bootloader_heap_initial_content = l1_batch - .metadata - .bootloader_initial_content_commitment - .expect("No bootloader_initial_content_commitment"); - - if events_queue_state != events_queue_state_from_prover - || bootloader_heap_initial_content - != bootloader_heap_initial_content_from_prover - { - let server_values = format!("events_queue_state = {events_queue_state}, bootloader_heap_initial_content = {bootloader_heap_initial_content}"); - let prover_values = format!("events_queue_state = {events_queue_state_from_prover}, bootloader_heap_initial_content = {bootloader_heap_initial_content_from_prover}"); - panic!( - "Auxilary output doesn't match, server values: {} prover values: {}", - server_values, prover_values - ); - } - } - - let system_logs = serialize_commitments(&l1_batch.header.system_logs); - let system_logs_hash = H256(keccak256(&system_logs)); - - if !is_pre_boojum { - let state_diff_hash = l1_batch - .header - .system_logs - .into_iter() - .find(|elem| elem.0.key == H256::from_low_u64_be(2)) - .expect("No state diff hash key") - .0 - .value; + let l1_batch_number = L1BatchNumber(l1_batch_number); + let mut connection = self.pool.connection().await.unwrap(); + let mut dal = connection.proof_generation_dal(); - if state_diff_hash != state_diff_hash_from_prover - || system_logs_hash != system_logs_hash_from_prover - { - let server_values = format!("system_logs_hash = {system_logs_hash}, state_diff_hash = {state_diff_hash}"); - let prover_values = format!("system_logs_hash = {system_logs_hash_from_prover}, state_diff_hash = {state_diff_hash_from_prover}"); - panic!( - "Auxilary output doesn't match, server values: {} prover values: {}", - server_values, prover_values - ); - } - } - storage - .proof_generation_dal() - .save_proof_artifacts_metadata(l1_batch_number, &blob_url) - .await - .map_err(TeeRequestProcessorError::Sqlx)?; + // TODO: Replace the lines below with code that saves the proof generation result back to the database. + match payload { + SubmitTeeProofRequest::Proof(_proof) => { + // dal.save_proof_artifacts_metadata(l1_batch_number, &blob_url) + // .await + // .map_err(RequestProcessorError::Sqlx)?; } - SubmitProofRequest::TeeProof(_proof) => { /* TBD */ } - SubmitProofRequest::SkippedProofGeneration => { - self.pool - .connection() - .await - .unwrap() - .proof_generation_dal() - .mark_proof_generation_job_as_skipped(l1_batch_number) + SubmitTeeProofRequest::SkippedProofGeneration => { + dal.mark_proof_generation_job_as_skipped(l1_batch_number) .await - .map_err(TeeRequestProcessorError::Sqlx)?; + .map_err(RequestProcessorError::Sqlx)?; } } From 90bef147a6cd88ae85851d6b1aac68717b49b48b Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 20 May 2024 18:27:27 +0200 Subject: [PATCH 04/42] Added the initial unit test for the prover interface --- Cargo.lock | 4 + core/lib/prover_interface/Cargo.toml | 1 + core/lib/prover_interface/src/api.rs | 13 +++- core/lib/prover_interface/src/outputs.rs | 4 +- core/node/proof_data_handler/Cargo.toml | 5 ++ core/node/proof_data_handler/src/lib.rs | 96 ++++++++++++++++++++---- 6 files changed, 105 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d64bd9c82c13..d1a156bf77eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9052,7 +9052,10 @@ version = "0.1.0" dependencies = [ "anyhow", "axum", + "mime", + "serde_json", "tokio", + "tower", "tracing", "vm_utils", "zksync_config", @@ -9126,6 +9129,7 @@ dependencies = [ "chrono", "circuit_sequencer_api 0.1.50", "serde", + "serde_json", "serde_with", "strum", "tokio", diff --git a/core/lib/prover_interface/Cargo.toml b/core/lib/prover_interface/Cargo.toml index d1e46bacd425..869338a8830d 100644 --- a/core/lib/prover_interface/Cargo.toml +++ b/core/lib/prover_interface/Cargo.toml @@ -24,3 +24,4 @@ chrono = { workspace = true, features = ["serde"] } [dev-dependencies] tokio = { workspace = true, features = ["full"] } bincode.workspace = true +serde_json.workspace = true diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 689910bbc0ef..b7bb4890dc3e 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -35,7 +35,7 @@ pub enum GenericProofGenerationDataResponse { pub type ProofGenerationDataResponse = GenericProofGenerationDataResponse; -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, PartialEq, Serialize, Deserialize)] pub enum GenericSubmitProofRequest { Proof(Box), // The proof generation was skipped due to sampling @@ -50,3 +50,14 @@ pub enum SubmitProofResponse { Success, Error(String), } + +#[test] +fn test_tee_proof_request_serialization() { + let tee_proof = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { + signature: vec![0, 1, 2, 3, 4], + })); + let encoded = serde_json::to_string(&tee_proof).unwrap(); + assert_eq!(r#"{"Proof":{"signature":[0,1,2,3,4]}}"#, encoded); + let decoded = serde_json::from_str(&encoded).unwrap(); + assert_eq!(tee_proof, decoded); +} diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index e9a6b922b7d1..f934ffddded8 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -13,10 +13,10 @@ pub struct L1BatchProofForL1 { pub protocol_version: ProtocolSemanticVersion, } -#[derive(Clone, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Serialize, Deserialize)] pub struct L1BatchTeeProofForL1 { // TODO revisit what else is needed here - signature: Vec, + pub signature: Vec, } impl fmt::Debug for L1BatchProofForL1 { diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index b8a56fc3b71a..5c7657369019 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -23,3 +23,8 @@ tracing.workspace = true anyhow.workspace = true axum.workspace = true tokio.workspace = true + +[dev-dependencies] +serde_json.workspace = true +tower.workspace = true +mime = "0.3.16" diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 4e92c76c9147..24cfa983c8df 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -28,13 +28,36 @@ pub async fn run_server( ) -> anyhow::Result<()> { let bind_address = SocketAddr::from(([0, 0, 0, 0], config.http_port)); tracing::debug!("Starting proof data handler server on {bind_address}"); + let app = create_proof_processing_router(blob_store, connection_pool, config, commitment_mode); + + axum::Server::bind(&bind_address) + .serve(app.into_make_service()) + .with_graceful_shutdown(async move { + if stop_receiver.changed().await.is_err() { + tracing::warn!("Stop signal sender for proof data handler server was dropped without sending a signal"); + } + tracing::info!("Stop signal received, proof data handler server is shutting down"); + }) + .await + .context("Proof data handler server failed")?; + tracing::info!("Proof data handler server shut down"); + Ok(()) +} + +fn create_proof_processing_router( + blob_store: Arc, + connection_pool: ConnectionPool, + config: ProofDataHandlerConfig, + commitment_mode: L1BatchCommitmentMode, +) -> Router { let get_tee_proof_gen_processor = TeeRequestProcessor::new(blob_store.clone(), connection_pool.clone(), config.clone()); let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); let get_proof_gen_processor = RequestProcessor::new(blob_store, connection_pool, config, commitment_mode); let submit_proof_processor = get_proof_gen_processor.clone(); - let app = Router::new() + + Router::new() .route( "/proof_generation_data", post( @@ -68,7 +91,7 @@ pub async fn run_server( ), ) .route( - "/submit_tee_proof/:l1_batch_number", // add TEE type as a parameter + "/submit_tee_proof/:l1_batch_number", // add TEE type as a parameter (and pubkey?) post( move |l1_batch_number: Path, payload: Json| async move { submit_tee_proof_processor @@ -76,18 +99,61 @@ pub async fn run_server( .await }, ), - ); + ) +} - axum::Server::bind(&bind_address) - .serve(app.into_make_service()) - .with_graceful_shutdown(async move { - if stop_receiver.changed().await.is_err() { - tracing::warn!("Stop signal sender for proof data handler server was dropped without sending a signal"); - } - tracing::info!("Stop signal received, proof data handler server is shutting down"); - }) - .await - .context("Proof data handler server failed")?; - tracing::info!("Proof data handler server shut down"); - Ok(()) +#[cfg(test)] +mod tests { + use crate::{create_proof_processing_router, ConnectionPool}; + use axum::{ + body::Body, + http::{self, Method, Request, StatusCode}, + }; + use serde_json::json; + use tower::ServiceExt; + use zksync_config::configs::ProofDataHandlerConfig; + use zksync_object_store::ObjectStoreFactory; + use zksync_types::commitment::L1BatchCommitmentMode; + + // TODO https://github.com/tokio-rs/axum/blob/main/examples/testing/src/main.rs#L58 + + #[tokio::test] + async fn invalid_json_syntax() { + let blob_store = ObjectStoreFactory::mock().create_store().await; + let connection_pool = ConnectionPool::test_pool().await; + let app = create_proof_processing_router( + blob_store, + connection_pool, + ProofDataHandlerConfig { + http_port: 1337, + proof_generation_timeout_in_secs: 10, + }, + L1BatchCommitmentMode::Rollup, + ); + let data = json!({}); + print!( + "{:?}", + Request::builder() + .method(Method::POST) + .uri("/tee_proof_generation_data") + .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .body(Body::from(serde_json::to_vec(&data).unwrap(),)) + .unwrap() + ); + let response = app + .oneshot( + Request::builder() + .method(Method::POST) + .uri("/tee_proof_generation_data") + .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .body(Body::from(serde_json::to_vec(&data).unwrap())) + .unwrap(), + ) + .await + .unwrap(); + println!("{:?}", response); + println!("{:?}", response); + println!("{:?}", response.status()); + assert_eq!(response.status(), StatusCode::OK); + } } From 377a0bad4911d4d46531da07211b08a54e401846 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 21 May 2024 14:36:31 +0200 Subject: [PATCH 05/42] Fix the unit test for the /tee_proof_generation_data endpoint --- Cargo.lock | 87 ++++++++++++++++++------- checks-config/era.dic | 4 ++ core/node/proof_data_handler/Cargo.toml | 7 +- core/node/proof_data_handler/src/lib.rs | 76 ++++++++++++++++----- 4 files changed, 131 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d1a156bf77eb..ae4bad1bd02d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -362,8 +362,8 @@ dependencies = [ "bitflags 1.3.2", "bytes", "futures-util", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "hyper", "itoa", "matchit", @@ -392,8 +392,8 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "mime", "rustversion", "tower-layer", @@ -2396,7 +2396,7 @@ dependencies = [ "futures-core", "futures-sink", "gloo-utils", - "http", + "http 0.2.9", "js-sys", "pin-project", "serde", @@ -2555,7 +2555,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.9", "indexmap 2.1.0", "slab", "tokio", @@ -2724,6 +2724,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http-body" version = "0.4.5" @@ -2731,7 +2742,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes", - "http", + "http 0.2.9", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +dependencies = [ + "bytes", + "futures-core", + "http 1.1.0", + "http-body 1.0.0", "pin-project-lite", ] @@ -2764,8 +2798,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "httparse", "httpdate", "itoa", @@ -2784,7 +2818,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ "futures-util", - "http", + "http 0.2.9", "hyper", "log", "rustls 0.21.11", @@ -3052,7 +3086,7 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http", + "http 0.2.9", "jsonrpsee-core", "pin-project", "rustls-native-certs 0.7.0", @@ -3134,7 +3168,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cc7c6d1a2c58f6135810284a390d9f823d0f508db74cd914d8237802de80f98" dependencies = [ "futures-util", - "http", + "http 0.2.9", "hyper", "jsonrpsee-core", "jsonrpsee-types", @@ -3181,7 +3215,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "073c077471e89c4b511fa88b3df9a0f0abdf4a0a2e6683dd2ab36893af87bb2d" dependencies = [ - "http", + "http 0.2.9", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -4068,7 +4102,7 @@ checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" dependencies = [ "async-trait", "bytes", - "http", + "http 0.2.9", "opentelemetry_api", "reqwest", ] @@ -4081,7 +4115,7 @@ checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" dependencies = [ "async-trait", "futures-core", - "http", + "http 0.2.9", "opentelemetry-http", "opentelemetry-proto", "opentelemetry-semantic-conventions", @@ -5093,8 +5127,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "hyper", "hyper-tls", "ipnet", @@ -6049,7 +6083,7 @@ dependencies = [ "base64 0.13.1", "bytes", "futures 0.3.28", - "http", + "http 0.2.9", "httparse", "log", "rand 0.8.5", @@ -6843,8 +6877,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "hyper", "hyper-timeout", "percent-encoding", @@ -6891,8 +6925,8 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "http-range-header", "httpdate", "iri-string", @@ -8792,7 +8826,7 @@ dependencies = [ "futures 0.3.28", "governor", "hex", - "http", + "http 0.2.9", "itertools 0.10.5", "lru", "multivm", @@ -9033,7 +9067,7 @@ dependencies = [ "flate2", "google-cloud-auth", "google-cloud-storage", - "http", + "http 0.2.9", "prost 0.12.1", "rand 0.8.5", "serde_json", @@ -9052,13 +9086,18 @@ version = "0.1.0" dependencies = [ "anyhow", "axum", + "http-body-util", + "hyper", "mime", + "multivm", "serde_json", "tokio", "tower", "tracing", "vm_utils", + "zksync_basic_types", "zksync_config", + "zksync_contracts", "zksync_dal", "zksync_object_store", "zksync_prover_interface", diff --git a/checks-config/era.dic b/checks-config/era.dic index 3741e158dfae..d22d57e091d1 100644 --- a/checks-config/era.dic +++ b/checks-config/era.dic @@ -973,3 +973,7 @@ uncached untrimmed UNNEST semver +TeeRequestProcessor +l1_batch_number +RequestProcessorError +map_err diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 5c7657369019..3aff91caa944 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -25,6 +25,11 @@ axum.workspace = true tokio.workspace = true [dev-dependencies] +http-body-util = "0.1.0" +hyper = { version = "0.14", features = ["full"] } +mime = "0.3.16" +multivm.workspace = true serde_json.workspace = true tower.workspace = true -mime = "0.3.16" +zksync_basic_types.workspace = true +zksync_contracts.workspace = true diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 24cfa983c8df..4c6f8d185066 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -109,17 +109,61 @@ mod tests { body::Body, http::{self, Method, Request, StatusCode}, }; + use multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use serde_json::json; use tower::ServiceExt; + use zksync_basic_types::U256; use zksync_config::configs::ProofDataHandlerConfig; + use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_object_store::ObjectStoreFactory; - use zksync_types::commitment::L1BatchCommitmentMode; - - // TODO https://github.com/tokio-rs/axum/blob/main/examples/testing/src/main.rs#L58 + use zksync_prover_interface::inputs::PrepareBasicCircuitsJob; + use zksync_tee_verifier::TeeVerifierInput; + use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; #[tokio::test] - async fn invalid_json_syntax() { + async fn request_tee_proof_generation_data() { + let batch_number = L1BatchNumber::from(1); + let tvi = TeeVerifierInput::new( + PrepareBasicCircuitsJob::new(0), + vec![], + L1BatchEnv { + previous_batch_hash: Some(H256([1; 32])), + number: batch_number, + timestamp: 0, + fee_input: Default::default(), + fee_account: Default::default(), + enforced_base_fee: None, + first_l2_block: L2BlockEnv { + number: 0, + timestamp: 0, + prev_block_hash: H256([1; 32]), + max_virtual_blocks_to_create: 0, + }, + }, + SystemEnv { + zk_porter_available: false, + version: Default::default(), + base_system_smart_contracts: BaseSystemContracts { + bootloader: SystemContractCode { + code: vec![U256([1; 4])], + hash: H256([1; 32]), + }, + default_aa: SystemContractCode { + code: vec![U256([1; 4])], + hash: H256([1; 32]), + }, + }, + bootloader_gas_limit: 0, + execution_mode: TxExecutionMode::VerifyExecute, + default_validation_computational_gas_limit: 0, + chain_id: Default::default(), + }, + vec![(H256([1; 32]), vec![0, 1, 2, 3, 4])], + ); let blob_store = ObjectStoreFactory::mock().create_store().await; + blob_store.put(batch_number, &tvi).await.unwrap(); + // TODO mock relevant SQL table once the logic is implemented in the TeeRequestProcessor::get_proof_generation_data + // TODO useful examples: https://github.com/tokio-rs/axum/blob/main/examples/testing/src/main.rs#L58 let connection_pool = ConnectionPool::test_pool().await; let app = create_proof_processing_router( blob_store, @@ -130,30 +174,26 @@ mod tests { }, L1BatchCommitmentMode::Rollup, ); - let data = json!({}); - print!( - "{:?}", - Request::builder() - .method(Method::POST) - .uri("/tee_proof_generation_data") - .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) - .body(Body::from(serde_json::to_vec(&data).unwrap(),)) - .unwrap() - ); + let req_body = Body::from(serde_json::to_vec(&json!({})).unwrap()); let response = app .oneshot( Request::builder() .method(Method::POST) .uri("/tee_proof_generation_data") .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) - .body(Body::from(serde_json::to_vec(&data).unwrap())) + .body(req_body) .unwrap(), ) .await .unwrap(); - println!("{:?}", response); - println!("{:?}", response); - println!("{:?}", response.status()); assert_eq!(response.status(), StatusCode::OK); + let body = hyper::body::to_bytes(response.into_body()).await.unwrap(); + let json: serde_json::Value = serde_json::from_slice(&body).unwrap(); + let json = json + .get("Success") + .expect("Unexpected response format") + .clone(); + let deserialized: TeeVerifierInput = serde_json::from_value(json).unwrap(); + assert_eq!(tvi, deserialized); } } From 9412d7e8471ee0a3558c1d15ecfdf3b826ae14e4 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 22 May 2024 17:09:53 +0200 Subject: [PATCH 06/42] Move tests to a separate file --- core/lib/prover_interface/src/api.rs | 5 + core/node/proof_data_handler/Cargo.toml | 3 +- core/node/proof_data_handler/src/lib.rs | 110 ++---------------- .../src/tee_request_processor.rs | 4 +- core/node/proof_data_handler/src/tests.rs | 92 +++++++++++++++ 5 files changed, 111 insertions(+), 103 deletions(-) create mode 100644 core/node/proof_data_handler/src/tests.rs diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index b7bb4890dc3e..768de4060927 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -51,6 +51,11 @@ pub enum SubmitProofResponse { Error(String), } +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub enum TeeType { + Sgx, +} + #[test] fn test_tee_proof_request_serialization() { let tee_proof = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 3aff91caa944..008560e51efd 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -18,11 +18,10 @@ zksync_prover_interface.workspace = true zksync_tee_verifier.workspace = true zksync_types.workspace = true zksync_utils.workspace = true - -tracing.workspace = true anyhow.workspace = true axum.workspace = true tokio.workspace = true +tracing.workspace = true [dev-dependencies] http-body-util = "0.1.0" diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 4c6f8d185066..2ec30942ffbd 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -8,13 +8,16 @@ use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ ProofGenerationDataRequest, SubmitProofRequest, SubmitTeeProofRequest, - TeeProofGenerationDataRequest, + TeeProofGenerationDataRequest, TeeType, }; use zksync_types::commitment::L1BatchCommitmentMode; use crate::request_processor::RequestProcessor; use crate::tee_request_processor::TeeRequestProcessor; +#[cfg(test)] +mod tests; + mod errors; mod request_processor; mod tee_request_processor; @@ -91,109 +94,16 @@ fn create_proof_processing_router( ), ) .route( - "/submit_tee_proof/:l1_batch_number", // add TEE type as a parameter (and pubkey?) + "/submit_tee_proof/:l1_batch_number/:tee_type?", post( - move |l1_batch_number: Path, payload: Json| async move { + move |l1_batch_number: Path, + tee_type: Option>, + payload: Json| async move { + let tee_type = tee_type.unwrap_or(Path(TeeType::Sgx)); submit_tee_proof_processor - .submit_proof(l1_batch_number, payload) + .submit_proof(tee_type, l1_batch_number, payload) .await }, ), ) } - -#[cfg(test)] -mod tests { - use crate::{create_proof_processing_router, ConnectionPool}; - use axum::{ - body::Body, - http::{self, Method, Request, StatusCode}, - }; - use multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; - use serde_json::json; - use tower::ServiceExt; - use zksync_basic_types::U256; - use zksync_config::configs::ProofDataHandlerConfig; - use zksync_contracts::{BaseSystemContracts, SystemContractCode}; - use zksync_object_store::ObjectStoreFactory; - use zksync_prover_interface::inputs::PrepareBasicCircuitsJob; - use zksync_tee_verifier::TeeVerifierInput; - use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; - - #[tokio::test] - async fn request_tee_proof_generation_data() { - let batch_number = L1BatchNumber::from(1); - let tvi = TeeVerifierInput::new( - PrepareBasicCircuitsJob::new(0), - vec![], - L1BatchEnv { - previous_batch_hash: Some(H256([1; 32])), - number: batch_number, - timestamp: 0, - fee_input: Default::default(), - fee_account: Default::default(), - enforced_base_fee: None, - first_l2_block: L2BlockEnv { - number: 0, - timestamp: 0, - prev_block_hash: H256([1; 32]), - max_virtual_blocks_to_create: 0, - }, - }, - SystemEnv { - zk_porter_available: false, - version: Default::default(), - base_system_smart_contracts: BaseSystemContracts { - bootloader: SystemContractCode { - code: vec![U256([1; 4])], - hash: H256([1; 32]), - }, - default_aa: SystemContractCode { - code: vec![U256([1; 4])], - hash: H256([1; 32]), - }, - }, - bootloader_gas_limit: 0, - execution_mode: TxExecutionMode::VerifyExecute, - default_validation_computational_gas_limit: 0, - chain_id: Default::default(), - }, - vec![(H256([1; 32]), vec![0, 1, 2, 3, 4])], - ); - let blob_store = ObjectStoreFactory::mock().create_store().await; - blob_store.put(batch_number, &tvi).await.unwrap(); - // TODO mock relevant SQL table once the logic is implemented in the TeeRequestProcessor::get_proof_generation_data - // TODO useful examples: https://github.com/tokio-rs/axum/blob/main/examples/testing/src/main.rs#L58 - let connection_pool = ConnectionPool::test_pool().await; - let app = create_proof_processing_router( - blob_store, - connection_pool, - ProofDataHandlerConfig { - http_port: 1337, - proof_generation_timeout_in_secs: 10, - }, - L1BatchCommitmentMode::Rollup, - ); - let req_body = Body::from(serde_json::to_vec(&json!({})).unwrap()); - let response = app - .oneshot( - Request::builder() - .method(Method::POST) - .uri("/tee_proof_generation_data") - .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) - .body(req_body) - .unwrap(), - ) - .await - .unwrap(); - assert_eq!(response.status(), StatusCode::OK); - let body = hyper::body::to_bytes(response.into_body()).await.unwrap(); - let json: serde_json::Value = serde_json::from_slice(&body).unwrap(); - let json = json - .get("Success") - .expect("Unexpected response format") - .clone(); - let deserialized: TeeVerifierInput = serde_json::from_value(json).unwrap(); - assert_eq!(tvi, deserialized); - } -} diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index dee1d710dc98..43d333282527 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -7,7 +7,7 @@ use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ GenericProofGenerationDataResponse, SubmitProofResponse, SubmitTeeProofRequest, - TeeProofGenerationDataRequest, + TeeProofGenerationDataRequest, TeeType, }; use zksync_tee_verifier::TeeVerifierInput; use zksync_types::L1BatchNumber; @@ -63,10 +63,12 @@ impl TeeRequestProcessor { pub(crate) async fn submit_proof( &self, + Path(tee_type): Path, Path(l1_batch_number): Path, Json(payload): Json, ) -> Result, RequestProcessorError> { tracing::info!("Received proof for block number: {:?}", l1_batch_number); + assert_eq!(tee_type, TeeType::Sgx, "Expected TEE type to be SGX"); let l1_batch_number = L1BatchNumber(l1_batch_number); let mut connection = self.pool.connection().await.unwrap(); diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs new file mode 100644 index 000000000000..990972a39cea --- /dev/null +++ b/core/node/proof_data_handler/src/tests.rs @@ -0,0 +1,92 @@ +use crate::{create_proof_processing_router, ConnectionPool}; +use axum::{ + body::Body, + http::{self, Method, Request, StatusCode}, +}; +use multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; +use serde_json::json; +use tower::ServiceExt; +use zksync_basic_types::U256; +use zksync_config::configs::ProofDataHandlerConfig; +use zksync_contracts::{BaseSystemContracts, SystemContractCode}; +use zksync_object_store::ObjectStoreFactory; +use zksync_prover_interface::inputs::PrepareBasicCircuitsJob; +use zksync_tee_verifier::TeeVerifierInput; +use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; + +#[tokio::test] +async fn request_tee_proof_generation_data() { + let batch_number = L1BatchNumber::from(1); + let tvi = TeeVerifierInput::new( + PrepareBasicCircuitsJob::new(0), + vec![], + L1BatchEnv { + previous_batch_hash: Some(H256([1; 32])), + number: batch_number, + timestamp: 0, + fee_input: Default::default(), + fee_account: Default::default(), + enforced_base_fee: None, + first_l2_block: L2BlockEnv { + number: 0, + timestamp: 0, + prev_block_hash: H256([1; 32]), + max_virtual_blocks_to_create: 0, + }, + }, + SystemEnv { + zk_porter_available: false, + version: Default::default(), + base_system_smart_contracts: BaseSystemContracts { + bootloader: SystemContractCode { + code: vec![U256([1; 4])], + hash: H256([1; 32]), + }, + default_aa: SystemContractCode { + code: vec![U256([1; 4])], + hash: H256([1; 32]), + }, + }, + bootloader_gas_limit: 0, + execution_mode: TxExecutionMode::VerifyExecute, + default_validation_computational_gas_limit: 0, + chain_id: Default::default(), + }, + vec![(H256([1; 32]), vec![0, 1, 2, 3, 4])], + ); + let blob_store = ObjectStoreFactory::mock().create_store().await; + blob_store.put(batch_number, &tvi).await.unwrap(); + // TODO mock relevant SQL table once the logic is implemented in the TeeRequestProcessor::get_proof_generation_data + // TODO useful examples: https://github.com/tokio-rs/axum/blob/main/examples/testing/src/main.rs#L58 + let connection_pool = ConnectionPool::test_pool().await; + let app = create_proof_processing_router( + blob_store, + connection_pool, + ProofDataHandlerConfig { + http_port: 1337, + proof_generation_timeout_in_secs: 10, + }, + L1BatchCommitmentMode::Rollup, + ); + let req_body = Body::from(serde_json::to_vec(&json!({})).unwrap()); + let response = app + .oneshot( + Request::builder() + .method(Method::POST) + .uri("/tee_proof_generation_data") + .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .body(req_body) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = hyper::body::to_bytes(response.into_body()).await.unwrap(); + let json: serde_json::Value = serde_json::from_slice(&body).unwrap(); + let json = json + .get("Success") + .expect("Unexpected response format") + .clone(); + let deserialized: TeeVerifierInput = serde_json::from_value(json).unwrap(); + assert_eq!(tvi, deserialized); +} From 75cee28cafdffbe24a5ab4bbfba816a9c9d350d3 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 22 May 2024 17:38:22 +0200 Subject: [PATCH 07/42] Add a unit test for the /submit_tee_proof endpoint --- core/lib/prover_interface/src/api.rs | 5 --- core/node/proof_data_handler/src/lib.rs | 11 +++---- .../src/tee_request_processor.rs | 10 +++--- core/node/proof_data_handler/src/tests.rs | 31 +++++++++++++++++++ 4 files changed, 41 insertions(+), 16 deletions(-) diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 768de4060927..b7bb4890dc3e 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -51,11 +51,6 @@ pub enum SubmitProofResponse { Error(String), } -#[derive(Debug, PartialEq, Serialize, Deserialize)] -pub enum TeeType { - Sgx, -} - #[test] fn test_tee_proof_request_serialization() { let tee_proof = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 2ec30942ffbd..b841a48500eb 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -8,7 +8,7 @@ use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ ProofGenerationDataRequest, SubmitProofRequest, SubmitTeeProofRequest, - TeeProofGenerationDataRequest, TeeType, + TeeProofGenerationDataRequest, }; use zksync_types::commitment::L1BatchCommitmentMode; @@ -94,14 +94,11 @@ fn create_proof_processing_router( ), ) .route( - "/submit_tee_proof/:l1_batch_number/:tee_type?", + "/submit_tee_proof/:l1_batch_number", post( - move |l1_batch_number: Path, - tee_type: Option>, - payload: Json| async move { - let tee_type = tee_type.unwrap_or(Path(TeeType::Sgx)); + move |l1_batch_number: Path, payload: Json| async move { submit_tee_proof_processor - .submit_proof(tee_type, l1_batch_number, payload) + .submit_proof(l1_batch_number, payload) .await }, ), diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 43d333282527..8fe1e9cb4fd1 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -7,7 +7,7 @@ use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ GenericProofGenerationDataResponse, SubmitProofResponse, SubmitTeeProofRequest, - TeeProofGenerationDataRequest, TeeType, + TeeProofGenerationDataRequest, }; use zksync_tee_verifier::TeeVerifierInput; use zksync_types::L1BatchNumber; @@ -63,12 +63,10 @@ impl TeeRequestProcessor { pub(crate) async fn submit_proof( &self, - Path(tee_type): Path, Path(l1_batch_number): Path, Json(payload): Json, ) -> Result, RequestProcessorError> { tracing::info!("Received proof for block number: {:?}", l1_batch_number); - assert_eq!(tee_type, TeeType::Sgx, "Expected TEE type to be SGX"); let l1_batch_number = L1BatchNumber(l1_batch_number); let mut connection = self.pool.connection().await.unwrap(); @@ -76,7 +74,11 @@ impl TeeRequestProcessor { // TODO: Replace the lines below with code that saves the proof generation result back to the database. match payload { - SubmitTeeProofRequest::Proof(_proof) => { + SubmitTeeProofRequest::Proof(proof) => { + println!( + "Received proof {:?} for block number: {:?}", + proof, l1_batch_number + ); // dal.save_proof_artifacts_metadata(l1_batch_number, &blob_url) // .await // .map_err(RequestProcessorError::Sqlx)?; diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 990972a39cea..d869607a1c84 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -90,3 +90,34 @@ async fn request_tee_proof_generation_data() { let deserialized: TeeVerifierInput = serde_json::from_value(json).unwrap(); assert_eq!(tvi, deserialized); } + +#[tokio::test] +async fn submit_tee_proof() { + let blob_store = ObjectStoreFactory::mock().create_store().await; + let connection_pool = ConnectionPool::test_pool().await; + let app = create_proof_processing_router( + blob_store, + connection_pool, + ProofDataHandlerConfig { + http_port: 1337, + proof_generation_timeout_in_secs: 10, + }, + L1BatchCommitmentMode::Rollup, + ); + + let request = r#"{ "Proof": { "signature": [ 0, 1, 2, 3, 4 ] } }"#; + let request: serde_json::Value = serde_json::from_str(request).unwrap(); + let req_body = Body::from(serde_json::to_vec(&request).unwrap()); + let response = app + .oneshot( + Request::builder() + .method(Method::POST) + .uri("/submit_tee_proof/123") + .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .body(req_body) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); +} From d952f8a8e74992c87cad129e319a6e1158ae6fa1 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 23 May 2024 11:29:44 +0200 Subject: [PATCH 08/42] Create a new SQL table to track TEE proof generation details --- ...ee_proof_generation_details_table.down.sql | 3 + ..._tee_proof_generation_details_table.up.sql | 15 ++ core/lib/dal/src/lib.rs | 9 +- core/lib/dal/src/tee_proof_generation_dal.rs | 187 ++++++++++++++++++ 4 files changed, 213 insertions(+), 1 deletion(-) create mode 100644 core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql create mode 100644 core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql create mode 100644 core/lib/dal/src/tee_proof_generation_dal.rs diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql new file mode 100644 index 000000000000..2291f75592d5 --- /dev/null +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql @@ -0,0 +1,3 @@ +DROP TABLE IF EXISTS tee_proof_generation_details; + +DROP INDEX IF EXISTS idx_tee_proof_generation_details_status_prover_taken_at; diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql new file mode 100644 index 000000000000..1c3ea6750492 --- /dev/null +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS tee_proof_generation_details +( + l1_batch_number BIGINT PRIMARY KEY REFERENCES l1_batches (number) ON DELETE CASCADE, + status TEXT NOT NULL, + proof_gen_data_blob_url TEXT NOT NULL, + proof_blob_url TEXT, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL, + prover_taken_at TIMESTAMP +); + + +CREATE INDEX IF NOT EXISTS idx_tee_proof_generation_details_status_prover_taken_at + ON tee_proof_generation_details (prover_taken_at) + WHERE status = 'picked_by_prover'; diff --git a/core/lib/dal/src/lib.rs b/core/lib/dal/src/lib.rs index 8b048a035121..45d1f94b486d 100644 --- a/core/lib/dal/src/lib.rs +++ b/core/lib/dal/src/lib.rs @@ -20,7 +20,7 @@ use crate::{ snapshot_recovery_dal::SnapshotRecoveryDal, snapshots_creator_dal::SnapshotsCreatorDal, snapshots_dal::SnapshotsDal, storage_logs_dal::StorageLogsDal, storage_logs_dedup_dal::StorageLogsDedupDal, storage_web3_dal::StorageWeb3Dal, - sync_dal::SyncDal, system_dal::SystemDal, + sync_dal::SyncDal, system_dal::SystemDal, tee_proof_generation_dal::TeeProofGenerationDal, tee_verifier_input_producer_dal::TeeVerifierInputProducerDal, tokens_dal::TokensDal, tokens_web3_dal::TokensWeb3Dal, transactions_dal::TransactionsDal, transactions_web3_dal::TransactionsWeb3Dal, vm_runner_dal::VmRunnerDal, @@ -50,6 +50,7 @@ pub mod storage_logs_dedup_dal; pub mod storage_web3_dal; pub mod sync_dal; pub mod system_dal; +pub mod tee_proof_generation_dal; pub mod tee_verifier_input_producer_dal; pub mod tokens_dal; pub mod tokens_web3_dal; @@ -111,6 +112,8 @@ where fn proof_generation_dal(&mut self) -> ProofGenerationDal<'_, 'a>; + fn tee_proof_generation_dal(&mut self) -> TeeProofGenerationDal<'_, 'a>; + fn system_dal(&mut self) -> SystemDal<'_, 'a>; fn snapshots_dal(&mut self) -> SnapshotsDal<'_, 'a>; @@ -213,6 +216,10 @@ impl<'a> CoreDal<'a> for Connection<'a, Core> { ProofGenerationDal { storage: self } } + fn tee_proof_generation_dal(&mut self) -> TeeProofGenerationDal<'_, 'a> { + TeeProofGenerationDal { storage: self } + } + fn system_dal(&mut self) -> SystemDal<'_, 'a> { SystemDal { storage: self } } diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs new file mode 100644 index 000000000000..bcac25451218 --- /dev/null +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -0,0 +1,187 @@ +use std::time::Duration; + +use strum::{Display, EnumString}; +use zksync_db_connection::{connection::Connection, utils::pg_interval_from_duration}; +use zksync_types::L1BatchNumber; + +use crate::{Core, SqlxError}; + +#[derive(Debug)] +pub struct TeeProofGenerationDal<'a, 'c> { + pub(crate) storage: &'a mut Connection<'c, Core>, +} + +#[derive(Debug, EnumString, Display)] +enum ProofGenerationJobStatus { + #[strum(serialize = "ready_to_be_proven")] + ReadyToBeProven, + #[strum(serialize = "picked_by_prover")] + PickedByProver, + #[strum(serialize = "generated")] + Generated, + #[strum(serialize = "skipped")] + Skipped, +} + +impl TeeProofGenerationDal<'_, '_> { + pub async fn get_next_block_to_be_proven( + &mut self, + processing_timeout: Duration, + ) -> Option { + let processing_timeout = pg_interval_from_duration(processing_timeout); + let result: Option = sqlx::query!( + r#" + UPDATE tee_proof_generation_details + SET + status = 'picked_by_prover', + updated_at = NOW(), + prover_taken_at = NOW() + WHERE + l1_batch_number = ( + SELECT + l1_batch_number + FROM + tee_proof_generation_details + WHERE + status = 'ready_to_be_proven' + OR ( + status = 'picked_by_prover' + AND prover_taken_at < NOW() - $1::INTERVAL + ) + ORDER BY + l1_batch_number ASC + LIMIT + 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING + tee_proof_generation_details.l1_batch_number + "#, + &processing_timeout, + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + + result + } + + pub async fn save_proof_artifacts_metadata( + &mut self, + block_number: L1BatchNumber, + proof_blob_url: &str, + ) -> Result<(), SqlxError> { + sqlx::query!( + r#" + UPDATE tee_proof_generation_details + SET + status = 'generated', + proof_blob_url = $1, + updated_at = NOW() + WHERE + l1_batch_number = $2 + "#, + proof_blob_url, + i64::from(block_number.0) + ) + .execute(self.storage.conn()) + .await? + .rows_affected() + .eq(&1) + .then_some(()) + .ok_or(sqlx::Error::RowNotFound) + } + + pub async fn insert_tee_proof_generation_details( + &mut self, + block_number: L1BatchNumber, + proof_gen_data_blob_url: &str, + ) { + sqlx::query!( + r#" + INSERT INTO + tee_proof_generation_details (l1_batch_number, status, proof_gen_data_blob_url, created_at, updated_at) + VALUES + ($1, 'ready_to_be_proven', $2, NOW(), NOW()) + ON CONFLICT (l1_batch_number) DO NOTHING + "#, + i64::from(block_number.0), + proof_gen_data_blob_url, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + } + + pub async fn mark_proof_generation_job_as_skipped( + &mut self, + block_number: L1BatchNumber, + ) -> Result<(), SqlxError> { + sqlx::query!( + r#" + UPDATE tee_proof_generation_details + SET + status = $1, + updated_at = NOW() + WHERE + l1_batch_number = $2 + "#, + ProofGenerationJobStatus::Skipped.to_string(), + i64::from(block_number.0) + ) + .execute(self.storage.conn()) + .await? + .rows_affected() + .eq(&1) + .then_some(()) + .ok_or(sqlx::Error::RowNotFound) + } + + pub async fn get_oldest_unpicked_batch(&mut self) -> Option { + let result: Option = sqlx::query!( + r#" + SELECT + l1_batch_number + FROM + tee_proof_generation_details + WHERE + status = 'ready_to_be_proven' + ORDER BY + l1_batch_number ASC + LIMIT + 1 + "#, + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + + result + } + + pub async fn get_oldest_not_generated_batch(&mut self) -> Option { + let result: Option = sqlx::query!( + r#" + SELECT + l1_batch_number + FROM + tee_proof_generation_details + WHERE + status NOT IN ('generated', 'skipped') + ORDER BY + l1_batch_number ASC + LIMIT + 1 + "#, + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + + result + } +} From e5edaa315859c45c8525daf4788c017856535d65 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 23 May 2024 14:02:04 +0200 Subject: [PATCH 09/42] Add .sqlx `query-*.json` files The files were generated with the `zk init` (`cargo sqlx prepare` under the hood) --- ...f380f16ea3e7c709c8dd7b4fced29f4717e1c.json | 20 +++++++++++++++++ ...7c0f351163b364eeef70aa9098f305276e851.json | 20 +++++++++++++++++ ...270e25815ca2ab720a59567da3b3b5bcedd63.json | 15 +++++++++++++ ...795c7ab5a0e74b74f37ff25466ec6cd318859.json | 22 +++++++++++++++++++ ...2aca5fa987f0a3454da89447ab39373620a5e.json | 15 +++++++++++++ ...461510433c36dadfbff0a18515935e629fc0d.json | 15 +++++++++++++ 6 files changed, 107 insertions(+) create mode 100644 core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json create mode 100644 core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json create mode 100644 core/lib/dal/.sqlx/query-6b7f66422078e9880b002da3175270e25815ca2ab720a59567da3b3b5bcedd63.json create mode 100644 core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json create mode 100644 core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json create mode 100644 core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json diff --git a/core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json b/core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json new file mode 100644 index 000000000000..d4ac20fd3aab --- /dev/null +++ b/core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number\n FROM\n tee_proof_generation_details\n WHERE\n status = 'ready_to_be_proven'\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c" +} diff --git a/core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json b/core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json new file mode 100644 index 000000000000..ada2796e8208 --- /dev/null +++ b/core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number\n FROM\n tee_proof_generation_details\n WHERE\n status NOT IN ('generated', 'skipped')\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851" +} diff --git a/core/lib/dal/.sqlx/query-6b7f66422078e9880b002da3175270e25815ca2ab720a59567da3b3b5bcedd63.json b/core/lib/dal/.sqlx/query-6b7f66422078e9880b002da3175270e25815ca2ab720a59567da3b3b5bcedd63.json new file mode 100644 index 000000000000..b7b84c323b2e --- /dev/null +++ b/core/lib/dal/.sqlx/query-6b7f66422078e9880b002da3175270e25815ca2ab720a59567da3b3b5bcedd63.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = $1,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "6b7f66422078e9880b002da3175270e25815ca2ab720a59567da3b3b5bcedd63" +} diff --git a/core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json b/core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json new file mode 100644 index 000000000000..169e58c7e073 --- /dev/null +++ b/core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n tee_proof_generation_details\n WHERE\n status = 'ready_to_be_proven'\n OR (\n status = 'picked_by_prover'\n AND prover_taken_at < NOW() - $1::INTERVAL\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Interval" + ] + }, + "nullable": [ + false + ] + }, + "hash": "aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859" +} diff --git a/core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json b/core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json new file mode 100644 index 000000000000..dd62137dfc12 --- /dev/null +++ b/core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n proof_blob_url = $1,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e" +} diff --git a/core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json b/core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json new file mode 100644 index 000000000000..a1deabc1f58a --- /dev/null +++ b/core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, proof_gen_data_blob_url, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', $2, NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d" +} From b468c304245802033ddc3b44cae25dc4f1c43993 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 23 May 2024 15:26:21 +0200 Subject: [PATCH 10/42] Add one more serialization test to cover both SubmitProofRequest and SubmitTeeProofRequest --- core/lib/prover_interface/src/api.rs | 11 --- .../tests/job_serialization.rs | 95 ++++++++++++++++++- 2 files changed, 93 insertions(+), 13 deletions(-) diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index b7bb4890dc3e..bc930e9ff83c 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -50,14 +50,3 @@ pub enum SubmitProofResponse { Success, Error(String), } - -#[test] -fn test_tee_proof_request_serialization() { - let tee_proof = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { - signature: vec![0, 1, 2, 3, 4], - })); - let encoded = serde_json::to_string(&tee_proof).unwrap(); - assert_eq!(r#"{"Proof":{"signature":[0,1,2,3,4]}}"#, encoded); - let decoded = serde_json::from_str(&encoded).unwrap(); - assert_eq!(tee_proof, decoded); -} diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index ffa6d18ef451..527735e3832f 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -1,10 +1,12 @@ //! Integration tests for object store serialization of job objects. +use circuit_sequencer_api_1_5_0::proof::FinalProof; use tokio::fs; use zksync_object_store::{Bucket, MockObjectStore}; use zksync_prover_interface::{ + api::{SubmitProofRequest, SubmitTeeProofRequest}, inputs::{PrepareBasicCircuitsJob, StorageLogMetadata}, - outputs::L1BatchProofForL1, + outputs::{L1BatchProofForL1, L1BatchTeeProofForL1}, }; use zksync_types::L1BatchNumber; @@ -66,7 +68,7 @@ async fn prepare_basic_circuits_job_compatibility() { assert_job_integrity(job_tuple.1, job_tuple.0); } -/// Simple test to check if we can succesfully parse the proof. +/// Simple test to check if we can successfully parse the proof. #[tokio::test] async fn test_final_proof_deserialization() { let proof = fs::read("./tests/l1_batch_proof_1_0_24_0.bin") @@ -76,3 +78,92 @@ async fn test_final_proof_deserialization() { let results: L1BatchProofForL1 = bincode::deserialize(&proof).unwrap(); assert_eq!(results.aggregation_result_coords[0][0], 0); } + +#[test] +fn test_proof_request_serialization() { + let proof = SubmitProofRequest::Proof(Box::new(L1BatchProofForL1 { + aggregation_result_coords: [[0; 32]; 4], + scheduler_proof: FinalProof::empty(), + })); + let encoded_obj = serde_json::to_string(&proof).unwrap(); + let encoded_json = r#"{ + "Proof": { + "aggregation_result_coords": [ + [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ] + ], + "scheduler_proof": { + "n": 0, + "inputs": [], + "state_polys_commitments": [], + "witness_polys_commitments": [], + "copy_permutation_grand_product_commitment": { + "x": [ 0, 0, 0, 0 ], + "y": [ 1, 0, 0, 0 ], + "infinity": true + }, + "lookup_s_poly_commitment": null, + "lookup_grand_product_commitment": null, + "quotient_poly_parts_commitments": [], + "state_polys_openings_at_z": [], + "state_polys_openings_at_dilations": [], + "witness_polys_openings_at_z": [], + "witness_polys_openings_at_dilations": [], + "gate_setup_openings_at_z": [], + "gate_selectors_openings_at_z": [], + "copy_permutation_polys_openings_at_z": [], + "copy_permutation_grand_product_opening_at_z_omega": [ 0, 0, 0, 0 ], + "lookup_s_poly_opening_at_z_omega": null, + "lookup_grand_product_opening_at_z_omega": null, + "lookup_t_poly_opening_at_z": null, + "lookup_t_poly_opening_at_z_omega": null, + "lookup_selector_poly_opening_at_z": null, + "lookup_table_type_poly_opening_at_z": null, + "quotient_poly_opening_at_z": [ 0, 0, 0, 0 ], + "linearization_poly_opening_at_z": [ 0, 0, 0, 0 ], + "opening_proof_at_z": { + "x": [ 0, 0, 0, 0 ], + "y": [ 1, 0, 0, 0 ], + "infinity": true + }, + "opening_proof_at_z_omega": { + "x": [ 0, 0, 0, 0 ], + "y": [ 1, 0, 0, 0 ], + "infinity": true + } + } + } + }"#; + let decoded_obj: SubmitProofRequest = serde_json::from_str(&encoded_obj).unwrap(); + let decoded_json: SubmitProofRequest = serde_json::from_str(encoded_json).unwrap(); + match (decoded_obj, decoded_json) { + (SubmitProofRequest::Proof(decoded_obj), SubmitProofRequest::Proof(decoded_json)) => { + assert_eq!( + decoded_obj.aggregation_result_coords, + decoded_json.aggregation_result_coords + ); + } + _ => panic!("Either decoded_obj or decoded_json is not SubmitProofRequest::Proof"), + } +} + +#[test] +fn test_tee_proof_request_serialization() { + let tee_proof = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { + signature: vec![0, 1, 2, 3, 4], + })); + let encoded = serde_json::to_string(&tee_proof).unwrap(); + assert_eq!(r#"{"Proof":{"signature":[0,1,2,3,4]}}"#, encoded); + let decoded = serde_json::from_str(&encoded).unwrap(); + assert_eq!(tee_proof, decoded); +} From c140233e1091f0f19942dbd511f073954c7be688 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 23 May 2024 17:18:36 +0200 Subject: [PATCH 11/42] Utilize mocked SQL table in unit tests for the /tee_proof_generation_data endpoint --- .../src/tee_request_processor.rs | 12 +++++++---- core/node/proof_data_handler/src/tests.rs | 21 +++++++++++++++---- 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 8fe1e9cb4fd1..5e57da344138 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -40,10 +40,14 @@ impl TeeRequestProcessor { ) -> Result, RequestProcessorError> { tracing::info!("Received request for proof generation data: {:?}", request); - // TODO: Replace this line with an appropriate method to get the next batch number to be proven. - // It's likely that a new SQL column needs to be added to the `proof_generation_details` table. - // Take self.config.proof_generation_timeout() into account when selecting the next batch to be proven! - let l1_batch_number_result = Option::from(L1BatchNumber::from(1)); + let l1_batch_number_result = self + .pool + .connection() + .await + .unwrap() + .tee_proof_generation_dal() + .get_next_block_to_be_proven(self.config.proof_generation_timeout()) + .await; let l1_batch_number = match l1_batch_number_result { Some(number) => number, diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index d869607a1c84..7094a6430609 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -1,4 +1,4 @@ -use crate::{create_proof_processing_router, ConnectionPool}; +use crate::create_proof_processing_router; use axum::{ body::Body, http::{self, Method, Request, StatusCode}, @@ -9,6 +9,7 @@ use tower::ServiceExt; use zksync_basic_types::U256; use zksync_config::configs::ProofDataHandlerConfig; use zksync_contracts::{BaseSystemContracts, SystemContractCode}; +use zksync_dal::{ConnectionPool, CoreDal}; use zksync_object_store::ObjectStoreFactory; use zksync_prover_interface::inputs::PrepareBasicCircuitsJob; use zksync_tee_verifier::TeeVerifierInput; @@ -54,14 +55,26 @@ async fn request_tee_proof_generation_data() { }, vec![(H256([1; 32]), vec![0, 1, 2, 3, 4])], ); + // mock object store let blob_store = ObjectStoreFactory::mock().create_store().await; blob_store.put(batch_number, &tvi).await.unwrap(); - // TODO mock relevant SQL table once the logic is implemented in the TeeRequestProcessor::get_proof_generation_data - // TODO useful examples: https://github.com/tokio-rs/axum/blob/main/examples/testing/src/main.rs#L58 + // mock SQL table let connection_pool = ConnectionPool::test_pool().await; + let mut storage = connection_pool.connection().await.unwrap(); + let mut proof_dal = storage.tee_proof_generation_dal(); + let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await; + assert!(oldest_batch_number.is_none()); + // TODO let mut transaction = storage.start_transaction().await.unwrap(); + // TODO if storage doesn't work, use transaction instead + proof_dal + .insert_tee_proof_generation_details(batch_number, "blob_url") + .await; + // TODO transaction.commit().await.unwrap(); + let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); + assert_eq!(oldest_batch_number, batch_number); let app = create_proof_processing_router( blob_store, - connection_pool, + connection_pool.clone(), ProofDataHandlerConfig { http_port: 1337, proof_generation_timeout_in_secs: 10, From 9ea9cfe870a1006a468694302838512cdbf8cd19 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 24 May 2024 13:05:56 +0200 Subject: [PATCH 12/42] Revise database schema and update dependent code in tests and state machine handling accordingly --- ...f380f16ea3e7c709c8dd7b4fced29f4717e1c.json | 20 ---- ...5dc5adb190d65377c7994588d7a74bdae2319.json | 14 +++ ...7c0f351163b364eeef70aa9098f305276e851.json | 20 ---- ...8e032ac0c59cb9644a0aad4ed88f8ee544b93.json | 18 ++++ ...795c7ab5a0e74b74f37ff25466ec6cd318859.json | 22 ---- ...2aca5fa987f0a3454da89447ab39373620a5e.json | 15 --- ...461510433c36dadfbff0a18515935e629fc0d.json | 15 --- ...444499078a045a579528ed059d0dd32e5b8cb.json | 20 ++++ ...47d0c1bac5edc397f3e49cbe565fee92c7fbb.json | 22 ++++ ..._tee_proof_generation_details_table.up.sql | 8 +- core/lib/dal/src/tee_proof_generation_dal.rs | 101 +++++++++--------- core/node/metadata_calculator/src/updater.rs | 4 + core/node/proof_data_handler/src/tests.rs | 23 ++-- .../tee_verifier_input_producer/src/lib.rs | 1 + 14 files changed, 149 insertions(+), 154 deletions(-) delete mode 100644 core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json create mode 100644 core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json delete mode 100644 core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json create mode 100644 core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json delete mode 100644 core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json delete mode 100644 core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json delete mode 100644 core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json create mode 100644 core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json create mode 100644 core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json diff --git a/core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json b/core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json deleted file mode 100644 index d4ac20fd3aab..000000000000 --- a/core/lib/dal/.sqlx/query-3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n l1_batch_number\n FROM\n tee_proof_generation_details\n WHERE\n status = 'ready_to_be_proven'\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "3a8f244d40e1c1d75d2c6d76e02f380f16ea3e7c709c8dd7b4fced29f4717e1c" -} diff --git a/core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json b/core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json new file mode 100644 index 000000000000..7fcc5d90ca56 --- /dev/null +++ b/core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319" +} diff --git a/core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json b/core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json deleted file mode 100644 index ada2796e8208..000000000000 --- a/core/lib/dal/.sqlx/query-61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n l1_batch_number\n FROM\n tee_proof_generation_details\n WHERE\n status NOT IN ('generated', 'skipped')\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "61c931ae8867bd9a5ebe3a0ba407c0f351163b364eeef70aa9098f305276e851" -} diff --git a/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json b/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json new file mode 100644 index 000000000000..2247e3ce3b6f --- /dev/null +++ b/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n signature = $1,\n pubkey = $2,\n attestation = $3,\n tee_type = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Text", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93" +} diff --git a/core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json b/core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json deleted file mode 100644 index 169e58c7e073..000000000000 --- a/core/lib/dal/.sqlx/query-aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n l1_batch_number\n FROM\n tee_proof_generation_details\n WHERE\n status = 'ready_to_be_proven'\n OR (\n status = 'picked_by_prover'\n AND prover_taken_at < NOW() - $1::INTERVAL\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Interval" - ] - }, - "nullable": [ - false - ] - }, - "hash": "aec1c25a0b3547e316a96a65066795c7ab5a0e74b74f37ff25466ec6cd318859" -} diff --git a/core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json b/core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json deleted file mode 100644 index dd62137dfc12..000000000000 --- a/core/lib/dal/.sqlx/query-bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n proof_blob_url = $1,\n updated_at = NOW()\n WHERE\n l1_batch_number = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "bd2383c3ba1822515b1ea1ee84d2aca5fa987f0a3454da89447ab39373620a5e" -} diff --git a/core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json b/core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json deleted file mode 100644 index a1deabc1f58a..000000000000 --- a/core/lib/dal/.sqlx/query-ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, proof_gen_data_blob_url, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', $2, NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [] - }, - "hash": "ca5b7f51bcb6a0b665424fc6979461510433c36dadfbff0a18515935e629fc0d" -} diff --git a/core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json b/core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json new file mode 100644 index 000000000000..b08b159c9156 --- /dev/null +++ b/core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN\n tee_verifier_input_producer_jobs AS inputs\n ON\n proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb" +} diff --git a/core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json b/core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json new file mode 100644 index 000000000000..23e2d177f35c --- /dev/null +++ b/core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN\n tee_verifier_input_producer_jobs AS inputs\n ON\n proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $1::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Interval" + ] + }, + "nullable": [ + false + ] + }, + "hash": "f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb" +} diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 1c3ea6750492..b05c675f8418 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -1,9 +1,11 @@ CREATE TABLE IF NOT EXISTS tee_proof_generation_details ( - l1_batch_number BIGINT PRIMARY KEY REFERENCES l1_batches (number) ON DELETE CASCADE, + l1_batch_number BIGINT PRIMARY KEY REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, status TEXT NOT NULL, - proof_gen_data_blob_url TEXT NOT NULL, - proof_blob_url TEXT, + signature TEXT, + pubkey TEXT, + attestation TEXT, + tee_type TEXT, created_at TIMESTAMP NOT NULL, updated_at TIMESTAMP NOT NULL, prover_taken_at TIMESTAMP diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index bcac25451218..931e84258ba6 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -12,7 +12,7 @@ pub struct TeeProofGenerationDal<'a, 'c> { } #[derive(Debug, EnumString, Display)] -enum ProofGenerationJobStatus { +enum TeeProofGenerationJobStatus { #[strum(serialize = "ready_to_be_proven")] ReadyToBeProven, #[strum(serialize = "picked_by_prover")] @@ -23,6 +23,12 @@ enum ProofGenerationJobStatus { Skipped, } +#[derive(Debug, EnumString, Display)] +pub enum TeeType { + #[strum(serialize = "sgx")] + Sgx, +} + impl TeeProofGenerationDal<'_, '_> { pub async fn get_next_block_to_be_proven( &mut self, @@ -39,14 +45,21 @@ impl TeeProofGenerationDal<'_, '_> { WHERE l1_batch_number = ( SELECT - l1_batch_number + proofs.l1_batch_number FROM - tee_proof_generation_details + tee_proof_generation_details AS proofs + JOIN + tee_verifier_input_producer_jobs AS inputs + ON + proofs.l1_batch_number = inputs.l1_batch_number WHERE - status = 'ready_to_be_proven' - OR ( - status = 'picked_by_prover' - AND prover_taken_at < NOW() - $1::INTERVAL + inputs.status = 'Successful' + AND ( + proofs.status = 'ready_to_be_proven' + OR ( + proofs.status = 'picked_by_prover' + AND proofs.prover_taken_at < NOW() - $1::INTERVAL + ) ) ORDER BY l1_batch_number ASC @@ -71,19 +84,28 @@ impl TeeProofGenerationDal<'_, '_> { pub async fn save_proof_artifacts_metadata( &mut self, block_number: L1BatchNumber, - proof_blob_url: &str, + signature: &str, + pubkey: &str, + attestation: &str, + tee_type: TeeType, ) -> Result<(), SqlxError> { sqlx::query!( r#" UPDATE tee_proof_generation_details SET status = 'generated', - proof_blob_url = $1, + signature = $1, + pubkey = $2, + attestation = $3, + tee_type = $4, updated_at = NOW() WHERE - l1_batch_number = $2 + l1_batch_number = $5 "#, - proof_blob_url, + signature, + pubkey, + attestation, + tee_type.to_string(), i64::from(block_number.0) ) .execute(self.storage.conn()) @@ -94,21 +116,16 @@ impl TeeProofGenerationDal<'_, '_> { .ok_or(sqlx::Error::RowNotFound) } - pub async fn insert_tee_proof_generation_details( - &mut self, - block_number: L1BatchNumber, - proof_gen_data_blob_url: &str, - ) { + pub async fn insert_tee_proof_generation_details(&mut self, block_number: L1BatchNumber) { sqlx::query!( r#" - INSERT INTO - tee_proof_generation_details (l1_batch_number, status, proof_gen_data_blob_url, created_at, updated_at) - VALUES - ($1, 'ready_to_be_proven', $2, NOW(), NOW()) - ON CONFLICT (l1_batch_number) DO NOTHING - "#, + INSERT INTO + tee_proof_generation_details (l1_batch_number, status, created_at, updated_at) + VALUES + ($1, 'ready_to_be_proven', NOW(), NOW()) + ON CONFLICT (l1_batch_number) DO NOTHING + "#, i64::from(block_number.0), - proof_gen_data_blob_url, ) .execute(self.storage.conn()) .await @@ -128,7 +145,7 @@ impl TeeProofGenerationDal<'_, '_> { WHERE l1_batch_number = $2 "#, - ProofGenerationJobStatus::Skipped.to_string(), + TeeProofGenerationJobStatus::Skipped.to_string(), i64::from(block_number.0) ) .execute(self.storage.conn()) @@ -143,36 +160,18 @@ impl TeeProofGenerationDal<'_, '_> { let result: Option = sqlx::query!( r#" SELECT - l1_batch_number - FROM - tee_proof_generation_details - WHERE - status = 'ready_to_be_proven' - ORDER BY - l1_batch_number ASC - LIMIT - 1 - "#, - ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() - .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - - result - } - - pub async fn get_oldest_not_generated_batch(&mut self) -> Option { - let result: Option = sqlx::query!( - r#" - SELECT - l1_batch_number + proofs.l1_batch_number FROM - tee_proof_generation_details + tee_proof_generation_details AS proofs + JOIN + tee_verifier_input_producer_jobs AS inputs + ON + proofs.l1_batch_number = inputs.l1_batch_number WHERE - status NOT IN ('generated', 'skipped') + inputs.status = 'Successful' + AND proofs.status = 'ready_to_be_proven' ORDER BY - l1_batch_number ASC + proofs.l1_batch_number ASC LIMIT 1 "#, diff --git a/core/node/metadata_calculator/src/updater.rs b/core/node/metadata_calculator/src/updater.rs index cca6fce6d4cd..6f5ee3b2787d 100644 --- a/core/node/metadata_calculator/src/updater.rs +++ b/core/node/metadata_calculator/src/updater.rs @@ -151,6 +151,10 @@ impl TreeUpdater { .proof_generation_dal() .insert_proof_generation_details(l1_batch_number, object_key) .await; + storage + .tee_proof_generation_dal() + .insert_tee_proof_generation_details(l1_batch_number) + .await; } save_postgres_latency.observe(); tracing::info!("Updated metadata for L1 batch #{l1_batch_number} in Postgres"); diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 7094a6430609..3ff1bda20d1e 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -55,23 +55,30 @@ async fn request_tee_proof_generation_data() { }, vec![(H256([1; 32]), vec![0, 1, 2, 3, 4])], ); - // mock object store + // populate mocked object store with a single batch blob let blob_store = ObjectStoreFactory::mock().create_store().await; blob_store.put(batch_number, &tvi).await.unwrap(); - // mock SQL table + // get connection to the SQL db let connection_pool = ConnectionPool::test_pool().await; - let mut storage = connection_pool.connection().await.unwrap(); - let mut proof_dal = storage.tee_proof_generation_dal(); + let mut db_conn1 = connection_pool.connection().await.unwrap(); + let mut proof_dal = db_conn1.tee_proof_generation_dal(); + let mut db_conn2 = connection_pool.connection().await.unwrap(); + let mut input_producer_dal = db_conn2.tee_verifier_input_producer_dal(); + // there should not be any batches awaiting proof in the db yet let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await; assert!(oldest_batch_number.is_none()); - // TODO let mut transaction = storage.start_transaction().await.unwrap(); - // TODO if storage doesn't work, use transaction instead + // mock SQL table with relevant batch information + input_producer_dal + .create_tee_verifier_input_producer_job(batch_number) + .await + .expect("Failed to create tee_verifier_input_producer_job job"); proof_dal - .insert_tee_proof_generation_details(batch_number, "blob_url") + .insert_tee_proof_generation_details(batch_number) .await; - // TODO transaction.commit().await.unwrap(); + // now, there should be one batch in the database awaiting proof let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); assert_eq!(oldest_batch_number, batch_number); + // test the /tee_proof_generation_data endpoint; it should return batch data let app = create_proof_processing_router( blob_store, connection_pool.clone(), diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs index 47ae9cd87c3f..752b696e7e36 100644 --- a/core/node/tee_verifier_input_producer/src/lib.rs +++ b/core/node/tee_verifier_input_producer/src/lib.rs @@ -259,6 +259,7 @@ impl JobProcessor for TeeVerifierInputProducer { .mark_job_as_successful(job_id, started_at, &object_path) .await .context("failed to mark job as successful for TeeVerifierInputProducer")?; + // TODO set 'ready_to_be_proven' using transaction.tee_verifier_input_producer_dal()? naah... transaction .commit() .await From 71a263cd49f309fda09f49f3cd510c6e6ac5d4f5 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 27 May 2024 12:25:45 +0200 Subject: [PATCH 13/42] Fix request_tee_proof_generation_data unit test by marking job as successful prior to running assertions --- core/node/proof_data_handler/src/tests.rs | 48 +++++++++++++++++++---- 1 file changed, 40 insertions(+), 8 deletions(-) diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 3ff1bda20d1e..5bd6eecacbc9 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -1,3 +1,5 @@ +use std::time::Instant; + use crate::create_proof_processing_router; use axum::{ body::Body, @@ -15,8 +17,16 @@ use zksync_prover_interface::inputs::PrepareBasicCircuitsJob; use zksync_tee_verifier::TeeVerifierInput; use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; +// Test the /tee_proof_generation_data endpoint by: +// 1. Mocking an object store with a single batch blob containing TEE verifier input +// 2. Populating the SQL db with relevant information about the status of the TEE verifier input and +// TEE proof generation +// 3. Sending a request to the /tee_proof_generation_data endpoint and asserting that the response +// matches the file from the object store #[tokio::test] async fn request_tee_proof_generation_data() { + // prepare sample TEE verifier input + let batch_number = L1BatchNumber::from(1); let tvi = TeeVerifierInput::new( PrepareBasicCircuitsJob::new(0), @@ -55,30 +65,52 @@ async fn request_tee_proof_generation_data() { }, vec![(H256([1; 32]), vec![0, 1, 2, 3, 4])], ); + // populate mocked object store with a single batch blob + let blob_store = ObjectStoreFactory::mock().create_store().await; - blob_store.put(batch_number, &tvi).await.unwrap(); + let object_path = blob_store.put(batch_number, &tvi).await.unwrap(); + // get connection to the SQL db + let connection_pool = ConnectionPool::test_pool().await; - let mut db_conn1 = connection_pool.connection().await.unwrap(); - let mut proof_dal = db_conn1.tee_proof_generation_dal(); - let mut db_conn2 = connection_pool.connection().await.unwrap(); - let mut input_producer_dal = db_conn2.tee_verifier_input_producer_dal(); + let mut proof_db_conn = connection_pool.connection().await.unwrap(); + let mut proof_dal = proof_db_conn.tee_proof_generation_dal(); + let mut input_db_conn = connection_pool.connection().await.unwrap(); + let mut input_producer_dal = input_db_conn.tee_verifier_input_producer_dal(); + // there should not be any batches awaiting proof in the db yet + let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await; assert!(oldest_batch_number.is_none()); - // mock SQL table with relevant batch information + + // mock SQL table with relevant information about the status of the TEE verifier input + input_producer_dal .create_tee_verifier_input_producer_job(batch_number) .await - .expect("Failed to create tee_verifier_input_producer_job job"); + .expect("Failed to create tee_verifier_input_producer_job"); + + // pretend that the TEE verifier input file was fetched successfully + + input_producer_dal + .mark_job_as_successful(batch_number, Instant::now(), &object_path) + .await + .expect("Failed to mark tee_verifier_input_producer_job job as successful"); + + // mock SQL table with relevant information about the status of TEE proof generation + proof_dal .insert_tee_proof_generation_details(batch_number) .await; + // now, there should be one batch in the database awaiting proof + let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); assert_eq!(oldest_batch_number, batch_number); - // test the /tee_proof_generation_data endpoint; it should return batch data + + // test the /tee_proof_generation_data endpoint; it should return the batch from the object store + let app = create_proof_processing_router( blob_store, connection_pool.clone(), From aca88100d51984527fba2fa2515157eda2f22ec4 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 27 May 2024 16:59:33 +0200 Subject: [PATCH 14/42] Refine /submit_tee_proof endpoint unit test --- checks-config/era.dic | 2 + ...8e032ac0c59cb9644a0aad4ed88f8ee544b93.json | 6 +-- ..._tee_proof_generation_details_table.up.sql | 6 +-- core/lib/dal/src/tee_proof_generation_dal.rs | 6 +-- core/lib/prover_interface/src/outputs.rs | 3 +- .../tests/job_serialization.rs | 17 +++++-- .../src/tee_request_processor.rs | 17 ++++--- core/node/proof_data_handler/src/tests.rs | 47 +++++++++++++++++-- .../tee_verifier_input_producer/src/lib.rs | 1 - 9 files changed, 78 insertions(+), 27 deletions(-) diff --git a/checks-config/era.dic b/checks-config/era.dic index d22d57e091d1..baf2b512c24c 100644 --- a/checks-config/era.dic +++ b/checks-config/era.dic @@ -977,3 +977,5 @@ TeeRequestProcessor l1_batch_number RequestProcessorError map_err +tee_proof_generation_data +submit_tee_proof diff --git a/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json b/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json index 2247e3ce3b6f..b42dae8f1a94 100644 --- a/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json +++ b/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json @@ -5,9 +5,9 @@ "columns": [], "parameters": { "Left": [ - "Text", - "Text", - "Text", + "Bytea", + "Bytea", + "Bytea", "Text", "Int8" ] diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index b05c675f8418..e4f136c88e65 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -2,9 +2,9 @@ CREATE TABLE IF NOT EXISTS tee_proof_generation_details ( l1_batch_number BIGINT PRIMARY KEY REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, status TEXT NOT NULL, - signature TEXT, - pubkey TEXT, - attestation TEXT, + signature BYTEA, + pubkey BYTEA, + attestation BYTEA, tee_type TEXT, created_at TIMESTAMP NOT NULL, updated_at TIMESTAMP NOT NULL, diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 931e84258ba6..7beb0fe0e559 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -84,9 +84,9 @@ impl TeeProofGenerationDal<'_, '_> { pub async fn save_proof_artifacts_metadata( &mut self, block_number: L1BatchNumber, - signature: &str, - pubkey: &str, - attestation: &str, + signature: &[u8], + pubkey: &[u8], + attestation: &[u8], tee_type: TeeType, ) -> Result<(), SqlxError> { sqlx::query!( diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index f934ffddded8..f11f9b466900 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -15,8 +15,9 @@ pub struct L1BatchProofForL1 { #[derive(Clone, PartialEq, Serialize, Deserialize)] pub struct L1BatchTeeProofForL1 { - // TODO revisit what else is needed here pub signature: Vec, + pub pubkey: Vec, + pub attestation: Vec, } impl fmt::Debug for L1BatchProofForL1 { diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index 527735e3832f..46f92def0b56 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -159,11 +159,18 @@ fn test_proof_request_serialization() { #[test] fn test_tee_proof_request_serialization() { - let tee_proof = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { + let tee_proof_str = r#"{ + "Proof": { + "signature": [ 0, 1, 2, 3, 4 ], + "pubkey": [ 5, 6, 7, 8, 9 ], + "attestation": [ 10, 11, 12, 13, 14 ] + } + }"#; + let tee_proof_result = serde_json::from_str::(tee_proof_str).unwrap(); + let tee_proof_expected = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { signature: vec![0, 1, 2, 3, 4], + pubkey: vec![5, 6, 7, 8, 9], + attestation: vec![10, 11, 12, 13, 14], })); - let encoded = serde_json::to_string(&tee_proof).unwrap(); - assert_eq!(r#"{"Proof":{"signature":[0,1,2,3,4]}}"#, encoded); - let decoded = serde_json::from_str(&encoded).unwrap(); - assert_eq!(tee_proof, decoded); + assert_eq!(tee_proof_result, tee_proof_expected); } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 5e57da344138..754779e3c2a9 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use crate::errors::RequestProcessorError; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_dal::{tee_proof_generation_dal::TeeType, ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ GenericProofGenerationDataResponse, SubmitProofResponse, SubmitTeeProofRequest, @@ -74,18 +74,23 @@ impl TeeRequestProcessor { let l1_batch_number = L1BatchNumber(l1_batch_number); let mut connection = self.pool.connection().await.unwrap(); - let mut dal = connection.proof_generation_dal(); + let mut dal = connection.tee_proof_generation_dal(); - // TODO: Replace the lines below with code that saves the proof generation result back to the database. match payload { SubmitTeeProofRequest::Proof(proof) => { println!( "Received proof {:?} for block number: {:?}", proof, l1_batch_number ); - // dal.save_proof_artifacts_metadata(l1_batch_number, &blob_url) - // .await - // .map_err(RequestProcessorError::Sqlx)?; + dal.save_proof_artifacts_metadata( + l1_batch_number, + &proof.signature, + &proof.signature, + &proof.attestation, + TeeType::Sgx, + ) + .await + .map_err(RequestProcessorError::Sqlx)?; } SubmitTeeProofRequest::SkippedProofGeneration => { dal.mark_proof_generation_job_as_skipped(l1_batch_number) diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 5bd6eecacbc9..385798302433 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -13,7 +13,7 @@ use zksync_config::configs::ProofDataHandlerConfig; use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_dal::{ConnectionPool, CoreDal}; use zksync_object_store::ObjectStoreFactory; -use zksync_prover_interface::inputs::PrepareBasicCircuitsJob; +use zksync_prover_interface::{api::SubmitTeeProofRequest, inputs::PrepareBasicCircuitsJob}; use zksync_tee_verifier::TeeVerifierInput; use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; @@ -143,10 +143,12 @@ async fn request_tee_proof_generation_data() { assert_eq!(tvi, deserialized); } +// Test the /submit_tee_proof endpoint #[tokio::test] async fn submit_tee_proof() { let blob_store = ObjectStoreFactory::mock().create_store().await; let connection_pool = ConnectionPool::test_pool().await; + let connection_clone = connection_pool.clone(); let app = create_proof_processing_router( blob_store, connection_pool, @@ -157,14 +159,49 @@ async fn submit_tee_proof() { L1BatchCommitmentMode::Rollup, ); - let request = r#"{ "Proof": { "signature": [ 0, 1, 2, 3, 4 ] } }"#; - let request: serde_json::Value = serde_json::from_str(request).unwrap(); - let req_body = Body::from(serde_json::to_vec(&request).unwrap()); + let tee_proof_request_str = r#"{ + "Proof": { + "signature": [ 0, 1, 2, 3, 4 ], + "pubkey": [ 5, 6, 7, 8, 9 ], + "attestation": [ 10, 11, 12, 13, 14 ] + } + }"#; + let batch_number = L1BatchNumber::from(1); + let mut proof_db_conn = connection_clone.connection().await.unwrap(); + let mut proof_dal = proof_db_conn.tee_proof_generation_dal(); + let mut input_db_conn = connection_clone.connection().await.unwrap(); + let mut input_producer_dal = input_db_conn.tee_verifier_input_producer_dal(); + + // mock SQL table with relevant information about the status of the TEE verifier input + + input_producer_dal + .create_tee_verifier_input_producer_job(batch_number) + .await + .expect("Failed to create tee_verifier_input_producer_job"); + + // pretend that the TEE verifier input file was fetched successfully + + let object_path = "mocked_object_path"; + input_producer_dal + .mark_job_as_successful(batch_number, Instant::now(), object_path) + .await + .expect("Failed to mark tee_verifier_input_producer_job job as successful"); + + // mock SQL table with relevant information about the status of TEE proof generation + + proof_dal + .insert_tee_proof_generation_details(batch_number) + .await; + + let tee_proof_request = + serde_json::from_str::(tee_proof_request_str).unwrap(); + let req_body = Body::from(serde_json::to_vec(&tee_proof_request).unwrap()); + let uri = format!("/submit_tee_proof/{}", batch_number.0); let response = app .oneshot( Request::builder() .method(Method::POST) - .uri("/submit_tee_proof/123") + .uri(uri) .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) .body(req_body) .unwrap(), diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs index 752b696e7e36..47ae9cd87c3f 100644 --- a/core/node/tee_verifier_input_producer/src/lib.rs +++ b/core/node/tee_verifier_input_producer/src/lib.rs @@ -259,7 +259,6 @@ impl JobProcessor for TeeVerifierInputProducer { .mark_job_as_successful(job_id, started_at, &object_path) .await .context("failed to mark job as successful for TeeVerifierInputProducer")?; - // TODO set 'ready_to_be_proven' using transaction.tee_verifier_input_producer_dal()? naah... transaction .commit() .await From d763c9cf5c8ff12ecbba2aea2d02408fe1e35bc6 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 27 May 2024 23:41:17 +0200 Subject: [PATCH 15/42] Address CI code formatting complaints --- core/node/proof_data_handler/src/lib.rs | 5 ++--- core/node/proof_data_handler/src/request_processor.rs | 3 ++- core/node/proof_data_handler/src/tee_request_processor.rs | 3 ++- core/node/proof_data_handler/src/tests.rs | 3 ++- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index b841a48500eb..5de1f2a93c38 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -2,6 +2,8 @@ use std::{net::SocketAddr, sync::Arc}; use anyhow::Context as _; use axum::{extract::Path, routing::post, Json, Router}; +use request_processor::RequestProcessor; +use tee_request_processor::TeeRequestProcessor; use tokio::sync::watch; use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{ConnectionPool, Core}; @@ -12,9 +14,6 @@ use zksync_prover_interface::api::{ }; use zksync_types::commitment::L1BatchCommitmentMode; -use crate::request_processor::RequestProcessor; -use crate::tee_request_processor::TeeRequestProcessor; - #[cfg(test)] mod tests; diff --git a/core/node/proof_data_handler/src/request_processor.rs b/core/node/proof_data_handler/src/request_processor.rs index 28e399353d5b..2aec391ba342 100644 --- a/core/node/proof_data_handler/src/request_processor.rs +++ b/core/node/proof_data_handler/src/request_processor.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use crate::errors::RequestProcessorError; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{ConnectionPool, Core, CoreDal}; @@ -16,6 +15,8 @@ use zksync_types::{ L1BatchNumber, H256, }; +use crate::errors::RequestProcessorError; + #[derive(Clone)] pub(crate) struct RequestProcessor { blob_store: Arc, diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 754779e3c2a9..2c5632127c20 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use crate::errors::RequestProcessorError; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{tee_proof_generation_dal::TeeType, ConnectionPool, Core, CoreDal}; @@ -12,6 +11,8 @@ use zksync_prover_interface::api::{ use zksync_tee_verifier::TeeVerifierInput; use zksync_types::L1BatchNumber; +use crate::errors::RequestProcessorError; + pub type TeeProofGenerationDataResponse = GenericProofGenerationDataResponse; #[derive(Clone)] diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 385798302433..ea969036d0e4 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -1,6 +1,5 @@ use std::time::Instant; -use crate::create_proof_processing_router; use axum::{ body::Body, http::{self, Method, Request, StatusCode}, @@ -17,6 +16,8 @@ use zksync_prover_interface::{api::SubmitTeeProofRequest, inputs::PrepareBasicCi use zksync_tee_verifier::TeeVerifierInput; use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; +use crate::create_proof_processing_router; + // Test the /tee_proof_generation_data endpoint by: // 1. Mocking an object store with a single batch blob containing TEE verifier input // 2. Populating the SQL db with relevant information about the status of the TEE verifier input and From ac83fa5657a8849bdc7f19fdc1013bd10d923abe Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 28 May 2024 08:23:41 +0200 Subject: [PATCH 16/42] Reformat SQL queries using `zk fmt` --- ...5dc5adb190d65377c7994588d7a74bdae2319.json | 14 ------------ ...a2fca14965083b0589c3b3efad02e37d55f0c.json | 20 +++++++++++++++++ ...b608d21dc70397b64ce500881a8b55953c59c.json | 14 ++++++++++++ ...444499078a045a579528ed059d0dd32e5b8cb.json | 20 ----------------- ...148b0f1c7e512dd43434062341eb263fe434f.json | 22 +++++++++++++++++++ ...47d0c1bac5edc397f3e49cbe565fee92c7fbb.json | 22 ------------------- core/lib/dal/src/tee_proof_generation_dal.rs | 22 +++++++------------ 7 files changed, 64 insertions(+), 70 deletions(-) delete mode 100644 core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json create mode 100644 core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json create mode 100644 core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json delete mode 100644 core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json create mode 100644 core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json delete mode 100644 core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json diff --git a/core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json b/core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json deleted file mode 100644 index 7fcc5d90ca56..000000000000 --- a/core/lib/dal/.sqlx/query-4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "4c723fbbef8ca6d7b56e757b09e5dc5adb190d65377c7994588d7a74bdae2319" -} diff --git a/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json b/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json new file mode 100644 index 000000000000..f0603488f1e8 --- /dev/null +++ b/core/lib/dal/.sqlx/query-640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "640d37aa1d6dc722b1651c74b7ea2fca14965083b0589c3b3efad02e37d55f0c" +} diff --git a/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json b/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json new file mode 100644 index 000000000000..994bfcfbb5a2 --- /dev/null +++ b/core/lib/dal/.sqlx/query-9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proof_generation_details (l1_batch_number, status, created_at, updated_at)\n VALUES\n ($1, 'ready_to_be_proven', NOW(), NOW())\n ON CONFLICT (l1_batch_number) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "9533a672ae82db344ae1070ae11b608d21dc70397b64ce500881a8b55953c59c" +} diff --git a/core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json b/core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json deleted file mode 100644 index b08b159c9156..000000000000 --- a/core/lib/dal/.sqlx/query-dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN\n tee_verifier_input_producer_jobs AS inputs\n ON\n proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND proofs.status = 'ready_to_be_proven'\n ORDER BY\n proofs.l1_batch_number ASC\n LIMIT\n 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "dd4308f54f5fe1dea7c6848e121444499078a045a579528ed059d0dd32e5b8cb" -} diff --git a/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json b/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json new file mode 100644 index 000000000000..4236e72fccad --- /dev/null +++ b/core/lib/dal/.sqlx/query-e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $1::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Interval" + ] + }, + "nullable": [ + false + ] + }, + "hash": "e2ff392b3aa7a22fc39d150d08b148b0f1c7e512dd43434062341eb263fe434f" +} diff --git a/core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json b/core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json deleted file mode 100644 index 23e2d177f35c..000000000000 --- a/core/lib/dal/.sqlx/query-f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'picked_by_prover',\n updated_at = NOW(),\n prover_taken_at = NOW()\n WHERE\n l1_batch_number = (\n SELECT\n proofs.l1_batch_number\n FROM\n tee_proof_generation_details AS proofs\n JOIN\n tee_verifier_input_producer_jobs AS inputs\n ON\n proofs.l1_batch_number = inputs.l1_batch_number\n WHERE\n inputs.status = 'Successful'\n AND (\n proofs.status = 'ready_to_be_proven'\n OR (\n proofs.status = 'picked_by_prover'\n AND proofs.prover_taken_at < NOW() - $1::INTERVAL\n )\n )\n ORDER BY\n l1_batch_number ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n tee_proof_generation_details.l1_batch_number\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Interval" - ] - }, - "nullable": [ - false - ] - }, - "hash": "f31b6325ad3fb6af3d73223f74c47d0c1bac5edc397f3e49cbe565fee92c7fbb" -} diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 7beb0fe0e559..200017c25013 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -48,10 +48,7 @@ impl TeeProofGenerationDal<'_, '_> { proofs.l1_batch_number FROM tee_proof_generation_details AS proofs - JOIN - tee_verifier_input_producer_jobs AS inputs - ON - proofs.l1_batch_number = inputs.l1_batch_number + JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE inputs.status = 'Successful' AND ( @@ -119,12 +116,12 @@ impl TeeProofGenerationDal<'_, '_> { pub async fn insert_tee_proof_generation_details(&mut self, block_number: L1BatchNumber) { sqlx::query!( r#" - INSERT INTO - tee_proof_generation_details (l1_batch_number, status, created_at, updated_at) - VALUES - ($1, 'ready_to_be_proven', NOW(), NOW()) - ON CONFLICT (l1_batch_number) DO NOTHING - "#, + INSERT INTO + tee_proof_generation_details (l1_batch_number, status, created_at, updated_at) + VALUES + ($1, 'ready_to_be_proven', NOW(), NOW()) + ON CONFLICT (l1_batch_number) DO NOTHING + "#, i64::from(block_number.0), ) .execute(self.storage.conn()) @@ -163,10 +160,7 @@ impl TeeProofGenerationDal<'_, '_> { proofs.l1_batch_number FROM tee_proof_generation_details AS proofs - JOIN - tee_verifier_input_producer_jobs AS inputs - ON - proofs.l1_batch_number = inputs.l1_batch_number + JOIN tee_verifier_input_producer_jobs AS inputs ON proofs.l1_batch_number = inputs.l1_batch_number WHERE inputs.status = 'Successful' AND proofs.status = 'ready_to_be_proven' From f985d627553940a5ab7f23b89849f3cfb1707e6d Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 28 May 2024 13:45:20 +0200 Subject: [PATCH 17/42] Simplify unit tests --- checks-config/era.dic | 1 + core/node/proof_data_handler/src/tests.rs | 143 +++++++++++----------- 2 files changed, 73 insertions(+), 71 deletions(-) diff --git a/checks-config/era.dic b/checks-config/era.dic index baf2b512c24c..fe4210b41dc7 100644 --- a/checks-config/era.dic +++ b/checks-config/era.dic @@ -979,3 +979,4 @@ RequestProcessorError map_err tee_proof_generation_data submit_tee_proof +ready_to_be_proven diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index ea969036d0e4..108147cf759c 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -26,7 +26,7 @@ use crate::create_proof_processing_router; // matches the file from the object store #[tokio::test] async fn request_tee_proof_generation_data() { - // prepare sample TEE verifier input + // prepare a sample mocked TEE verifier input let batch_number = L1BatchNumber::from(1); let tvi = TeeVerifierInput::new( @@ -72,49 +72,16 @@ async fn request_tee_proof_generation_data() { let blob_store = ObjectStoreFactory::mock().create_store().await; let object_path = blob_store.put(batch_number, &tvi).await.unwrap(); - // get connection to the SQL db + // get connection to the SQL db and mock the status of the TEE proof generation - let connection_pool = ConnectionPool::test_pool().await; - let mut proof_db_conn = connection_pool.connection().await.unwrap(); - let mut proof_dal = proof_db_conn.tee_proof_generation_dal(); - let mut input_db_conn = connection_pool.connection().await.unwrap(); - let mut input_producer_dal = input_db_conn.tee_verifier_input_producer_dal(); - - // there should not be any batches awaiting proof in the db yet - - let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await; - assert!(oldest_batch_number.is_none()); - - // mock SQL table with relevant information about the status of the TEE verifier input - - input_producer_dal - .create_tee_verifier_input_producer_job(batch_number) - .await - .expect("Failed to create tee_verifier_input_producer_job"); - - // pretend that the TEE verifier input file was fetched successfully - - input_producer_dal - .mark_job_as_successful(batch_number, Instant::now(), &object_path) - .await - .expect("Failed to mark tee_verifier_input_producer_job job as successful"); - - // mock SQL table with relevant information about the status of TEE proof generation - - proof_dal - .insert_tee_proof_generation_details(batch_number) - .await; - - // now, there should be one batch in the database awaiting proof - - let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); - assert_eq!(oldest_batch_number, batch_number); + let db_conn_pool = ConnectionPool::test_pool().await; + mock_tee_batch_status(db_conn_pool.clone(), batch_number, &object_path).await; // test the /tee_proof_generation_data endpoint; it should return the batch from the object store let app = create_proof_processing_router( blob_store, - connection_pool.clone(), + db_conn_pool, ProofDataHandlerConfig { http_port: 1337, proof_generation_timeout_in_secs: 10, @@ -133,7 +100,9 @@ async fn request_tee_proof_generation_data() { ) .await .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + let body = hyper::body::to_bytes(response.into_body()).await.unwrap(); let json: serde_json::Value = serde_json::from_slice(&body).unwrap(); let json = json @@ -141,38 +110,83 @@ async fn request_tee_proof_generation_data() { .expect("Unexpected response format") .clone(); let deserialized: TeeVerifierInput = serde_json::from_value(json).unwrap(); + assert_eq!(tvi, deserialized); } -// Test the /submit_tee_proof endpoint +// Test /submit_tee_proof endpoint using a mocked TEE proof and verify response and db state #[tokio::test] async fn submit_tee_proof() { let blob_store = ObjectStoreFactory::mock().create_store().await; - let connection_pool = ConnectionPool::test_pool().await; - let connection_clone = connection_pool.clone(); + let db_conn_pool = ConnectionPool::test_pool().await; + let object_path = "mocked_object_path"; + let batch_number = L1BatchNumber::from(1); + + mock_tee_batch_status(db_conn_pool.clone(), batch_number, object_path).await; + + // send a request to the /submit_tee_proof endpoint, using a mocked TEE proof + + let tee_proof_request_str = r#"{ + "Proof": { + "signature": [ 0, 1, 2, 3, 4 ], + "pubkey": [ 5, 6, 7, 8, 9 ], + "attestation": [ 10, 11, 12, 13, 14 ] + } + }"#; + let tee_proof_request = + serde_json::from_str::(tee_proof_request_str).unwrap(); + let req_body = Body::from(serde_json::to_vec(&tee_proof_request).unwrap()); + let uri = format!("/submit_tee_proof/{}", batch_number.0); let app = create_proof_processing_router( blob_store, - connection_pool, + db_conn_pool.clone(), ProofDataHandlerConfig { http_port: 1337, proof_generation_timeout_in_secs: 10, }, L1BatchCommitmentMode::Rollup, ); + let response = app + .oneshot( + Request::builder() + .method(Method::POST) + .uri(uri) + .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .body(req_body) + .unwrap(), + ) + .await + .unwrap(); - let tee_proof_request_str = r#"{ - "Proof": { - "signature": [ 0, 1, 2, 3, 4 ], - "pubkey": [ 5, 6, 7, 8, 9 ], - "attestation": [ 10, 11, 12, 13, 14 ] - } - }"#; - let batch_number = L1BatchNumber::from(1); - let mut proof_db_conn = connection_clone.connection().await.unwrap(); + assert_eq!(response.status(), StatusCode::OK); + + // there should not be any batches awaiting proof in the db anymore + + let mut proof_db_conn = db_conn_pool.connection().await.unwrap(); + let oldest_batch_number = proof_db_conn + .tee_proof_generation_dal() + .get_oldest_unpicked_batch() + .await; + + assert!(oldest_batch_number.is_none()); +} + +// Mock SQL db with information about the status of the TEE proof generation +async fn mock_tee_batch_status( + db_conn_pool: ConnectionPool, + batch_number: L1BatchNumber, + object_path: &str, +) { + let mut proof_db_conn = db_conn_pool.connection().await.unwrap(); let mut proof_dal = proof_db_conn.tee_proof_generation_dal(); - let mut input_db_conn = connection_clone.connection().await.unwrap(); + let mut input_db_conn = db_conn_pool.connection().await.unwrap(); let mut input_producer_dal = input_db_conn.tee_verifier_input_producer_dal(); + // there should not be any batches awaiting proof in the db yet + + let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await; + assert!(oldest_batch_number.is_none()); + // mock SQL table with relevant information about the status of the TEE verifier input input_producer_dal @@ -180,34 +194,21 @@ async fn submit_tee_proof() { .await .expect("Failed to create tee_verifier_input_producer_job"); - // pretend that the TEE verifier input file was fetched successfully + // pretend that the TEE verifier input blob file was fetched successfully - let object_path = "mocked_object_path"; input_producer_dal .mark_job_as_successful(batch_number, Instant::now(), object_path) .await .expect("Failed to mark tee_verifier_input_producer_job job as successful"); - // mock SQL table with relevant information about the status of TEE proof generation + // mock SQL table with relevant information about the status of TEE proof generation ('ready_to_be_proven') proof_dal .insert_tee_proof_generation_details(batch_number) .await; - let tee_proof_request = - serde_json::from_str::(tee_proof_request_str).unwrap(); - let req_body = Body::from(serde_json::to_vec(&tee_proof_request).unwrap()); - let uri = format!("/submit_tee_proof/{}", batch_number.0); - let response = app - .oneshot( - Request::builder() - .method(Method::POST) - .uri(uri) - .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) - .body(req_body) - .unwrap(), - ) - .await - .unwrap(); - assert_eq!(response.status(), StatusCode::OK); + // now, there should be one batch in the db awaiting proof + + let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); + assert_eq!(oldest_batch_number, batch_number); } From c0a8d3446a49ac9f65a99a87ca450c1b377e4417 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 31 May 2024 17:14:35 +0200 Subject: [PATCH 18/42] Rename /tee_proof_generation_data endpoint to /tee_proof_inputs As suggested in Thomas' PR review. --- checks-config/era.dic | 2 +- core/node/proof_data_handler/src/lib.rs | 2 +- core/node/proof_data_handler/src/tests.rs | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/checks-config/era.dic b/checks-config/era.dic index fe4210b41dc7..03cbd6b91d6f 100644 --- a/checks-config/era.dic +++ b/checks-config/era.dic @@ -977,6 +977,6 @@ TeeRequestProcessor l1_batch_number RequestProcessorError map_err -tee_proof_generation_data +tee_proof_inputs submit_tee_proof ready_to_be_proven diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 5de1f2a93c38..7ce3fdcd6e32 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -83,7 +83,7 @@ fn create_proof_processing_router( ), ) .route( - "/tee_proof_generation_data", + "/tee_proof_inputs", post( move |payload: Json| async move { get_tee_proof_gen_processor diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 108147cf759c..af213f199836 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -18,14 +18,14 @@ use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; use crate::create_proof_processing_router; -// Test the /tee_proof_generation_data endpoint by: +// Test the /tee_proof_inputs endpoint by: // 1. Mocking an object store with a single batch blob containing TEE verifier input // 2. Populating the SQL db with relevant information about the status of the TEE verifier input and // TEE proof generation -// 3. Sending a request to the /tee_proof_generation_data endpoint and asserting that the response +// 3. Sending a request to the /tee_proof_inputs endpoint and asserting that the response // matches the file from the object store #[tokio::test] -async fn request_tee_proof_generation_data() { +async fn request_tee_proof_inputs() { // prepare a sample mocked TEE verifier input let batch_number = L1BatchNumber::from(1); @@ -77,7 +77,7 @@ async fn request_tee_proof_generation_data() { let db_conn_pool = ConnectionPool::test_pool().await; mock_tee_batch_status(db_conn_pool.clone(), batch_number, &object_path).await; - // test the /tee_proof_generation_data endpoint; it should return the batch from the object store + // test the /tee_proof_inputs endpoint; it should return the batch from the object store let app = create_proof_processing_router( blob_store, @@ -93,7 +93,7 @@ async fn request_tee_proof_generation_data() { .oneshot( Request::builder() .method(Method::POST) - .uri("/tee_proof_generation_data") + .uri("/tee_proof_inputs") .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) .body(req_body) .unwrap(), From 8b83be4fedcd3510d780336873ea8cb4e616c859 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 31 May 2024 17:22:17 +0200 Subject: [PATCH 19/42] Rename `attestation` to `proof` As suggested in Thomas' PR review. --- ...9a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json} | 4 ++-- ...0523085604_add_tee_proof_generation_details_table.up.sql | 2 +- core/lib/dal/src/tee_proof_generation_dal.rs | 6 +++--- core/lib/prover_interface/src/outputs.rs | 2 +- core/lib/prover_interface/tests/job_serialization.rs | 4 ++-- core/node/proof_data_handler/src/tee_request_processor.rs | 4 ++-- core/node/proof_data_handler/src/tests.rs | 2 +- 7 files changed, 12 insertions(+), 12 deletions(-) rename core/lib/dal/.sqlx/{query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json => query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json} (63%) diff --git a/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json b/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json similarity index 63% rename from core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json rename to core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json index b42dae8f1a94..8e210aade885 100644 --- a/core/lib/dal/.sqlx/query-a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93.json +++ b/core/lib/dal/.sqlx/query-727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n signature = $1,\n pubkey = $2,\n attestation = $3,\n tee_type = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ", + "query": "\n UPDATE tee_proof_generation_details\n SET\n status = 'generated',\n signature = $1,\n pubkey = $2,\n proof = $3,\n tee_type = $4,\n updated_at = NOW()\n WHERE\n l1_batch_number = $5\n ", "describe": { "columns": [], "parameters": { @@ -14,5 +14,5 @@ }, "nullable": [] }, - "hash": "a314aadb34096e35924432d43ee8e032ac0c59cb9644a0aad4ed88f8ee544b93" + "hash": "727d4dc6a8fdb39a6c54d4395124f8d103f12e51252c46a210a007e5e600d711" } diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index e4f136c88e65..50fe4a1430be 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -4,7 +4,7 @@ CREATE TABLE IF NOT EXISTS tee_proof_generation_details status TEXT NOT NULL, signature BYTEA, pubkey BYTEA, - attestation BYTEA, + proof BYTEA, tee_type TEXT, created_at TIMESTAMP NOT NULL, updated_at TIMESTAMP NOT NULL, diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 200017c25013..d7ee57046cf5 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -83,7 +83,7 @@ impl TeeProofGenerationDal<'_, '_> { block_number: L1BatchNumber, signature: &[u8], pubkey: &[u8], - attestation: &[u8], + proof: &[u8], tee_type: TeeType, ) -> Result<(), SqlxError> { sqlx::query!( @@ -93,7 +93,7 @@ impl TeeProofGenerationDal<'_, '_> { status = 'generated', signature = $1, pubkey = $2, - attestation = $3, + proof = $3, tee_type = $4, updated_at = NOW() WHERE @@ -101,7 +101,7 @@ impl TeeProofGenerationDal<'_, '_> { "#, signature, pubkey, - attestation, + proof, tee_type.to_string(), i64::from(block_number.0) ) diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index f11f9b466900..053cb02a1878 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -17,7 +17,7 @@ pub struct L1BatchProofForL1 { pub struct L1BatchTeeProofForL1 { pub signature: Vec, pub pubkey: Vec, - pub attestation: Vec, + pub proof: Vec, } impl fmt::Debug for L1BatchProofForL1 { diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index 46f92def0b56..60d8299b88a7 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -163,14 +163,14 @@ fn test_tee_proof_request_serialization() { "Proof": { "signature": [ 0, 1, 2, 3, 4 ], "pubkey": [ 5, 6, 7, 8, 9 ], - "attestation": [ 10, 11, 12, 13, 14 ] + "proof": [ 10, 11, 12, 13, 14 ] } }"#; let tee_proof_result = serde_json::from_str::(tee_proof_str).unwrap(); let tee_proof_expected = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { signature: vec![0, 1, 2, 3, 4], pubkey: vec![5, 6, 7, 8, 9], - attestation: vec![10, 11, 12, 13, 14], + proof: vec![10, 11, 12, 13, 14], })); assert_eq!(tee_proof_result, tee_proof_expected); } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 2c5632127c20..1f88618e6610 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -86,8 +86,8 @@ impl TeeRequestProcessor { dal.save_proof_artifacts_metadata( l1_batch_number, &proof.signature, - &proof.signature, - &proof.attestation, + &proof.pubkey, + &proof.proof, TeeType::Sgx, ) .await diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index af213f199836..f8835995fa79 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -130,7 +130,7 @@ async fn submit_tee_proof() { "Proof": { "signature": [ 0, 1, 2, 3, 4 ], "pubkey": [ 5, 6, 7, 8, 9 ], - "attestation": [ 10, 11, 12, 13, 14 ] + "proof": [ 10, 11, 12, 13, 14 ] } }"#; let tee_proof_request = From dc24fb5ddea13938a47f390aca81d7cf62971e1c Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 31 May 2024 17:58:15 +0200 Subject: [PATCH 20/42] Fix test_proof_request_serialization unit test --- core/lib/prover_interface/tests/job_serialization.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index 60d8299b88a7..c4330554b416 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -8,7 +8,7 @@ use zksync_prover_interface::{ inputs::{PrepareBasicCircuitsJob, StorageLogMetadata}, outputs::{L1BatchProofForL1, L1BatchTeeProofForL1}, }; -use zksync_types::L1BatchNumber; +use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber, ProtocolVersionId}; /// Tests compatibility of the `PrepareBasicCircuitsJob` serialization to the previously used /// one. @@ -84,6 +84,10 @@ fn test_proof_request_serialization() { let proof = SubmitProofRequest::Proof(Box::new(L1BatchProofForL1 { aggregation_result_coords: [[0; 32]; 4], scheduler_proof: FinalProof::empty(), + protocol_version: ProtocolSemanticVersion { + minor: ProtocolVersionId::Version25, + patch: 10.into(), + }, })); let encoded_obj = serde_json::to_string(&proof).unwrap(); let encoded_json = r#"{ @@ -141,10 +145,12 @@ fn test_proof_request_serialization() { "y": [ 1, 0, 0, 0 ], "infinity": true } - } + }, + "protocol_version": "0.25.10" } }"#; let decoded_obj: SubmitProofRequest = serde_json::from_str(&encoded_obj).unwrap(); + println!("{}", encoded_obj); let decoded_json: SubmitProofRequest = serde_json::from_str(encoded_json).unwrap(); match (decoded_obj, decoded_json) { (SubmitProofRequest::Proof(decoded_obj), SubmitProofRequest::Proof(decoded_json)) => { From 143abaff73127346f524cb4c3fd3ca0f195352ca Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 3 Jun 2024 21:10:01 +0200 Subject: [PATCH 21/42] Add tee_attestations SQL table --- ...16618914d6dedb39a9a40d36484741e8b01f4.json | 15 +++++ ...ee_proof_generation_details_table.down.sql | 1 + ..._tee_proof_generation_details_table.up.sql | 9 ++- core/lib/dal/src/tee_proof_generation_dal.rs | 24 ++++++++ core/lib/prover_interface/src/api.rs | 31 ++++++++--- core/node/proof_data_handler/src/lib.rs | 15 ++++- .../src/tee_request_processor.rs | 19 ++++++- core/node/proof_data_handler/src/tests.rs | 55 +++++++++++++++---- 8 files changed, 143 insertions(+), 26 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json diff --git a/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json b/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json new file mode 100644 index 000000000000..a39a1bdb07b8 --- /dev/null +++ b/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_attestations (pubkey, attestation)\n VALUES\n ($1, $2)\n ON CONFLICT (pubkey) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4" +} diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql index 2291f75592d5..5b4f9958a8ea 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.down.sql @@ -1,3 +1,4 @@ +DROP TABLE IF EXISTS tee_attestations; DROP TABLE IF EXISTS tee_proof_generation_details; DROP INDEX IF EXISTS idx_tee_proof_generation_details_status_prover_taken_at; diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 50fe4a1430be..3a249c44346c 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -1,9 +1,15 @@ +CREATE TABLE IF NOT EXISTS tee_attestations +( + pubkey BYTEA PRIMARY KEY, + attestation BYTEA +); + CREATE TABLE IF NOT EXISTS tee_proof_generation_details ( l1_batch_number BIGINT PRIMARY KEY REFERENCES tee_verifier_input_producer_jobs (l1_batch_number) ON DELETE CASCADE, status TEXT NOT NULL, signature BYTEA, - pubkey BYTEA, + pubkey BYTEA REFERENCES tee_attestations (pubkey) ON DELETE SET NULL, proof BYTEA, tee_type TEXT, created_at TIMESTAMP NOT NULL, @@ -11,7 +17,6 @@ CREATE TABLE IF NOT EXISTS tee_proof_generation_details prover_taken_at TIMESTAMP ); - CREATE INDEX IF NOT EXISTS idx_tee_proof_generation_details_status_prover_taken_at ON tee_proof_generation_details (prover_taken_at) WHERE status = 'picked_by_prover'; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index d7ee57046cf5..028a34ff668c 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -177,4 +177,28 @@ impl TeeProofGenerationDal<'_, '_> { result } + + pub async fn save_attestation( + &mut self, + pubkey: &[u8], + attestation: &[u8], + ) -> Result<(), SqlxError> { + sqlx::query!( + r#" + INSERT INTO + tee_attestations (pubkey, attestation) + VALUES + ($1, $2) + ON CONFLICT (pubkey) DO NOTHING + "#, + pubkey, + attestation + ) + .execute(self.storage.conn()) + .await? + .rows_affected() + .eq(&1) + .then_some(()) + .ok_or(sqlx::Error::RowNotFound) + } } diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index bc930e9ff83c..f66b59c099c7 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -13,6 +13,8 @@ use crate::{ outputs::{L1BatchProofForL1, L1BatchTeeProofForL1}, }; +// Structs for holding data returned in HTTP responses + #[derive(Debug, Serialize, Deserialize)] pub struct ProofGenerationData { pub l1_batch_number: L1BatchNumber, @@ -22,11 +24,6 @@ pub struct ProofGenerationData { pub eip_4844_blobs: Eip4844Blobs, } -#[derive(Debug, Serialize, Deserialize)] -pub struct ProofGenerationDataRequest {} - -pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest; - #[derive(Debug, Serialize, Deserialize)] pub enum GenericProofGenerationDataResponse { Success(Option>), @@ -35,6 +32,22 @@ pub enum GenericProofGenerationDataResponse { pub type ProofGenerationDataResponse = GenericProofGenerationDataResponse; +#[derive(Debug, Serialize, Deserialize)] +pub enum SimpleResponse { + Success, + Error(String), +} + +pub type SubmitProofResponse = SimpleResponse; +pub type RegisterTeeAttestationResponse = SimpleResponse; + +// Structs to hold data necessary for making HTTP requests + +#[derive(Debug, Serialize, Deserialize)] +pub struct ProofGenerationDataRequest {} + +pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest; + #[derive(Debug, PartialEq, Serialize, Deserialize)] pub enum GenericSubmitProofRequest { Proof(Box), @@ -45,8 +58,8 @@ pub enum GenericSubmitProofRequest { pub type SubmitProofRequest = GenericSubmitProofRequest; pub type SubmitTeeProofRequest = GenericSubmitProofRequest; -#[derive(Debug, Serialize, Deserialize)] -pub enum SubmitProofResponse { - Success, - Error(String), +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub struct RegisterTeeAttestationRequest { + pub attestation: Vec, + pub pubkey: Vec, } diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 7ce3fdcd6e32..0f31979607a6 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -9,8 +9,8 @@ use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ - ProofGenerationDataRequest, SubmitProofRequest, SubmitTeeProofRequest, - TeeProofGenerationDataRequest, + ProofGenerationDataRequest, RegisterTeeAttestationRequest, SubmitProofRequest, + SubmitTeeProofRequest, TeeProofGenerationDataRequest, }; use zksync_types::commitment::L1BatchCommitmentMode; @@ -55,6 +55,7 @@ fn create_proof_processing_router( let get_tee_proof_gen_processor = TeeRequestProcessor::new(blob_store.clone(), connection_pool.clone(), config.clone()); let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); + let register_tee_attestation_processor = get_tee_proof_gen_processor.clone(); let get_proof_gen_processor = RequestProcessor::new(blob_store, connection_pool, config, commitment_mode); let submit_proof_processor = get_proof_gen_processor.clone(); @@ -102,4 +103,14 @@ fn create_proof_processing_router( }, ), ) + .route( + "/register_tee_attestation", + post( + move |payload: Json| async move { + register_tee_attestation_processor + .register_tee_attestation(payload) + .await + }, + ), + ) } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 1f88618e6610..f870c793973e 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -5,7 +5,8 @@ use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{tee_proof_generation_dal::TeeType, ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ - GenericProofGenerationDataResponse, SubmitProofResponse, SubmitTeeProofRequest, + GenericProofGenerationDataResponse, RegisterTeeAttestationRequest, + RegisterTeeAttestationResponse, SubmitProofResponse, SubmitTeeProofRequest, TeeProofGenerationDataRequest, }; use zksync_tee_verifier::TeeVerifierInput; @@ -102,4 +103,20 @@ impl TeeRequestProcessor { Ok(Json(SubmitProofResponse::Success)) } + + pub(crate) async fn register_tee_attestation( + &self, + Json(payload): Json, + ) -> Result, RequestProcessorError> { + tracing::info!("Received attestation: {:?}", payload); + + let mut connection = self.pool.connection().await.unwrap(); + let mut dal = connection.tee_proof_generation_dal(); + + dal.save_attestation(&payload.pubkey, &payload.attestation) + .await + .map_err(RequestProcessorError::Sqlx)?; + + Ok(Json(RegisterTeeAttestationResponse::Success)) + } } diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index f8835995fa79..c7c080da6819 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -3,6 +3,8 @@ use std::time::Instant; use axum::{ body::Body, http::{self, Method, Request, StatusCode}, + response::Response, + Router, }; use multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use serde_json::json; @@ -135,7 +137,6 @@ async fn submit_tee_proof() { }"#; let tee_proof_request = serde_json::from_str::(tee_proof_request_str).unwrap(); - let req_body = Body::from(serde_json::to_vec(&tee_proof_request).unwrap()); let uri = format!("/submit_tee_proof/{}", batch_number.0); let app = create_proof_processing_router( blob_store, @@ -146,18 +147,29 @@ async fn submit_tee_proof() { }, L1BatchCommitmentMode::Rollup, ); - let response = app - .oneshot( - Request::builder() - .method(Method::POST) - .uri(uri) - .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) - .body(req_body) - .unwrap(), - ) - .await - .unwrap(); + // should fail as we haven't saved the attestation for the pubkey yet + + let response = send_submit_tee_proof_request(&app, &uri, &tee_proof_request).await; + assert_eq!(response.status(), StatusCode::BAD_GATEWAY); + + // save the attestation for the pubkey + + if let SubmitTeeProofRequest::Proof(ref proof) = tee_proof_request { + let attestation = [15, 16, 17, 18, 19]; + let mut proof_dal = db_conn_pool.connection().await.unwrap(); + proof_dal + .tee_proof_generation_dal() + .save_attestation(&proof.pubkey, &attestation) + .await + .expect("Failed to save attestation"); + } else { + panic!("Expected Proof, got {:?}", tee_proof_request); + } + + // send the same request again; now it should succeed + + let response = send_submit_tee_proof_request(&app, &uri, &tee_proof_request).await; assert_eq!(response.status(), StatusCode::OK); // there should not be any batches awaiting proof in the db anymore @@ -212,3 +224,22 @@ async fn mock_tee_batch_status( let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); assert_eq!(oldest_batch_number, batch_number); } + +async fn send_submit_tee_proof_request( + app: &Router, + uri: &str, + tee_proof_request: &SubmitTeeProofRequest, +) -> Response { + let req_body = Body::from(serde_json::to_vec(tee_proof_request).unwrap()); + app.clone() + .oneshot( + Request::builder() + .method(Method::POST) + .uri(uri) + .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .body(req_body) + .unwrap(), + ) + .await + .unwrap() +} From a445762dd6b358103fa8b8061ddfa772d707032f Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 4 Jun 2024 13:08:09 +0200 Subject: [PATCH 22/42] Add valid_until column to the attestations table --- Cargo.lock | 1 + ...6fa16618914d6dedb39a9a40d36484741e8b01f4.json | 15 --------------- ...66db1899ba8ec8a3ec7c16542c909cee3e9c7393.json | 16 ++++++++++++++++ ...add_tee_proof_generation_details_table.up.sql | 3 ++- core/lib/dal/src/tee_proof_generation_dal.rs | 9 ++++++--- core/lib/prover_interface/src/api.rs | 2 ++ core/node/proof_data_handler/Cargo.toml | 1 + .../src/tee_request_processor.rs | 2 +- core/node/proof_data_handler/src/tests.rs | 8 +++++--- 9 files changed, 34 insertions(+), 23 deletions(-) delete mode 100644 core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json create mode 100644 core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json diff --git a/Cargo.lock b/Cargo.lock index ae4bad1bd02d..81cd73375bfc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9086,6 +9086,7 @@ version = "0.1.0" dependencies = [ "anyhow", "axum", + "chrono", "http-body-util", "hyper", "mime", diff --git a/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json b/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json deleted file mode 100644 index a39a1bdb07b8..000000000000 --- a/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_attestations (pubkey, attestation)\n VALUES\n ($1, $2)\n ON CONFLICT (pubkey) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bytea", - "Bytea" - ] - }, - "nullable": [] - }, - "hash": "37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4" -} diff --git a/core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json b/core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json new file mode 100644 index 000000000000..a6dbff262f0e --- /dev/null +++ b/core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_attestations (pubkey, attestation, valid_until)\n VALUES\n ($1, $2, $3)\n ON CONFLICT (pubkey) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Timestamp" + ] + }, + "nullable": [] + }, + "hash": "711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393" +} diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 3a249c44346c..5449ac2d1012 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -1,7 +1,8 @@ CREATE TABLE IF NOT EXISTS tee_attestations ( pubkey BYTEA PRIMARY KEY, - attestation BYTEA + attestation BYTEA, + valid_until TIMESTAMP NOT NULL ); CREATE TABLE IF NOT EXISTS tee_proof_generation_details diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 028a34ff668c..d2f36d2d9a30 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,5 +1,6 @@ use std::time::Duration; +use chrono::{DateTime, Utc}; use strum::{Display, EnumString}; use zksync_db_connection::{connection::Connection, utils::pg_interval_from_duration}; use zksync_types::L1BatchNumber; @@ -182,17 +183,19 @@ impl TeeProofGenerationDal<'_, '_> { &mut self, pubkey: &[u8], attestation: &[u8], + valid_until: &DateTime, ) -> Result<(), SqlxError> { sqlx::query!( r#" INSERT INTO - tee_attestations (pubkey, attestation) + tee_attestations (pubkey, attestation, valid_until) VALUES - ($1, $2) + ($1, $2, $3) ON CONFLICT (pubkey) DO NOTHING "#, pubkey, - attestation + attestation, + valid_until.naive_utc() ) .execute(self.storage.conn()) .await? diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index f66b59c099c7..266ba9d4302b 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -1,6 +1,7 @@ //! Prover and server subsystems communicate via the API. //! This module defines the types used in the API. +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use zksync_types::{ basic_fri_types::Eip4844Blobs, @@ -62,4 +63,5 @@ pub type SubmitTeeProofRequest = GenericSubmitProofRequest pub struct RegisterTeeAttestationRequest { pub attestation: Vec, pub pubkey: Vec, + pub valid_until: DateTime, } diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 008560e51efd..0bd44c4ad165 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -27,6 +27,7 @@ tracing.workspace = true http-body-util = "0.1.0" hyper = { version = "0.14", features = ["full"] } mime = "0.3.16" +chrono.workspace = true multivm.workspace = true serde_json.workspace = true tower.workspace = true diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index f870c793973e..139e46db5afb 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -113,7 +113,7 @@ impl TeeRequestProcessor { let mut connection = self.pool.connection().await.unwrap(); let mut dal = connection.tee_proof_generation_dal(); - dal.save_attestation(&payload.pubkey, &payload.attestation) + dal.save_attestation(&payload.pubkey, &payload.attestation, &payload.valid_until) .await .map_err(RequestProcessorError::Sqlx)?; diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index c7c080da6819..0c1a760041ed 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -6,6 +6,7 @@ use axum::{ response::Response, Router, }; +use chrono::{Duration, Utc}; use multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use serde_json::json; use tower::ServiceExt; @@ -148,7 +149,7 @@ async fn submit_tee_proof() { L1BatchCommitmentMode::Rollup, ); - // should fail as we haven't saved the attestation for the pubkey yet + // this should fail because we haven't saved the attestation for the pubkey yet let response = send_submit_tee_proof_request(&app, &uri, &tee_proof_request).await; assert_eq!(response.status(), StatusCode::BAD_GATEWAY); @@ -156,18 +157,19 @@ async fn submit_tee_proof() { // save the attestation for the pubkey if let SubmitTeeProofRequest::Proof(ref proof) = tee_proof_request { + let valid_until = Utc::now() + Duration::days(42); let attestation = [15, 16, 17, 18, 19]; let mut proof_dal = db_conn_pool.connection().await.unwrap(); proof_dal .tee_proof_generation_dal() - .save_attestation(&proof.pubkey, &attestation) + .save_attestation(&proof.pubkey, &attestation, &valid_until) .await .expect("Failed to save attestation"); } else { panic!("Expected Proof, got {:?}", tee_proof_request); } - // send the same request again; now it should succeed + // resend the same request; this time, it should be successful. let response = send_submit_tee_proof_request(&app, &uri, &tee_proof_request).await; assert_eq!(response.status(), StatusCode::OK); From 0bfe0c1961042f9c4d63005d8426368a495e9356 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 5 Jun 2024 12:14:53 +0200 Subject: [PATCH 23/42] Get rid of mime dependency --- Cargo.lock | 1 - core/node/proof_data_handler/Cargo.toml | 1 - core/node/proof_data_handler/src/tests.rs | 4 ++-- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 81cd73375bfc..39260f386d87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9089,7 +9089,6 @@ dependencies = [ "chrono", "http-body-util", "hyper", - "mime", "multivm", "serde_json", "tokio", diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 0bd44c4ad165..d2649e7a8a50 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -26,7 +26,6 @@ tracing.workspace = true [dev-dependencies] http-body-util = "0.1.0" hyper = { version = "0.14", features = ["full"] } -mime = "0.3.16" chrono.workspace = true multivm.workspace = true serde_json.workspace = true diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 0c1a760041ed..f9daa1fae5be 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -97,7 +97,7 @@ async fn request_tee_proof_inputs() { Request::builder() .method(Method::POST) .uri("/tee_proof_inputs") - .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .header(http::header::CONTENT_TYPE, "application/json") .body(req_body) .unwrap(), ) @@ -238,7 +238,7 @@ async fn send_submit_tee_proof_request( Request::builder() .method(Method::POST) .uri(uri) - .header(http::header::CONTENT_TYPE, mime::APPLICATION_JSON.as_ref()) + .header(http::header::CONTENT_TYPE, "application/json") .body(req_body) .unwrap(), ) From d07b649ef23a2b2cc6740f1a90b77d5ac5cbda84 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 5 Jun 2024 13:35:34 +0200 Subject: [PATCH 24/42] Remove println! from the production code --- .../proof_data_handler/src/tee_request_processor.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 139e46db5afb..d998eaae5371 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -72,17 +72,16 @@ impl TeeRequestProcessor { Path(l1_batch_number): Path, Json(payload): Json, ) -> Result, RequestProcessorError> { - tracing::info!("Received proof for block number: {:?}", l1_batch_number); - let l1_batch_number = L1BatchNumber(l1_batch_number); let mut connection = self.pool.connection().await.unwrap(); let mut dal = connection.tee_proof_generation_dal(); match payload { SubmitTeeProofRequest::Proof(proof) => { - println!( + tracing::info!( "Received proof {:?} for block number: {:?}", - proof, l1_batch_number + proof, + l1_batch_number ); dal.save_proof_artifacts_metadata( l1_batch_number, @@ -95,6 +94,10 @@ impl TeeRequestProcessor { .map_err(RequestProcessorError::Sqlx)?; } SubmitTeeProofRequest::SkippedProofGeneration => { + tracing::info!( + "Received request to skip proof generation for block number: {:?}", + l1_batch_number + ); dal.mark_proof_generation_job_as_skipped(l1_batch_number) .await .map_err(RequestProcessorError::Sqlx)?; From b893618596ce538cab969566e8acc19293979ce7 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 5 Jun 2024 13:38:04 +0200 Subject: [PATCH 25/42] Remove println! from tests --- core/lib/prover_interface/tests/job_serialization.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index c4330554b416..ec278dddbe44 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -150,7 +150,6 @@ fn test_proof_request_serialization() { } }"#; let decoded_obj: SubmitProofRequest = serde_json::from_str(&encoded_obj).unwrap(); - println!("{}", encoded_obj); let decoded_json: SubmitProofRequest = serde_json::from_str(encoded_json).unwrap(); match (decoded_obj, decoded_json) { (SubmitProofRequest::Proof(decoded_obj), SubmitProofRequest::Proof(decoded_json)) => { From 0d21e6e9c99fd899f7ceb603301f623d16897f43 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 5 Jun 2024 17:58:24 +0200 Subject: [PATCH 26/42] Propagate errors instead of using unwrap() --- .../src/tee_request_processor.rs | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index d998eaae5371..df12893aaedc 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{tee_proof_generation_dal::TeeType, ConnectionPool, Core, CoreDal}; +use zksync_dal::{tee_proof_generation_dal::TeeType, ConnectionPool, Core, CoreDal, SqlxError}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ GenericProofGenerationDataResponse, RegisterTeeAttestationRequest, @@ -42,15 +42,16 @@ impl TeeRequestProcessor { ) -> Result, RequestProcessorError> { tracing::info!("Received request for proof generation data: {:?}", request); - let l1_batch_number_result = self + let mut connection = self .pool .connection() .await - .unwrap() + .map_err(|_| RequestProcessorError::Sqlx(SqlxError::PoolClosed))?; + + let l1_batch_number_result = connection .tee_proof_generation_dal() .get_next_block_to_be_proven(self.config.proof_generation_timeout()) .await; - let l1_batch_number = match l1_batch_number_result { Some(number) => number, None => return Ok(Json(TeeProofGenerationDataResponse::Success(None))), @@ -73,7 +74,11 @@ impl TeeRequestProcessor { Json(payload): Json, ) -> Result, RequestProcessorError> { let l1_batch_number = L1BatchNumber(l1_batch_number); - let mut connection = self.pool.connection().await.unwrap(); + let mut connection = self + .pool + .connection() + .await + .map_err(|_| RequestProcessorError::Sqlx(SqlxError::PoolClosed))?; let mut dal = connection.tee_proof_generation_dal(); match payload { @@ -113,7 +118,11 @@ impl TeeRequestProcessor { ) -> Result, RequestProcessorError> { tracing::info!("Received attestation: {:?}", payload); - let mut connection = self.pool.connection().await.unwrap(); + let mut connection = self + .pool + .connection() + .await + .map_err(|_| RequestProcessorError::Sqlx(SqlxError::PoolClosed))?; let mut dal = connection.tee_proof_generation_dal(); dal.save_attestation(&payload.pubkey, &payload.attestation, &payload.valid_until) From 1277ae1a671260f129507d62d7f8664d0328f535 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 5 Jun 2024 23:19:59 +0200 Subject: [PATCH 27/42] Replace ObjectSoreFactory with MockObjectStore after rebase --- core/node/proof_data_handler/src/tests.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index f9daa1fae5be..9622bca351cc 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -14,7 +14,7 @@ use zksync_basic_types::U256; use zksync_config::configs::ProofDataHandlerConfig; use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_dal::{ConnectionPool, CoreDal}; -use zksync_object_store::ObjectStoreFactory; +use zksync_object_store::MockObjectStore; use zksync_prover_interface::{api::SubmitTeeProofRequest, inputs::PrepareBasicCircuitsJob}; use zksync_tee_verifier::TeeVerifierInput; use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; @@ -72,7 +72,7 @@ async fn request_tee_proof_inputs() { // populate mocked object store with a single batch blob - let blob_store = ObjectStoreFactory::mock().create_store().await; + let blob_store = MockObjectStore::arc(); let object_path = blob_store.put(batch_number, &tvi).await.unwrap(); // get connection to the SQL db and mock the status of the TEE proof generation @@ -120,7 +120,7 @@ async fn request_tee_proof_inputs() { // Test /submit_tee_proof endpoint using a mocked TEE proof and verify response and db state #[tokio::test] async fn submit_tee_proof() { - let blob_store = ObjectStoreFactory::mock().create_store().await; + let blob_store = MockObjectStore::arc(); let db_conn_pool = ConnectionPool::test_pool().await; let object_path = "mocked_object_path"; let batch_number = L1BatchNumber::from(1); From 3e8013c67c65be97a956db2a35e20a6b6079c247 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Wed, 5 Jun 2024 23:55:34 +0200 Subject: [PATCH 28/42] Remove unused dependency --- Cargo.lock | 83 +++++++------------------ core/node/proof_data_handler/Cargo.toml | 1 - 2 files changed, 24 insertions(+), 60 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 39260f386d87..62a1e81b3fcc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -362,8 +362,8 @@ dependencies = [ "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "hyper", "itoa", "matchit", @@ -392,8 +392,8 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "mime", "rustversion", "tower-layer", @@ -2396,7 +2396,7 @@ dependencies = [ "futures-core", "futures-sink", "gloo-utils", - "http 0.2.9", + "http", "js-sys", "pin-project", "serde", @@ -2555,7 +2555,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.9", + "http", "indexmap 2.1.0", "slab", "tokio", @@ -2724,17 +2724,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http-body" version = "0.4.5" @@ -2742,30 +2731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes", - "http 0.2.9", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" -dependencies = [ - "bytes", - "http 1.1.0", -] - -[[package]] -name = "http-body-util" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" -dependencies = [ - "bytes", - "futures-core", - "http 1.1.0", - "http-body 1.0.0", + "http", "pin-project-lite", ] @@ -2798,8 +2764,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "httparse", "httpdate", "itoa", @@ -2818,7 +2784,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ "futures-util", - "http 0.2.9", + "http", "hyper", "log", "rustls 0.21.11", @@ -3086,7 +3052,7 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http 0.2.9", + "http", "jsonrpsee-core", "pin-project", "rustls-native-certs 0.7.0", @@ -3168,7 +3134,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cc7c6d1a2c58f6135810284a390d9f823d0f508db74cd914d8237802de80f98" dependencies = [ "futures-util", - "http 0.2.9", + "http", "hyper", "jsonrpsee-core", "jsonrpsee-types", @@ -3215,7 +3181,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "073c077471e89c4b511fa88b3df9a0f0abdf4a0a2e6683dd2ab36893af87bb2d" dependencies = [ - "http 0.2.9", + "http", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -4102,7 +4068,7 @@ checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" dependencies = [ "async-trait", "bytes", - "http 0.2.9", + "http", "opentelemetry_api", "reqwest", ] @@ -4115,7 +4081,7 @@ checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" dependencies = [ "async-trait", "futures-core", - "http 0.2.9", + "http", "opentelemetry-http", "opentelemetry-proto", "opentelemetry-semantic-conventions", @@ -5127,8 +5093,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "hyper", "hyper-tls", "ipnet", @@ -6083,7 +6049,7 @@ dependencies = [ "base64 0.13.1", "bytes", "futures 0.3.28", - "http 0.2.9", + "http", "httparse", "log", "rand 0.8.5", @@ -6877,8 +6843,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "hyper", "hyper-timeout", "percent-encoding", @@ -6925,8 +6891,8 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "http-range-header", "httpdate", "iri-string", @@ -8826,7 +8792,7 @@ dependencies = [ "futures 0.3.28", "governor", "hex", - "http 0.2.9", + "http", "itertools 0.10.5", "lru", "multivm", @@ -9067,7 +9033,7 @@ dependencies = [ "flate2", "google-cloud-auth", "google-cloud-storage", - "http 0.2.9", + "http", "prost 0.12.1", "rand 0.8.5", "serde_json", @@ -9087,7 +9053,6 @@ dependencies = [ "anyhow", "axum", "chrono", - "http-body-util", "hyper", "multivm", "serde_json", diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index d2649e7a8a50..50419721ea1f 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -24,7 +24,6 @@ tokio.workspace = true tracing.workspace = true [dev-dependencies] -http-body-util = "0.1.0" hyper = { version = "0.14", features = ["full"] } chrono.workspace = true multivm.workspace = true From 3a5aef6c9a202607d2be610291899c75485eecb4 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 6 Jun 2024 11:08:03 +0200 Subject: [PATCH 29/42] Use workspace dependencies only --- Cargo.lock | 145 ++++++++++++++++------ Cargo.toml | 1 + core/node/proof_data_handler/Cargo.toml | 2 +- core/node/proof_data_handler/src/tests.rs | 2 +- 4 files changed, 109 insertions(+), 41 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 62a1e81b3fcc..0211245c6700 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -324,6 +324,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "atomic-write-file" version = "0.1.2" @@ -362,9 +368,9 @@ dependencies = [ "bitflags 1.3.2", "bytes", "futures-util", - "http", - "http-body", - "hyper", + "http 0.2.9", + "http-body 0.4.5", + "hyper 0.14.27", "itoa", "matchit", "memchr", @@ -392,8 +398,8 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "mime", "rustversion", "tower-layer", @@ -2396,7 +2402,7 @@ dependencies = [ "futures-core", "futures-sink", "gloo-utils", - "http", + "http 0.2.9", "js-sys", "pin-project", "serde", @@ -2555,7 +2561,26 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.9", + "indexmap 2.1.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", "indexmap 2.1.0", "slab", "tokio", @@ -2724,6 +2749,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http-body" version = "0.4.5" @@ -2731,10 +2767,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes", - "http", + "http 0.2.9", "pin-project-lite", ] +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + [[package]] name = "http-range-header" version = "0.3.1" @@ -2763,9 +2809,9 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", - "http", - "http-body", + "h2 0.3.26", + "http 0.2.9", + "http-body 0.4.5", "httparse", "httpdate", "itoa", @@ -2777,6 +2823,27 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.5", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + [[package]] name = "hyper-rustls" version = "0.24.1" @@ -2784,8 +2851,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ "futures-util", - "http", - "hyper", + "http 0.2.9", + "hyper 0.14.27", "log", "rustls 0.21.11", "rustls-native-certs 0.6.3", @@ -2799,7 +2866,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper", + "hyper 0.14.27", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -2812,7 +2879,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper", + "hyper 0.14.27", "native-tls", "tokio", "tokio-native-tls", @@ -3052,7 +3119,7 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http", + "http 0.2.9", "jsonrpsee-core", "pin-project", "rustls-native-certs 0.7.0", @@ -3079,7 +3146,7 @@ dependencies = [ "beef", "futures-timer", "futures-util", - "hyper", + "hyper 0.14.27", "jsonrpsee-types", "parking_lot", "pin-project", @@ -3101,7 +3168,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b7de9f3219d95985eb77fd03194d7c1b56c19bce1abfcc9d07462574b15572" dependencies = [ "async-trait", - "hyper", + "hyper 0.14.27", "hyper-rustls", "jsonrpsee-core", "jsonrpsee-types", @@ -3134,8 +3201,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cc7c6d1a2c58f6135810284a390d9f823d0f508db74cd914d8237802de80f98" dependencies = [ "futures-util", - "http", - "hyper", + "http 0.2.9", + "hyper 0.14.27", "jsonrpsee-core", "jsonrpsee-types", "pin-project", @@ -3181,7 +3248,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "073c077471e89c4b511fa88b3df9a0f0abdf4a0a2e6683dd2ab36893af87bb2d" dependencies = [ - "http", + "http 0.2.9", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -3575,7 +3642,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a4964177ddfdab1e3a2b37aec7cf320e14169abb0ed73999f558136409178d5" dependencies = [ "base64 0.21.5", - "hyper", + "hyper 0.14.27", "indexmap 1.9.3", "ipnet", "metrics", @@ -4068,7 +4135,7 @@ checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" dependencies = [ "async-trait", "bytes", - "http", + "http 0.2.9", "opentelemetry_api", "reqwest", ] @@ -4081,7 +4148,7 @@ checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" dependencies = [ "async-trait", "futures-core", - "http", + "http 0.2.9", "opentelemetry-http", "opentelemetry-proto", "opentelemetry-semantic-conventions", @@ -5092,10 +5159,10 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2", - "http", - "http-body", - "hyper", + "h2 0.3.26", + "http 0.2.9", + "http-body 0.4.5", + "hyper 0.14.27", "hyper-tls", "ipnet", "js-sys", @@ -6049,7 +6116,7 @@ dependencies = [ "base64 0.13.1", "bytes", "futures 0.3.28", - "http", + "http 0.2.9", "httparse", "log", "rand 0.8.5", @@ -6842,10 +6909,10 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "h2", - "http", - "http-body", - "hyper", + "h2 0.3.26", + "http 0.2.9", + "http-body 0.4.5", + "hyper 0.14.27", "hyper-timeout", "percent-encoding", "pin-project", @@ -6891,8 +6958,8 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", - "http-body", + "http 0.2.9", + "http-body 0.4.5", "http-range-header", "httpdate", "iri-string", @@ -7258,7 +7325,7 @@ name = "vise-exporter" version = "0.1.0" source = "git+https://github.com/matter-labs/vise.git?rev=a5bb80c9ce7168663114ee30e794d6dc32159ee4#a5bb80c9ce7168663114ee30e794d6dc32159ee4" dependencies = [ - "hyper", + "hyper 0.14.27", "metrics-exporter-prometheus", "once_cell", "tokio", @@ -8792,7 +8859,7 @@ dependencies = [ "futures 0.3.28", "governor", "hex", - "http", + "http 0.2.9", "itertools 0.10.5", "lru", "multivm", @@ -9033,7 +9100,7 @@ dependencies = [ "flate2", "google-cloud-auth", "google-cloud-storage", - "http", + "http 0.2.9", "prost 0.12.1", "rand 0.8.5", "serde_json", @@ -9053,7 +9120,7 @@ dependencies = [ "anyhow", "axum", "chrono", - "hyper", + "hyper 1.3.1", "multivm", "serde_json", "tokio", diff --git a/Cargo.toml b/Cargo.toml index 77af41c63721..a858a06374a6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -116,6 +116,7 @@ google-cloud-storage = "0.15.0" governor = "0.4.2" hex = "0.4" http = "0.2.9" +hyper = "1.3.1" iai = "0.1" insta = "1.29.0" itertools = "0.10" diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 50419721ea1f..f7b3db745181 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -24,7 +24,7 @@ tokio.workspace = true tracing.workspace = true [dev-dependencies] -hyper = { version = "0.14", features = ["full"] } +hyper = { workspace = true, features = ["full"] } chrono.workspace = true multivm.workspace = true serde_json.workspace = true diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 9622bca351cc..eaeadc17f9c1 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -106,7 +106,7 @@ async fn request_tee_proof_inputs() { assert_eq!(response.status(), StatusCode::OK); - let body = hyper::body::to_bytes(response.into_body()).await.unwrap(); + let body = response.into_body().collect().await.unwrap().to_bytes(); let json: serde_json::Value = serde_json::from_slice(&body).unwrap(); let json = json .get("Success") From d2dbd95d2f3c3ea1828b462ae08e54d322c84bd5 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Thu, 6 Jun 2024 17:35:08 +0200 Subject: [PATCH 30/42] Don't use deprecated hyper::body::to_bytes --- Cargo.lock | 167 ++++++---------------- Cargo.toml | 2 +- core/node/proof_data_handler/Cargo.toml | 2 +- core/node/proof_data_handler/src/tests.rs | 1 + 4 files changed, 48 insertions(+), 124 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0211245c6700..282287488ec9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -324,12 +324,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - [[package]] name = "atomic-write-file" version = "0.1.2" @@ -368,9 +362,9 @@ dependencies = [ "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.9", - "http-body 0.4.5", - "hyper 0.14.27", + "http", + "http-body", + "hyper", "itoa", "matchit", "memchr", @@ -398,8 +392,8 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "mime", "rustversion", "tower-layer", @@ -2402,7 +2396,7 @@ dependencies = [ "futures-core", "futures-sink", "gloo-utils", - "http 0.2.9", + "http", "js-sys", "pin-project", "serde", @@ -2561,26 +2555,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.9", - "indexmap 2.1.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "h2" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http 1.1.0", + "http", "indexmap 2.1.0", "slab", "tokio", @@ -2749,38 +2724,17 @@ dependencies = [ "itoa", ] -[[package]] -name = "http" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.9", + "http", "pin-project-lite", ] -[[package]] -name = "http-body" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" -dependencies = [ - "bytes", - "http 1.1.0", -] - [[package]] name = "http-range-header" version = "0.3.1" @@ -2801,49 +2755,28 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2 0.3.26", - "http 0.2.9", - "http-body 0.4.5", + "h2", + "http", + "http-body", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2", "tokio", "tower-service", "tracing", "want", ] -[[package]] -name = "hyper" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", - "want", -] - [[package]] name = "hyper-rustls" version = "0.24.1" @@ -2851,8 +2784,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ "futures-util", - "http 0.2.9", - "hyper 0.14.27", + "http", + "hyper", "log", "rustls 0.21.11", "rustls-native-certs 0.6.3", @@ -2866,7 +2799,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper 0.14.27", + "hyper", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -2879,7 +2812,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.27", + "hyper", "native-tls", "tokio", "tokio-native-tls", @@ -3119,7 +3052,7 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http 0.2.9", + "http", "jsonrpsee-core", "pin-project", "rustls-native-certs 0.7.0", @@ -3146,7 +3079,7 @@ dependencies = [ "beef", "futures-timer", "futures-util", - "hyper 0.14.27", + "hyper", "jsonrpsee-types", "parking_lot", "pin-project", @@ -3168,7 +3101,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b7de9f3219d95985eb77fd03194d7c1b56c19bce1abfcc9d07462574b15572" dependencies = [ "async-trait", - "hyper 0.14.27", + "hyper", "hyper-rustls", "jsonrpsee-core", "jsonrpsee-types", @@ -3201,8 +3134,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cc7c6d1a2c58f6135810284a390d9f823d0f508db74cd914d8237802de80f98" dependencies = [ "futures-util", - "http 0.2.9", - "hyper 0.14.27", + "http", + "hyper", "jsonrpsee-core", "jsonrpsee-types", "pin-project", @@ -3248,7 +3181,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "073c077471e89c4b511fa88b3df9a0f0abdf4a0a2e6683dd2ab36893af87bb2d" dependencies = [ - "http 0.2.9", + "http", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -3642,7 +3575,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a4964177ddfdab1e3a2b37aec7cf320e14169abb0ed73999f558136409178d5" dependencies = [ "base64 0.21.5", - "hyper 0.14.27", + "hyper", "indexmap 1.9.3", "ipnet", "metrics", @@ -4135,7 +4068,7 @@ checksum = "c7594ec0e11d8e33faf03530a4c49af7064ebba81c1480e01be67d90b356508b" dependencies = [ "async-trait", "bytes", - "http 0.2.9", + "http", "opentelemetry_api", "reqwest", ] @@ -4148,7 +4081,7 @@ checksum = "7e5e5a5c4135864099f3faafbe939eb4d7f9b80ebf68a8448da961b32a7c1275" dependencies = [ "async-trait", "futures-core", - "http 0.2.9", + "http", "opentelemetry-http", "opentelemetry-proto", "opentelemetry-semantic-conventions", @@ -5159,10 +5092,10 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.3.26", - "http 0.2.9", - "http-body 0.4.5", - "hyper 0.14.27", + "h2", + "http", + "http-body", + "hyper", "hyper-tls", "ipnet", "js-sys", @@ -6087,16 +6020,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.5" @@ -6116,7 +6039,7 @@ dependencies = [ "base64 0.13.1", "bytes", "futures 0.3.28", - "http 0.2.9", + "http", "httparse", "log", "rand 0.8.5", @@ -6786,7 +6709,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2", "tokio-macros", "windows-sys 0.48.0", ] @@ -6909,10 +6832,10 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "h2 0.3.26", - "http 0.2.9", - "http-body 0.4.5", - "hyper 0.14.27", + "h2", + "http", + "http-body", + "hyper", "hyper-timeout", "percent-encoding", "pin-project", @@ -6958,8 +6881,8 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http 0.2.9", - "http-body 0.4.5", + "http", + "http-body", "http-range-header", "httpdate", "iri-string", @@ -7325,7 +7248,7 @@ name = "vise-exporter" version = "0.1.0" source = "git+https://github.com/matter-labs/vise.git?rev=a5bb80c9ce7168663114ee30e794d6dc32159ee4#a5bb80c9ce7168663114ee30e794d6dc32159ee4" dependencies = [ - "hyper 0.14.27", + "hyper", "metrics-exporter-prometheus", "once_cell", "tokio", @@ -8859,7 +8782,7 @@ dependencies = [ "futures 0.3.28", "governor", "hex", - "http 0.2.9", + "http", "itertools 0.10.5", "lru", "multivm", @@ -9100,7 +9023,7 @@ dependencies = [ "flate2", "google-cloud-auth", "google-cloud-storage", - "http 0.2.9", + "http", "prost 0.12.1", "rand 0.8.5", "serde_json", @@ -9120,7 +9043,7 @@ dependencies = [ "anyhow", "axum", "chrono", - "hyper 1.3.1", + "hyper", "multivm", "serde_json", "tokio", diff --git a/Cargo.toml b/Cargo.toml index a858a06374a6..de664288e150 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -116,7 +116,7 @@ google-cloud-storage = "0.15.0" governor = "0.4.2" hex = "0.4" http = "0.2.9" -hyper = "1.3.1" +hyper = "0.14.29" iai = "0.1" insta = "1.29.0" itertools = "0.10" diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index f7b3db745181..7a3efe3afeb6 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -24,7 +24,7 @@ tokio.workspace = true tracing.workspace = true [dev-dependencies] -hyper = { workspace = true, features = ["full"] } +hyper.workspace = true chrono.workspace = true multivm.workspace = true serde_json.workspace = true diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index eaeadc17f9c1..dda8d194210c 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -7,6 +7,7 @@ use axum::{ Router, }; use chrono::{Duration, Utc}; +use hyper::body::HttpBody; use multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use serde_json::json; use tower::ServiceExt; From 5bbba0c44c26c50b962a773965d1d77362342157 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 7 Jun 2024 00:07:57 +0200 Subject: [PATCH 31/42] Remove valid_until column --- ...6fa16618914d6dedb39a9a40d36484741e8b01f4.json | 15 +++++++++++++++ ...66db1899ba8ec8a3ec7c16542c909cee3e9c7393.json | 16 ---------------- ...add_tee_proof_generation_details_table.up.sql | 3 +-- core/lib/dal/src/tee_proof_generation_dal.rs | 9 +++------ core/lib/prover_interface/src/api.rs | 2 -- .../src/tee_request_processor.rs | 2 +- core/node/proof_data_handler/src/tests.rs | 4 +--- prover/Cargo.lock | 3 +++ 8 files changed, 24 insertions(+), 30 deletions(-) create mode 100644 core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json delete mode 100644 core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json diff --git a/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json b/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json new file mode 100644 index 000000000000..a39a1bdb07b8 --- /dev/null +++ b/core/lib/dal/.sqlx/query-37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_attestations (pubkey, attestation)\n VALUES\n ($1, $2)\n ON CONFLICT (pubkey) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "37890022be6b5e893cf051266fa16618914d6dedb39a9a40d36484741e8b01f4" +} diff --git a/core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json b/core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json deleted file mode 100644 index a6dbff262f0e..000000000000 --- a/core/lib/dal/.sqlx/query-711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n tee_attestations (pubkey, attestation, valid_until)\n VALUES\n ($1, $2, $3)\n ON CONFLICT (pubkey) DO NOTHING\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bytea", - "Bytea", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "711bca136767169b46e7e58966db1899ba8ec8a3ec7c16542c909cee3e9c7393" -} diff --git a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql index 5449ac2d1012..3a249c44346c 100644 --- a/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql +++ b/core/lib/dal/migrations/20240523085604_add_tee_proof_generation_details_table.up.sql @@ -1,8 +1,7 @@ CREATE TABLE IF NOT EXISTS tee_attestations ( pubkey BYTEA PRIMARY KEY, - attestation BYTEA, - valid_until TIMESTAMP NOT NULL + attestation BYTEA ); CREATE TABLE IF NOT EXISTS tee_proof_generation_details diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index d2f36d2d9a30..028a34ff668c 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,6 +1,5 @@ use std::time::Duration; -use chrono::{DateTime, Utc}; use strum::{Display, EnumString}; use zksync_db_connection::{connection::Connection, utils::pg_interval_from_duration}; use zksync_types::L1BatchNumber; @@ -183,19 +182,17 @@ impl TeeProofGenerationDal<'_, '_> { &mut self, pubkey: &[u8], attestation: &[u8], - valid_until: &DateTime, ) -> Result<(), SqlxError> { sqlx::query!( r#" INSERT INTO - tee_attestations (pubkey, attestation, valid_until) + tee_attestations (pubkey, attestation) VALUES - ($1, $2, $3) + ($1, $2) ON CONFLICT (pubkey) DO NOTHING "#, pubkey, - attestation, - valid_until.naive_utc() + attestation ) .execute(self.storage.conn()) .await? diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index 266ba9d4302b..f66b59c099c7 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -1,7 +1,6 @@ //! Prover and server subsystems communicate via the API. //! This module defines the types used in the API. -use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use zksync_types::{ basic_fri_types::Eip4844Blobs, @@ -63,5 +62,4 @@ pub type SubmitTeeProofRequest = GenericSubmitProofRequest pub struct RegisterTeeAttestationRequest { pub attestation: Vec, pub pubkey: Vec, - pub valid_until: DateTime, } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index df12893aaedc..67e36feb9982 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -125,7 +125,7 @@ impl TeeRequestProcessor { .map_err(|_| RequestProcessorError::Sqlx(SqlxError::PoolClosed))?; let mut dal = connection.tee_proof_generation_dal(); - dal.save_attestation(&payload.pubkey, &payload.attestation, &payload.valid_until) + dal.save_attestation(&payload.pubkey, &payload.attestation) .await .map_err(RequestProcessorError::Sqlx)?; diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index dda8d194210c..5b7dd460aa48 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -6,7 +6,6 @@ use axum::{ response::Response, Router, }; -use chrono::{Duration, Utc}; use hyper::body::HttpBody; use multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use serde_json::json; @@ -158,12 +157,11 @@ async fn submit_tee_proof() { // save the attestation for the pubkey if let SubmitTeeProofRequest::Proof(ref proof) = tee_proof_request { - let valid_until = Utc::now() + Duration::days(42); let attestation = [15, 16, 17, 18, 19]; let mut proof_dal = db_conn_pool.connection().await.unwrap(); proof_dal .tee_proof_generation_dal() - .save_attestation(&proof.pubkey, &attestation, &valid_until) + .save_attestation(&proof.pubkey, &attestation) .await .expect("Failed to save attestation"); } else { diff --git a/prover/Cargo.lock b/prover/Cargo.lock index d2de12c5682e..62bb4bf1da14 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -9175,11 +9175,14 @@ dependencies = [ "axum", "tokio", "tracing", + "vm_utils", "zksync_config", "zksync_dal", "zksync_object_store", "zksync_prover_interface", + "zksync_tee_verifier", "zksync_types", + "zksync_utils", ] [[package]] From 7b092ac87318bc8b75c309899358d2a5e49ebced Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Fri, 7 Jun 2024 12:50:48 +0200 Subject: [PATCH 32/42] Refactor TEE endpoints and make them optional --- checks-config/era.dic | 4 ++-- .../config/src/configs/proof_data_handler.rs | 1 + core/lib/config/src/testonly.rs | 1 + core/lib/env_config/src/proof_data_handler.rs | 2 ++ .../protobuf_config/src/proof_data_handler.rs | 4 ++++ .../src/proto/config/prover.proto | 1 + core/node/proof_data_handler/src/lib.rs | 21 ++++++++++++------- core/node/proof_data_handler/src/tests.rs | 16 +++++++------- etc/env/file_based/general.yaml | 1 + 9 files changed, 34 insertions(+), 17 deletions(-) diff --git a/checks-config/era.dic b/checks-config/era.dic index 03cbd6b91d6f..78ed9ced30bd 100644 --- a/checks-config/era.dic +++ b/checks-config/era.dic @@ -977,6 +977,6 @@ TeeRequestProcessor l1_batch_number RequestProcessorError map_err -tee_proof_inputs -submit_tee_proof +proof_inputs +submit_proofs ready_to_be_proven diff --git a/core/lib/config/src/configs/proof_data_handler.rs b/core/lib/config/src/configs/proof_data_handler.rs index 06d672b40d78..de7f6969b05f 100644 --- a/core/lib/config/src/configs/proof_data_handler.rs +++ b/core/lib/config/src/configs/proof_data_handler.rs @@ -6,6 +6,7 @@ use serde::Deserialize; pub struct ProofDataHandlerConfig { pub http_port: u16, pub proof_generation_timeout_in_secs: u16, + pub tee_support: bool, } impl ProofDataHandlerConfig { diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index 87c3bd2a1294..3feee2a29ec7 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -626,6 +626,7 @@ impl Distribution for EncodeDist { configs::ProofDataHandlerConfig { http_port: self.sample(rng), proof_generation_timeout_in_secs: self.sample(rng), + tee_support: self.sample(rng), } } } diff --git a/core/lib/env_config/src/proof_data_handler.rs b/core/lib/env_config/src/proof_data_handler.rs index 53bbeb42ee6a..f69aa1d6dc59 100644 --- a/core/lib/env_config/src/proof_data_handler.rs +++ b/core/lib/env_config/src/proof_data_handler.rs @@ -19,6 +19,7 @@ mod tests { ProofDataHandlerConfig { http_port: 3320, proof_generation_timeout_in_secs: 18000, + tee_support: true, } } @@ -27,6 +28,7 @@ mod tests { let config = r#" PROOF_DATA_HANDLER_PROOF_GENERATION_TIMEOUT_IN_SECS="18000" PROOF_DATA_HANDLER_HTTP_PORT="3320" + PROOF_DATA_HANDLER_TEE_SUPPORT="true" "#; let mut lock = MUTEX.lock(); lock.set_env(config); diff --git a/core/lib/protobuf_config/src/proof_data_handler.rs b/core/lib/protobuf_config/src/proof_data_handler.rs index d231e5b46b7b..4b7bd2fd7c32 100644 --- a/core/lib/protobuf_config/src/proof_data_handler.rs +++ b/core/lib/protobuf_config/src/proof_data_handler.rs @@ -14,6 +14,9 @@ impl ProtoRepr for proto::ProofDataHandler { proof_generation_timeout_in_secs: required(&self.proof_generation_timeout_in_secs) .and_then(|x| Ok((*x).try_into()?)) .context("proof_generation_timeout_in_secs")?, + tee_support: required(&self.tee_support) + .copied() + .context("tee_support")?, }) } @@ -21,6 +24,7 @@ impl ProtoRepr for proto::ProofDataHandler { Self { http_port: Some(this.http_port.into()), proof_generation_timeout_in_secs: Some(this.proof_generation_timeout_in_secs.into()), + tee_support: Some(this.tee_support), } } } diff --git a/core/lib/protobuf_config/src/proto/config/prover.proto b/core/lib/protobuf_config/src/proto/config/prover.proto index d5d131fc157b..1eaf8637522a 100644 --- a/core/lib/protobuf_config/src/proto/config/prover.proto +++ b/core/lib/protobuf_config/src/proto/config/prover.proto @@ -99,4 +99,5 @@ message WitnessVectorGenerator { message ProofDataHandler { optional uint32 http_port = 1; // required; u16 optional uint32 proof_generation_timeout_in_secs = 2; // required; s + optional bool tee_support = 3; // required } diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 0f31979607a6..0a0224b4ce3e 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -57,10 +57,10 @@ fn create_proof_processing_router( let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); let register_tee_attestation_processor = get_tee_proof_gen_processor.clone(); let get_proof_gen_processor = - RequestProcessor::new(blob_store, connection_pool, config, commitment_mode); + RequestProcessor::new(blob_store, connection_pool, config.clone(), commitment_mode); let submit_proof_processor = get_proof_gen_processor.clone(); - Router::new() + let router = Router::new() .route( "/proof_generation_data", post( @@ -82,9 +82,11 @@ fn create_proof_processing_router( .await }, ), - ) - .route( - "/tee_proof_inputs", + ); + + if config.tee_support { + return router.route( + "/tee/proof_inputs", post( move |payload: Json| async move { get_tee_proof_gen_processor @@ -94,7 +96,7 @@ fn create_proof_processing_router( ), ) .route( - "/submit_tee_proof/:l1_batch_number", + "/tee/submit_proofs/:l1_batch_number", post( move |l1_batch_number: Path, payload: Json| async move { submit_tee_proof_processor @@ -104,7 +106,7 @@ fn create_proof_processing_router( ), ) .route( - "/register_tee_attestation", + "/tee/register_attestation", post( move |payload: Json| async move { register_tee_attestation_processor @@ -112,5 +114,8 @@ fn create_proof_processing_router( .await }, ), - ) + ); + } + + router } diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 5b7dd460aa48..98330731e53c 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -21,11 +21,11 @@ use zksync_types::{commitment::L1BatchCommitmentMode, L1BatchNumber, H256}; use crate::create_proof_processing_router; -// Test the /tee_proof_inputs endpoint by: +// Test the /tee/proof_inputs endpoint by: // 1. Mocking an object store with a single batch blob containing TEE verifier input // 2. Populating the SQL db with relevant information about the status of the TEE verifier input and // TEE proof generation -// 3. Sending a request to the /tee_proof_inputs endpoint and asserting that the response +// 3. Sending a request to the /tee/proof_inputs endpoint and asserting that the response // matches the file from the object store #[tokio::test] async fn request_tee_proof_inputs() { @@ -80,7 +80,7 @@ async fn request_tee_proof_inputs() { let db_conn_pool = ConnectionPool::test_pool().await; mock_tee_batch_status(db_conn_pool.clone(), batch_number, &object_path).await; - // test the /tee_proof_inputs endpoint; it should return the batch from the object store + // test the /tee/proof_inputs endpoint; it should return the batch from the object store let app = create_proof_processing_router( blob_store, @@ -88,6 +88,7 @@ async fn request_tee_proof_inputs() { ProofDataHandlerConfig { http_port: 1337, proof_generation_timeout_in_secs: 10, + tee_support: true, }, L1BatchCommitmentMode::Rollup, ); @@ -96,7 +97,7 @@ async fn request_tee_proof_inputs() { .oneshot( Request::builder() .method(Method::POST) - .uri("/tee_proof_inputs") + .uri("/tee/proof_inputs") .header(http::header::CONTENT_TYPE, "application/json") .body(req_body) .unwrap(), @@ -117,7 +118,7 @@ async fn request_tee_proof_inputs() { assert_eq!(tvi, deserialized); } -// Test /submit_tee_proof endpoint using a mocked TEE proof and verify response and db state +// Test /tee/submit_proofs endpoint using a mocked TEE proof and verify response and db state #[tokio::test] async fn submit_tee_proof() { let blob_store = MockObjectStore::arc(); @@ -127,7 +128,7 @@ async fn submit_tee_proof() { mock_tee_batch_status(db_conn_pool.clone(), batch_number, object_path).await; - // send a request to the /submit_tee_proof endpoint, using a mocked TEE proof + // send a request to the /tee/submit_proofs endpoint, using a mocked TEE proof let tee_proof_request_str = r#"{ "Proof": { @@ -138,13 +139,14 @@ async fn submit_tee_proof() { }"#; let tee_proof_request = serde_json::from_str::(tee_proof_request_str).unwrap(); - let uri = format!("/submit_tee_proof/{}", batch_number.0); + let uri = format!("/tee/submit_proofs/{}", batch_number.0); let app = create_proof_processing_router( blob_store, db_conn_pool.clone(), ProofDataHandlerConfig { http_port: 1337, proof_generation_timeout_in_secs: 10, + tee_support: true, }, L1BatchCommitmentMode::Rollup, ); diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index de7914bd3e6d..03cba74c97c8 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -190,6 +190,7 @@ witness_vector_generator: data_handler: http_port: 3320 proof_generation_timeout_in_secs: 18000 + tee_support: true prover_gateway: api_url: http://127.0.0.1:3320 api_poll_duration_secs: 1000 From c753a46fc5201a75fc68904af503a5e2eba50711 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 10 Jun 2024 12:26:54 +0200 Subject: [PATCH 33/42] Add a comment to clarify the role of the L1BatchTeeProofForL1 struct --- core/lib/prover_interface/src/outputs.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index 053cb02a1878..a4035a21ec2b 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use zksync_object_store::{serialize_using_bincode, Bucket, StoredObject}; use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber}; -/// A "final" proof that can be sent to the L1 contract. +/// A "final" ZK proof that can be sent to the L1 contract. #[derive(Clone, Serialize, Deserialize)] pub struct L1BatchProofForL1 { pub aggregation_result_coords: [[u8; 32]; 4], @@ -13,10 +13,15 @@ pub struct L1BatchProofForL1 { pub protocol_version: ProtocolSemanticVersion, } +/// A "final" TEE proof that can be sent to the L1 contract. #[derive(Clone, PartialEq, Serialize, Deserialize)] pub struct L1BatchTeeProofForL1 { + // signature generated within the TEE enclave, using the privkey corresponding to the pubkey pub signature: Vec, + // pubkey used for signature verification; each key pair is attested by the TEE attestation + // stored in the db pub pubkey: Vec, + // data that was signed pub proof: Vec, } From 76af19ddb13c51a397e934e55be5f936e54cb4e2 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 10 Jun 2024 12:32:52 +0200 Subject: [PATCH 34/42] Remove redundant dependency (vm_utils) --- core/node/proof_data_handler/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index 7a3efe3afeb6..e5b733280970 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -10,7 +10,6 @@ keywords.workspace = true categories.workspace = true [dependencies] -vm_utils.workspace = true zksync_config.workspace = true zksync_dal.workspace = true zksync_object_store.workspace = true From 9a22bb33ee97652304bc950a424976b07e7a7d22 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 10 Jun 2024 13:11:06 +0200 Subject: [PATCH 35/42] Remove redundant dependency (zksync_utils) --- Cargo.lock | 2 -- checks-config/era.dic | 1 + core/node/proof_data_handler/Cargo.toml | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 282287488ec9..675da6d97a2d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9049,7 +9049,6 @@ dependencies = [ "tokio", "tower", "tracing", - "vm_utils", "zksync_basic_types", "zksync_config", "zksync_contracts", @@ -9058,7 +9057,6 @@ dependencies = [ "zksync_prover_interface", "zksync_tee_verifier", "zksync_types", - "zksync_utils", ] [[package]] diff --git a/checks-config/era.dic b/checks-config/era.dic index 78ed9ced30bd..a93a467f956e 100644 --- a/checks-config/era.dic +++ b/checks-config/era.dic @@ -980,3 +980,4 @@ map_err proof_inputs submit_proofs ready_to_be_proven +privkey diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index e5b733280970..301ce0df6a80 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -16,7 +16,6 @@ zksync_object_store.workspace = true zksync_prover_interface.workspace = true zksync_tee_verifier.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true anyhow.workspace = true axum.workspace = true tokio.workspace = true From 10d0c8a40862acfeea6f11fcd2804c571a872836 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 10 Jun 2024 13:49:53 +0200 Subject: [PATCH 36/42] Don't create TEE objects unless TEE support is enabled --- core/node/proof_data_handler/src/lib.rs | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 0a0224b4ce3e..5a3cb2d95b6a 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -52,15 +52,14 @@ fn create_proof_processing_router( config: ProofDataHandlerConfig, commitment_mode: L1BatchCommitmentMode, ) -> Router { - let get_tee_proof_gen_processor = - TeeRequestProcessor::new(blob_store.clone(), connection_pool.clone(), config.clone()); - let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); - let register_tee_attestation_processor = get_tee_proof_gen_processor.clone(); - let get_proof_gen_processor = - RequestProcessor::new(blob_store, connection_pool, config.clone(), commitment_mode); + let get_proof_gen_processor = RequestProcessor::new( + blob_store.clone(), + connection_pool.clone(), + config.clone(), + commitment_mode, + ); let submit_proof_processor = get_proof_gen_processor.clone(); - - let router = Router::new() + let mut router = Router::new() .route( "/proof_generation_data", post( @@ -85,7 +84,12 @@ fn create_proof_processing_router( ); if config.tee_support { - return router.route( + let get_tee_proof_gen_processor = + TeeRequestProcessor::new(blob_store, connection_pool, config.clone()); + let submit_tee_proof_processor = get_tee_proof_gen_processor.clone(); + let register_tee_attestation_processor = get_tee_proof_gen_processor.clone(); + + router = router.route( "/tee/proof_inputs", post( move |payload: Json| async move { From 46d0631d736d4fe82bd155098ca573cb8c270e20 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 10 Jun 2024 16:56:47 +0200 Subject: [PATCH 37/42] Update Cargo.lock --- prover/Cargo.lock | 2 -- 1 file changed, 2 deletions(-) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 62bb4bf1da14..1ae20e823b0a 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -9175,14 +9175,12 @@ dependencies = [ "axum", "tokio", "tracing", - "vm_utils", "zksync_config", "zksync_dal", "zksync_object_store", "zksync_prover_interface", "zksync_tee_verifier", "zksync_types", - "zksync_utils", ] [[package]] From 9de0becbdc57bd958a797e812c690b4c125522be Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 10 Jun 2024 17:21:34 +0200 Subject: [PATCH 38/42] Create tee_proof_generation_job once the verifier input is produced --- core/lib/dal/src/tee_proof_generation_dal.rs | 2 +- core/node/metadata_calculator/src/updater.rs | 6 +----- core/node/proof_data_handler/src/tests.rs | 2 +- core/node/tee_verifier_input_producer/src/lib.rs | 4 ++++ 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 028a34ff668c..65a7e281d94a 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -113,7 +113,7 @@ impl TeeProofGenerationDal<'_, '_> { .ok_or(sqlx::Error::RowNotFound) } - pub async fn insert_tee_proof_generation_details(&mut self, block_number: L1BatchNumber) { + pub async fn insert_tee_proof_generation_job(&mut self, block_number: L1BatchNumber) { sqlx::query!( r#" INSERT INTO diff --git a/core/node/metadata_calculator/src/updater.rs b/core/node/metadata_calculator/src/updater.rs index 6f5ee3b2787d..362e6d0c915d 100644 --- a/core/node/metadata_calculator/src/updater.rs +++ b/core/node/metadata_calculator/src/updater.rs @@ -150,11 +150,7 @@ impl TreeUpdater { storage .proof_generation_dal() .insert_proof_generation_details(l1_batch_number, object_key) - .await; - storage - .tee_proof_generation_dal() - .insert_tee_proof_generation_details(l1_batch_number) - .await; + .await?; } save_postgres_latency.observe(); tracing::info!("Updated metadata for L1 batch #{l1_batch_number} in Postgres"); diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 98330731e53c..8e6dc85bc2ab 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -219,7 +219,7 @@ async fn mock_tee_batch_status( // mock SQL table with relevant information about the status of TEE proof generation ('ready_to_be_proven') proof_dal - .insert_tee_proof_generation_details(batch_number) + .insert_tee_proof_generation_job(batch_number) .await; // now, there should be one batch in the db awaiting proof diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs index 47ae9cd87c3f..aab0e3022e16 100644 --- a/core/node/tee_verifier_input_producer/src/lib.rs +++ b/core/node/tee_verifier_input_producer/src/lib.rs @@ -259,6 +259,10 @@ impl JobProcessor for TeeVerifierInputProducer { .mark_job_as_successful(job_id, started_at, &object_path) .await .context("failed to mark job as successful for TeeVerifierInputProducer")?; + transaction + .tee_proof_generation_dal() + .insert_tee_proof_generation_job(job_id) + .await; transaction .commit() .await From 7d737470f11a1662ab5b13e61c2ad2d2e9550970 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Mon, 10 Jun 2024 18:26:35 +0200 Subject: [PATCH 39/42] Rework error handling as suggested by Igor --- core/lib/dal/src/proof_generation_dal.rs | 117 +++++++++++----- core/lib/dal/src/tee_proof_generation_dal.rs | 129 ++++++++++++------ .../fri_prover_queue_reporter.rs | 4 +- core/node/proof_data_handler/src/errors.rs | 10 +- .../src/request_processor.rs | 7 +- .../src/tee_request_processor.rs | 17 +-- core/node/proof_data_handler/src/tests.rs | 14 +- .../tee_verifier_input_producer/src/lib.rs | 2 +- 8 files changed, 196 insertions(+), 104 deletions(-) diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index 5c1734751458..3f24d4e0831a 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -2,10 +2,13 @@ use std::time::Duration; use strum::{Display, EnumString}; -use zksync_db_connection::{connection::Connection, utils::pg_interval_from_duration}; +use zksync_db_connection::instrument::Instrumented; +use zksync_db_connection::{ + connection::Connection, error::DalResult, utils::pg_interval_from_duration, +}; use zksync_types::L1BatchNumber; -use crate::{Core, SqlxError}; +use crate::Core; #[derive(Debug)] pub struct ProofGenerationDal<'a, 'c> { @@ -28,7 +31,7 @@ impl ProofGenerationDal<'_, '_> { pub async fn get_next_block_to_be_proven( &mut self, processing_timeout: Duration, - ) -> Option { + ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); let result: Option = sqlx::query!( r#" @@ -66,15 +69,16 @@ impl ProofGenerationDal<'_, '_> { .unwrap() .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - result + Ok(result) } pub async fn save_proof_artifacts_metadata( &mut self, - block_number: L1BatchNumber, + batch_number: L1BatchNumber, proof_blob_url: &str, - ) -> Result<(), SqlxError> { - sqlx::query!( + ) -> DalResult<()> { + let batch_number = i64::from(batch_number.0); + let query = sqlx::query!( r#" UPDATE proof_generation_details SET @@ -85,22 +89,34 @@ impl ProofGenerationDal<'_, '_> { l1_batch_number = $2 "#, proof_blob_url, - i64::from(block_number.0) - ) - .execute(self.storage.conn()) - .await? - .rows_affected() - .eq(&1) - .then_some(()) - .ok_or(sqlx::Error::RowNotFound) + batch_number + ); + let instrumentation = Instrumented::new("save_proof_artifacts_metadata") + .with_arg("proof_blob_url", &proof_blob_url) + .with_arg("l1_batch_number", &batch_number); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Cannot save proof_blob_url for a batch number {} that does not exist", + batch_number + )); + return Err(err); + } + + Ok(()) } pub async fn insert_proof_generation_details( &mut self, block_number: L1BatchNumber, proof_gen_data_blob_url: &str, - ) { - sqlx::query!( + ) -> DalResult<()> { + let l1_batch_number = i64::from(block_number.0); + let query = sqlx::query!( r#" INSERT INTO proof_generation_details (l1_batch_number, status, proof_gen_data_blob_url, created_at, updated_at) @@ -108,19 +124,35 @@ impl ProofGenerationDal<'_, '_> { ($1, 'ready_to_be_proven', $2, NOW(), NOW()) ON CONFLICT (l1_batch_number) DO NOTHING "#, - i64::from(block_number.0), + l1_batch_number, proof_gen_data_blob_url, - ) - .execute(self.storage.conn()) - .await - .unwrap(); + ); + let instrumentation = Instrumented::new("insert_proof_generation_details") + .with_arg("l1_batch_number", &l1_batch_number) + .with_arg("proof_gen_data_blob_url", &proof_gen_data_blob_url); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Cannot save proof_blob_url for a batch number {} that does not exist", + l1_batch_number + )); + return Err(err); + } + + Ok(()) } pub async fn mark_proof_generation_job_as_skipped( &mut self, block_number: L1BatchNumber, - ) -> Result<(), SqlxError> { - sqlx::query!( + ) -> DalResult<()> { + let status = ProofGenerationJobStatus::Skipped.to_string(); + let l1_batch_number = i64::from(block_number.0); + let query = sqlx::query!( r#" UPDATE proof_generation_details SET @@ -129,18 +161,29 @@ impl ProofGenerationDal<'_, '_> { WHERE l1_batch_number = $2 "#, - ProofGenerationJobStatus::Skipped.to_string(), - i64::from(block_number.0) - ) - .execute(self.storage.conn()) - .await? - .rows_affected() - .eq(&1) - .then_some(()) - .ok_or(sqlx::Error::RowNotFound) + status, + l1_batch_number + ); + let instrumentation = Instrumented::new("mark_proof_generation_job_as_skipped") + .with_arg("status", &status) + .with_arg("l1_batch_number", &l1_batch_number); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Cannot mark proof as skipped because batch number {} does not exist", + l1_batch_number + )); + return Err(err); + } + + Ok(()) } - pub async fn get_oldest_unpicked_batch(&mut self) -> Option { + pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { let result: Option = sqlx::query!( r#" SELECT @@ -160,10 +203,10 @@ impl ProofGenerationDal<'_, '_> { .unwrap() .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - result + Ok(result) } - pub async fn get_oldest_not_generated_batch(&mut self) -> Option { + pub async fn get_oldest_not_generated_batch(&mut self) -> DalResult> { let result: Option = sqlx::query!( r#" SELECT @@ -183,6 +226,6 @@ impl ProofGenerationDal<'_, '_> { .unwrap() .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - result + Ok(result) } } diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 65a7e281d94a..364b96123d4c 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,10 +1,13 @@ use std::time::Duration; use strum::{Display, EnumString}; -use zksync_db_connection::{connection::Connection, utils::pg_interval_from_duration}; +use zksync_db_connection::instrument::{InstrumentExt, Instrumented}; +use zksync_db_connection::{ + connection::Connection, error::DalResult, utils::pg_interval_from_duration, +}; use zksync_types::L1BatchNumber; -use crate::{Core, SqlxError}; +use crate::Core; #[derive(Debug)] pub struct TeeProofGenerationDal<'a, 'c> { @@ -33,7 +36,7 @@ impl TeeProofGenerationDal<'_, '_> { pub async fn get_next_block_to_be_proven( &mut self, processing_timeout: Duration, - ) -> Option { + ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); let result: Option = sqlx::query!( r#" @@ -75,7 +78,7 @@ impl TeeProofGenerationDal<'_, '_> { .unwrap() .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - result + Ok(result) } pub async fn save_proof_artifacts_metadata( @@ -85,8 +88,8 @@ impl TeeProofGenerationDal<'_, '_> { pubkey: &[u8], proof: &[u8], tee_type: TeeType, - ) -> Result<(), SqlxError> { - sqlx::query!( + ) -> DalResult<()> { + let query = sqlx::query!( r#" UPDATE tee_proof_generation_details SET @@ -104,16 +107,32 @@ impl TeeProofGenerationDal<'_, '_> { proof, tee_type.to_string(), i64::from(block_number.0) - ) - .execute(self.storage.conn()) - .await? - .rows_affected() - .eq(&1) - .then_some(()) - .ok_or(sqlx::Error::RowNotFound) + ); + let instrumentation = Instrumented::new("save_proof_artifacts_metadata") + .with_arg("signature", &signature) + .with_arg("pubkey", &pubkey) + .with_arg("proof", &proof) + .with_arg("tee_type", &tee_type); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Updating TEE proof for a non-existent batch number is not allowed" + )); + return Err(err); + } + + Ok(()) } - pub async fn insert_tee_proof_generation_job(&mut self, block_number: L1BatchNumber) { + pub async fn insert_tee_proof_generation_job( + &mut self, + block_number: L1BatchNumber, + ) -> DalResult<()> { + let block_number = i64::from(block_number.0); sqlx::query!( r#" INSERT INTO @@ -122,18 +141,24 @@ impl TeeProofGenerationDal<'_, '_> { ($1, 'ready_to_be_proven', NOW(), NOW()) ON CONFLICT (l1_batch_number) DO NOTHING "#, - i64::from(block_number.0), + block_number, ) - .execute(self.storage.conn()) - .await - .unwrap(); + .instrument("create_tee_proof_generation_details") + .with_arg("l1_batch_number", &block_number) + .report_latency() + .execute(self.storage) + .await?; + + Ok(()) } pub async fn mark_proof_generation_job_as_skipped( &mut self, block_number: L1BatchNumber, - ) -> Result<(), SqlxError> { - sqlx::query!( + ) -> DalResult<()> { + let status = TeeProofGenerationJobStatus::Skipped.to_string(); + let l1_batch_number = i64::from(block_number.0); + let query = sqlx::query!( r#" UPDATE tee_proof_generation_details SET @@ -142,18 +167,28 @@ impl TeeProofGenerationDal<'_, '_> { WHERE l1_batch_number = $2 "#, - TeeProofGenerationJobStatus::Skipped.to_string(), - i64::from(block_number.0) - ) - .execute(self.storage.conn()) - .await? - .rows_affected() - .eq(&1) - .then_some(()) - .ok_or(sqlx::Error::RowNotFound) + status, + l1_batch_number + ); + let instrumentation = Instrumented::new("mark_proof_generation_job_as_skipped") + .with_arg("status", &status) + .with_arg("l1_batch_number", &l1_batch_number); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Cannot mark TEE proof as skipped for a batch number that does not exist" + )); + return Err(err); + } + + Ok(()) } - pub async fn get_oldest_unpicked_batch(&mut self) -> Option { + pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { let result: Option = sqlx::query!( r#" SELECT @@ -175,15 +210,11 @@ impl TeeProofGenerationDal<'_, '_> { .unwrap() .map(|row| L1BatchNumber(row.l1_batch_number as u32)); - result + Ok(result) } - pub async fn save_attestation( - &mut self, - pubkey: &[u8], - attestation: &[u8], - ) -> Result<(), SqlxError> { - sqlx::query!( + pub async fn save_attestation(&mut self, pubkey: &[u8], attestation: &[u8]) -> DalResult<()> { + let query = sqlx::query!( r#" INSERT INTO tee_attestations (pubkey, attestation) @@ -193,12 +224,22 @@ impl TeeProofGenerationDal<'_, '_> { "#, pubkey, attestation - ) - .execute(self.storage.conn()) - .await? - .rows_affected() - .eq(&1) - .then_some(()) - .ok_or(sqlx::Error::RowNotFound) + ); + let instrumentation = Instrumented::new("mark_proof_generation_job_as_skipped") + .with_arg("pubkey", &pubkey) + .with_arg("attestation", &attestation); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; + if result.rows_affected() == 0 { + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Unable to insert TEE attestation for a non-existent batch number" + )); + return Err(err); + } + + Ok(()) } } diff --git a/core/node/house_keeper/src/prover/queue_reporter/fri_prover_queue_reporter.rs b/core/node/house_keeper/src/prover/queue_reporter/fri_prover_queue_reporter.rs index 1ae03c74b45e..04d823252aff 100644 --- a/core/node/house_keeper/src/prover/queue_reporter/fri_prover_queue_reporter.rs +++ b/core/node/house_keeper/src/prover/queue_reporter/fri_prover_queue_reporter.rs @@ -98,7 +98,7 @@ impl PeriodicJob for FriProverQueueReporter { let oldest_unpicked_batch = match db_conn .proof_generation_dal() .get_oldest_unpicked_batch() - .await + .await? { Some(l1_batch_number) => l1_batch_number.0 as u64, // if there is no unpicked batch in database, we use sealed batch number as a result @@ -119,7 +119,7 @@ impl PeriodicJob for FriProverQueueReporter { if let Some(l1_batch_number) = db_conn .proof_generation_dal() .get_oldest_not_generated_batch() - .await + .await? { FRI_PROVER_METRICS .oldest_not_generated_batch diff --git a/core/node/proof_data_handler/src/errors.rs b/core/node/proof_data_handler/src/errors.rs index 91405692f03a..f170b3b53e7c 100644 --- a/core/node/proof_data_handler/src/errors.rs +++ b/core/node/proof_data_handler/src/errors.rs @@ -2,12 +2,12 @@ use axum::{ http::StatusCode, response::{IntoResponse, Response}, }; -use zksync_dal::SqlxError; +use zksync_dal::DalError; use zksync_object_store::ObjectStoreError; pub(crate) enum RequestProcessorError { ObjectStore(ObjectStoreError), - Sqlx(SqlxError), + Dal(DalError), } impl IntoResponse for RequestProcessorError { @@ -20,10 +20,10 @@ impl IntoResponse for RequestProcessorError { "Failed fetching/saving from GCS".to_owned(), ) } - RequestProcessorError::Sqlx(err) => { + RequestProcessorError::Dal(err) => { tracing::error!("Sqlx error: {:?}", err); - match err { - SqlxError::RowNotFound => { + match err.inner() { + zksync_dal::SqlxError::RowNotFound => { (StatusCode::NOT_FOUND, "Non existing L1 batch".to_owned()) } _ => ( diff --git a/core/node/proof_data_handler/src/request_processor.rs b/core/node/proof_data_handler/src/request_processor.rs index 2aec391ba342..170b27bb971f 100644 --- a/core/node/proof_data_handler/src/request_processor.rs +++ b/core/node/proof_data_handler/src/request_processor.rs @@ -53,7 +53,8 @@ impl RequestProcessor { .unwrap() .proof_generation_dal() .get_next_block_to_be_proven(self.config.proof_generation_timeout()) - .await; + .await + .map_err(RequestProcessorError::Dal)?; let l1_batch_number = match l1_batch_number_result { Some(number) => number, @@ -215,7 +216,7 @@ impl RequestProcessor { .proof_generation_dal() .save_proof_artifacts_metadata(l1_batch_number, &blob_url) .await - .map_err(RequestProcessorError::Sqlx)?; + .map_err(RequestProcessorError::Dal)?; } SubmitProofRequest::SkippedProofGeneration => { self.pool @@ -225,7 +226,7 @@ impl RequestProcessor { .proof_generation_dal() .mark_proof_generation_job_as_skipped(l1_batch_number) .await - .map_err(RequestProcessorError::Sqlx)?; + .map_err(RequestProcessorError::Dal)?; } } diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index 67e36feb9982..f46fc894bfaf 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use axum::{extract::Path, Json}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{tee_proof_generation_dal::TeeType, ConnectionPool, Core, CoreDal, SqlxError}; +use zksync_dal::{tee_proof_generation_dal::TeeType, ConnectionPool, Core, CoreDal}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ GenericProofGenerationDataResponse, RegisterTeeAttestationRequest, @@ -46,12 +46,13 @@ impl TeeRequestProcessor { .pool .connection() .await - .map_err(|_| RequestProcessorError::Sqlx(SqlxError::PoolClosed))?; + .map_err(RequestProcessorError::Dal)?; let l1_batch_number_result = connection .tee_proof_generation_dal() .get_next_block_to_be_proven(self.config.proof_generation_timeout()) - .await; + .await + .map_err(RequestProcessorError::Dal)?; let l1_batch_number = match l1_batch_number_result { Some(number) => number, None => return Ok(Json(TeeProofGenerationDataResponse::Success(None))), @@ -78,7 +79,7 @@ impl TeeRequestProcessor { .pool .connection() .await - .map_err(|_| RequestProcessorError::Sqlx(SqlxError::PoolClosed))?; + .map_err(RequestProcessorError::Dal)?; let mut dal = connection.tee_proof_generation_dal(); match payload { @@ -96,7 +97,7 @@ impl TeeRequestProcessor { TeeType::Sgx, ) .await - .map_err(RequestProcessorError::Sqlx)?; + .map_err(RequestProcessorError::Dal)?; } SubmitTeeProofRequest::SkippedProofGeneration => { tracing::info!( @@ -105,7 +106,7 @@ impl TeeRequestProcessor { ); dal.mark_proof_generation_job_as_skipped(l1_batch_number) .await - .map_err(RequestProcessorError::Sqlx)?; + .map_err(RequestProcessorError::Dal)?; } } @@ -122,12 +123,12 @@ impl TeeRequestProcessor { .pool .connection() .await - .map_err(|_| RequestProcessorError::Sqlx(SqlxError::PoolClosed))?; + .map_err(RequestProcessorError::Dal)?; let mut dal = connection.tee_proof_generation_dal(); dal.save_attestation(&payload.pubkey, &payload.attestation) .await - .map_err(RequestProcessorError::Sqlx)?; + .map_err(RequestProcessorError::Dal)?; Ok(Json(RegisterTeeAttestationResponse::Success)) } diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 8e6dc85bc2ab..7ee361989a74 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -181,7 +181,8 @@ async fn submit_tee_proof() { let oldest_batch_number = proof_db_conn .tee_proof_generation_dal() .get_oldest_unpicked_batch() - .await; + .await + .unwrap(); assert!(oldest_batch_number.is_none()); } @@ -199,7 +200,7 @@ async fn mock_tee_batch_status( // there should not be any batches awaiting proof in the db yet - let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await; + let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); assert!(oldest_batch_number.is_none()); // mock SQL table with relevant information about the status of the TEE verifier input @@ -220,11 +221,16 @@ async fn mock_tee_batch_status( proof_dal .insert_tee_proof_generation_job(batch_number) - .await; + .await + .expect("Failed to insert tee_proof_generation_job"); // now, there should be one batch in the db awaiting proof - let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); + let oldest_batch_number = proof_dal + .get_oldest_unpicked_batch() + .await + .unwrap() + .unwrap(); assert_eq!(oldest_batch_number, batch_number); } diff --git a/core/node/tee_verifier_input_producer/src/lib.rs b/core/node/tee_verifier_input_producer/src/lib.rs index aab0e3022e16..efa3c9e00b11 100644 --- a/core/node/tee_verifier_input_producer/src/lib.rs +++ b/core/node/tee_verifier_input_producer/src/lib.rs @@ -262,7 +262,7 @@ impl JobProcessor for TeeVerifierInputProducer { transaction .tee_proof_generation_dal() .insert_tee_proof_generation_job(job_id) - .await; + .await?; transaction .commit() .await From c48b07d84a6c42b526e60b7bbd6451c4d2555c24 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 11 Jun 2024 09:54:00 +0200 Subject: [PATCH 40/42] `zk fmt` fixes --- core/lib/dal/src/proof_generation_dal.rs | 4 ++-- core/lib/dal/src/tee_proof_generation_dal.rs | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/core/lib/dal/src/proof_generation_dal.rs b/core/lib/dal/src/proof_generation_dal.rs index 3f24d4e0831a..040b4246604f 100644 --- a/core/lib/dal/src/proof_generation_dal.rs +++ b/core/lib/dal/src/proof_generation_dal.rs @@ -2,9 +2,9 @@ use std::time::Duration; use strum::{Display, EnumString}; -use zksync_db_connection::instrument::Instrumented; use zksync_db_connection::{ - connection::Connection, error::DalResult, utils::pg_interval_from_duration, + connection::Connection, error::DalResult, instrument::Instrumented, + utils::pg_interval_from_duration, }; use zksync_types::L1BatchNumber; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 364b96123d4c..f827171a244c 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,9 +1,11 @@ use std::time::Duration; use strum::{Display, EnumString}; -use zksync_db_connection::instrument::{InstrumentExt, Instrumented}; use zksync_db_connection::{ - connection::Connection, error::DalResult, utils::pg_interval_from_duration, + connection::Connection, + error::DalResult, + instrument::{InstrumentExt, Instrumented}, + utils::pg_interval_from_duration, }; use zksync_types::L1BatchNumber; From 6c1c44861357bb2e4101bba336c9a3a382ea67f6 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 11 Jun 2024 12:32:02 +0200 Subject: [PATCH 41/42] Add missing config option to /etc/env/base/proof_data_handler.toml --- etc/env/base/proof_data_handler.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/etc/env/base/proof_data_handler.toml b/etc/env/base/proof_data_handler.toml index 3ea1ee03aa61..7a1999a03c31 100644 --- a/etc/env/base/proof_data_handler.toml +++ b/etc/env/base/proof_data_handler.toml @@ -1,3 +1,4 @@ [proof_data_handler] -http_port=3320 -proof_generation_timeout_in_secs=18000 +http_port = 3320 +proof_generation_timeout_in_secs = 18000 +tee_support = true From eb145f4ec07c9404d97321d4e304e3da45e5d886 Mon Sep 17 00:00:00 2001 From: Patrick Beza Date: Tue, 11 Jun 2024 14:45:49 +0200 Subject: [PATCH 42/42] Remove 'skip proof generation' feature from the TEE prover --- core/lib/dal/src/tee_proof_generation_dal.rs | 40 +---------------- core/lib/prover_interface/src/api.rs | 10 ++--- .../tests/job_serialization.rs | 10 ++--- .../src/tee_request_processor.rs | 43 +++++++------------ core/node/proof_data_handler/src/tests.rs | 26 +++++------ 5 files changed, 36 insertions(+), 93 deletions(-) diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index f827171a244c..d5625935fa1b 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -154,42 +154,6 @@ impl TeeProofGenerationDal<'_, '_> { Ok(()) } - pub async fn mark_proof_generation_job_as_skipped( - &mut self, - block_number: L1BatchNumber, - ) -> DalResult<()> { - let status = TeeProofGenerationJobStatus::Skipped.to_string(); - let l1_batch_number = i64::from(block_number.0); - let query = sqlx::query!( - r#" - UPDATE tee_proof_generation_details - SET - status = $1, - updated_at = NOW() - WHERE - l1_batch_number = $2 - "#, - status, - l1_batch_number - ); - let instrumentation = Instrumented::new("mark_proof_generation_job_as_skipped") - .with_arg("status", &status) - .with_arg("l1_batch_number", &l1_batch_number); - let result = instrumentation - .clone() - .with(query) - .execute(self.storage) - .await?; - if result.rows_affected() == 0 { - let err = instrumentation.constraint_error(anyhow::anyhow!( - "Cannot mark TEE proof as skipped for a batch number that does not exist" - )); - return Err(err); - } - - Ok(()) - } - pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { let result: Option = sqlx::query!( r#" @@ -227,7 +191,7 @@ impl TeeProofGenerationDal<'_, '_> { pubkey, attestation ); - let instrumentation = Instrumented::new("mark_proof_generation_job_as_skipped") + let instrumentation = Instrumented::new("save_attestation") .with_arg("pubkey", &pubkey) .with_arg("attestation", &attestation); let result = instrumentation @@ -237,7 +201,7 @@ impl TeeProofGenerationDal<'_, '_> { .await?; if result.rows_affected() == 0 { let err = instrumentation.constraint_error(anyhow::anyhow!( - "Unable to insert TEE attestation for a non-existent batch number" + "Unable to insert TEE attestation: given pubkey already has an attestation assigned" )); return Err(err); } diff --git a/core/lib/prover_interface/src/api.rs b/core/lib/prover_interface/src/api.rs index f66b59c099c7..fb96c62d38c7 100644 --- a/core/lib/prover_interface/src/api.rs +++ b/core/lib/prover_interface/src/api.rs @@ -48,15 +48,15 @@ pub struct ProofGenerationDataRequest {} pub type TeeProofGenerationDataRequest = ProofGenerationDataRequest; -#[derive(Debug, PartialEq, Serialize, Deserialize)] -pub enum GenericSubmitProofRequest { - Proof(Box), +#[derive(Debug, Serialize, Deserialize)] +pub enum SubmitProofRequest { + Proof(Box), // The proof generation was skipped due to sampling SkippedProofGeneration, } -pub type SubmitProofRequest = GenericSubmitProofRequest; -pub type SubmitTeeProofRequest = GenericSubmitProofRequest; +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub struct SubmitTeeProofRequest(pub Box); #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct RegisterTeeAttestationRequest { diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index ec278dddbe44..60a80f91ed8d 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -165,14 +165,12 @@ fn test_proof_request_serialization() { #[test] fn test_tee_proof_request_serialization() { let tee_proof_str = r#"{ - "Proof": { - "signature": [ 0, 1, 2, 3, 4 ], - "pubkey": [ 5, 6, 7, 8, 9 ], - "proof": [ 10, 11, 12, 13, 14 ] - } + "signature": [ 0, 1, 2, 3, 4 ], + "pubkey": [ 5, 6, 7, 8, 9 ], + "proof": [ 10, 11, 12, 13, 14 ] }"#; let tee_proof_result = serde_json::from_str::(tee_proof_str).unwrap(); - let tee_proof_expected = SubmitTeeProofRequest::Proof(Box::new(L1BatchTeeProofForL1 { + let tee_proof_expected = SubmitTeeProofRequest(Box::new(L1BatchTeeProofForL1 { signature: vec![0, 1, 2, 3, 4], pubkey: vec![5, 6, 7, 8, 9], proof: vec![10, 11, 12, 13, 14], diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index f46fc894bfaf..957d0ef085f1 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -72,7 +72,7 @@ impl TeeRequestProcessor { pub(crate) async fn submit_proof( &self, Path(l1_batch_number): Path, - Json(payload): Json, + Json(proof): Json, ) -> Result, RequestProcessorError> { let l1_batch_number = L1BatchNumber(l1_batch_number); let mut connection = self @@ -82,33 +82,20 @@ impl TeeRequestProcessor { .map_err(RequestProcessorError::Dal)?; let mut dal = connection.tee_proof_generation_dal(); - match payload { - SubmitTeeProofRequest::Proof(proof) => { - tracing::info!( - "Received proof {:?} for block number: {:?}", - proof, - l1_batch_number - ); - dal.save_proof_artifacts_metadata( - l1_batch_number, - &proof.signature, - &proof.pubkey, - &proof.proof, - TeeType::Sgx, - ) - .await - .map_err(RequestProcessorError::Dal)?; - } - SubmitTeeProofRequest::SkippedProofGeneration => { - tracing::info!( - "Received request to skip proof generation for block number: {:?}", - l1_batch_number - ); - dal.mark_proof_generation_job_as_skipped(l1_batch_number) - .await - .map_err(RequestProcessorError::Dal)?; - } - } + tracing::info!( + "Received proof {:?} for block number: {:?}", + proof, + l1_batch_number + ); + dal.save_proof_artifacts_metadata( + l1_batch_number, + &proof.0.signature, + &proof.0.pubkey, + &proof.0.proof, + TeeType::Sgx, + ) + .await + .map_err(RequestProcessorError::Dal)?; Ok(Json(SubmitProofResponse::Success)) } diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 7ee361989a74..7047bd154c9a 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -131,11 +131,9 @@ async fn submit_tee_proof() { // send a request to the /tee/submit_proofs endpoint, using a mocked TEE proof let tee_proof_request_str = r#"{ - "Proof": { - "signature": [ 0, 1, 2, 3, 4 ], - "pubkey": [ 5, 6, 7, 8, 9 ], - "proof": [ 10, 11, 12, 13, 14 ] - } + "signature": [ 0, 1, 2, 3, 4 ], + "pubkey": [ 5, 6, 7, 8, 9 ], + "proof": [ 10, 11, 12, 13, 14 ] }"#; let tee_proof_request = serde_json::from_str::(tee_proof_request_str).unwrap(); @@ -158,17 +156,13 @@ async fn submit_tee_proof() { // save the attestation for the pubkey - if let SubmitTeeProofRequest::Proof(ref proof) = tee_proof_request { - let attestation = [15, 16, 17, 18, 19]; - let mut proof_dal = db_conn_pool.connection().await.unwrap(); - proof_dal - .tee_proof_generation_dal() - .save_attestation(&proof.pubkey, &attestation) - .await - .expect("Failed to save attestation"); - } else { - panic!("Expected Proof, got {:?}", tee_proof_request); - } + let attestation = [15, 16, 17, 18, 19]; + let mut proof_dal = db_conn_pool.connection().await.unwrap(); + proof_dal + .tee_proof_generation_dal() + .save_attestation(&tee_proof_request.0.pubkey, &attestation) + .await + .expect("Failed to save attestation"); // resend the same request; this time, it should be successful.