From e14f48b5524c296b06f45d062e085702ba19a7a3 Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Wed, 15 May 2024 19:47:29 +0300 Subject: [PATCH 1/6] add check to sqlx --- ...e357c5956c3471a81986851738a5cf6e38a4.json} | 4 +-- ...5fc4397c90dbfdf159e5c53db66576fea783.json} | 4 +-- ...64174f39e6011fdfdc56490397ce90233055.json} | 4 +-- core/lib/dal/src/blocks_dal.rs | 29 ++++++++++++++++--- 4 files changed, 31 insertions(+), 10 deletions(-) rename core/lib/dal/.sqlx/{query-245dc5bb82cc82df38e4440a7746ca08324bc86a72e4ea85c9c7962a6c8c9e30.json => query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json} (72%) rename core/lib/dal/.sqlx/{query-012bed5d34240ed28c331c8515c381d82925556a4801f678b8786235d525d784.json => query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json} (72%) rename core/lib/dal/.sqlx/{query-dea22358feed1418430505767d03aa4239d3a8be71b47178b4b8fb11fe898b31.json => query-c81438eae5e2482c57c54941780864174f39e6011fdfdc56490397ce90233055.json} (75%) diff --git a/core/lib/dal/.sqlx/query-245dc5bb82cc82df38e4440a7746ca08324bc86a72e4ea85c9c7962a6c8c9e30.json b/core/lib/dal/.sqlx/query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json similarity index 72% rename from core/lib/dal/.sqlx/query-245dc5bb82cc82df38e4440a7746ca08324bc86a72e4ea85c9c7962a6c8c9e30.json rename to core/lib/dal/.sqlx/query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json index 0b9c4aa59b7a..0f998d6645aa 100644 --- a/core/lib/dal/.sqlx/query-245dc5bb82cc82df38e4440a7746ca08324bc86a72e4ea85c9c7962a6c8c9e30.json +++ b/core/lib/dal/.sqlx/query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE l1_batches\n SET\n eth_prove_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n ", + "query": "\n UPDATE l1_batches\n SET\n eth_prove_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n AND\n eth_prove_tx_id IS NULL\n ", "describe": { "columns": [], "parameters": { @@ -12,5 +12,5 @@ }, "nullable": [] }, - "hash": "245dc5bb82cc82df38e4440a7746ca08324bc86a72e4ea85c9c7962a6c8c9e30" + "hash": "49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4" } diff --git a/core/lib/dal/.sqlx/query-012bed5d34240ed28c331c8515c381d82925556a4801f678b8786235d525d784.json b/core/lib/dal/.sqlx/query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json similarity index 72% rename from core/lib/dal/.sqlx/query-012bed5d34240ed28c331c8515c381d82925556a4801f678b8786235d525d784.json rename to core/lib/dal/.sqlx/query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json index fbeefdfbf956..eb7b7e25af04 100644 --- a/core/lib/dal/.sqlx/query-012bed5d34240ed28c331c8515c381d82925556a4801f678b8786235d525d784.json +++ b/core/lib/dal/.sqlx/query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE l1_batches\n SET\n eth_commit_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n ", + "query": "\n UPDATE l1_batches\n SET\n eth_commit_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n AND\n eth_commit_tx_id IS NULL\n ", "describe": { "columns": [], "parameters": { @@ -12,5 +12,5 @@ }, "nullable": [] }, - "hash": "012bed5d34240ed28c331c8515c381d82925556a4801f678b8786235d525d784" + "hash": "85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783" } diff --git a/core/lib/dal/.sqlx/query-dea22358feed1418430505767d03aa4239d3a8be71b47178b4b8fb11fe898b31.json b/core/lib/dal/.sqlx/query-c81438eae5e2482c57c54941780864174f39e6011fdfdc56490397ce90233055.json similarity index 75% rename from core/lib/dal/.sqlx/query-dea22358feed1418430505767d03aa4239d3a8be71b47178b4b8fb11fe898b31.json rename to core/lib/dal/.sqlx/query-c81438eae5e2482c57c54941780864174f39e6011fdfdc56490397ce90233055.json index ef070554c2fd..eb09077290e3 100644 --- a/core/lib/dal/.sqlx/query-dea22358feed1418430505767d03aa4239d3a8be71b47178b4b8fb11fe898b31.json +++ b/core/lib/dal/.sqlx/query-c81438eae5e2482c57c54941780864174f39e6011fdfdc56490397ce90233055.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE l1_batches\n SET\n eth_execute_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n ", + "query": "\n UPDATE l1_batches\n SET\n eth_execute_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n AND eth_execute_tx_id IS NULL\n ", "describe": { "columns": [], "parameters": { @@ -12,5 +12,5 @@ }, "nullable": [] }, - "hash": "dea22358feed1418430505767d03aa4239d3a8be71b47178b4b8fb11fe898b31" + "hash": "c81438eae5e2482c57c54941780864174f39e6011fdfdc56490397ce90233055" } diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index 4e39a7bcea31..e942c8349c0c 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -402,10 +402,10 @@ impl BlocksDal<'_, '_> { number_range: ops::RangeInclusive, eth_tx_id: u32, aggregation_type: AggregatedActionType, - ) -> DalResult<()> { + ) -> anyhow::Result<()> { match aggregation_type { AggregatedActionType::Commit => { - sqlx::query!( + let result = sqlx::query!( r#" UPDATE l1_batches SET @@ -413,6 +413,7 @@ impl BlocksDal<'_, '_> { updated_at = NOW() WHERE number BETWEEN $2 AND $3 + AND eth_commit_tx_id IS NULL "#, eth_tx_id as i32, i64::from(number_range.start().0), @@ -423,9 +424,15 @@ impl BlocksDal<'_, '_> { .with_arg("eth_tx_id", ð_tx_id) .execute(self.storage) .await?; + + if result.rows_affected() == 0 { + return Err(anyhow::anyhow!( + "Update eth_commit_tx_id that is is not null is not allowed" + )); + } } AggregatedActionType::PublishProofOnchain => { - sqlx::query!( + let result = sqlx::query!( r#" UPDATE l1_batches SET @@ -433,6 +440,7 @@ impl BlocksDal<'_, '_> { updated_at = NOW() WHERE number BETWEEN $2 AND $3 + AND eth_prove_tx_id IS NULL "#, eth_tx_id as i32, i64::from(number_range.start().0), @@ -443,9 +451,15 @@ impl BlocksDal<'_, '_> { .with_arg("eth_tx_id", ð_tx_id) .execute(self.storage) .await?; + + if result.rows_affected() == 0 { + return Err(anyhow::anyhow!( + "Update eth_prove_tx_id that is is not null is not allowed" + )); + } } AggregatedActionType::Execute => { - sqlx::query!( + let result = sqlx::query!( r#" UPDATE l1_batches SET @@ -453,6 +467,7 @@ impl BlocksDal<'_, '_> { updated_at = NOW() WHERE number BETWEEN $2 AND $3 + AND eth_execute_tx_id IS NULL "#, eth_tx_id as i32, i64::from(number_range.start().0), @@ -463,6 +478,12 @@ impl BlocksDal<'_, '_> { .with_arg("eth_tx_id", ð_tx_id) .execute(self.storage) .await?; + + if result.rows_affected() == 0 { + return Err(anyhow::anyhow!( + "Update eth_execute_tx_id that is is not null is not allowed" + )); + } } } Ok(()) From 00dbdff09b09401917f5f1487258df91cd90bd93 Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Wed, 15 May 2024 20:37:03 +0300 Subject: [PATCH 2/6] add test --- ...18daf76a5f283e4298fd12022b0c3db07319.json} | 4 +- ...779128de288484abea33d338c3304dd66e08.json} | 4 +- core/lib/dal/src/blocks_dal.rs | 105 +++++++++++++++++- 3 files changed, 103 insertions(+), 10 deletions(-) rename core/lib/dal/.sqlx/{query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json => query-25ef41cbeb95d10e4051b822769518daf76a5f283e4298fd12022b0c3db07319.json} (72%) rename core/lib/dal/.sqlx/{query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json => query-f3c651a3ecd2aefabef802f32c18779128de288484abea33d338c3304dd66e08.json} (72%) diff --git a/core/lib/dal/.sqlx/query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json b/core/lib/dal/.sqlx/query-25ef41cbeb95d10e4051b822769518daf76a5f283e4298fd12022b0c3db07319.json similarity index 72% rename from core/lib/dal/.sqlx/query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json rename to core/lib/dal/.sqlx/query-25ef41cbeb95d10e4051b822769518daf76a5f283e4298fd12022b0c3db07319.json index 0f998d6645aa..079246791a98 100644 --- a/core/lib/dal/.sqlx/query-49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4.json +++ b/core/lib/dal/.sqlx/query-25ef41cbeb95d10e4051b822769518daf76a5f283e4298fd12022b0c3db07319.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE l1_batches\n SET\n eth_prove_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n AND\n eth_prove_tx_id IS NULL\n ", + "query": "\n UPDATE l1_batches\n SET\n eth_prove_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n AND eth_prove_tx_id IS NULL\n ", "describe": { "columns": [], "parameters": { @@ -12,5 +12,5 @@ }, "nullable": [] }, - "hash": "49f2a1dec29cdeeab2dca0c8eaace357c5956c3471a81986851738a5cf6e38a4" + "hash": "25ef41cbeb95d10e4051b822769518daf76a5f283e4298fd12022b0c3db07319" } diff --git a/core/lib/dal/.sqlx/query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json b/core/lib/dal/.sqlx/query-f3c651a3ecd2aefabef802f32c18779128de288484abea33d338c3304dd66e08.json similarity index 72% rename from core/lib/dal/.sqlx/query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json rename to core/lib/dal/.sqlx/query-f3c651a3ecd2aefabef802f32c18779128de288484abea33d338c3304dd66e08.json index eb7b7e25af04..7d5467b4459c 100644 --- a/core/lib/dal/.sqlx/query-85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783.json +++ b/core/lib/dal/.sqlx/query-f3c651a3ecd2aefabef802f32c18779128de288484abea33d338c3304dd66e08.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE l1_batches\n SET\n eth_commit_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n AND\n eth_commit_tx_id IS NULL\n ", + "query": "\n UPDATE l1_batches\n SET\n eth_commit_tx_id = $1,\n updated_at = NOW()\n WHERE\n number BETWEEN $2 AND $3\n AND eth_commit_tx_id IS NULL\n ", "describe": { "columns": [], "parameters": { @@ -12,5 +12,5 @@ }, "nullable": [] }, - "hash": "85fa7f46499742855851b270ab155fc4397c90dbfdf159e5c53db66576fea783" + "hash": "f3c651a3ecd2aefabef802f32c18779128de288484abea33d338c3304dd66e08" } diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index e942c8349c0c..a4065b1ba4c5 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -2264,15 +2264,14 @@ mod tests { use super::*; use crate::{ConnectionPool, Core, CoreDal}; - #[tokio::test] - async fn loading_l1_batch_header() { - let pool = ConnectionPool::::test_pool().await; - let mut conn = pool.connection().await.unwrap(); - conn.protocol_versions_dal() - .save_protocol_version_with_tx(&ProtocolVersion::default()) + async fn save_mock_eth_tx(action_type: AggregatedActionType, conn: &mut Connection<'_, Core>) { + conn.eth_sender_dal() + .save_eth_tx(1, vec![], action_type, Address::default(), 1, None, None) .await .unwrap(); + } + fn mock_l1_batch_header() -> L1BatchHeader { let mut header = L1BatchHeader::new( L1BatchNumber(1), 100, @@ -2295,6 +2294,100 @@ mod tests { header.l2_to_l1_messages.push(vec![22; 22]); header.l2_to_l1_messages.push(vec![33; 33]); + header + } + + #[tokio::test] + async fn set_tx_id_works_correctly() { + let pool = ConnectionPool::::test_pool().await; + let mut conn = pool.connection().await.unwrap(); + + conn.protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + + conn.blocks_dal() + .insert_mock_l1_batch(&mock_l1_batch_header()) + .await + .unwrap(); + + save_mock_eth_tx(AggregatedActionType::Commit, &mut conn).await; + save_mock_eth_tx(AggregatedActionType::PublishProofOnchain, &mut conn).await; + save_mock_eth_tx(AggregatedActionType::Execute, &mut conn).await; + + assert!(conn + .blocks_dal() + .set_eth_tx_id( + L1BatchNumber(1)..=L1BatchNumber(1), + 1, + AggregatedActionType::Commit, + ) + .await + .is_ok()); + + assert!(conn + .blocks_dal() + .set_eth_tx_id( + L1BatchNumber(1)..=L1BatchNumber(1), + 2, + AggregatedActionType::Commit, + ) + .await + .is_err()); + + assert!(conn + .blocks_dal() + .set_eth_tx_id( + L1BatchNumber(1)..=L1BatchNumber(1), + 1, + AggregatedActionType::PublishProofOnchain, + ) + .await + .is_ok()); + + assert!(conn + .blocks_dal() + .set_eth_tx_id( + L1BatchNumber(1)..=L1BatchNumber(1), + 2, + AggregatedActionType::PublishProofOnchain, + ) + .await + .is_err()); + + assert!(conn + .blocks_dal() + .set_eth_tx_id( + L1BatchNumber(1)..=L1BatchNumber(1), + 1, + AggregatedActionType::Execute, + ) + .await + .is_ok()); + + assert!(conn + .blocks_dal() + .set_eth_tx_id( + L1BatchNumber(1)..=L1BatchNumber(1), + 2, + AggregatedActionType::Execute, + ) + .await + .is_err()); + } + + #[tokio::test] + async fn loading_l1_batch_header() { + let pool = ConnectionPool::::test_pool().await; + let mut conn = pool.connection().await.unwrap(); + conn.protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + + let header = mock_l1_batch_header(); + conn.blocks_dal() .insert_mock_l1_batch(&header) .await From 3da56d78289b91df7f2c3684703fff7555bbb9bc Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Thu, 16 May 2024 11:30:14 +0300 Subject: [PATCH 3/6] use dal error --- core/lib/dal/src/blocks_dal.rs | 41 +++++++++++++++++++++++++--------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index a4065b1ba4c5..cd0e54be9ea0 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -402,7 +402,7 @@ impl BlocksDal<'_, '_> { number_range: ops::RangeInclusive, eth_tx_id: u32, aggregation_type: AggregatedActionType, - ) -> anyhow::Result<()> { + ) -> DalResult<()> { match aggregation_type { AggregatedActionType::Commit => { let result = sqlx::query!( @@ -426,9 +426,16 @@ impl BlocksDal<'_, '_> { .await?; if result.rows_affected() == 0 { - return Err(anyhow::anyhow!( - "Update eth_commit_tx_id that is is not null is not allowed" - )); + let err = Instrumented::new("set_eth_tx_id#commit") + .with_arg("number_range", &number_range) + .with_arg("eth_tx_id", ð_tx_id) + .arg_error( + "eth_tx_id", + anyhow::anyhow!( + "Update eth_commit_tx_id that is is not null is not allowed" + ), + ); + return Err(err); } } AggregatedActionType::PublishProofOnchain => { @@ -453,9 +460,16 @@ impl BlocksDal<'_, '_> { .await?; if result.rows_affected() == 0 { - return Err(anyhow::anyhow!( - "Update eth_prove_tx_id that is is not null is not allowed" - )); + let err = Instrumented::new("set_eth_tx_id#prove") + .with_arg("number_range", &number_range) + .with_arg("eth_tx_id", ð_tx_id) + .arg_error( + "eth_tx_id", + anyhow::anyhow!( + "Update eth_prove_tx_id that is is not null is not allowed" + ), + ); + return Err(err); } } AggregatedActionType::Execute => { @@ -480,9 +494,16 @@ impl BlocksDal<'_, '_> { .await?; if result.rows_affected() == 0 { - return Err(anyhow::anyhow!( - "Update eth_execute_tx_id that is is not null is not allowed" - )); + let err = Instrumented::new("set_eth_tx_id#execute") + .with_arg("number_range", &number_range) + .with_arg("eth_tx_id", ð_tx_id) + .arg_error( + "eth_tx_id", + anyhow::anyhow!( + "Update eth_execute_tx_id that is is not null is not allowed" + ), + ); + return Err(err); } } } From c019e11cf91931d4464a811d0a48fe42bb292a25 Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Fri, 17 May 2024 11:44:31 +0300 Subject: [PATCH 4/6] address comments --- core/lib/dal/src/blocks_dal.rs | 91 +++++++++++------------- core/lib/db_connection/src/instrument.rs | 6 +- 2 files changed, 46 insertions(+), 51 deletions(-) diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index cd0e54be9ea0..7dedff068b3f 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -405,7 +405,11 @@ impl BlocksDal<'_, '_> { ) -> DalResult<()> { match aggregation_type { AggregatedActionType::Commit => { - let result = sqlx::query!( + let instrumentation = Instrumented::new("set_eth_tx_id#commit") + .with_arg("number_range", &number_range) + .with_arg("eth_tx_id", ð_tx_id); + + let query = sqlx::query!( r#" UPDATE l1_batches SET @@ -418,28 +422,25 @@ impl BlocksDal<'_, '_> { eth_tx_id as i32, i64::from(number_range.start().0), i64::from(number_range.end().0) - ) - .instrument("set_eth_tx_id#commit") - .with_arg("number_range", &number_range) - .with_arg("eth_tx_id", ð_tx_id) - .execute(self.storage) - .await?; + ); + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; if result.rows_affected() == 0 { - let err = Instrumented::new("set_eth_tx_id#commit") - .with_arg("number_range", &number_range) - .with_arg("eth_tx_id", ð_tx_id) - .arg_error( - "eth_tx_id", - anyhow::anyhow!( - "Update eth_commit_tx_id that is is not null is not allowed" - ), - ); + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Update eth_commit_tx_id that is is not null is not allowed" + )); return Err(err); } } AggregatedActionType::PublishProofOnchain => { - let result = sqlx::query!( + let instrumentation = Instrumented::new("set_eth_tx_id#prove") + .with_arg("number_range", &number_range) + .with_arg("eth_tx_id", ð_tx_id); + let query = sqlx::query!( r#" UPDATE l1_batches SET @@ -452,28 +453,27 @@ impl BlocksDal<'_, '_> { eth_tx_id as i32, i64::from(number_range.start().0), i64::from(number_range.end().0) - ) - .instrument("set_eth_tx_id#prove") - .with_arg("number_range", &number_range) - .with_arg("eth_tx_id", ð_tx_id) - .execute(self.storage) - .await?; + ); + + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; if result.rows_affected() == 0 { - let err = Instrumented::new("set_eth_tx_id#prove") - .with_arg("number_range", &number_range) - .with_arg("eth_tx_id", ð_tx_id) - .arg_error( - "eth_tx_id", - anyhow::anyhow!( - "Update eth_prove_tx_id that is is not null is not allowed" - ), - ); + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Update eth_prove_tx_id that is is not null is not allowed" + )); return Err(err); } } AggregatedActionType::Execute => { - let result = sqlx::query!( + let instrumentation = Instrumented::new("set_eth_tx_id#execute") + .with_arg("number_range", &number_range) + .with_arg("eth_tx_id", ð_tx_id); + + let query = sqlx::query!( r#" UPDATE l1_batches SET @@ -486,23 +486,18 @@ impl BlocksDal<'_, '_> { eth_tx_id as i32, i64::from(number_range.start().0), i64::from(number_range.end().0) - ) - .instrument("set_eth_tx_id#execute") - .with_arg("number_range", &number_range) - .with_arg("eth_tx_id", ð_tx_id) - .execute(self.storage) - .await?; + ); + + let result = instrumentation + .clone() + .with(query) + .execute(self.storage) + .await?; if result.rows_affected() == 0 { - let err = Instrumented::new("set_eth_tx_id#execute") - .with_arg("number_range", &number_range) - .with_arg("eth_tx_id", ð_tx_id) - .arg_error( - "eth_tx_id", - anyhow::anyhow!( - "Update eth_execute_tx_id that is is not null is not allowed" - ), - ); + let err = instrumentation.constraint_error(anyhow::anyhow!( + "Update eth_execute_tx_id that is is not null is not allowed" + )); return Err(err); } } diff --git a/core/lib/db_connection/src/instrument.rs b/core/lib/db_connection/src/instrument.rs index c61fad25b1ed..e0728ce22b85 100644 --- a/core/lib/db_connection/src/instrument.rs +++ b/core/lib/db_connection/src/instrument.rs @@ -31,7 +31,7 @@ use crate::{ type ThreadSafeDebug<'a> = dyn fmt::Debug + Send + Sync + 'a; /// Logged arguments for an SQL query. -#[derive(Debug, Default)] +#[derive(Debug, Clone, Default)] struct QueryArgs<'a> { inner: Vec<(&'static str, &'a ThreadSafeDebug<'a>)>, } @@ -180,7 +180,7 @@ impl ActiveCopy<'_> { } } -#[derive(Debug)] +#[derive(Debug, Clone)] struct InstrumentedData<'a> { name: &'static str, location: &'static Location<'static>, @@ -278,7 +278,7 @@ impl<'a> InstrumentedData<'a> { /// included in the case of a slow query, plus the error info. /// - Slow and erroneous queries are also reported using metrics (`dal.request.slow` and `dal.request.error`, /// respectively). The query name is included as a metric label; args are not included for obvious reasons. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Instrumented<'a, Q> { query: Q, data: InstrumentedData<'a>, From d88fcb10f1a3dcfa71a8700ea07fe9757792a90c Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Mon, 20 May 2024 13:46:18 +0300 Subject: [PATCH 5/6] fix tests for eth sender --- core/node/eth_sender/src/tests.rs | 160 ++++++++++++++++++++++++++++-- 1 file changed, 150 insertions(+), 10 deletions(-) diff --git a/core/node/eth_sender/src/tests.rs b/core/node/eth_sender/src/tests.rs index fd2a295a04b8..3471617ea23c 100644 --- a/core/node/eth_sender/src/tests.rs +++ b/core/node/eth_sender/src/tests.rs @@ -7,6 +7,7 @@ use zksync_config::{ configs::eth_sender::{ProofSendingMode, PubdataSendingMode, SenderConfig}, ContractsConfig, EthConfig, GasAdjusterConfig, }; +use zksync_contracts::BaseSystemContractsHashes; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_eth_client::{clients::MockEthereum, EthInterface}; use zksync_l1_contract_interface::i_executor::methods::{ExecuteBatches, ProveBatches}; @@ -20,6 +21,7 @@ use zksync_types::{ }, ethabi::Token, helpers::unix_timestamp_ms, + l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, pubdata_da::PubdataDA, web3::contract::Error, Address, L1BatchNumber, L1BlockNumber, ProtocolVersion, ProtocolVersionId, H256, @@ -43,11 +45,47 @@ static DUMMY_OPERATION: Lazy = Lazy::new(|| { }) }); +fn get_dummy_operation(number: u32) -> AggregatedOperation { + AggregatedOperation::Execute(ExecuteBatches { + l1_batches: vec![L1BatchWithMetadata { + header: create_l1_batch(number), + metadata: default_l1_batch_metadata(), + raw_published_factory_deps: Vec::new(), + }], + }) +} + const COMMITMENT_MODES: [L1BatchCommitmentMode; 2] = [ L1BatchCommitmentMode::Rollup, L1BatchCommitmentMode::Validium, ]; +fn mock_l1_batch_header(number: u32) -> L1BatchHeader { + let mut header = L1BatchHeader::new( + L1BatchNumber(number), + 100, + BaseSystemContractsHashes { + bootloader: H256::repeat_byte(1), + default_aa: H256::repeat_byte(42), + }, + ProtocolVersionId::latest(), + ); + header.l1_tx_count = 3; + header.l2_tx_count = 5; + header.l2_to_l1_logs.push(UserL2ToL1Log(L2ToL1Log { + shard_id: 0, + is_service: false, + tx_number_in_block: 2, + sender: Address::repeat_byte(2), + key: H256::repeat_byte(3), + value: H256::zero(), + })); + header.l2_to_l1_messages.push(vec![22; 22]); + header.l2_to_l1_messages.push(vec![33; 33]); + + header +} + fn mock_multicall_response() -> Token { Token::Array(vec![ Token::Tuple(vec![Token::Bool(true), Token::Bytes(vec![1u8; 32])]), @@ -220,7 +258,7 @@ async fn confirm_many( ) -> anyhow::Result<()> { let connection_pool = ConnectionPool::::test_pool().await; let mut tester = EthSenderTester::new( - connection_pool, + connection_pool.clone(), vec![10; 100], false, aggregator_operate_4844_mode, @@ -230,12 +268,31 @@ async fn confirm_many( let mut hashes = vec![]; - for _ in 0..5 { + connection_pool + .clone() + .connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + + for number in 0..5 { + connection_pool + .clone() + .connection() + .await + .unwrap() + .blocks_dal() + .insert_mock_l1_batch(&mock_l1_batch_header(number + 1)) + .await + .unwrap(); let tx = tester .aggregator .save_eth_tx( &mut tester.conn.connection().await.unwrap(), - &DUMMY_OPERATION, + &get_dummy_operation(number + 1), false, ) .await?; @@ -302,8 +359,9 @@ async fn confirm_many( #[test_casing(2, COMMITMENT_MODES)] #[tokio::test] async fn resend_each_block(commitment_mode: L1BatchCommitmentMode) -> anyhow::Result<()> { + let connection_pool = ConnectionPool::::test_pool().await; let mut tester = EthSenderTester::new( - ConnectionPool::::test_pool().await, + connection_pool.clone(), vec![7, 6, 5, 5, 5, 2, 1], false, false, @@ -315,12 +373,32 @@ async fn resend_each_block(commitment_mode: L1BatchCommitmentMode) -> anyhow::Re tester.gateway.advance_block_number(3); tester.gas_adjuster.keep_updated().await?; + connection_pool + .clone() + .connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + + connection_pool + .clone() + .connection() + .await + .unwrap() + .blocks_dal() + .insert_mock_l1_batch(&mock_l1_batch_header(1)) + .await + .unwrap(); + let block = L1BlockNumber(tester.gateway.block_number().await?.as_u32()); let tx = tester .aggregator .save_eth_tx( &mut tester.conn.connection().await.unwrap(), - &DUMMY_OPERATION, + &get_dummy_operation(1), false, ) .await?; @@ -412,8 +490,9 @@ async fn resend_each_block(commitment_mode: L1BatchCommitmentMode) -> anyhow::Re #[test_casing(2, COMMITMENT_MODES)] #[tokio::test] async fn dont_resend_already_mined(commitment_mode: L1BatchCommitmentMode) -> anyhow::Result<()> { + let mut connection_pool = ConnectionPool::::test_pool().await; let mut tester = EthSenderTester::new( - ConnectionPool::::test_pool().await, + connection_pool.clone(), vec![100; 100], false, false, @@ -421,6 +500,26 @@ async fn dont_resend_already_mined(commitment_mode: L1BatchCommitmentMode) -> an ) .await; + connection_pool + .clone() + .connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + + connection_pool + .clone() + .connection() + .await + .unwrap() + .blocks_dal() + .insert_mock_l1_batch(&mock_l1_batch_header(1)) + .await + .unwrap(); + let tx = tester .aggregator .save_eth_tx( @@ -491,8 +590,9 @@ async fn dont_resend_already_mined(commitment_mode: L1BatchCommitmentMode) -> an #[test_casing(2, COMMITMENT_MODES)] #[tokio::test] async fn three_scenarios(commitment_mode: L1BatchCommitmentMode) -> anyhow::Result<()> { + let connection_pool = ConnectionPool::::test_pool().await; let mut tester = EthSenderTester::new( - ConnectionPool::::test_pool().await, + connection_pool.clone(), vec![100; 100], false, false, @@ -502,12 +602,31 @@ async fn three_scenarios(commitment_mode: L1BatchCommitmentMode) -> anyhow::Resu let mut hashes = vec![]; - for _ in 0..3 { + connection_pool + .clone() + .connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + + for number in 0..3 { + connection_pool + .clone() + .connection() + .await + .unwrap() + .blocks_dal() + .insert_mock_l1_batch(&mock_l1_batch_header(number + 1)) + .await + .unwrap(); let tx = tester .aggregator .save_eth_tx( &mut tester.conn.connection().await.unwrap(), - &DUMMY_OPERATION, + &get_dummy_operation(number + 1), false, ) .await @@ -571,8 +690,9 @@ async fn three_scenarios(commitment_mode: L1BatchCommitmentMode) -> anyhow::Resu #[test_casing(2, COMMITMENT_MODES)] #[tokio::test] async fn failed_eth_tx(commitment_mode: L1BatchCommitmentMode) { + let connection_pool = ConnectionPool::::test_pool().await; let mut tester = EthSenderTester::new( - ConnectionPool::::test_pool().await, + connection_pool.clone(), vec![100; 100], false, false, @@ -580,6 +700,26 @@ async fn failed_eth_tx(commitment_mode: L1BatchCommitmentMode) { ) .await; + connection_pool + .clone() + .connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion::default()) + .await + .unwrap(); + + connection_pool + .clone() + .connection() + .await + .unwrap() + .blocks_dal() + .insert_mock_l1_batch(&mock_l1_batch_header(1)) + .await + .unwrap(); + let tx = tester .aggregator .save_eth_tx( From d58af05b4eeba6e128bfcb901b59648bca687bed Mon Sep 17 00:00:00 2001 From: Lech <88630083+Artemka374@users.noreply.github.com> Date: Mon, 20 May 2024 13:53:36 +0300 Subject: [PATCH 6/6] fix lint --- core/node/eth_sender/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/node/eth_sender/src/tests.rs b/core/node/eth_sender/src/tests.rs index 3471617ea23c..d624fe939481 100644 --- a/core/node/eth_sender/src/tests.rs +++ b/core/node/eth_sender/src/tests.rs @@ -490,7 +490,7 @@ async fn resend_each_block(commitment_mode: L1BatchCommitmentMode) -> anyhow::Re #[test_casing(2, COMMITMENT_MODES)] #[tokio::test] async fn dont_resend_already_mined(commitment_mode: L1BatchCommitmentMode) -> anyhow::Result<()> { - let mut connection_pool = ConnectionPool::::test_pool().await; + let connection_pool = ConnectionPool::::test_pool().await; let mut tester = EthSenderTester::new( connection_pool.clone(), vec![100; 100],