Skip to content

Commit

Permalink
Merge branch 'main' of github.com:matter-labs/zksync-era into feat-va…
Browse files Browse the repository at this point in the history
…lidium-with-da
  • Loading branch information
dimazhornyk committed Jul 2, 2024
2 parents 4072e4b + 65973cc commit 2d63d19
Show file tree
Hide file tree
Showing 85 changed files with 1,995 additions and 760 deletions.
1 change: 1 addition & 0 deletions .github/workflows/ci-core-lint-reusable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ jobs:
run: |
ci_run zk fmt --check
ci_run zk lint rust --check
ci_run zk lint toolbox --check
ci_run zk lint js --check
ci_run zk lint ts --check
ci_run zk lint md --check
Expand Down
4 changes: 2 additions & 2 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.github/release-please/** @RomanBrodetski @perekopskiy @Deniallugo @popzxc
**/CHANGELOG.md @RomanBrodetski @perekopskiy @Deniallugo @popzxc
.github/release-please/** @RomanBrodetski @perekopskiy @Deniallugo @popzxc @EmilLuta
**/CHANGELOG.md @RomanBrodetski @perekopskiy @Deniallugo @popzxc @EmilLuta
CODEOWNERS @RomanBrodetski @perekopskiy @Deniallugo @popzxc
.github/workflows/** @matter-labs/devops
45 changes: 45 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ members = [
"core/lib/mempool",
"core/lib/merkle_tree",
"core/lib/mini_merkle_tree",
"core/lib/node_framework_derive",
"core/lib/object_store",
"core/lib/prover_interface",
"core/lib/queued_job_processor",
Expand Down Expand Up @@ -175,6 +176,12 @@ tracing-opentelemetry = "0.21.0"
url = "2"
web3 = "0.19.0"

# Proc-macro
syn = "2.0"
quote = "1.0"
proc-macro2 = "1.0"
trybuild = "1.0"

# "Internal" dependencies
circuit_sequencer_api_1_3_3 = { package = "circuit_sequencer_api", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.3.3" }
circuit_sequencer_api_1_4_0 = { package = "circuit_sequencer_api", git = "https://github.com/matter-labs/era-zkevm_test_harness.git", branch = "v1.4.0" }
Expand Down Expand Up @@ -244,6 +251,7 @@ zksync_crypto_primitives = { path = "core/lib/crypto_primitives" }

# Framework and components
zksync_node_framework = { path = "core/node/node_framework" }
zksync_node_framework_derive = { path = "core/lib/node_framework_derive" }
zksync_eth_watch = { path = "core/node/eth_watch" }
zksync_shared_metrics = { path = "core/node/shared_metrics" }
zksync_proof_data_handler = { path = "core/node/proof_data_handler" }
Expand Down
2 changes: 1 addition & 1 deletion core/lib/dal/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
pub use sqlx::{types::BigDecimal, Error as SqlxError};
use zksync_db_connection::connection::DbMarker;
pub use zksync_db_connection::{
connection::Connection,
connection::{Connection, IsolationLevel},
connection_pool::{ConnectionPool, ConnectionPoolBuilder},
error::{DalError, DalResult},
};
Expand Down
1 change: 1 addition & 0 deletions core/lib/db_connection/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,4 @@ tracing.workspace = true

[dev-dependencies]
assert_matches.workspace = true
test-casing.workspace = true
92 changes: 86 additions & 6 deletions core/lib/db_connection/src/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ impl<'a, DB: DbMarker> Connection<'a, DB> {
Ok(TransactionBuilder {
connection: self,
is_readonly: false,
isolation_level: None,
})
}

Expand Down Expand Up @@ -280,11 +281,26 @@ impl<'a, DB: DbMarker> Connection<'a, DB> {
}
}

/// Transaction isolation level.
///
/// See [Postgres docs](https://www.postgresql.org/docs/14/transaction-iso.html) for details on isolation level semantics.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub enum IsolationLevel {
/// "Read committed" isolation level.
ReadCommitted,
/// "Repeatable read" isolation level (aka "snapshot isolation").
RepeatableRead,
/// Serializable isolation level.
Serializable,
}

/// Builder of transactions allowing to configure transaction characteristics (for now, just its readonly status).
#[derive(Debug)]
pub struct TransactionBuilder<'a, 'c, DB: DbMarker> {
connection: &'a mut Connection<'c, DB>,
is_readonly: bool,
isolation_level: Option<IsolationLevel>,
}

impl<'a, DB: DbMarker> TransactionBuilder<'a, '_, DB> {
Expand All @@ -294,12 +310,40 @@ impl<'a, DB: DbMarker> TransactionBuilder<'a, '_, DB> {
self
}

/// Sets the isolation level of this transaction. If this method is not called, the isolation level will be
/// "read committed" (the default Postgres isolation level) for read-write transactions, and "repeatable read"
/// for readonly transactions. Beware that setting high isolation level for read-write transactions may lead
/// to performance degradation and/or isolation-related errors.
pub fn set_isolation(mut self, level: IsolationLevel) -> Self {
self.isolation_level = Some(level);
self
}

/// Builds the transaction with the provided characteristics.
pub async fn build(self) -> DalResult<Connection<'a, DB>> {
let mut transaction = self.connection.start_transaction().await?;

let level = self.isolation_level.unwrap_or(if self.is_readonly {
IsolationLevel::RepeatableRead
} else {
IsolationLevel::ReadCommitted
});
let level = match level {
IsolationLevel::ReadCommitted => "READ COMMITTED",
IsolationLevel::RepeatableRead => "REPEATABLE READ",
IsolationLevel::Serializable => "SERIALIZABLE",
};
let mut set_transaction_args = format!(" ISOLATION LEVEL {level}");

if self.is_readonly {
sqlx::query("SET TRANSACTION READ ONLY")
set_transaction_args += " READ ONLY";
}

if !set_transaction_args.is_empty() {
sqlx::query(&format!("SET TRANSACTION{set_transaction_args}"))
.instrument("set_transaction_characteristics")
.with_arg("isolation_level", &self.isolation_level)
.with_arg("readonly", &self.is_readonly)
.execute(&mut transaction)
.await?;
}
Expand All @@ -309,6 +353,8 @@ impl<'a, DB: DbMarker> TransactionBuilder<'a, '_, DB> {

#[cfg(test)]
mod tests {
use test_casing::test_casing;

use super::*;

#[tokio::test]
Expand Down Expand Up @@ -344,17 +390,51 @@ mod tests {
}
}

const ISOLATION_LEVELS: [Option<IsolationLevel>; 4] = [
None,
Some(IsolationLevel::ReadCommitted),
Some(IsolationLevel::RepeatableRead),
Some(IsolationLevel::Serializable),
];

#[test_casing(4, ISOLATION_LEVELS)]
#[tokio::test]
async fn creating_readonly_transaction() {
async fn setting_isolation_level_for_transaction(level: Option<IsolationLevel>) {
let pool = ConnectionPool::<InternalMarker>::constrained_test_pool(1).await;
let mut connection = pool.connection().await.unwrap();
let mut readonly_transaction = connection
.transaction_builder()
let mut transaction_builder = connection.transaction_builder().unwrap();
if let Some(level) = level {
transaction_builder = transaction_builder.set_isolation(level);
}

let mut transaction = transaction_builder.build().await.unwrap();
assert!(transaction.in_transaction());

sqlx::query("SELECT COUNT(*) AS \"count?\" FROM miniblocks")
.instrument("test")
.fetch_optional(&mut transaction)
.await
.unwrap()
.set_readonly()
.build()
.expect("no row returned");
// Check that it's possible to execute write statements in the transaction.
sqlx::query("DELETE FROM miniblocks")
.instrument("test")
.execute(&mut transaction)
.await
.unwrap();
}

#[test_casing(4, ISOLATION_LEVELS)]
#[tokio::test]
async fn creating_readonly_transaction(level: Option<IsolationLevel>) {
let pool = ConnectionPool::<InternalMarker>::constrained_test_pool(1).await;
let mut connection = pool.connection().await.unwrap();
let mut transaction_builder = connection.transaction_builder().unwrap().set_readonly();
if let Some(level) = level {
transaction_builder = transaction_builder.set_isolation(level);
}

let mut readonly_transaction = transaction_builder.build().await.unwrap();
assert!(readonly_transaction.in_transaction());

sqlx::query("SELECT COUNT(*) AS \"count?\" FROM miniblocks")
Expand Down
18 changes: 18 additions & 0 deletions core/lib/node_framework_derive/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[package]
name = "zksync_node_framework_derive"
version.workspace = true
edition.workspace = true
authors.workspace = true
homepage.workspace = true
repository.workspace = true
license.workspace = true
keywords.workspace = true
categories.workspace = true

[lib]
proc-macro = true

[dependencies]
syn = { workspace = true, features = ["full"] }
quote.workspace = true
proc-macro2.workspace = true
44 changes: 44 additions & 0 deletions core/lib/node_framework_derive/src/helpers.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
use std::fmt;

use syn::{GenericArgument, PathArguments, Type};

use crate::labels::CtxLabel;

/// Representation of a single structure field.
pub(crate) struct Field {
/// Name of the field.
pub(crate) ident: syn::Ident,
/// Type of the field.
pub(crate) ty: syn::Type,
/// Parsed label.
pub(crate) label: CtxLabel,
}

impl fmt::Debug for Field {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Field")
.field("ident", &self.ident)
.field("label", &self.label)
.finish()
}
}

// Helper function to check if a field is of type Option<T> and extract T
pub(crate) fn extract_option_inner_type(ty: &Type) -> Option<&Type> {
if let Type::Path(type_path) = ty {
// Check if the path is `Option`
if type_path.path.segments.len() == 1 {
let segment = &type_path.path.segments[0];
if segment.ident == "Option" {
if let PathArguments::AngleBracketed(angle_bracketed_args) = &segment.arguments {
if angle_bracketed_args.args.len() == 1 {
if let GenericArgument::Type(inner_type) = &angle_bracketed_args.args[0] {
return Some(inner_type);
}
}
}
}
}
}
None
}
Loading

0 comments on commit 2d63d19

Please sign in to comment.