Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: protobuf-generated json configs for the main node (BFT-371) #458

Merged
merged 27 commits into from
Jan 26, 2024
Merged
Changes from 1 commit
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
5cb1162
defined some protos
pompon0 Nov 8, 2023
bb53fc7
added some conversions
pompon0 Nov 9, 2023
1ce3125
added missing files
pompon0 Nov 10, 2023
d5d5213
nudge prover
pompon0 Nov 13, 2023
72f7cef
moved proto config encoding to a separate crate
pompon0 Nov 13, 2023
9fe41df
Merge remote-tracking branch 'origin/main' into gprusak-protobuf-configs
pompon0 Nov 13, 2023
27f62e2
added tests
pompon0 Nov 13, 2023
c80db41
Merge remote-tracking branch 'origin/main' into gprusak-protobuf-configs
pompon0 Jan 4, 2024
c520d23
splitted conversion code into files
pompon0 Jan 4, 2024
2eb869f
proto definitions
pompon0 Jan 5, 2024
247b914
more protos
pompon0 Jan 5, 2024
4afc72f
chain cfg conv
pompon0 Jan 5, 2024
ee6ee15
contracts conv
pompon0 Jan 5, 2024
6ea90ac
database conv
pompon0 Jan 5, 2024
20e3864
eth conv
pompon0 Jan 5, 2024
8bbc6d2
some fri conv
pompon0 Jan 8, 2024
3b7a0e2
more conv
pompon0 Jan 9, 2024
fef239d
conv done
pompon0 Jan 9, 2024
a20ef5a
cargo fmt
pompon0 Jan 9, 2024
f2bab42
Merge remote-tracking branch 'origin/main' into gprusak-protobuf-configs
pompon0 Jan 9, 2024
0efcc6e
Merge branch 'main' into gprusak-protobuf-configs
pompon0 Jan 11, 2024
8ff2a9b
Merge branch 'main' into gprusak-protobuf-configs
pompon0 Jan 25, 2024
c17ffba
updated configs
pompon0 Jan 25, 2024
915542d
Merge remote-tracking branch 'origin/main' into gprusak-protobuf-configs
pompon0 Jan 26, 2024
2f4b5f1
updated era-consensus commit
pompon0 Jan 26, 2024
e20f337
prover/Cargo.lock
pompon0 Jan 26, 2024
8138698
Cargo.toml
pompon0 Jan 26, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
moved proto config encoding to a separate crate
pompon0 committed Nov 13, 2023

Verified

This commit was signed with the committer’s verified signature.
pompon0 Grzegorz Prusak
commit 72f7cef16b57d7c6189d59ce38e5000b2746a4d5
12 changes: 10 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 1 addition & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -33,6 +33,7 @@ members = [
"core/lib/state",
"core/lib/storage",
"core/lib/types",
"core/lib/protobuf_config",
"core/lib/prover_utils",
"core/lib/utils",
"core/lib/vlog",
@@ -57,6 +58,3 @@ exclude = []
[profile.perf]
inherits = "release"
debug = true

[workspace.dependencies]
zksync_protobuf = { version = "0.1.0", git = "https://github.com/matter-labs/era-consensus.git", rev = "9b300371512a4e443d086ecd19c1877719c15646" }
2 changes: 1 addition & 1 deletion contracts
Submodule contracts updated 248 files
6 changes: 0 additions & 6 deletions core/lib/config/Cargo.toml
Original file line number Diff line number Diff line change
@@ -12,13 +12,7 @@ categories = ["cryptography"]
[dependencies]
zksync_basic_types = { path = "../../lib/basic_types" }
zksync_contracts = { path = "../../lib/contracts" }
zksync_protobuf.workspace = true

anyhow = "1.0"
serde = { version = "1.0", features = ["derive"] }
envy = "0.4"
prost = "0.12.1"

[build-dependencies]
zksync_protobuf.workspace = true

16 changes: 0 additions & 16 deletions core/lib/config/src/configs/alerts.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,8 @@
use serde::Deserialize;
use zksync_protobuf::ProtoFmt;

#[derive(Debug, Deserialize, Clone, PartialEq)]
pub struct AlertsConfig {
/// List of panics' messages from external crypto code,
/// that are sporadic and needed to be handled separately
pub sporadic_crypto_errors_substrs: Vec<String>,
}

impl ProtoFmt for AlertsConfig {
type Proto = super::proto::Alerts;
fn read(r: &Self::Proto) -> anyhow::Result<Self> {
Ok(Self {
sporadic_crypto_errors_substrs: r.sporadic_crypto_errors_substrs.clone(),
})
}

fn build(&self) -> Self::Proto {
Self::Proto {
sporadic_crypto_errors_substrs: self.sporadic_crypto_errors_substrs.clone(),
}
}
}
221 changes: 1 addition & 220 deletions core/lib/config/src/configs/api.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,7 @@
pub use crate::configs::PrometheusConfig;
use anyhow::Context as _;
use serde::Deserialize;
use std::{
convert::{TryFrom as _, TryInto as _},
net::SocketAddr,
time::Duration,
};
use std::{net::SocketAddr, time::Duration};
use zksync_basic_types::H256;
use zksync_protobuf::{read_required, required, ProtoFmt};

/// API configuration.
#[derive(Debug, Deserialize, Clone, PartialEq)]
@@ -24,30 +18,6 @@ pub struct ApiConfig {
pub merkle_tree: MerkleTreeApiConfig,
}

impl ProtoFmt for ApiConfig {
type Proto = super::proto::Api;
fn read(r: &Self::Proto) -> anyhow::Result<Self> {
Ok(Self {
web3_json_rpc: read_required(&r.web3_json_rpc).context("web3_json_rpc")?,
contract_verification: read_required(&r.contract_verification)
.context("contract_verification")?,
prometheus: read_required(&r.prometheus).context("prometheus")?,
healthcheck: read_required(&r.healthcheck).context("healthcheck")?,
merkle_tree: read_required(&r.merkle_tree).context("merkle_tree")?,
})
}

fn build(&self) -> Self::Proto {
Self::Proto {
web3_json_rpc: Some(self.web3_json_rpc.build()),
contract_verification: Some(self.contract_verification.build()),
prometheus: Some(self.prometheus.build()),
healthcheck: Some(self.healthcheck.build()),
merkle_tree: Some(self.merkle_tree.build()),
}
}
}

#[derive(Debug, Deserialize, Clone, PartialEq)]
pub struct Web3JsonRpcConfig {
/// Port to which the HTTP RPC server is listening.
@@ -235,143 +205,6 @@ impl Web3JsonRpcConfig {
}
}

impl ProtoFmt for Web3JsonRpcConfig {
type Proto = super::proto::Web3JsonRpc;
fn read(r: &Self::Proto) -> anyhow::Result<Self> {
Ok(Self {
http_port: required(&r.http_port)
.and_then(|p| Ok((*p).try_into()?))
.context("http_port")?,
http_url: required(&r.http_url).context("http_url")?.clone(),
ws_port: required(&r.ws_port)
.and_then(|p| Ok((*p).try_into()?))
.context("ws_port")?,
ws_url: required(&r.ws_url).context("ws_url")?.clone(),
req_entities_limit: r.req_entities_limit,
filters_limit: r.filters_limit,
subscriptions_limit: r.subscriptions_limit,
pubsub_polling_interval: r.pubsub_polling_interval,
threads_per_server: *required(&r.threads_per_server).context("threads_per_server")?,
max_nonce_ahead: *required(&r.max_nonce_ahead).context("max_nonce_ahead")?,
gas_price_scale_factor: *required(&r.gas_price_scale_factor)
.context("gas_price_scale_factor")?,
transactions_per_sec_limit: r.transactions_per_sec_limit,
request_timeout: r.request_timeout,
account_pks: match &r.account_pks {
None => None,
Some(r) => {
let mut keys = vec![];
for (i, k) in r.keys.iter().enumerate() {
keys.push(
<[u8; 32]>::try_from(&k[..])
.with_context(|| format!("keys[{i}]"))?
.into(),
);
}
Some(keys)
}
},
estimate_gas_scale_factor: *required(&r.estimate_gas_scale_factor)
.context("estimate_gas_scale_factor")?,
estimate_gas_acceptable_overestimation: *required(
&r.estimate_gas_acceptable_overestimation,
)
.context("acceptable_overestimation")?,
max_tx_size: required(&r.max_tx_size)
.and_then(|x| Ok((*x).try_into()?))
.context("max_tx_size")?,
vm_execution_cache_misses_limit: r
.vm_execution_cache_misses_limit
.map(|x| x.try_into())
.transpose()
.context("vm_execution_cache_misses_limit")?,
vm_concurrency_limit: r
.vm_concurrency_limit
.map(|x| x.try_into())
.transpose()
.context("vm_concurrency_limit")?,
factory_deps_cache_size_mb: r
.factory_deps_cache_size_mb
.map(|x| x.try_into())
.transpose()
.context("factory_deps_cache_size_mb")?,
initial_writes_cache_size_mb: r
.initial_writes_cache_size_mb
.map(|x| x.try_into())
.transpose()
.context("initial_writes_cache_size_mb")?,
latest_values_cache_size_mb: r
.latest_values_cache_size_mb
.map(|x| x.try_into())
.transpose()
.context("latests_values_cache_size_mb")?,
http_threads: r.http_threads,
ws_threads: r.ws_threads,
fee_history_limit: r.fee_history_limit,
max_batch_request_size: r
.max_batch_request_size
.map(|x| x.try_into())
.transpose()
.context("max_batch_requres_size")?,
max_response_body_size_mb: r
.max_response_body_size_mb
.map(|x| x.try_into())
.transpose()
.context("max_response_body_size_mb")?,
websocket_requests_per_minute_limit: r.websocket_requests_per_minute_limit,
})
}
fn build(&self) -> Self::Proto {
Self::Proto {
http_port: Some(self.http_port.into()),
http_url: Some(self.http_url.clone()),
ws_port: Some(self.ws_port.into()),
ws_url: Some(self.ws_url.clone()),
req_entities_limit: self.req_entities_limit,
filters_limit: self.filters_limit,
subscriptions_limit: self.subscriptions_limit,
pubsub_polling_interval: self.pubsub_polling_interval,
threads_per_server: Some(self.threads_per_server),
max_nonce_ahead: Some(self.max_nonce_ahead),
gas_price_scale_factor: Some(self.gas_price_scale_factor),
transactions_per_sec_limit: self.transactions_per_sec_limit,
request_timeout: self.request_timeout,
account_pks: self
.account_pks
.as_ref()
.map(|keys| super::proto::PrivateKeys {
keys: keys.iter().map(|k| k.as_bytes().into()).collect(),
}),
estimate_gas_scale_factor: Some(self.estimate_gas_scale_factor),
estimate_gas_acceptable_overestimation: Some(
self.estimate_gas_acceptable_overestimation,
),
max_tx_size: Some(self.max_tx_size.try_into().unwrap()),
vm_execution_cache_misses_limit: self
.vm_execution_cache_misses_limit
.map(|x| x.try_into().unwrap()),
vm_concurrency_limit: self.vm_concurrency_limit.map(|x| x.try_into().unwrap()),
factory_deps_cache_size_mb: self
.factory_deps_cache_size_mb
.map(|x| x.try_into().unwrap()),
initial_writes_cache_size_mb: self
.initial_writes_cache_size_mb
.map(|x| x.try_into().unwrap()),
latest_values_cache_size_mb: self
.latest_values_cache_size_mb
.map(|x| x.try_into().unwrap()),
http_threads: self.http_threads,
ws_threads: self.ws_threads,
fee_history_limit: self.fee_history_limit,
max_batch_request_size: self.max_batch_request_size.map(|x| x.try_into().unwrap()),
max_response_body_size_mb: self
.max_response_body_size_mb
.map(|x| x.try_into().unwrap()),
websocket_requests_per_minute_limit: self.websocket_requests_per_minute_limit,
}
}
}

#[derive(Debug, Deserialize, Clone, PartialEq)]
pub struct HealthCheckConfig {
/// Port to which the REST server is listening.
@@ -384,22 +217,6 @@ impl HealthCheckConfig {
}
}

impl ProtoFmt for HealthCheckConfig {
type Proto = super::proto::HealthCheck;
fn read(r: &Self::Proto) -> anyhow::Result<Self> {
Ok(Self {
port: required(&r.port)
.and_then(|p| Ok((*p).try_into()?))
.context("port")?,
})
}
fn build(&self) -> Self::Proto {
Self::Proto {
port: Some(self.port.into()),
}
}
}

#[derive(Debug, Deserialize, Clone, PartialEq)]
pub struct ContractVerificationApiConfig {
/// Port to which the REST server is listening.
@@ -416,26 +233,6 @@ impl ContractVerificationApiConfig {
}
}

impl ProtoFmt for ContractVerificationApiConfig {
type Proto = super::proto::ContractVerificationApi;
fn read(r: &Self::Proto) -> anyhow::Result<Self> {
Ok(Self {
port: required(&r.port)
.and_then(|p| Ok((*p).try_into()?))
.context("port")?,
url: required(&r.url).context("url")?.clone(),
threads_per_server: *required(&r.threads_per_server).context("threads_per_server")?,
})
}
fn build(&self) -> Self::Proto {
Self::Proto {
port: Some(self.port.into()),
url: Some(self.url.clone()),
threads_per_server: Some(self.threads_per_server),
}
}
}

/// Configuration for the Merkle tree API.
#[derive(Debug, Clone, PartialEq, Deserialize)]
pub struct MerkleTreeApiConfig {
@@ -449,19 +246,3 @@ impl MerkleTreeApiConfig {
3_072
}
}

impl ProtoFmt for MerkleTreeApiConfig {
type Proto = super::proto::MerkleTreeApi;
fn read(r: &Self::Proto) -> anyhow::Result<Self> {
Ok(Self {
port: required(&r.port)
.and_then(|p| Ok((*p).try_into()?))
.context("port")?,
})
}
fn build(&self) -> Self::Proto {
Self::Proto {
port: Some(self.port.into()),
}
}
}
1 change: 0 additions & 1 deletion core/lib/config/src/configs/mod.rs
Original file line number Diff line number Diff line change
@@ -33,7 +33,6 @@ pub mod fri_witness_vector_generator;
pub mod house_keeper;
pub mod object_store;
pub mod proof_data_handler;
mod proto;
pub mod prover;
pub mod prover_group;
#[cfg(test)]
2 changes: 0 additions & 2 deletions core/lib/config/src/configs/proto/mod.rs

This file was deleted.

27 changes: 1 addition & 26 deletions core/lib/config/src/configs/utils.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
use anyhow::Context as _;
use serde::Deserialize;
use std::{convert::TryInto as _, env, time::Duration};
use zksync_protobuf::{required, ProtoFmt};
use std::{env, time::Duration};

#[derive(Debug, Deserialize, Clone, PartialEq)]
pub struct PrometheusConfig {
@@ -28,26 +26,3 @@ impl PrometheusConfig {
format!("{gateway_url}/metrics/job/{job_id}/namespace/{namespace}/pod/{pod}")
}
}

impl ProtoFmt for PrometheusConfig {
type Proto = super::proto::Prometheus;
fn read(r: &Self::Proto) -> anyhow::Result<Self> {
Ok(Self {
listener_port: required(&r.listener_port)
.and_then(|p| Ok((*p).try_into()?))
.context("listener_port")?,
pushgateway_url: required(&r.pushgateway_url)
.context("pushgateway_url")?
.clone(),
push_interval_ms: r.push_interval_ms,
})
}

fn build(&self) -> Self::Proto {
Self::Proto {
listener_port: Some(self.listener_port.into()),
pushgateway_url: Some(self.pushgateway_url.clone()),
push_interval_ms: self.push_interval_ms,
}
}
}
21 changes: 21 additions & 0 deletions core/lib/protobuf_config/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[package]
name = "zksync_protobuf_config"
version = "0.1.0"
edition = "2021"
authors = ["The Matter Labs Team <hello@matterlabs.dev>"]
homepage = "https://zksync.io/"
repository = "https://github.com/matter-labs/zksync-era"
license = "MIT OR Apache-2.0"
keywords = ["blockchain", "zksync"]
categories = ["cryptography"]

[dependencies]
zksync_config = { path = "../../lib/config" }
zksync_protobuf = { version = "0.1.0", git = "https://github.com/matter-labs/era-consensus.git", rev = "9b300371512a4e443d086ecd19c1877719c15646" }

anyhow = "1.0"
prost = "0.12.1"

[build-dependencies]
zksync_protobuf = { version = "0.1.0", git = "https://github.com/matter-labs/era-consensus.git", rev = "9b300371512a4e443d086ecd19c1877719c15646" }

Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
//! Generates rust code from protobufs.
fn main() {
zksync_protobuf::build::Config {
input_root: "src/configs/proto".into(),
proto_root: "zksync/config/configs".into(),
input_root: "src/proto".into(),
proto_root: "zksync/config".into(),
dependencies: vec![],
protobuf_crate: "::zksync_protobuf".into(),
}
266 changes: 266 additions & 0 deletions core/lib/protobuf_config/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,266 @@
//! Defined protobuf mapping for the config files.
//! It allows to encode the configs using:
//! * protobuf binary format
//! * protobuf text format
//! * protobuf json format
use anyhow::Context as _;
use std::convert::{TryFrom as _, TryInto as _};
use zksync_config::configs::{
api::ContractVerificationApiConfig, api::HealthCheckConfig, api::MerkleTreeApiConfig,
api::Web3JsonRpcConfig, AlertsConfig, ApiConfig, PrometheusConfig,
};
use zksync_protobuf::required;

pub mod proto;

/// Trait reverse to `zksync_protobuf::ProtoFmt` for cases where
/// you would like to specify a custom proto encoding for an externally defined type.
trait ProtoRepr<T>: prost::Message {
fn read(r: &Self) -> anyhow::Result<T>;
fn build(this: &T) -> Self;
}

fn read_required_repr<T, P: ProtoRepr<T>>(field: &Option<P>) -> anyhow::Result<T> {
ProtoRepr::read(field.as_ref().context("missing field")?)
}

impl ProtoRepr<AlertsConfig> for proto::Alerts {
fn read(r: &Self) -> anyhow::Result<AlertsConfig> {
Ok(AlertsConfig {
sporadic_crypto_errors_substrs: r.sporadic_crypto_errors_substrs.clone(),
})
}

fn build(this: &AlertsConfig) -> Self {
Self {
sporadic_crypto_errors_substrs: this.sporadic_crypto_errors_substrs.clone(),
}
}
}

impl ProtoRepr<PrometheusConfig> for proto::Prometheus {
fn read(r: &Self) -> anyhow::Result<PrometheusConfig> {
Ok(PrometheusConfig {
listener_port: required(&r.listener_port)
.and_then(|p| Ok((*p).try_into()?))
.context("listener_port")?,
pushgateway_url: required(&r.pushgateway_url)
.context("pushgateway_url")?
.clone(),
push_interval_ms: r.push_interval_ms,
})
}

fn build(this: &PrometheusConfig) -> Self {
Self {
listener_port: Some(this.listener_port.into()),
pushgateway_url: Some(this.pushgateway_url.clone()),
push_interval_ms: this.push_interval_ms,
}
}
}

impl ProtoRepr<ApiConfig> for proto::Api {
fn read(r: &Self) -> anyhow::Result<ApiConfig> {
Ok(ApiConfig {
web3_json_rpc: read_required_repr(&r.web3_json_rpc).context("web3_json_rpc")?,
contract_verification: read_required_repr(&r.contract_verification)
.context("contract_verification")?,
prometheus: read_required_repr(&r.prometheus).context("prometheus")?,
healthcheck: read_required_repr(&r.healthcheck).context("healthcheck")?,
merkle_tree: read_required_repr(&r.merkle_tree).context("merkle_tree")?,
})
}

fn build(this: &ApiConfig) -> Self {
Self {
web3_json_rpc: Some(ProtoRepr::build(&this.web3_json_rpc)),
contract_verification: Some(ProtoRepr::build(&this.contract_verification)),
prometheus: Some(ProtoRepr::build(&this.prometheus)),
healthcheck: Some(ProtoRepr::build(&this.healthcheck)),
merkle_tree: Some(ProtoRepr::build(&this.merkle_tree)),
}
}
}

impl ProtoRepr<Web3JsonRpcConfig> for proto::Web3JsonRpc {
fn read(r: &Self) -> anyhow::Result<Web3JsonRpcConfig> {
Ok(Web3JsonRpcConfig {
http_port: required(&r.http_port)
.and_then(|p| Ok((*p).try_into()?))
.context("http_port")?,
http_url: required(&r.http_url).context("http_url")?.clone(),
ws_port: required(&r.ws_port)
.and_then(|p| Ok((*p).try_into()?))
.context("ws_port")?,
ws_url: required(&r.ws_url).context("ws_url")?.clone(),
req_entities_limit: r.req_entities_limit,
filters_limit: r.filters_limit,
subscriptions_limit: r.subscriptions_limit,
pubsub_polling_interval: r.pubsub_polling_interval,
threads_per_server: *required(&r.threads_per_server).context("threads_per_server")?,
max_nonce_ahead: *required(&r.max_nonce_ahead).context("max_nonce_ahead")?,
gas_price_scale_factor: *required(&r.gas_price_scale_factor)
.context("gas_price_scale_factor")?,
transactions_per_sec_limit: r.transactions_per_sec_limit,
request_timeout: r.request_timeout,
account_pks: match &r.account_pks {
None => None,
Some(r) => {
let mut keys = vec![];
for (i, k) in r.keys.iter().enumerate() {
keys.push(
<[u8; 32]>::try_from(&k[..])
.with_context(|| format!("keys[{i}]"))?
.into(),
);
}
Some(keys)
}
},
estimate_gas_scale_factor: *required(&r.estimate_gas_scale_factor)
.context("estimate_gas_scale_factor")?,
estimate_gas_acceptable_overestimation: *required(
&r.estimate_gas_acceptable_overestimation,
)
.context("acceptable_overestimation")?,
max_tx_size: required(&r.max_tx_size)
.and_then(|x| Ok((*x).try_into()?))
.context("max_tx_size")?,
vm_execution_cache_misses_limit: r
.vm_execution_cache_misses_limit
.map(|x| x.try_into())
.transpose()
.context("vm_execution_cache_misses_limit")?,
vm_concurrency_limit: r
.vm_concurrency_limit
.map(|x| x.try_into())
.transpose()
.context("vm_concurrency_limit")?,
factory_deps_cache_size_mb: r
.factory_deps_cache_size_mb
.map(|x| x.try_into())
.transpose()
.context("factory_deps_cache_size_mb")?,
initial_writes_cache_size_mb: r
.initial_writes_cache_size_mb
.map(|x| x.try_into())
.transpose()
.context("initial_writes_cache_size_mb")?,
latest_values_cache_size_mb: r
.latest_values_cache_size_mb
.map(|x| x.try_into())
.transpose()
.context("latests_values_cache_size_mb")?,
http_threads: r.http_threads,
ws_threads: r.ws_threads,
fee_history_limit: r.fee_history_limit,
max_batch_request_size: r
.max_batch_request_size
.map(|x| x.try_into())
.transpose()
.context("max_batch_requres_size")?,
max_response_body_size_mb: r
.max_response_body_size_mb
.map(|x| x.try_into())
.transpose()
.context("max_response_body_size_mb")?,
websocket_requests_per_minute_limit: r.websocket_requests_per_minute_limit,
})
}
fn build(this: &Web3JsonRpcConfig) -> Self {
Self {
http_port: Some(this.http_port.into()),
http_url: Some(this.http_url.clone()),
ws_port: Some(this.ws_port.into()),
ws_url: Some(this.ws_url.clone()),
req_entities_limit: this.req_entities_limit,
filters_limit: this.filters_limit,
subscriptions_limit: this.subscriptions_limit,
pubsub_polling_interval: this.pubsub_polling_interval,
threads_per_server: Some(this.threads_per_server),
max_nonce_ahead: Some(this.max_nonce_ahead),
gas_price_scale_factor: Some(this.gas_price_scale_factor),
transactions_per_sec_limit: this.transactions_per_sec_limit,
request_timeout: this.request_timeout,
account_pks: this.account_pks.as_ref().map(|keys| proto::PrivateKeys {
keys: keys.iter().map(|k| k.as_bytes().into()).collect(),
}),
estimate_gas_scale_factor: Some(this.estimate_gas_scale_factor),
estimate_gas_acceptable_overestimation: Some(
this.estimate_gas_acceptable_overestimation,
),
max_tx_size: Some(this.max_tx_size.try_into().unwrap()),
vm_execution_cache_misses_limit: this
.vm_execution_cache_misses_limit
.map(|x| x.try_into().unwrap()),
vm_concurrency_limit: this.vm_concurrency_limit.map(|x| x.try_into().unwrap()),
factory_deps_cache_size_mb: this
.factory_deps_cache_size_mb
.map(|x| x.try_into().unwrap()),
initial_writes_cache_size_mb: this
.initial_writes_cache_size_mb
.map(|x| x.try_into().unwrap()),
latest_values_cache_size_mb: this
.latest_values_cache_size_mb
.map(|x| x.try_into().unwrap()),
http_threads: this.http_threads,
ws_threads: this.ws_threads,
fee_history_limit: this.fee_history_limit,
max_batch_request_size: this.max_batch_request_size.map(|x| x.try_into().unwrap()),
max_response_body_size_mb: this
.max_response_body_size_mb
.map(|x| x.try_into().unwrap()),
websocket_requests_per_minute_limit: this.websocket_requests_per_minute_limit,
}
}
}

impl ProtoRepr<ContractVerificationApiConfig> for proto::ContractVerificationApi {
fn read(r: &Self) -> anyhow::Result<ContractVerificationApiConfig> {
Ok(ContractVerificationApiConfig {
port: required(&r.port)
.and_then(|p| Ok((*p).try_into()?))
.context("port")?,
url: required(&r.url).context("url")?.clone(),
threads_per_server: *required(&r.threads_per_server).context("threads_per_server")?,
})
}
fn build(this: &ContractVerificationApiConfig) -> Self {
Self {
port: Some(this.port.into()),
url: Some(this.url.clone()),
threads_per_server: Some(this.threads_per_server),
}
}
}

impl ProtoRepr<HealthCheckConfig> for proto::HealthCheck {
fn read(r: &Self) -> anyhow::Result<HealthCheckConfig> {
Ok(HealthCheckConfig {
port: required(&r.port)
.and_then(|p| Ok((*p).try_into()?))
.context("port")?,
})
}
fn build(this: &HealthCheckConfig) -> Self {
Self {
port: Some(this.port.into()),
}
}
}

impl ProtoRepr<MerkleTreeApiConfig> for proto::MerkleTreeApi {
fn read(r: &Self) -> anyhow::Result<MerkleTreeApiConfig> {
Ok(MerkleTreeApiConfig {
port: required(&r.port)
.and_then(|p| Ok((*p).try_into()?))
.context("port")?,
})
}
fn build(this: &MerkleTreeApiConfig) -> Self {
Self {
port: Some(this.port.into()),
}
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
syntax = "proto3";

package zksync.config.configs;
package zksync.config;

message Alerts {
repeated string sporadic_crypto_errors_substrs = 1;
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
syntax = "proto3";

package zksync.config.configs;
package zksync.config;

import "zksync/config/configs/utils.proto";
import "zksync/config/utils.proto";

message PrivateKeys {
repeated bytes keys = 1;
2 changes: 2 additions & 0 deletions core/lib/protobuf_config/src/proto/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#![allow(warnings)]
include!(concat!(env!("OUT_DIR"), "/src/proto/gen.rs"));
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
syntax = "proto3";

package zksync.config.configs;
package zksync.config;

message Prometheus {
optional uint32 listener_port = 1; // required
2 changes: 1 addition & 1 deletion etc/system-contracts
Submodule system-contracts updated 128 files