-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
985 additions
and
1 deletion.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,189 @@ | ||
#![allow(missing_docs, unreachable_pub)] | ||
use alloy_primitives::{map::HashMap, B256, U256}; | ||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; | ||
use itertools::Itertools; | ||
use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner}; | ||
use reth_trie::{ | ||
hashed_cursor::{noop::NoopHashedStorageCursor, HashedPostStateStorageCursor}, | ||
node_iter::{TrieElement, TrieNodeIter}, | ||
trie_cursor::{noop::NoopStorageTrieCursor, InMemoryStorageTrieCursor}, | ||
updates::StorageTrieUpdates, | ||
walker::TrieWalker, | ||
HashedStorage, | ||
}; | ||
use reth_trie_common::{HashBuilder, Nibbles}; | ||
use reth_trie_sparse::{SparseStateTrie, SparseTrie}; | ||
|
||
pub fn calculate_root_from_leaves(c: &mut Criterion) { | ||
let mut group = c.benchmark_group("calculate root from leaves"); | ||
group.sample_size(20); | ||
|
||
for size in [1_000, 5_000, 10_000, 100_000] { | ||
let state = generate_test_data(size); | ||
|
||
// hash builder | ||
group.bench_function(BenchmarkId::new("hash builder", size), |b| { | ||
b.iter_with_setup(HashBuilder::default, |mut hb| { | ||
for (key, value) in state.iter().sorted_by_key(|(key, _)| *key) { | ||
hb.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value)); | ||
} | ||
hb.root(); | ||
}) | ||
}); | ||
|
||
// sparse trie | ||
group.bench_function(BenchmarkId::new("sparse trie", size), |b| { | ||
b.iter_with_setup( | ||
|| SparseStateTrie::from_state(SparseTrie::revealed_empty()), | ||
|mut sparse| { | ||
for (key, value) in &state { | ||
sparse.update_leaf( | ||
Nibbles::unpack(key), | ||
alloy_rlp::encode_fixed_size(value).to_vec(), | ||
); | ||
} | ||
sparse.root().unwrap(); | ||
}, | ||
) | ||
}); | ||
} | ||
} | ||
|
||
pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { | ||
let mut group = c.benchmark_group("calculate root from leaves repeated"); | ||
group.sample_size(20); | ||
|
||
for init_size in [1_000, 10_000, 100_000] { | ||
let init_state = generate_test_data(init_size); | ||
|
||
for update_size in [100, 1_000, 5_000, 10_000] { | ||
for num_updates in [1, 3, 5, 10] { | ||
let updates = | ||
(0..num_updates).map(|_| generate_test_data(update_size)).collect::<Vec<_>>(); | ||
|
||
// hash builder | ||
let benchmark_id = BenchmarkId::new( | ||
"hash builder", | ||
format!("init size {init_size} | update size {update_size} | num updates {num_updates}"), | ||
); | ||
group.bench_function(benchmark_id, |b| { | ||
b.iter_with_setup( | ||
|| { | ||
let init_storage = HashedStorage::from_iter(false, init_state.clone()); | ||
let storage_updates = updates | ||
.clone() | ||
.into_iter() | ||
.map(|update| HashedStorage::from_iter(false, update)) | ||
.collect::<Vec<_>>(); | ||
|
||
let mut hb = HashBuilder::default().with_updates(true); | ||
for (key, value) in init_state.iter().sorted_by_key(|(key, _)| *key) { | ||
hb.add_leaf( | ||
Nibbles::unpack(key), | ||
&alloy_rlp::encode_fixed_size(value), | ||
); | ||
} | ||
hb.root(); | ||
|
||
let (_, updates) = hb.split(); | ||
let trie_updates = StorageTrieUpdates::new(updates); | ||
(init_storage, storage_updates, trie_updates) | ||
}, | ||
|(init_storage, storage_updates, mut trie_updates)| { | ||
let mut storage = init_storage; | ||
for update in storage_updates { | ||
storage.extend(&update); | ||
|
||
let prefix_set = update.construct_prefix_set().freeze(); | ||
let storage_sorted = storage.clone().into_sorted(); | ||
let trie_updates_sorted = trie_updates.clone().into_sorted(); | ||
|
||
let walker = TrieWalker::new( | ||
InMemoryStorageTrieCursor::new( | ||
B256::ZERO, | ||
NoopStorageTrieCursor::default(), | ||
Some(&trie_updates_sorted), | ||
), | ||
prefix_set, | ||
); | ||
let mut node_iter = TrieNodeIter::new( | ||
walker, | ||
HashedPostStateStorageCursor::new( | ||
NoopHashedStorageCursor::default(), | ||
Some(&storage_sorted), | ||
), | ||
); | ||
|
||
let mut hb = HashBuilder::default().with_updates(true); | ||
while let Some(node) = node_iter.try_next().unwrap() { | ||
match node { | ||
TrieElement::Branch(node) => { | ||
hb.add_branch( | ||
node.key, | ||
node.value, | ||
node.children_are_in_trie, | ||
); | ||
} | ||
TrieElement::Leaf(hashed_slot, value) => { | ||
hb.add_leaf( | ||
Nibbles::unpack(hashed_slot), | ||
alloy_rlp::encode_fixed_size(&value).as_ref(), | ||
); | ||
} | ||
} | ||
} | ||
hb.root(); | ||
|
||
trie_updates.finalize(node_iter.walker, hb); | ||
} | ||
}, | ||
) | ||
}); | ||
|
||
// sparse trie | ||
let benchmark_id = BenchmarkId::new( | ||
"sparse trie", | ||
format!("init size {init_size} | update size {update_size} | num updates {num_updates}"), | ||
); | ||
group.bench_function(benchmark_id, |b| { | ||
b.iter_with_setup( | ||
|| { | ||
let mut sparse = | ||
SparseStateTrie::from_state(SparseTrie::revealed_empty()); | ||
for (key, value) in &init_state { | ||
sparse.update_leaf( | ||
Nibbles::unpack(key), | ||
alloy_rlp::encode_fixed_size(value).to_vec(), | ||
); | ||
} | ||
sparse.root().unwrap(); | ||
sparse | ||
}, | ||
|mut sparse| { | ||
for update in &updates { | ||
for (key, value) in update { | ||
sparse.update_leaf( | ||
Nibbles::unpack(key), | ||
alloy_rlp::encode_fixed_size(value).to_vec(), | ||
); | ||
} | ||
sparse.root().unwrap(); | ||
} | ||
}, | ||
) | ||
}); | ||
} | ||
} | ||
} | ||
} | ||
|
||
fn generate_test_data(size: usize) -> HashMap<B256, U256> { | ||
let mut runner = TestRunner::new(ProptestConfig::default()); | ||
proptest::collection::hash_map(any::<B256>(), any::<U256>(), size) | ||
.new_tree(&mut runner) | ||
.unwrap() | ||
.current() | ||
} | ||
|
||
criterion_group!(root, calculate_root_from_leaves, calculate_root_from_leaves_repeated); | ||
criterion_main!(root); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,7 @@ | ||
//! The implementation of sparse MPT. | ||
mod state; | ||
pub use state::*; | ||
|
||
mod trie; | ||
pub use trie::*; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,129 @@ | ||
use crate::SparseTrie; | ||
use alloy_primitives::{ | ||
map::{HashMap, HashSet}, | ||
Bytes, B256, | ||
}; | ||
use alloy_rlp::Decodable; | ||
use reth_trie::{Nibbles, TrieNode}; | ||
|
||
/// Sparse state trie representing lazy-loaded Ethereum state trie. | ||
#[derive(Default, Debug)] | ||
pub struct SparseStateTrie { | ||
/// Sparse account trie. | ||
pub(crate) state: SparseTrie, | ||
/// Sparse storage tries. | ||
#[allow(dead_code)] | ||
pub(crate) storages: HashMap<B256, SparseTrie>, | ||
/// Collection of revealed account and storage keys. | ||
#[allow(dead_code)] | ||
pub(crate) revealed: HashMap<B256, HashSet<B256>>, | ||
} | ||
|
||
impl SparseStateTrie { | ||
/// Create state trie from state trie. | ||
pub fn from_state(state: SparseTrie) -> Self { | ||
Self { state, ..Default::default() } | ||
} | ||
|
||
/// Returns `true` if account was already revealed. | ||
pub fn is_account_revealed(&self, account: &B256) -> bool { | ||
self.revealed.contains_key(account) | ||
} | ||
|
||
/// Returns `true` if storage slot for account was already revealed. | ||
pub fn is_storage_slot_revealed(&self, account: &B256, slot: &B256) -> bool { | ||
self.revealed.get(account).map_or(false, |slots| slots.contains(slot)) | ||
} | ||
|
||
/// Reveal unknown trie paths from provided leaf path and its proof. | ||
/// | ||
/// # Panics | ||
/// | ||
/// This method panics on invalid proof if `debug_assertions` are enabled. | ||
/// However, it does not extensively validate the proof. | ||
pub fn reveal_account( | ||
&mut self, | ||
account: B256, | ||
proof: impl IntoIterator<Item = (Nibbles, Bytes)>, | ||
) -> alloy_rlp::Result<()> { | ||
let mut proof = proof.into_iter().peekable(); | ||
|
||
// reveal root and initialize the trie of not already | ||
let Some((path, root)) = proof.next() else { return Ok(()) }; | ||
debug_assert!(path.is_empty(), "first proof node is not root"); | ||
let root_node = TrieNode::decode(&mut &root[..])?; | ||
debug_assert!( | ||
!matches!(root_node, TrieNode::EmptyRoot) || proof.peek().is_none(), | ||
"invalid proof" | ||
); | ||
let trie = self.state.reveal_root(root_node)?; | ||
|
||
// add the remaining proof nodes | ||
for (path, bytes) in proof { | ||
let node = TrieNode::decode(&mut &bytes[..])?; | ||
trie.reveal_node(path, node)?; | ||
} | ||
|
||
// Mark leaf path as revealed. | ||
self.revealed.entry(account).or_default(); | ||
|
||
Ok(()) | ||
} | ||
|
||
/// Returns sparse trie root if the the trie has been revealed. | ||
pub fn root(&mut self) -> Option<B256> { | ||
self.state.root() | ||
} | ||
|
||
/// Update the leaf node | ||
pub fn update_leaf(&mut self, path: Nibbles, value: Vec<u8>) { | ||
self.state.as_revealed_mut().unwrap().update_leaf(path, value); | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod tests { | ||
use super::*; | ||
use reth_trie::HashBuilder; | ||
use reth_trie_common::proof::ProofRetainer; | ||
|
||
#[test] | ||
fn sparse_trie_reveal_empty() { | ||
let retainer = ProofRetainer::from_iter([Nibbles::default()]); | ||
let mut hash_builder = HashBuilder::default().with_proof_retainer(retainer); | ||
hash_builder.root(); | ||
let proofs = hash_builder.take_proof_nodes(); | ||
assert_eq!(proofs.len(), 1); | ||
|
||
let mut sparse = SparseStateTrie::default(); | ||
assert_eq!(sparse.state, SparseTrie::Blind); | ||
sparse.reveal_account(Default::default(), proofs.into_inner()).unwrap(); | ||
assert_eq!(sparse.state, SparseTrie::revealed_empty()); | ||
} | ||
|
||
#[cfg(debug_assertions)] | ||
mod debug_assertions { | ||
use super::*; | ||
use alloy_primitives::Bytes; | ||
use alloy_rlp::EMPTY_STRING_CODE; | ||
|
||
#[test] | ||
#[should_panic] | ||
fn reveal_first_node_not_root() { | ||
let mut sparse = SparseStateTrie::default(); | ||
let proof = [(Nibbles::from_nibbles(&[0x1]), Bytes::from([EMPTY_STRING_CODE]))]; | ||
sparse.reveal_account(Default::default(), proof).unwrap(); | ||
} | ||
|
||
#[test] | ||
#[should_panic] | ||
fn reveal_invalid_proof_with_empty_root() { | ||
let mut sparse = SparseStateTrie::default(); | ||
let proof = [ | ||
(Nibbles::default(), Bytes::from([EMPTY_STRING_CODE])), | ||
(Nibbles::from_nibbles(&[0x1]), Bytes::new()), | ||
]; | ||
sparse.reveal_account(Default::default(), proof).unwrap(); | ||
} | ||
} | ||
} |
Oops, something went wrong.