Skip to content

Commit

Permalink
clippy: rm some type_complexity (#9276)
Browse files Browse the repository at this point in the history
Co-authored-by: Matthias Seitz <[email protected]>
  • Loading branch information
tcoratger and mattsse authored Jul 3, 2024
1 parent 4f3f506 commit ba37091
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 15 deletions.
19 changes: 13 additions & 6 deletions crates/consensus/beacon/src/engine/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,18 @@ const MAX_INVALID_HEADERS: u32 = 512u32;
/// If the distance exceeds this threshold, the pipeline will be used for sync.
pub const MIN_BLOCKS_FOR_PIPELINE_RUN: u64 = EPOCH_SLOTS;

/// Represents a pending forkchoice update.
///
/// This type encapsulates the necessary components for a pending forkchoice update
/// in the context of a beacon consensus engine.
///
/// It consists of:
/// - The current fork choice state.
/// - Optional payload attributes specific to the engine type.
/// - Sender for the result of an oneshot channel, conveying the outcome of the fork choice update.
type PendingForkchoiceUpdate<PayloadAttributes> =
(ForkchoiceState, Option<PayloadAttributes>, oneshot::Sender<RethResult<OnForkChoiceUpdated>>);

/// The beacon consensus engine is the driver that switches between historical and live sync.
///
/// The beacon consensus engine is itself driven by messages from the Consensus Layer, which are
Expand Down Expand Up @@ -189,12 +201,7 @@ where
/// It is recorded if we cannot process the forkchoice update because
/// a hook with database read-write access is active.
/// This is a temporary solution to always process missed FCUs.
#[allow(clippy::type_complexity)]
pending_forkchoice_update: Option<(
ForkchoiceState,
Option<EngineT::PayloadAttributes>,
oneshot::Sender<RethResult<OnForkChoiceUpdated>>,
)>,
pending_forkchoice_update: Option<PendingForkchoiceUpdate<EngineT::PayloadAttributes>>,
/// Tracks the header of invalid payloads that were rejected by the engine because they're
/// invalid.
invalid_headers: InvalidHeaderCache,
Expand Down
11 changes: 9 additions & 2 deletions crates/etl/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,14 @@ where
}
}

/// Type alias for the items stored in the heap of [`EtlIter`].
///
/// Each item in the heap is a tuple containing:
/// - A `Reverse` tuple of a key-value pair (`Vec<u8>, Vec<u8>`), used to maintain the heap in
/// ascending order of keys.
/// - An index (`usize`) representing the source file from which the key-value pair was read.
type HeapItem = (Reverse<(Vec<u8>, Vec<u8>)>, usize);

/// `EtlIter` is an iterator for traversing through sorted key-value pairs in a collection of ETL
/// files. These files are created using the [`Collector`] and contain data where keys are encoded
/// and values are compressed.
Expand All @@ -174,8 +182,7 @@ where
#[derive(Debug)]
pub struct EtlIter<'a> {
/// Heap managing the next items to be iterated.
#[allow(clippy::type_complexity)]
heap: BinaryHeap<(Reverse<(Vec<u8>, Vec<u8>)>, usize)>,
heap: BinaryHeap<HeapItem>,
/// Reference to the vector of ETL files being iterated over.
files: &'a mut Vec<EtlFile>,
}
Expand Down
1 change: 0 additions & 1 deletion crates/storage/db/benches/hash_keys.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ where

/// Generates two batches. The first is to be inserted into the database before running the
/// benchmark. The second is to be benchmarked with.
#[allow(clippy::type_complexity)]
fn generate_batches<T>(size: usize) -> (Vec<TableRow<T>>, Vec<TableRow<T>>)
where
T: Table,
Expand Down
10 changes: 6 additions & 4 deletions crates/storage/db/src/static_file/cursor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ use std::sync::Arc;
#[derive(Debug, Deref, DerefMut)]
pub struct StaticFileCursor<'a>(NippyJarCursor<'a, SegmentHeader>);

/// Type alias for column results with optional values.
type ColumnResult<T> = ProviderResult<Option<T>>;

impl<'a> StaticFileCursor<'a> {
/// Returns a new [`StaticFileCursor`].
pub fn new(jar: &'a NippyJar<SegmentHeader>, reader: Arc<DataReader>) -> ProviderResult<Self> {
Expand Down Expand Up @@ -56,7 +59,7 @@ impl<'a> StaticFileCursor<'a> {
pub fn get_one<M: ColumnSelectorOne>(
&mut self,
key_or_num: KeyOrNumber<'_>,
) -> ProviderResult<Option<M::FIRST>> {
) -> ColumnResult<M::FIRST> {
let row = self.get(key_or_num, M::MASK)?;

match row {
Expand All @@ -69,7 +72,7 @@ impl<'a> StaticFileCursor<'a> {
pub fn get_two<M: ColumnSelectorTwo>(
&mut self,
key_or_num: KeyOrNumber<'_>,
) -> ProviderResult<Option<(M::FIRST, M::SECOND)>> {
) -> ColumnResult<(M::FIRST, M::SECOND)> {
let row = self.get(key_or_num, M::MASK)?;

match row {
Expand All @@ -79,11 +82,10 @@ impl<'a> StaticFileCursor<'a> {
}

/// Gets three column values from a row.
#[allow(clippy::type_complexity)]
pub fn get_three<M: ColumnSelectorThree>(
&mut self,
key_or_num: KeyOrNumber<'_>,
) -> ProviderResult<Option<(M::FIRST, M::SECOND, M::THIRD)>> {
) -> ColumnResult<(M::FIRST, M::SECOND, M::THIRD)> {
let row = self.get(key_or_num, M::MASK)?;

match row {
Expand Down
9 changes: 7 additions & 2 deletions examples/stateful-precompile/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,12 @@ use reth_tracing::{RethTracer, Tracer};
use schnellru::{ByLength, LruMap};
use std::{collections::HashMap, sync::Arc};

/// Type alias for the LRU cache used within the [`PrecompileCache`].
type PrecompileLRUCache = LruMap<(Bytes, u64), PrecompileResult>;

/// Type alias for the thread-safe `Arc<RwLock<_>>` wrapper around [`PrecompileCache`].
type CachedPrecompileResult = Arc<RwLock<PrecompileLRUCache>>;

/// A cache for precompile inputs / outputs.
///
/// This assumes that the precompile is a standard precompile, as in `StandardPrecompileFn`, meaning
Expand All @@ -40,8 +46,7 @@ use std::{collections::HashMap, sync::Arc};
#[derive(Debug, Default)]
pub struct PrecompileCache {
/// Caches for each precompile input / output.
#[allow(clippy::type_complexity)]
cache: HashMap<(Address, SpecId), Arc<RwLock<LruMap<(Bytes, u64), PrecompileResult>>>>,
cache: HashMap<(Address, SpecId), CachedPrecompileResult>,
}

/// Custom EVM configuration
Expand Down

0 comments on commit ba37091

Please sign in to comment.