Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support StarknetProvider #138

Merged
merged 6 commits into from
Sep 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ RPC_URL_ETHEREUM_SEPOLIA=https://goerli.infura.io/v3/your-infura-api-key
# this value is optional
RPC_CHUNK_SIZE_ETHEREUM_SEPOLIA=2000

RPC_URL_STARKNET_SEPOLIA=# if it's starknet make sure to use pathfinder

# Optional
DRY_RUN_CAIRO_PATH= # path for dry run cairo
SOUND_RUN_CAIRO_PATH= # path for sound run cairo
Expand Down
5 changes: 2 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ jobs:
run: |
rustup component add clippy
rustup component add rustfmt
- name: Install cargo-make
run: cargo install --debug cargo-make
- uses: taiki-e/install-action@just
- name: Run clippy and formatter checks
run: cargo make run-ci-flow
run: just run-ci-flow
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
[workspace]
resolver = "2"
members = ["cli", "examples/private-input-module", "hdp"]

[workspace.package]
resolver = "2"
version = "0.4.0"
edition = "2021"
license-file = "LICENSE"
Expand Down Expand Up @@ -37,6 +37,7 @@ rand = "0.8.4"
regex = "1"
starknet = "0.10.0"
starknet-crypto = "0.6.1"
starknet-types-core = "0.1.0"
cairo-lang-starknet-classes = "2.7.0"
cairo-vm = "1.0.0-rc6"
futures = "0.3.30"
Expand Down
29 changes: 0 additions & 29 deletions Makefile.toml

This file was deleted.

1 change: 1 addition & 0 deletions hdp/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ serde = { workspace = true }
serde_with = { workspace = true }
serde_json = { workspace = true }
starknet-crypto = { workspace = true }
starknet-types-core = { workspace = true }
starknet = { workspace = true }
thiserror.workspace = true
alloy-merkle-tree = { workspace = true }
Expand Down
22 changes: 20 additions & 2 deletions hdp/src/provider/error.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use alloy::primitives::BlockNumber;
use thiserror::Error;

use crate::provider::indexer::IndexerError;

use super::evm::rpc::RpcProviderError;

/// Error type for provider
#[derive(Error, Debug)]
pub enum ProviderError {
Expand Down Expand Up @@ -34,3 +33,22 @@ pub enum ProviderError {
#[error("Fetch key error: {0}")]
FetchKeyError(String),
}

/// Error from [`RpcProvider`]
#[derive(Error, Debug)]
pub enum RpcProviderError {
#[error("Failed to send proofs with mpsc")]
MpscError(
#[from]
tokio::sync::mpsc::error::SendError<(
BlockNumber,
alloy::rpc::types::EIP1186AccountProofResponse,
)>,
),

#[error("Failed to fetch proofs: {0}")]
ReqwestError(#[from] reqwest::Error),

#[error("Failed to parse response: {0}")]
SerdeJsonError(#[from] serde_json::Error),
}
14 changes: 1 addition & 13 deletions hdp/src/provider/evm/rpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,25 +15,13 @@ use alloy::{
};
use futures::future::join_all;
use reqwest::Url;
use thiserror::Error;
use tokio::sync::{
mpsc::{self, Sender},
RwLock,
};
use tracing::debug;

/// Error from [`RpcProvider`]
#[derive(Error, Debug)]
pub enum RpcProviderError {
#[error("Failed to send proofs with mpsc")]
MpscError(
#[from]
tokio::sync::mpsc::error::SendError<(
BlockNumber,
alloy::rpc::types::EIP1186AccountProofResponse,
)>,
),
}
use crate::provider::error::RpcProviderError;

/// RPC provider for fetching data from Ethereum RPC
/// It is a wrapper around the alloy provider, using eth_getProof for fetching account and storage proofs
Expand Down
4 changes: 3 additions & 1 deletion hdp/src/provider/starknet/mod.rs
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
pub struct StarknetProvider {}
pub mod provider;
pub mod rpc;
pub mod types;
121 changes: 121 additions & 0 deletions hdp/src/provider/starknet/provider.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
use std::{collections::HashMap, time::Instant};

use alloy::primitives::BlockNumber;
use itertools::Itertools;
use starknet_types_core::felt::Felt;
use tracing::info;

use crate::provider::{config::ProviderConfig, error::ProviderError, indexer::Indexer};

use super::{rpc::RpcProvider, types::GetProofOutput};

type AccountProofsResult = Result<HashMap<BlockNumber, GetProofOutput>, ProviderError>;
type StorageProofsResult = Result<HashMap<BlockNumber, GetProofOutput>, ProviderError>;

pub struct StarknetProvider {
/// Account and storage trie provider
pub(crate) rpc_provider: RpcProvider,
/// Header provider
//TODO: indexer is not supported for starknet yet
pub(crate) _header_provider: Indexer,
}

#[cfg(feature = "test_utils")]
impl Default for StarknetProvider {
fn default() -> Self {
Self::new(&ProviderConfig::default())
}
}

impl StarknetProvider {
pub fn new(config: &ProviderConfig) -> Self {
let rpc_provider = RpcProvider::new(config.rpc_url.to_owned(), config.max_requests);
let indexer = Indexer::new(config.chain_id);
Self {
rpc_provider,
_header_provider: indexer,
}
}

/// Fetches the account proofs for the given block range.
/// The account proofs are fetched from the RPC provider.
///
/// Return:
/// - Account proofs mapped by block number
pub async fn get_range_of_account_proofs(
&self,
from_block: BlockNumber,
to_block: BlockNumber,
increment: u64,
address: Felt,
) -> AccountProofsResult {
let start_fetch = Instant::now();

let target_blocks_batch: Vec<Vec<BlockNumber>> =
self._chunk_block_range(from_block, to_block, increment);

let mut fetched_accounts_proofs_with_blocks_map = HashMap::new();
for target_blocks in target_blocks_batch {
fetched_accounts_proofs_with_blocks_map.extend(
self.rpc_provider
.get_account_proofs(target_blocks, address)
.await?,
);
}

let duration = start_fetch.elapsed();
info!("time taken (Account Proofs Fetch): {:?}", duration);

Ok(fetched_accounts_proofs_with_blocks_map)
}

/// Fetches the storage proofs for the given block range.
/// The storage proofs are fetched from the RPC provider.
///
/// Return:
/// - Storage proofs mapped by block number
pub async fn get_range_of_storage_proofs(
&self,
from_block: BlockNumber,
to_block: BlockNumber,
increment: u64,
address: Felt,
storage_slot: Felt,
) -> StorageProofsResult {
let start_fetch = Instant::now();

let target_blocks_batch: Vec<Vec<BlockNumber>> =
self._chunk_block_range(from_block, to_block, increment);

let mut processed_accounts = HashMap::new();
for target_blocks in target_blocks_batch {
processed_accounts.extend(
self.rpc_provider
.get_storage_proofs(target_blocks, address, storage_slot)
.await?,
);
}

let duration = start_fetch.elapsed();
info!("time taken (Storage Proofs Fetch): {:?}", duration);

Ok(processed_accounts)
}

/// Chunks the block range into smaller ranges of 800 blocks.
/// This is to avoid fetching too many blocks at once from the RPC provider.
/// This is meant to use with data lake definition, which have sequential block numbers
pub(crate) fn _chunk_block_range(
&self,
from_block: BlockNumber,
to_block: BlockNumber,
increment: u64,
) -> Vec<Vec<BlockNumber>> {
(from_block..=to_block)
.step_by(increment as usize)
.chunks(800)
.into_iter()
.map(|chunk| chunk.collect())
.collect()
}
}
Loading
Loading