Skip to content

Commit

Permalink
Merge pull request #135 from HerodotusDev/more-chain
Browse files Browse the repository at this point in the history
feat: support multiple chains as origin chain
  • Loading branch information
rkdud007 authored Sep 3, 2024
2 parents 01332fc + 5586e24 commit 16ad7d5
Show file tree
Hide file tree
Showing 8 changed files with 41 additions and 102 deletions.
10 changes: 6 additions & 4 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
# Note that CHAIN_ID and RPC_URL are both required for fetch data
CHAIN_ID=ETH_SEPOLIA
RPC_URL=https://goerli.infura.io/v3/your-infura-api-key
# Note that RPC_URL_{CHAIN_ID} is required for fetch data
RPC_URL_ETHEREUM_SEPOLIA=https://goerli.infura.io/v3/your-infura-api-key
# this value is optional
RPC_CHUNK_SIZE_ETHEREUM_SEPOLIA=2000

# Optional
RPC_CHUNK_SIZE=2000
DRY_RUN_CAIRO_PATH= # path for dry run cairo
SOUND_RUN_CAIRO_PATH= # path for sound run cairo
SAVE_FETCH_KEYS_FILE= # path for dry run output file
13 changes: 4 additions & 9 deletions cli/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use anyhow::Result;
use clap::Parser;
use hdp::primitives::processed_types::cairo_format::query::ProcessorInput;
use hdp::primitives::request::{SubmitBatchQuery, Task};
use hdp::primitives::ChainId;
use hdp::processor::{self, Processor};
use hdp::{
hdp_run,
Expand Down Expand Up @@ -94,8 +93,6 @@ pub async fn process_entry_run(args: ProcessArgs) -> Result<()> {

pub async fn module_entry_run(args: RunModuleArgs) -> Result<()> {
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
args.chain_id,
args.dry_run_cairo_file,
args.sound_run_cairo_file,
args.program_input_file,
Expand All @@ -121,8 +118,6 @@ pub async fn module_entry_run(args: RunModuleArgs) -> Result<()> {

pub async fn datalake_entry_run(args: RunDatalakeArgs) -> Result<()> {
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
args.chain_id,
None,
args.sound_run_cairo_file,
args.program_input_file,
Expand All @@ -133,26 +128,28 @@ pub async fn datalake_entry_run(args: RunDatalakeArgs) -> Result<()> {
);
let parsed_datalake = match args.datalake {
DataLakeCommands::BlockSampled {
chain_id,
block_range_start,
block_range_end,
sampled_property,
increment,
} => DatalakeEnvelope::BlockSampled(BlockSampledDatalake::new(
ChainId::EthereumSepolia,
chain_id,
block_range_start,
block_range_end,
increment,
sampled_property,
)),
DataLakeCommands::TransactionsInBlock {
chain_id,
target_block,
sampled_property,
start_index,
end_index,
increment,
included_types,
} => DatalakeEnvelope::TransactionsInBlock(TransactionsInBlockDatalake::new(
ChainId::EthereumSepolia,
chain_id,
target_block,
sampled_property,
start_index,
Expand All @@ -176,8 +173,6 @@ pub async fn entry_run(args: RunArgs) -> Result<()> {
let parsed: SubmitBatchQuery = serde_json::from_str(&request_context)
.expect("Invalid format of request. Cannot parse it.");
let config = hdp_run::HdpRunConfig::init(
args.rpc_url,
Some(parsed.destination_chain_id),
args.dry_run_cairo_file,
args.sound_run_cairo_file,
args.program_input_file,
Expand Down
7 changes: 0 additions & 7 deletions cli/src/commands/run.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::commands::Parser;
use starknet::providers::Url;
use std::path::PathBuf;

#[derive(Parser, Debug)]
Expand All @@ -8,12 +7,6 @@ pub struct RunArgs {
#[arg(short, long)]
pub request_file: PathBuf,

/// The RPC URL to fetch the data.
///
/// Can be overwritten by `RPC_URL` environment variable.
#[arg(long)]
pub rpc_url: Option<Url>,

/// dry run contract bootloader program.
/// only used for module task
#[arg(long)]
Expand Down
11 changes: 4 additions & 7 deletions cli/src/commands/run_datalake.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ use hdp::primitives::{
},
ChainId,
};
use starknet::providers::Url;

#[derive(Parser, Debug)]
pub struct RunDatalakeArgs {
Expand All @@ -24,12 +23,6 @@ pub struct RunDatalakeArgs {
#[command(subcommand)]
pub datalake: DataLakeCommands,

/// The RPC URL to fetch the datalake
pub rpc_url: Option<Url>,

/// The chain id to fetch the datalake
pub chain_id: Option<ChainId>,

/// Path to save program input file after pre-processing.
///
/// This will be input data for cairo program
Expand Down Expand Up @@ -64,6 +57,8 @@ pub enum DataLakeCommands {
#[command(arg_required_else_help = true)]
#[command(short_flag = 's')]
BlockSampled {
/// Chain id
chain_id: ChainId,
/// Block number range start (inclusive)
block_range_start: BlockNumber,
/// Block number range end (inclusive)
Expand All @@ -78,6 +73,8 @@ pub enum DataLakeCommands {
#[command(arg_required_else_help = true)]
#[command(short_flag = 't')]
TransactionsInBlock {
/// Chain id
chain_id: ChainId,
/// Target block number
target_block: BlockNumber,
/// Sampled property
Expand Down
14 changes: 0 additions & 14 deletions cli/src/commands/run_module.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
use clap::{arg, Parser};
use hdp::primitives::ChainId;
use starknet::providers::Url;
use std::path::PathBuf;

#[derive(Parser, Debug)]
Expand Down Expand Up @@ -29,18 +27,6 @@ pub struct RunModuleArgs {
#[arg(long)]
pub save_fetch_keys_file: Option<PathBuf>,

/// The RPC URL to fetch the data.
///
/// Can be overwritten by `RPC_URL` environment variable.
#[arg(long)]
pub rpc_url: Option<Url>,

/// The chain id to fetch the data.
///
/// Can be overwritten by `CHAIN_ID` environment variable
#[arg(long)]
pub chain_id: Option<ChainId>,

/// dry run contract bootloader program.
/// only used for module task
#[arg(long)]
Expand Down
2 changes: 0 additions & 2 deletions cli/src/interactive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -350,8 +350,6 @@ pub async fn run_interactive() -> anyhow::Result<()> {
.prompt()?
.into();
let config = hdp_run::HdpRunConfig::init(
rpc_url,
chain_id,
None,
None,
cairo_input,
Expand Down
31 changes: 0 additions & 31 deletions hdp-config.example.toml

This file was deleted.

55 changes: 27 additions & 28 deletions hdp/src/hdp_run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,6 @@ impl Default for HdpRunConfig {

impl HdpRunConfig {
pub fn init(
cli_rpc_url: Option<Url>,
cli_chain_id: Option<ChainId>,
cli_dry_run_cairo_file: Option<PathBuf>,
cli_sound_run_cairo_file: Option<PathBuf>,
program_input_file: PathBuf,
Expand All @@ -53,22 +51,33 @@ impl HdpRunConfig {
batch_proof_file: Option<PathBuf>,
cli_cairo_pie_file: Option<PathBuf>,
) -> Self {
let chain_id = cli_chain_id.unwrap_or_else(|| {
env::var("CHAIN_ID")
.expect("CHAIN_ID must be set")
.parse()
.expect("CHAIN_ID must be a number")
});
let rpc_url = cli_rpc_url.unwrap_or_else(|| {
env::var("RPC_URL")
.expect("RPC_URL must be set")
.parse()
.expect("RPC_URL must be a valid URL")
});
let rpc_chunk_size = env::var("RPC_CHUNK_SIZE")
.unwrap_or_else(|_| "40".to_string())
.parse()
.expect("RPC_CHUNK_SIZE must be a number");
let mut provider_config = HashMap::new();

// Iterate through environment variables to find RPC_URL and RPC_CHUNK_SIZE configurations
for (key, value) in env::vars() {
if key.starts_with("RPC_URL_") {
let chain_id: ChainId = key[8..]
.parse()
.expect("Invalid chain ID in RPC_URL env var");
let rpc_url: Url = value.parse().expect("Invalid URL in RPC_URL env var");

let chunk_size_key = format!("RPC_CHUNK_SIZE_{}", chain_id);
let rpc_chunk_size: u64 = env::var(&chunk_size_key)
.unwrap_or_else(|_| "40".to_string())
.parse()
.expect(&format!("{} must be a number", chunk_size_key));

provider_config.insert(
chain_id,
ProviderConfig {
rpc_url,
chain_id,
max_requests: rpc_chunk_size,
},
);
}
}

let save_fetch_keys_file: Option<PathBuf> = cli_save_fetch_keys_file
.or_else(|| env::var("SAVE_FETCH_KEYS_FILE").ok().map(PathBuf::from));
let dry_run_cairo_path: PathBuf = cli_dry_run_cairo_file.unwrap_or_else(|| {
Expand All @@ -84,16 +93,6 @@ impl HdpRunConfig {
.expect("SOUND_RUN_CAIRO_PATH must be a path to a cairo file")
});

let mut provider_config = HashMap::new();
provider_config.insert(
chain_id,
ProviderConfig {
rpc_url,
chain_id,
max_requests: rpc_chunk_size,
},
);

let config = HdpRunConfig {
provider_config,
dry_run_program_path: dry_run_cairo_path,
Expand Down

0 comments on commit 16ad7d5

Please sign in to comment.