Skip to content

Commit

Permalink
separate indexer async v2 db from aptosdb
Browse files Browse the repository at this point in the history
  • Loading branch information
jillxuu committed Jan 26, 2024
1 parent e772f56 commit 65faadd
Show file tree
Hide file tree
Showing 53 changed files with 319 additions and 342 deletions.
27 changes: 6 additions & 21 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -661,6 +661,7 @@ strum = "0.24.1"
strum_macros = "0.24.2"
syn = { version = "1.0.92", features = ["derive", "extra-traits"] }
sysinfo = "0.28.4"
tar = "0.4.40"
tempfile = "3.3.0"
termcolor = "1.1.2"
test-case = "3.1.0"
Expand Down
1 change: 1 addition & 0 deletions api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ aptos-bcs-utils = { workspace = true }
aptos-build-info = { workspace = true }
aptos-config = { workspace = true }
aptos-crypto = { workspace = true }
aptos-db-indexer = { workspace = true }
aptos-framework = { workspace = true }
aptos-gas-schedule = { workspace = true }
aptos-global-constants = { workspace = true }
Expand Down
1 change: 1 addition & 0 deletions api/openapi-spec-generator/src/fake_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@ pub fn get_fake_context() -> Context {
Arc::new(MockDbReaderWriter),
mempool.ac_client,
NodeConfig::default(),
None, /* table info reader */
)
}
10 changes: 8 additions & 2 deletions api/src/accounts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,10 @@ impl Account {
.latest_state_view_poem(&self.latest_ledger_info)?;
let converted_resources = state_view
.as_move_resolver()
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.try_into_resources(resources.iter().map(|(k, v)| (k.clone(), v.as_slice())))
.context("Failed to build move resource response from data in DB")
.map_err(|err| {
Expand Down Expand Up @@ -545,7 +548,10 @@ impl Account {
})?;

resolver
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.move_struct_fields(resource_type, &bytes)
.context("Failed to convert move structs from storage")
.map_err(|err| {
Expand Down
8 changes: 6 additions & 2 deletions api/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use aptos_api_types::{
};
use aptos_config::config::{NodeConfig, RoleType};
use aptos_crypto::HashValue;
use aptos_db_indexer::table_info_reader::TableInfoReader;
use aptos_gas_schedule::{AptosGasParameters, FromOnChainGasSchedule};
use aptos_logger::{error, warn};
use aptos_mempool::{MempoolClientRequest, MempoolClientSender, SubmissionStatus};
Expand Down Expand Up @@ -66,6 +67,7 @@ pub struct Context {
gas_schedule_cache: Arc<RwLock<GasScheduleCache>>,
gas_estimation_cache: Arc<RwLock<GasEstimationCache>>,
gas_limit_cache: Arc<RwLock<GasLimitCache>>,
pub table_info_reader: Option<Arc<dyn TableInfoReader>>,
}

impl std::fmt::Debug for Context {
Expand All @@ -80,6 +82,7 @@ impl Context {
db: Arc<dyn DbReader>,
mp_sender: MempoolClientSender,
node_config: NodeConfig,
table_info_reader: Option<Arc<dyn TableInfoReader>>,
) -> Self {
Self {
chain_id,
Expand All @@ -101,6 +104,7 @@ impl Context {
block_executor_onchain_config: OnChainExecutionConfig::default_if_missing()
.block_executor_onchain_config(),
})),
table_info_reader,
}
}

Expand Down Expand Up @@ -636,7 +640,7 @@ impl Context {

let state_view = self.latest_state_view_poem(ledger_info)?;
let resolver = state_view.as_move_resolver();
let converter = resolver.as_converter(self.db.clone());
let converter = resolver.as_converter(self.db.clone(), self.table_info_reader.clone());
let txns: Vec<aptos_api_types::Transaction> = data
.into_iter()
.map(|t| {
Expand Down Expand Up @@ -667,7 +671,7 @@ impl Context {

let state_view = self.latest_state_view_poem(ledger_info)?;
let resolver = state_view.as_move_resolver();
let converter = resolver.as_converter(self.db.clone());
let converter = resolver.as_converter(self.db.clone(), self.table_info_reader.clone());
let txns: Vec<aptos_api_types::Transaction> = data
.into_iter()
.map(|t| {
Expand Down
5 changes: 4 additions & 1 deletion api/src/events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,10 @@ impl EventsApi {
.context
.latest_state_view_poem(&latest_ledger_info)?
.as_move_resolver()
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.try_into_versioned_events(&events)
.context("Failed to convert events from storage into response")
.map_err(|err| {
Expand Down
5 changes: 4 additions & 1 deletion api/src/runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use crate::{
};
use anyhow::Context as AnyhowContext;
use aptos_config::config::{ApiConfig, NodeConfig};
use aptos_db_indexer::table_info_reader::TableInfoReader;
use aptos_logger::info;
use aptos_mempool::MempoolClientSender;
use aptos_storage_interface::DbReader;
Expand All @@ -34,11 +35,12 @@ pub fn bootstrap(
chain_id: ChainId,
db: Arc<dyn DbReader>,
mp_sender: MempoolClientSender,
table_info_reader: Option<Arc<dyn TableInfoReader>>,
) -> anyhow::Result<Runtime> {
let max_runtime_workers = get_max_runtime_workers(&config.api);
let runtime = aptos_runtimes::spawn_named_runtime("api".into(), Some(max_runtime_workers));

let context = Context::new(chain_id, db, mp_sender, config.clone());
let context = Context::new(chain_id, db, mp_sender, config.clone(), table_info_reader);

attach_poem_to_runtime(runtime.handle(), context.clone(), config, false)
.context("Failed to attach poem to runtime")?;
Expand Down Expand Up @@ -321,6 +323,7 @@ mod tests {
ChainId::test(),
context.db.clone(),
context.mempool.ac_client.clone(),
None,
);
assert!(ret.is_ok());

Expand Down
10 changes: 8 additions & 2 deletions api/src/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,10 @@ impl StateApi {
AcceptType::Json => {
let resource = state_view
.as_move_resolver()
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.try_into_resource(&resource_type, &bytes)
.context("Failed to deserialize resource data retrieved from DB")
.map_err(|err| {
Expand Down Expand Up @@ -421,7 +424,10 @@ impl StateApi {
.state_view(ledger_version.map(|inner| inner.0))?;

let resolver = state_view.as_move_resolver();
let converter = resolver.as_converter(self.context.db.clone());
let converter = resolver.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
);

// Convert key to lookup version for DB
let vm_key = converter
Expand Down
2 changes: 1 addition & 1 deletion api/src/tests/converter_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ async fn test_value_conversion() {

let state_view = context.latest_state_view();
let resolver = state_view.as_move_resolver();
let converter = resolver.as_converter(context.db);
let converter = resolver.as_converter(context.db, None);

assert_value_conversion(&converter, "u8", 1i32, VmMoveValue::U8(1));
assert_value_conversion(&converter, "u64", "1", VmMoveValue::U64(1));
Expand Down
24 changes: 18 additions & 6 deletions api/src/transactions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -762,7 +762,10 @@ impl TransactionsApi {
let timestamp =
self.context.get_block_timestamp(ledger_info, txn.version)?;
resolver
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.try_into_onchain_transaction(timestamp, txn)
.context("Failed to convert on chain transaction to Transaction")
.map_err(|err| {
Expand All @@ -774,7 +777,10 @@ impl TransactionsApi {
})?
},
TransactionData::Pending(txn) => resolver
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.try_into_pending_transaction(*txn)
.context("Failed to convert on pending transaction to Transaction")
.map_err(|err| {
Expand Down Expand Up @@ -946,7 +952,10 @@ impl TransactionsApi {
.context
.latest_state_view_poem(ledger_info)?
.as_move_resolver()
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.try_into_signed_transaction_poem(data.0, self.context.chain_id())
.context("Failed to create SignedTransaction from SubmitTransactionRequest")
.map_err(|err| {
Expand Down Expand Up @@ -1025,7 +1034,7 @@ impl TransactionsApi {
.map(|(index, txn)| {
self.context
.latest_state_view_poem(ledger_info)?.as_move_resolver()
.as_converter(self.context.db.clone())
.as_converter(self.context.db.clone(), self.context.table_info_reader.clone())
.try_into_signed_transaction_poem(txn, self.context.chain_id())
.context(format!("Failed to create SignedTransaction from SubmitTransactionRequest at position {}", index))
.map_err(|err| {
Expand Down Expand Up @@ -1117,7 +1126,7 @@ impl TransactionsApi {

// We provide the pending transaction so that users have the hash associated
let pending_txn = resolver
.as_converter(self.context.db.clone())
.as_converter(self.context.db.clone(), self.context.table_info_reader.clone())
.try_into_pending_transaction_poem(txn)
.context("Failed to build PendingTransaction from mempool response, even though it said the request was accepted")
.map_err(|err| SubmitTransactionError::internal_with_code(
Expand Down Expand Up @@ -1322,7 +1331,10 @@ impl TransactionsApi {
let state_view = self.context.latest_state_view_poem(&ledger_info)?;
let resolver = state_view.as_move_resolver();
let raw_txn: RawTransaction = resolver
.as_converter(self.context.db.clone())
.as_converter(
self.context.db.clone(),
self.context.table_info_reader.clone(),
)
.try_into_raw_transaction_poem(request.transaction, self.context.chain_id())
.context("The given transaction is invalid")
.map_err(|err| {
Expand Down
6 changes: 3 additions & 3 deletions api/src/view_function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ fn view_request(
ViewFunctionRequest::Json(data) => {
let resolver = state_view.as_move_resolver();
resolver
.as_converter(context.db.clone())
.as_converter(context.db.clone(), context.table_info_reader.clone())
.convert_view_function(data.0)
.map_err(|err| {
BasicErrorWith404::bad_request_with_code(
Expand Down Expand Up @@ -171,7 +171,7 @@ fn view_request(
AcceptType::Json => {
let resolver = state_view.as_move_resolver();
let return_types = resolver
.as_converter(context.db.clone())
.as_converter(context.db.clone(), context.table_info_reader.clone())
.function_return_types(&view_function)
.and_then(|tys| {
tys.into_iter()
Expand All @@ -191,7 +191,7 @@ fn view_request(
.zip(return_types.into_iter())
.map(|(v, ty)| {
resolver
.as_converter(context.db.clone())
.as_converter(context.db.clone(), context.table_info_reader.clone())
.try_into_move_value(&ty, &v)
})
.collect::<anyhow::Result<Vec<_>>>()
Expand Down
2 changes: 1 addition & 1 deletion api/test-context/src/test_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ pub fn new_test_context(
false, /* indexer */
BUFFERED_STATE_TARGET_ITEMS,
DEFAULT_MAX_NUM_NODES_PER_LRU_CACHE_SHARD,
false, /* indexer async v2 */
)
.unwrap(),
)
Expand All @@ -146,6 +145,7 @@ pub fn new_test_context(
db.clone(),
mempool.ac_client.clone(),
node_config.clone(),
None, /* table info reader */
);

// Configure the testing depending on which API version we're testing.
Expand Down
2 changes: 2 additions & 0 deletions api/types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ rust-version = { workspace = true }
anyhow = { workspace = true }
aptos-config = { workspace = true }
aptos-crypto = { workspace = true }
aptos-db-indexer = { workspace = true }
aptos-framework = { workspace = true }
aptos-logger = { workspace = true }
aptos-openapi = { workspace = true }
Expand All @@ -35,3 +36,4 @@ poem-openapi = { workspace = true }
poem-openapi-derive = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }

Loading

0 comments on commit 65faadd

Please sign in to comment.