From 920dd313a4449ad88000517fbb7e3d0194f43597 Mon Sep 17 00:00:00 2001 From: yuunlimm Date: Mon, 9 Dec 2024 17:32:33 -0800 Subject: [PATCH] [parquet-sdk][token_v2] migrate CurrentTokenRoyaltyV1 --- .../db/common/models/token_v2_models/mod.rs | 1 + .../token_v2_models/raw_v1_token_royalty.rs | 97 +++++++++++++++++++ .../db/parquet/models/token_v2_models/mod.rs | 1 + .../token_v2_models/v1_token_royalty.rs | 60 ++++++++++++ .../token_v2_models/v1_token_royalty.rs | 71 +++----------- .../src/processors/token_v2_processor.rs | 25 +++-- rust/processor/src/utils/table_flags.rs | 1 + .../src/config/processor_config.rs | 11 ++- .../src/parquet_processors/mod.rs | 19 +++- .../parquet_token_v2_processor.rs | 18 +++- .../parquet_token_v2_extractor.rs | 38 ++++++-- .../token_v2_processor/token_v2_extractor.rs | 21 ++-- 12 files changed, 272 insertions(+), 91 deletions(-) create mode 100644 rust/processor/src/db/common/models/token_v2_models/raw_v1_token_royalty.rs create mode 100644 rust/processor/src/db/parquet/models/token_v2_models/v1_token_royalty.rs diff --git a/rust/processor/src/db/common/models/token_v2_models/mod.rs b/rust/processor/src/db/common/models/token_v2_models/mod.rs index aabf2fddd..e64a62216 100644 --- a/rust/processor/src/db/common/models/token_v2_models/mod.rs +++ b/rust/processor/src/db/common/models/token_v2_models/mod.rs @@ -1 +1,2 @@ pub mod raw_token_claims; +pub mod raw_v1_token_royalty; diff --git a/rust/processor/src/db/common/models/token_v2_models/raw_v1_token_royalty.rs b/rust/processor/src/db/common/models/token_v2_models/raw_v1_token_royalty.rs new file mode 100644 index 000000000..1f793d655 --- /dev/null +++ b/rust/processor/src/db/common/models/token_v2_models/raw_v1_token_royalty.rs @@ -0,0 +1,97 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +// This is required because a diesel macro makes clippy sad +#![allow(clippy::extra_unused_lifetimes)] +#![allow(clippy::unused_unit)] + +use crate::db::postgres::models::token_models::token_utils::TokenWriteSet; +use aptos_protos::transaction::v1::WriteTableItem; +use bigdecimal::BigDecimal; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] +pub struct RawCurrentTokenRoyaltyV1 { + pub token_data_id: String, + pub payee_address: String, + pub royalty_points_numerator: BigDecimal, + pub royalty_points_denominator: BigDecimal, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, +} + +impl Ord for RawCurrentTokenRoyaltyV1 { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.token_data_id.cmp(&other.token_data_id) + } +} +impl PartialOrd for RawCurrentTokenRoyaltyV1 { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl RawCurrentTokenRoyaltyV1 { + pub fn pk(&self) -> String { + self.token_data_id.clone() + } + + // Royalty for v2 token is more complicated and not supported yet. For token v2, royalty can be on the collection (default) or on + // the token (override). + pub fn get_v1_from_write_table_item( + write_table_item: &WriteTableItem, + transaction_version: i64, + transaction_timestamp: chrono::NaiveDateTime, + ) -> anyhow::Result> { + let table_item_data = write_table_item.data.as_ref().unwrap(); + + let maybe_token_data = match TokenWriteSet::from_table_item_type( + table_item_data.value_type.as_str(), + &table_item_data.value, + transaction_version, + )? { + Some(TokenWriteSet::TokenData(inner)) => Some(inner), + _ => None, + }; + + if let Some(token_data) = maybe_token_data { + let maybe_token_data_id = match TokenWriteSet::from_table_item_type( + table_item_data.key_type.as_str(), + &table_item_data.key, + transaction_version, + )? { + Some(TokenWriteSet::TokenDataId(inner)) => Some(inner), + _ => None, + }; + if let Some(token_data_id_struct) = maybe_token_data_id { + // token data id is the 0x{hash} version of the creator, collection name, and token name + let token_data_id = token_data_id_struct.to_id(); + let payee_address = token_data.royalty.get_payee_address(); + let royalty_points_numerator = token_data.royalty.royalty_points_numerator.clone(); + let royalty_points_denominator = + token_data.royalty.royalty_points_denominator.clone(); + + return Ok(Some(Self { + token_data_id, + payee_address, + royalty_points_numerator, + royalty_points_denominator, + last_transaction_version: transaction_version, + last_transaction_timestamp: transaction_timestamp, + })); + } else { + tracing::warn!( + transaction_version, + key_type = table_item_data.key_type, + key = table_item_data.key, + "Expecting token_data_id as key for value = token_data" + ); + } + } + Ok(None) + } +} + +pub trait CurrentTokenRoyaltyV1Convertible { + fn from_raw(raw_item: RawCurrentTokenRoyaltyV1) -> Self; +} diff --git a/rust/processor/src/db/parquet/models/token_v2_models/mod.rs b/rust/processor/src/db/parquet/models/token_v2_models/mod.rs index 4dd8dd1e0..d42cead4e 100644 --- a/rust/processor/src/db/parquet/models/token_v2_models/mod.rs +++ b/rust/processor/src/db/parquet/models/token_v2_models/mod.rs @@ -1 +1,2 @@ pub mod token_claims; +pub mod v1_token_royalty; diff --git a/rust/processor/src/db/parquet/models/token_v2_models/v1_token_royalty.rs b/rust/processor/src/db/parquet/models/token_v2_models/v1_token_royalty.rs new file mode 100644 index 000000000..5d89bdda7 --- /dev/null +++ b/rust/processor/src/db/parquet/models/token_v2_models/v1_token_royalty.rs @@ -0,0 +1,60 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +// This is required because a diesel macro makes clippy sad +#![allow(clippy::extra_unused_lifetimes)] +#![allow(clippy::unused_unit)] + +use crate::{ + bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable}, + db::common::models::token_v2_models::raw_v1_token_royalty::{ + CurrentTokenRoyaltyV1Convertible, RawCurrentTokenRoyaltyV1, + }, +}; +use allocative_derive::Allocative; +use field_count::FieldCount; +use parquet_derive::ParquetRecordWriter; +use serde::{Deserialize, Serialize}; + +#[derive( + Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize, +)] +pub struct CurrentTokenRoyaltyV1 { + pub token_data_id: String, + pub payee_address: String, + pub royalty_points_numerator: String, // String format of BigDecimal + pub royalty_points_denominator: String, // String format of BigDecimal + pub last_transaction_version: i64, + #[allocative(skip)] + pub last_transaction_timestamp: chrono::NaiveDateTime, +} + +impl NamedTable for CurrentTokenRoyaltyV1 { + const TABLE_NAME: &'static str = "current_token_royalty_v1"; +} + +impl HasVersion for CurrentTokenRoyaltyV1 { + fn version(&self) -> i64 { + self.last_transaction_version + } +} + +impl GetTimeStamp for CurrentTokenRoyaltyV1 { + fn get_timestamp(&self) -> chrono::NaiveDateTime { + self.last_transaction_timestamp + } +} + +impl CurrentTokenRoyaltyV1Convertible for CurrentTokenRoyaltyV1 { + // TODO: consider returning a Result + fn from_raw(raw_item: RawCurrentTokenRoyaltyV1) -> Self { + Self { + token_data_id: raw_item.token_data_id, + payee_address: raw_item.payee_address, + royalty_points_numerator: raw_item.royalty_points_numerator.to_string(), + royalty_points_denominator: raw_item.royalty_points_denominator.to_string(), + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, + } + } +} diff --git a/rust/processor/src/db/postgres/models/token_v2_models/v1_token_royalty.rs b/rust/processor/src/db/postgres/models/token_v2_models/v1_token_royalty.rs index e9b13825d..73109d3c9 100644 --- a/rust/processor/src/db/postgres/models/token_v2_models/v1_token_royalty.rs +++ b/rust/processor/src/db/postgres/models/token_v2_models/v1_token_royalty.rs @@ -6,10 +6,11 @@ #![allow(clippy::unused_unit)] use crate::{ - db::postgres::models::token_models::token_utils::TokenWriteSet, + db::common::models::token_v2_models::raw_v1_token_royalty::{ + CurrentTokenRoyaltyV1Convertible, RawCurrentTokenRoyaltyV1, + }, schema::current_token_royalty_v1, }; -use aptos_protos::transaction::v1::WriteTableItem; use bigdecimal::BigDecimal; use field_count::FieldCount; use serde::{Deserialize, Serialize}; @@ -39,63 +40,15 @@ impl PartialOrd for CurrentTokenRoyaltyV1 { } } -impl CurrentTokenRoyaltyV1 { - pub fn pk(&self) -> String { - self.token_data_id.clone() - } - - // Royalty for v2 token is more complicated and not supported yet. For token v2, royalty can be on the collection (default) or on - // the token (override). - pub fn get_v1_from_write_table_item( - write_table_item: &WriteTableItem, - transaction_version: i64, - transaction_timestamp: chrono::NaiveDateTime, - ) -> anyhow::Result> { - let table_item_data = write_table_item.data.as_ref().unwrap(); - - let maybe_token_data = match TokenWriteSet::from_table_item_type( - table_item_data.value_type.as_str(), - &table_item_data.value, - transaction_version, - )? { - Some(TokenWriteSet::TokenData(inner)) => Some(inner), - _ => None, - }; - - if let Some(token_data) = maybe_token_data { - let maybe_token_data_id = match TokenWriteSet::from_table_item_type( - table_item_data.key_type.as_str(), - &table_item_data.key, - transaction_version, - )? { - Some(TokenWriteSet::TokenDataId(inner)) => Some(inner), - _ => None, - }; - if let Some(token_data_id_struct) = maybe_token_data_id { - // token data id is the 0x{hash} version of the creator, collection name, and token name - let token_data_id = token_data_id_struct.to_id(); - let payee_address = token_data.royalty.get_payee_address(); - let royalty_points_numerator = token_data.royalty.royalty_points_numerator.clone(); - let royalty_points_denominator = - token_data.royalty.royalty_points_denominator.clone(); - - return Ok(Some(Self { - token_data_id, - payee_address, - royalty_points_numerator, - royalty_points_denominator, - last_transaction_version: transaction_version, - last_transaction_timestamp: transaction_timestamp, - })); - } else { - tracing::warn!( - transaction_version, - key_type = table_item_data.key_type, - key = table_item_data.key, - "Expecting token_data_id as key for value = token_data" - ); - } +impl CurrentTokenRoyaltyV1Convertible for CurrentTokenRoyaltyV1 { + fn from_raw(raw_item: RawCurrentTokenRoyaltyV1) -> Self { + Self { + token_data_id: raw_item.token_data_id, + payee_address: raw_item.payee_address, + royalty_points_numerator: raw_item.royalty_points_numerator, + royalty_points_denominator: raw_item.royalty_points_denominator, + last_transaction_version: raw_item.last_transaction_version, + last_transaction_timestamp: raw_item.last_transaction_timestamp, } - Ok(None) } } diff --git a/rust/processor/src/processors/token_v2_processor.rs b/rust/processor/src/processors/token_v2_processor.rs index c658c4795..1635190d4 100644 --- a/rust/processor/src/processors/token_v2_processor.rs +++ b/rust/processor/src/processors/token_v2_processor.rs @@ -4,8 +4,11 @@ use super::{DefaultProcessingResult, ProcessorName, ProcessorTrait}; use crate::{ db::{ - common::models::token_v2_models::raw_token_claims::{ - CurrentTokenPendingClaimConvertible, RawCurrentTokenPendingClaim, TokenV1Claimed, + common::models::token_v2_models::{ + raw_token_claims::{ + CurrentTokenPendingClaimConvertible, RawCurrentTokenPendingClaim, TokenV1Claimed, + }, + raw_v1_token_royalty::{CurrentTokenRoyaltyV1Convertible, RawCurrentTokenRoyaltyV1}, }, postgres::models::{ fungible_asset_models::v2_fungible_asset_utils::FungibleAssetMetadata, @@ -617,7 +620,7 @@ impl ProcessorTrait for TokenV2Processor { current_deleted_token_ownerships_v2, token_activities_v2, mut current_token_v2_metadata, - current_token_royalties_v1, + raw_current_token_royalties_v1, raw_current_token_claims, ) = parse_v2_token( &transactions, @@ -633,6 +636,12 @@ impl ProcessorTrait for TokenV2Processor { .map(CurrentTokenPendingClaim::from_raw) .collect(); + let postgres_current_token_royalties_v1: Vec = + raw_current_token_royalties_v1 + .into_iter() + .map(CurrentTokenRoyaltyV1::from_raw) + .collect(); + let processing_duration_in_secs = processing_start.elapsed().as_secs_f64(); let db_insertion_start = std::time::Instant::now(); @@ -671,7 +680,7 @@ impl ProcessorTrait for TokenV2Processor { ), &token_activities_v2, ¤t_token_v2_metadata, - ¤t_token_royalties_v1, + &postgres_current_token_royalties_v1, &postgres_current_token_claims, &self.per_table_chunk_sizes, ) @@ -723,7 +732,7 @@ pub async fn parse_v2_token( Vec, // deleted token ownerships Vec, Vec, - Vec, + Vec, Vec, ) { // Token V2 and V1 combined @@ -752,7 +761,7 @@ pub async fn parse_v2_token( // Basically token properties let mut current_token_v2_metadata: AHashMap = AHashMap::new(); - let mut current_token_royalties_v1: AHashMap = + let mut current_token_royalties_v1: AHashMap = AHashMap::new(); // migrating this from v1 token model as we don't have any replacement table for this let mut all_current_token_claims: AHashMap< @@ -967,7 +976,7 @@ pub async fn parse_v2_token( ); } if let Some(current_token_royalty) = - CurrentTokenRoyaltyV1::get_v1_from_write_table_item( + RawCurrentTokenRoyaltyV1::get_v1_from_write_table_item( table_item, txn_version, txn_timestamp, @@ -1294,7 +1303,7 @@ pub async fn parse_v2_token( .collect::>(); let mut current_token_royalties_v1 = current_token_royalties_v1 .into_values() - .collect::>(); + .collect::>(); let mut all_current_token_claims = all_current_token_claims .into_values() .collect::>(); diff --git a/rust/processor/src/utils/table_flags.rs b/rust/processor/src/utils/table_flags.rs index 5fc0c0948..af29011e4 100644 --- a/rust/processor/src/utils/table_flags.rs +++ b/rust/processor/src/utils/table_flags.rs @@ -52,6 +52,7 @@ bitflags! { const COLLECTIONS_V2 = 1 << 57; const TOKEN_OWNERSHIPS_V2 = 1 << 58; const TOKEN_DATAS_V2 = 1 << 59; + const CURRENT_TOKEN_ROYALTY_V1 = 1 << 60; // User Transactions and Signatures: 61-70 const USER_TRANSACTIONS = 1 << 61; diff --git a/rust/sdk-processor/src/config/processor_config.rs b/rust/sdk-processor/src/config/processor_config.rs index b9e04be39..9f3aefb75 100644 --- a/rust/sdk-processor/src/config/processor_config.rs +++ b/rust/sdk-processor/src/config/processor_config.rs @@ -27,7 +27,9 @@ use processor::{ }, parquet_v2_fungible_metadata::FungibleAssetMetadataModel, }, - token_v2_models::token_claims::CurrentTokenPendingClaim, + token_v2_models::{ + token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1, + }, transaction_metadata_model::parquet_write_set_size_info::WriteSetSize, user_transaction_models::parquet_user_transactions::UserTransaction, }, @@ -166,9 +168,10 @@ impl ProcessorConfig { ProcessorName::ParquetAccountTransactionsProcessor => { HashSet::from([AccountTransaction::TABLE_NAME.to_string()]) }, - ProcessorName::ParquetTokenV2Processor => { - HashSet::from([CurrentTokenPendingClaim::TABLE_NAME.to_string()]) - }, + ProcessorName::ParquetTokenV2Processor => HashSet::from([ + CurrentTokenPendingClaim::TABLE_NAME.to_string(), + CurrentTokenRoyaltyV1::TABLE_NAME.to_string(), + ]), _ => HashSet::new(), // Default case for unsupported processors } } diff --git a/rust/sdk-processor/src/parquet_processors/mod.rs b/rust/sdk-processor/src/parquet_processors/mod.rs index f9ec72902..ff92f6c75 100644 --- a/rust/sdk-processor/src/parquet_processors/mod.rs +++ b/rust/sdk-processor/src/parquet_processors/mod.rs @@ -33,7 +33,9 @@ use processor::{ }, parquet_v2_fungible_metadata::FungibleAssetMetadataModel, }, - token_v2_models::token_claims::CurrentTokenPendingClaim, + token_v2_models::{ + token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1, + }, transaction_metadata_model::parquet_write_set_size_info::WriteSetSize, user_transaction_models::parquet_user_transactions::UserTransaction, }, @@ -109,6 +111,7 @@ pub enum ParquetTypeEnum { AccountTransactions, // token v2 CurrentTokenPendingClaims, + CurrentTokenRoyaltyV1, } /// Trait for handling various Parquet types. @@ -191,6 +194,10 @@ impl_parquet_trait!( CurrentTokenPendingClaim, ParquetTypeEnum::CurrentTokenPendingClaims ); +impl_parquet_trait!( + CurrentTokenRoyaltyV1, + ParquetTypeEnum::CurrentTokenRoyaltyV1 +); #[derive(Debug, Clone)] #[enum_dispatch(ParquetTypeTrait)] @@ -214,6 +221,7 @@ pub enum ParquetTypeStructs { WriteSetSize(Vec), AccountTransaction(Vec), CurrentTokenPendingClaim(Vec), + CurrentTokenRoyaltyV1(Vec), } impl ParquetTypeStructs { @@ -254,6 +262,9 @@ impl ParquetTypeStructs { ParquetTypeEnum::CurrentTokenPendingClaims => { ParquetTypeStructs::CurrentTokenPendingClaim(Vec::new()) }, + ParquetTypeEnum::CurrentTokenRoyaltyV1 => { + ParquetTypeStructs::CurrentTokenRoyaltyV1(Vec::new()) + }, } } @@ -378,6 +389,12 @@ impl ParquetTypeStructs { ) => { handle_append!(self_data, other_data) }, + ( + ParquetTypeStructs::CurrentTokenRoyaltyV1(self_data), + ParquetTypeStructs::CurrentTokenRoyaltyV1(other_data), + ) => { + handle_append!(self_data, other_data) + }, _ => Err(ProcessorError::ProcessError { message: "Mismatched buffer types in append operation".to_string(), }), diff --git a/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs b/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs index 3d9215565..27be8cf22 100644 --- a/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs +++ b/rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs @@ -30,7 +30,9 @@ use aptos_indexer_processor_sdk::{ use parquet::schema::types::Type; use processor::{ bq_analytics::generic_parquet_processor::HasParquetSchema, - db::parquet::models::token_v2_models::token_claims::CurrentTokenPendingClaim, + db::parquet::models::token_v2_models::{ + token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1, + }, }; use std::{collections::HashMap, sync::Arc}; use tracing::{debug, info}; @@ -121,10 +123,16 @@ impl ProcessorTrait for ParquetTokenV2Processor { initialize_gcs_client(parquet_db_config.google_application_credentials.clone()).await; // TODO: Update this - let parquet_type_to_schemas: HashMap> = [( - ParquetTypeEnum::CurrentTokenPendingClaims, - CurrentTokenPendingClaim::schema(), - )] + let parquet_type_to_schemas: HashMap> = [ + ( + ParquetTypeEnum::CurrentTokenPendingClaims, + CurrentTokenPendingClaim::schema(), + ), + ( + ParquetTypeEnum::CurrentTokenRoyaltyV1, + CurrentTokenRoyaltyV1::schema(), + ), + ] .into_iter() .collect(); diff --git a/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs b/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs index 588dc4b3c..d88e841d3 100644 --- a/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs +++ b/rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs @@ -11,8 +11,13 @@ use aptos_indexer_processor_sdk::{ use async_trait::async_trait; use processor::{ db::{ - common::models::token_v2_models::raw_token_claims::CurrentTokenPendingClaimConvertible, - parquet::models::token_v2_models::token_claims::CurrentTokenPendingClaim, + common::models::token_v2_models::{ + raw_token_claims::CurrentTokenPendingClaimConvertible, + raw_v1_token_royalty::CurrentTokenRoyaltyV1Convertible, + }, + parquet::models::token_v2_models::{ + token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1, + }, postgres::models::token_models::tokens::TableMetadataForToken, }, processors::token_v2_processor::parse_v2_token, @@ -85,7 +90,7 @@ impl Processable for ParquetTokenV2Extractor { _current_deleted_token_ownerships_v2, _token_activities_v2, _current_token_v2_metadata, - _current_token_royalties_v1, + raw_current_token_royalties_v1, raw_current_token_claims, ) = parse_v2_token( &transactions.data, @@ -101,21 +106,38 @@ impl Processable for ParquetTokenV2Extractor { .map(CurrentTokenPendingClaim::from_raw) .collect(); + let parquet_current_token_royalties_v1: Vec = + raw_current_token_royalties_v1 + .into_iter() + .map(CurrentTokenRoyaltyV1::from_raw) + .collect(); + // Print the size of each extracted data type debug!("Processed data sizes:"); debug!( " - CurrentTokenPendingClaim: {}", parquet_current_token_claims.len() ); + debug!( + " - CurrentTokenRoyaltyV1: {}", + parquet_current_token_royalties_v1.len() + ); let mut map: HashMap = HashMap::new(); // Array of tuples for each data type and its corresponding enum variant and flag - let data_types = [( - TableFlags::CURRENT_TOKEN_PENDING_CLAIMS, - ParquetTypeEnum::CurrentTokenPendingClaims, - ParquetTypeStructs::CurrentTokenPendingClaim(parquet_current_token_claims), - )]; + let data_types = [ + ( + TableFlags::CURRENT_TOKEN_PENDING_CLAIMS, + ParquetTypeEnum::CurrentTokenPendingClaims, + ParquetTypeStructs::CurrentTokenPendingClaim(parquet_current_token_claims), + ), + ( + TableFlags::CURRENT_TOKEN_ROYALTY_V1, + ParquetTypeEnum::CurrentTokenRoyaltyV1, + ParquetTypeStructs::CurrentTokenRoyaltyV1(parquet_current_token_royalties_v1), + ), + ]; // Populate the map based on opt-in tables add_to_map_if_opted_in_for_backfill(self.opt_in_tables, &mut map, data_types.to_vec()); diff --git a/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs b/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs index 4e29e0f5d..23c67fbee 100644 --- a/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs +++ b/rust/sdk-processor/src/steps/token_v2_processor/token_v2_extractor.rs @@ -8,7 +8,10 @@ use aptos_indexer_processor_sdk::{ use async_trait::async_trait; use processor::{ db::{ - common::models::token_v2_models::raw_token_claims::CurrentTokenPendingClaimConvertible, + common::models::token_v2_models::{ + raw_token_claims::CurrentTokenPendingClaimConvertible, + raw_v1_token_royalty::CurrentTokenRoyaltyV1Convertible, + }, postgres::models::{ token_models::{token_claims::CurrentTokenPendingClaim, tokens::TableMetadataForToken}, token_v2_models::{ @@ -69,9 +72,9 @@ impl Processable for TokenV2Extractor { ) -> Result< Option< TransactionContext<( - Vec, - Vec, - Vec, + Vec, // TODO: Deprecate this + Vec, // TODO: Deprecate this + Vec, // TODO: Deprecate this Vec, Vec, Vec, @@ -110,7 +113,7 @@ impl Processable for TokenV2Extractor { current_deleted_token_ownerships_v2, token_activities_v2, current_token_v2_metadata, - current_token_royalties_v1, + raw_current_token_royalties_v1, raw_current_token_claims, ) = parse_v2_token( &transactions.data, @@ -126,6 +129,12 @@ impl Processable for TokenV2Extractor { .map(CurrentTokenPendingClaim::from_raw) .collect(); + let postgres_current_token_royalties_v1: Vec = + raw_current_token_royalties_v1 + .into_iter() + .map(CurrentTokenRoyaltyV1::from_raw) + .collect(); + Ok(Some(TransactionContext { data: ( collections_v2, @@ -138,7 +147,7 @@ impl Processable for TokenV2Extractor { current_deleted_token_ownerships_v2, token_activities_v2, current_token_v2_metadata, - current_token_royalties_v1, + postgres_current_token_royalties_v1, postgres_current_token_claims, ), metadata: transactions.metadata,