From f150daeb93161fb1ee86ea95aec83d9d0777b570 Mon Sep 17 00:00:00 2001 From: bowenyang007 Date: Tue, 30 Apr 2024 16:12:34 -0700 Subject: [PATCH 1/6] add is_deleted to token datas --- .../down.sql | 2 + .../2024-04-29-215042_token_datas_burn/up.sql | 3 + .../models/token_v2_models/v2_token_datas.rs | 88 ++++++++++++++++++- .../src/processors/token_v2_processor.rs | 86 +++++++++++++++++- rust/processor/src/schema.rs | 1 + 5 files changed, 174 insertions(+), 6 deletions(-) create mode 100644 rust/processor/migrations/2024-04-29-215042_token_datas_burn/down.sql create mode 100644 rust/processor/migrations/2024-04-29-215042_token_datas_burn/up.sql diff --git a/rust/processor/migrations/2024-04-29-215042_token_datas_burn/down.sql b/rust/processor/migrations/2024-04-29-215042_token_datas_burn/down.sql new file mode 100644 index 000000000..cd15c8946 --- /dev/null +++ b/rust/processor/migrations/2024-04-29-215042_token_datas_burn/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +ALTER TABLE IF EXISTS current_token_datas_v2 DROP COLUMN is_deleted_v2; \ No newline at end of file diff --git a/rust/processor/migrations/2024-04-29-215042_token_datas_burn/up.sql b/rust/processor/migrations/2024-04-29-215042_token_datas_burn/up.sql new file mode 100644 index 000000000..f7fac2f87 --- /dev/null +++ b/rust/processor/migrations/2024-04-29-215042_token_datas_burn/up.sql @@ -0,0 +1,3 @@ +-- Your SQL goes here +ALTER TABLE current_token_datas_v2 +ADD COLUMN IF NOT EXISTS is_deleted_v2 BOOLEAN; \ No newline at end of file diff --git a/rust/processor/src/models/token_v2_models/v2_token_datas.rs b/rust/processor/src/models/token_v2_models/v2_token_datas.rs index 7259dfa0c..6606b6e8e 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_datas.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_datas.rs @@ -5,7 +5,7 @@ #![allow(clippy::extra_unused_lifetimes)] #![allow(clippy::unused_unit)] -use super::v2_token_utils::{TokenStandard, TokenV2}; +use super::v2_token_utils::{TokenStandard, TokenV2, TokenV2Burned}; use crate::{ models::{ object_models::v2_object_utils::ObjectAggregatedDataMapping, @@ -14,8 +14,8 @@ use crate::{ schema::{current_token_datas_v2, token_datas_v2}, utils::util::standardize_address, }; -use aptos_protos::transaction::v1::{WriteResource, WriteTableItem}; -use bigdecimal::BigDecimal; +use aptos_protos::transaction::v1::{DeleteResource, WriteResource, WriteTableItem}; +use bigdecimal::{BigDecimal, Zero}; use diesel::prelude::*; use field_count::FieldCount; use serde::{Deserialize, Serialize}; @@ -62,6 +62,40 @@ pub struct CurrentTokenDataV2 { pub is_fungible_v2: Option, pub last_transaction_version: i64, pub last_transaction_timestamp: chrono::NaiveDateTime, + pub decimals: i64, + pub is_deleted_v2: Option, +} + +#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] +#[diesel(primary_key(token_data_id))] +#[diesel(table_name = current_token_datas_v2)] +pub struct CurrentDeletedTokenDataV2 { + pub token_data_id: String, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, + pub is_deleted_v2: bool, +} + +#[derive(Debug, Deserialize, Identifiable, Queryable, Serialize)] +#[diesel(primary_key(token_data_id))] +#[diesel(table_name = current_token_datas_v2)] +pub struct CurrentTokenDataV2Query { + pub token_data_id: String, + pub collection_id: String, + pub token_name: String, + pub maximum: Option, + pub supply: BigDecimal, + pub largest_property_version_v1: Option, + pub token_uri: String, + pub description: String, + pub token_properties: serde_json::Value, + pub token_standard: String, + pub is_fungible_v2: Option, + pub last_transaction_version: i64, + pub last_transaction_timestamp: chrono::NaiveDateTime, + pub inserted_at: chrono::NaiveDateTime, + pub decimals: i64, + pub is_deleted_v2: Option, // Deperecated, but still here for backwards compatibility pub decimals: Option, } @@ -139,6 +173,7 @@ impl TokenDataV2 { last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, decimals: None, + is_deleted_v2: Some(false), }, ))) } else { @@ -146,6 +181,52 @@ impl TokenDataV2 { } } + /// This handles the case where token is burned but objectCore is still there + pub async fn get_burned_nft_v2_from_write_resource( + write_resource: &WriteResource, + txn_version: i64, + txn_timestamp: chrono::NaiveDateTime, + tokens_burned: &TokenV2Burned, + ) -> anyhow::Result> { + let token_data_id = standardize_address(&write_resource.address.to_string()); + if tokens_burned + .get(&standardize_address(&token_data_id)) + .is_some() + { + return Ok(Some(CurrentDeletedTokenDataV2 { + token_data_id, + last_transaction_version: txn_version, + last_transaction_timestamp: txn_timestamp, + is_deleted_v2: false, + })); + } else { + Ok(None) + } + } + + /// This handles the case where token is burned and objectCore is deleted + pub async fn get_burned_nft_v2_from_delete_resource( + delete_resource: &DeleteResource, + txn_version: i64, + txn_timestamp: chrono::NaiveDateTime, + tokens_burned: &TokenV2Burned, + ) -> anyhow::Result> { + let token_data_id = standardize_address(&delete_resource.address.to_string()); + if tokens_burned + .get(&standardize_address(&token_data_id)) + .is_some() + { + return Ok(Some(CurrentDeletedTokenDataV2 { + token_data_id, + last_transaction_version: txn_version, + last_transaction_timestamp: txn_timestamp, + is_deleted_v2: false, + })); + } else { + Ok(None) + } + } + pub fn get_v1_from_write_table_item( table_item: &WriteTableItem, txn_version: i64, @@ -213,6 +294,7 @@ impl TokenDataV2 { last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, decimals: None, + is_deleted_v2: None, }, ))); } else { diff --git a/rust/processor/src/processors/token_v2_processor.rs b/rust/processor/src/processors/token_v2_processor.rs index e806e08c2..cd8b50ad5 100644 --- a/rust/processor/src/processors/token_v2_processor.rs +++ b/rust/processor/src/processors/token_v2_processor.rs @@ -12,7 +12,9 @@ use crate::{ token_v2_models::{ v2_collections::{CollectionV2, CurrentCollectionV2, CurrentCollectionV2PK}, v2_token_activities::TokenActivityV2, - v2_token_datas::{CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2}, + v2_token_datas::{ + CurrentDeletedTokenDataV2, CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2, + }, v2_token_metadata::{CurrentTokenV2Metadata, CurrentTokenV2MetadataPK}, v2_token_ownerships::{ CurrentTokenOwnershipV2, CurrentTokenOwnershipV2PK, NFTOwnershipV2, @@ -96,6 +98,7 @@ async fn insert_to_db( token_ownerships_v2: &[TokenOwnershipV2], current_collections_v2: &[CurrentCollectionV2], current_token_datas_v2: &[CurrentTokenDataV2], + current_deleted_token_datas_v2: &[CurrentDeletedTokenDataV2], current_token_ownerships_v2: &[CurrentTokenOwnershipV2], current_deleted_token_ownerships_v2: &[CurrentTokenOwnershipV2], token_activities_v2: &[TokenActivityV2], @@ -148,6 +151,15 @@ async fn insert_to_db( per_table_chunk_sizes, ), ); + let cdtd_v2 = execute_in_chunks( + conn.clone(), + insert_current_deleted_token_datas_v2_query, + current_deleted_token_datas_v2, + get_config_table_chunk_size::( + "current_token_datas_v2", + per_table_chunk_sizes, + ), + ); let cto_v2 = execute_in_chunks( conn.clone(), insert_current_token_ownerships_v2_query, @@ -191,11 +203,12 @@ async fn insert_to_db( to_v2_res, cc_v2_res, ctd_v2_res, + cdtd_v2_res, cto_v2_res, cdto_v2_res, ta_v2_res, ct_v2_res, - ) = tokio::join!(coll_v2, td_v2, to_v2, cc_v2, ctd_v2, cto_v2, cdto_v2, ta_v2, ct_v2,); + ) = tokio::join!(coll_v2, td_v2, to_v2, cc_v2, ctd_v2, cdtd_v2, cto_v2, cdto_v2, ta_v2, ct_v2,); for res in [ coll_v2_res, @@ -203,6 +216,7 @@ async fn insert_to_db( to_v2_res, cc_v2_res, ctd_v2_res, + cdtd_v2_res, cto_v2_res, cdto_v2_res, ta_v2_res, @@ -341,6 +355,29 @@ fn insert_current_token_datas_v2_query( ) } +fn insert_current_deleted_token_datas_v2_query( + items_to_insert: Vec, +) -> ( + impl QueryFragment + diesel::query_builder::QueryId + Send, + Option<&'static str>, +) { + use schema::current_token_datas_v2::dsl::*; + + ( + diesel::insert_into(schema::current_token_datas_v2::table) + .values(items_to_insert) + .on_conflict(token_data_id) + .do_update() + .set(( + last_transaction_version.eq(excluded(last_transaction_version)), + last_transaction_timestamp.eq(excluded(last_transaction_timestamp)), + inserted_at.eq(excluded(inserted_at)), + is_deleted_v2.eq(excluded(is_deleted_v2)), + )), + Some(" WHERE current_token_datas_v2.last_transaction_version <= excluded.last_transaction_version "), + ) +} + fn insert_current_token_ownerships_v2_query( items_to_insert: Vec, ) -> ( @@ -470,6 +507,7 @@ impl ProcessorTrait for TokenV2Processor { token_ownerships_v2, current_collections_v2, current_token_datas_v2, + current_deleted_token_datas_v2, current_token_ownerships_v2, current_deleted_token_ownerships_v2, token_activities_v2, @@ -496,6 +534,7 @@ impl ProcessorTrait for TokenV2Processor { &token_ownerships_v2, ¤t_collections_v2, ¤t_token_datas_v2, + ¤t_deleted_token_datas_v2, ¤t_token_ownerships_v2, ¤t_deleted_token_ownerships_v2, &token_activities_v2, @@ -543,6 +582,7 @@ async fn parse_v2_token( Vec, Vec, Vec, + Vec, Vec, Vec, // deleted token ownerships Vec, @@ -557,6 +597,10 @@ async fn parse_v2_token( AHashMap::new(); let mut current_token_datas_v2: AHashMap = AHashMap::new(); + let mut current_deleted_token_datas_v2: AHashMap< + CurrentTokenDataV2PK, + CurrentDeletedTokenDataV2, + > = AHashMap::new(); let mut current_token_ownerships_v2: AHashMap< CurrentTokenOwnershipV2PK, CurrentTokenOwnershipV2, @@ -893,6 +937,22 @@ async fn parse_v2_token( ); } + // Add burned NFT handling for token datas (can probably be merged with below) + if let Some(deleted_token_data) = + TokenDataV2::get_burned_nft_v2_from_write_resource( + resource, + txn_version, + txn_timestamp, + &tokens_burned, + ) + .await + .unwrap() + { + current_deleted_token_datas_v2.insert( + deleted_token_data.token_data_id.clone(), + deleted_token_data, + ); + } // Add burned NFT handling if let Some((nft_ownership, current_nft_ownership)) = TokenOwnershipV2::get_burned_nft_v2_from_write_resource( @@ -947,7 +1007,22 @@ async fn parse_v2_token( } }, Change::DeleteResource(resource) => { - // Add burned NFT handling + // Add burned NFT handling for token datas (can probably be merged with below) + if let Some(deleted_token_data) = + TokenDataV2::get_burned_nft_v2_from_delete_resource( + resource, + txn_version, + txn_timestamp, + &tokens_burned, + ) + .await + .unwrap() + { + current_deleted_token_datas_v2.insert( + deleted_token_data.token_data_id.clone(), + deleted_token_data, + ); + } if let Some((nft_ownership, current_nft_ownership)) = TokenOwnershipV2::get_burned_nft_v2_from_delete_resource( resource, @@ -996,6 +1071,9 @@ async fn parse_v2_token( let mut current_token_datas_v2 = current_token_datas_v2 .into_values() .collect::>(); + let mut current_deleted_token_datas_v2 = current_deleted_token_datas_v2 + .into_values() + .collect::>(); let mut current_token_ownerships_v2 = current_token_ownerships_v2 .into_values() .collect::>(); @@ -1008,6 +1086,7 @@ async fn parse_v2_token( // Sort by PK current_collections_v2.sort_by(|a, b| a.collection_id.cmp(&b.collection_id)); + current_deleted_token_datas_v2.sort_by(|a, b| a.token_data_id.cmp(&b.token_data_id)); current_token_datas_v2.sort_by(|a, b| a.token_data_id.cmp(&b.token_data_id)); current_token_ownerships_v2.sort_by(|a, b| { ( @@ -1047,6 +1126,7 @@ async fn parse_v2_token( token_ownerships_v2, current_collections_v2, current_token_datas_v2, + current_deleted_token_datas_v2, current_token_ownerships_v2, current_deleted_token_ownerships_v2, token_activities_v2, diff --git a/rust/processor/src/schema.rs b/rust/processor/src/schema.rs index fafe9ff25..b88590eb1 100644 --- a/rust/processor/src/schema.rs +++ b/rust/processor/src/schema.rs @@ -538,6 +538,7 @@ diesel::table! { last_transaction_timestamp -> Timestamp, inserted_at -> Timestamp, decimals -> Nullable, + is_deleted_v2 -> Nullable, } } From c01c6dc997d672be9405be6fc35d95d985fcb881 Mon Sep 17 00:00:00 2001 From: bowenyang007 Date: Fri, 3 May 2024 09:49:26 -0700 Subject: [PATCH 2/6] lint --- .../models/token_v2_models/v2_token_datas.rs | 8 +++---- .../src/processors/token_v2_processor.rs | 21 ++++++++++++------- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/rust/processor/src/models/token_v2_models/v2_token_datas.rs b/rust/processor/src/models/token_v2_models/v2_token_datas.rs index 6606b6e8e..1302d398f 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_datas.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_datas.rs @@ -193,12 +193,12 @@ impl TokenDataV2 { .get(&standardize_address(&token_data_id)) .is_some() { - return Ok(Some(CurrentDeletedTokenDataV2 { + Ok(Some(CurrentDeletedTokenDataV2 { token_data_id, last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, is_deleted_v2: false, - })); + })) } else { Ok(None) } @@ -216,12 +216,12 @@ impl TokenDataV2 { .get(&standardize_address(&token_data_id)) .is_some() { - return Ok(Some(CurrentDeletedTokenDataV2 { + Ok(Some(CurrentDeletedTokenDataV2 { token_data_id, last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, is_deleted_v2: false, - })); + })) } else { Ok(None) } diff --git a/rust/processor/src/processors/token_v2_processor.rs b/rust/processor/src/processors/token_v2_processor.rs index cd8b50ad5..158721345 100644 --- a/rust/processor/src/processors/token_v2_processor.rs +++ b/rust/processor/src/processors/token_v2_processor.rs @@ -97,10 +97,14 @@ async fn insert_to_db( token_datas_v2: &[TokenDataV2], token_ownerships_v2: &[TokenOwnershipV2], current_collections_v2: &[CurrentCollectionV2], - current_token_datas_v2: &[CurrentTokenDataV2], - current_deleted_token_datas_v2: &[CurrentDeletedTokenDataV2], - current_token_ownerships_v2: &[CurrentTokenOwnershipV2], - current_deleted_token_ownerships_v2: &[CurrentTokenOwnershipV2], + (current_token_datas_v2, current_deleted_token_datas_v2): ( + &[CurrentTokenDataV2], + &[CurrentDeletedTokenDataV2], + ), + (current_token_ownerships_v2, current_deleted_token_ownerships_v2): ( + &[CurrentTokenOwnershipV2], + &[CurrentTokenOwnershipV2], + ), token_activities_v2: &[TokenActivityV2], current_token_v2_metadata: &[CurrentTokenV2Metadata], per_table_chunk_sizes: &AHashMap, @@ -533,10 +537,11 @@ impl ProcessorTrait for TokenV2Processor { &token_datas_v2, &token_ownerships_v2, ¤t_collections_v2, - ¤t_token_datas_v2, - ¤t_deleted_token_datas_v2, - ¤t_token_ownerships_v2, - ¤t_deleted_token_ownerships_v2, + (¤t_token_datas_v2, ¤t_deleted_token_datas_v2), + ( + ¤t_token_ownerships_v2, + ¤t_deleted_token_ownerships_v2, + ), &token_activities_v2, ¤t_token_v2_metadata, &self.per_table_chunk_sizes, From 61079339ba7d9d8764a77d3e95f7b8b3b53893e7 Mon Sep 17 00:00:00 2001 From: bowenyang007 Date: Fri, 3 May 2024 09:51:22 -0700 Subject: [PATCH 3/6] comment to avoid confusion --- rust/processor/src/processors/token_v2_processor.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rust/processor/src/processors/token_v2_processor.rs b/rust/processor/src/processors/token_v2_processor.rs index 158721345..f0eb177b3 100644 --- a/rust/processor/src/processors/token_v2_processor.rs +++ b/rust/processor/src/processors/token_v2_processor.rs @@ -354,6 +354,8 @@ fn insert_current_token_datas_v2_query( last_transaction_timestamp.eq(excluded(last_transaction_timestamp)), inserted_at.eq(excluded(inserted_at)), decimals.eq(excluded(decimals)), + // Intentionally not including is_deleted because it should always be true in this part + // and doesn't need to override )), Some(" WHERE current_token_datas_v2.last_transaction_version <= excluded.last_transaction_version "), ) From b777a9670c1d392477e4ffc85e77161c38e20a9f Mon Sep 17 00:00:00 2001 From: bowenyang007 Date: Mon, 6 May 2024 10:53:54 -0700 Subject: [PATCH 4/6] delete works for historical events too --- .../token_v2_models/v2_token_activities.rs | 2 +- .../models/token_v2_models/v2_token_datas.rs | 56 +++++++++++-------- .../token_v2_models/v2_token_ownerships.rs | 4 +- .../models/token_v2_models/v2_token_utils.rs | 26 +++++++-- .../src/processors/token_v2_processor.rs | 31 +++++----- 5 files changed, 72 insertions(+), 47 deletions(-) diff --git a/rust/processor/src/models/token_v2_models/v2_token_activities.rs b/rust/processor/src/models/token_v2_models/v2_token_activities.rs index 824bec15a..af66fe17b 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_activities.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_activities.rs @@ -158,7 +158,7 @@ impl TokenActivityV2 { // the new burn event has owner address now! let owner_address = if let V2TokenEvent::Burn(inner) = token_event { - Some(inner.get_previous_owner_address()) + inner.get_previous_owner_address() } else { // To handle a case with the old burn events, when a token is minted and burnt in the same transaction None diff --git a/rust/processor/src/models/token_v2_models/v2_token_datas.rs b/rust/processor/src/models/token_v2_models/v2_token_datas.rs index 1302d398f..b73e291f3 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_datas.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_datas.rs @@ -66,16 +66,6 @@ pub struct CurrentTokenDataV2 { pub is_deleted_v2: Option, } -#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(token_data_id))] -#[diesel(table_name = current_token_datas_v2)] -pub struct CurrentDeletedTokenDataV2 { - pub token_data_id: String, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, - pub is_deleted_v2: bool, -} - #[derive(Debug, Deserialize, Identifiable, Queryable, Serialize)] #[diesel(primary_key(token_data_id))] #[diesel(table_name = current_token_datas_v2)] @@ -187,17 +177,26 @@ impl TokenDataV2 { txn_version: i64, txn_timestamp: chrono::NaiveDateTime, tokens_burned: &TokenV2Burned, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let token_data_id = standardize_address(&write_resource.address.to_string()); - if tokens_burned - .get(&standardize_address(&token_data_id)) - .is_some() - { - Ok(Some(CurrentDeletedTokenDataV2 { + // reminder that v1 events won't get to this codepath + if let Some(burn_event_v2) = tokens_burned.get(&standardize_address(&token_data_id)) { + Ok(Some(CurrentTokenDataV2 { token_data_id, + collection_id: burn_event_v2.get_collection_address(), + token_name: "".to_string(), + maximum: None, + supply: BigDecimal::zero(), + largest_property_version_v1: None, + token_uri: "".to_string(), + token_properties: serde_json::Value::Null, + description: "".to_string(), + token_standard: TokenStandard::V2.to_string(), + is_fungible_v2: Some(false), last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, - is_deleted_v2: false, + decimals: 0, + is_deleted_v2: Some(true), })) } else { Ok(None) @@ -210,17 +209,26 @@ impl TokenDataV2 { txn_version: i64, txn_timestamp: chrono::NaiveDateTime, tokens_burned: &TokenV2Burned, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let token_data_id = standardize_address(&delete_resource.address.to_string()); - if tokens_burned - .get(&standardize_address(&token_data_id)) - .is_some() - { - Ok(Some(CurrentDeletedTokenDataV2 { + // reminder that v1 events won't get to this codepath + if let Some(burn_event_v2) = tokens_burned.get(&standardize_address(&token_data_id)) { + Ok(Some(CurrentTokenDataV2 { token_data_id, + collection_id: burn_event_v2.get_collection_address(), + token_name: "".to_string(), + maximum: None, + supply: BigDecimal::zero(), + largest_property_version_v1: None, + token_uri: "".to_string(), + token_properties: serde_json::Value::Null, + description: "".to_string(), + token_standard: TokenStandard::V2.to_string(), + is_fungible_v2: Some(false), last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, - is_deleted_v2: false, + decimals: 0, + is_deleted_v2: Some(true), })) } else { Ok(None) diff --git a/rust/processor/src/models/token_v2_models/v2_token_ownerships.rs b/rust/processor/src/models/token_v2_models/v2_token_ownerships.rs index 51dc797f9..4d305558c 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_ownerships.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_ownerships.rs @@ -336,8 +336,10 @@ impl TokenOwnershipV2 { let token_address = standardize_address(token_address); if let Some(burn_event) = tokens_burned.get(&token_address) { // 1. Try to lookup token address in burn event mapping - let previous_owner = if let Some(burn_event) = burn_event { + let previous_owner = if let Some(previous_owner) = burn_event.get_previous_owner_address() + { + previous_owner } else { // 2. If it doesn't exist in burn event mapping, then it must be an old burn event that doesn't contain previous_owner. // Do a lookup to get previous owner. This is necessary because previous owner is part of current token ownerships primary key. diff --git a/rust/processor/src/models/token_v2_models/v2_token_utils.rs b/rust/processor/src/models/token_v2_models/v2_token_utils.rs index e39be436a..2b6f25243 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_utils.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_utils.rs @@ -30,8 +30,8 @@ pub const TOKEN_V2_ADDR: &str = pub const DEFAULT_OWNER_ADDRESS: &str = "unknown"; /// Tracks all token related data in a hashmap for quick access (keyed on address of the object core) -/// Maps address to burn event (new). The event is None if it's an old burn event. -pub type TokenV2Burned = AHashMap>; +/// Maps address to burn event. If it's an old event previous_owner will be empty +pub type TokenV2Burned = AHashMap; pub type TokenV2Minted = AHashSet; pub type TokenV2MintedPK = (CurrentObjectPK, i64); @@ -348,13 +348,19 @@ impl BurnEvent { #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Burn { collection: String, - #[serde(deserialize_with = "deserialize_from_string")] - index: BigDecimal, token: String, previous_owner: String, } impl Burn { + pub fn new(collection: String, token: String, previous_owner: String) -> Self { + Burn { + collection, + token, + previous_owner, + } + } + pub fn from_event(event: &Event, txn_version: i64) -> anyhow::Result> { if let Some(V2TokenEvent::Burn(inner)) = V2TokenEvent::from_event(event.type_str.as_str(), &event.data, txn_version).unwrap() @@ -369,8 +375,16 @@ impl Burn { standardize_address(&self.token) } - pub fn get_previous_owner_address(&self) -> String { - standardize_address(&self.previous_owner) + pub fn get_previous_owner_address(&self) -> Option { + if self.previous_owner.is_empty() { + None + } else { + Some(standardize_address(&self.previous_owner)) + } + } + + pub fn get_collection_address(&self) -> String { + standardize_address(&self.collection) } } diff --git a/rust/processor/src/processors/token_v2_processor.rs b/rust/processor/src/processors/token_v2_processor.rs index f0eb177b3..005333e13 100644 --- a/rust/processor/src/processors/token_v2_processor.rs +++ b/rust/processor/src/processors/token_v2_processor.rs @@ -12,9 +12,7 @@ use crate::{ token_v2_models::{ v2_collections::{CollectionV2, CurrentCollectionV2, CurrentCollectionV2PK}, v2_token_activities::TokenActivityV2, - v2_token_datas::{ - CurrentDeletedTokenDataV2, CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2, - }, + v2_token_datas::{CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2}, v2_token_metadata::{CurrentTokenV2Metadata, CurrentTokenV2MetadataPK}, v2_token_ownerships::{ CurrentTokenOwnershipV2, CurrentTokenOwnershipV2PK, NFTOwnershipV2, @@ -99,7 +97,7 @@ async fn insert_to_db( current_collections_v2: &[CurrentCollectionV2], (current_token_datas_v2, current_deleted_token_datas_v2): ( &[CurrentTokenDataV2], - &[CurrentDeletedTokenDataV2], + &[CurrentTokenDataV2], ), (current_token_ownerships_v2, current_deleted_token_ownerships_v2): ( &[CurrentTokenOwnershipV2], @@ -159,7 +157,7 @@ async fn insert_to_db( conn.clone(), insert_current_deleted_token_datas_v2_query, current_deleted_token_datas_v2, - get_config_table_chunk_size::( + get_config_table_chunk_size::( "current_token_datas_v2", per_table_chunk_sizes, ), @@ -362,7 +360,7 @@ fn insert_current_token_datas_v2_query( } fn insert_current_deleted_token_datas_v2_query( - items_to_insert: Vec, + items_to_insert: Vec, ) -> ( impl QueryFragment + diesel::query_builder::QueryId + Send, Option<&'static str>, @@ -589,7 +587,7 @@ async fn parse_v2_token( Vec, Vec, Vec, - Vec, + Vec, Vec, Vec, // deleted token ownerships Vec, @@ -604,10 +602,8 @@ async fn parse_v2_token( AHashMap::new(); let mut current_token_datas_v2: AHashMap = AHashMap::new(); - let mut current_deleted_token_datas_v2: AHashMap< - CurrentTokenDataV2PK, - CurrentDeletedTokenDataV2, - > = AHashMap::new(); + let mut current_deleted_token_datas_v2: AHashMap = + AHashMap::new(); let mut current_token_ownerships_v2: AHashMap< CurrentTokenOwnershipV2PK, CurrentTokenOwnershipV2, @@ -726,10 +722,15 @@ async fn parse_v2_token( // and burn / transfer events need to come before the next section for (index, event) in user_txn.events.iter().enumerate() { if let Some(burn_event) = Burn::from_event(event, txn_version).unwrap() { - tokens_burned.insert(burn_event.get_token_address(), Some(burn_event)); + tokens_burned.insert(burn_event.get_token_address(), burn_event); } - if let Some(burn_event) = BurnEvent::from_event(event, txn_version).unwrap() { - tokens_burned.insert(burn_event.get_token_address(), None); + if let Some(old_burn_event) = BurnEvent::from_event(event, txn_version).unwrap() { + let burn_event = Burn::new( + standardize_address(event.key.as_ref().unwrap().account_address.as_str()), + old_burn_event.get_token_address(), + "".to_string(), + ); + tokens_burned.insert(burn_event.get_token_address(), burn_event); } if let Some(mint_event) = MintEvent::from_event(event, txn_version).unwrap() { tokens_minted.insert(mint_event.get_token_address()); @@ -1080,7 +1081,7 @@ async fn parse_v2_token( .collect::>(); let mut current_deleted_token_datas_v2 = current_deleted_token_datas_v2 .into_values() - .collect::>(); + .collect::>(); let mut current_token_ownerships_v2 = current_token_ownerships_v2 .into_values() .collect::>(); From 42cecdd705593b19391e6cb3b1946a10f0960965 Mon Sep 17 00:00:00 2001 From: bowenyang007 Date: Mon, 6 May 2024 13:20:41 -0700 Subject: [PATCH 5/6] rebase --- .../models/token_v2_models/v2_token_datas.rs | 35 ++++--------------- 1 file changed, 6 insertions(+), 29 deletions(-) diff --git a/rust/processor/src/models/token_v2_models/v2_token_datas.rs b/rust/processor/src/models/token_v2_models/v2_token_datas.rs index b73e291f3..0ed3957d6 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_datas.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_datas.rs @@ -62,32 +62,9 @@ pub struct CurrentTokenDataV2 { pub is_fungible_v2: Option, pub last_transaction_version: i64, pub last_transaction_timestamp: chrono::NaiveDateTime, - pub decimals: i64, - pub is_deleted_v2: Option, -} - -#[derive(Debug, Deserialize, Identifiable, Queryable, Serialize)] -#[diesel(primary_key(token_data_id))] -#[diesel(table_name = current_token_datas_v2)] -pub struct CurrentTokenDataV2Query { - pub token_data_id: String, - pub collection_id: String, - pub token_name: String, - pub maximum: Option, - pub supply: BigDecimal, - pub largest_property_version_v1: Option, - pub token_uri: String, - pub description: String, - pub token_properties: serde_json::Value, - pub token_standard: String, - pub is_fungible_v2: Option, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, - pub inserted_at: chrono::NaiveDateTime, - pub decimals: i64, - pub is_deleted_v2: Option, - // Deperecated, but still here for backwards compatibility + // Deprecated, but still here for backwards compatibility pub decimals: Option, + pub is_deleted_v2: Option, } impl TokenDataV2 { @@ -186,7 +163,7 @@ impl TokenDataV2 { collection_id: burn_event_v2.get_collection_address(), token_name: "".to_string(), maximum: None, - supply: BigDecimal::zero(), + supply: None, largest_property_version_v1: None, token_uri: "".to_string(), token_properties: serde_json::Value::Null, @@ -195,7 +172,7 @@ impl TokenDataV2 { is_fungible_v2: Some(false), last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, - decimals: 0, + decimals: None, is_deleted_v2: Some(true), })) } else { @@ -218,7 +195,7 @@ impl TokenDataV2 { collection_id: burn_event_v2.get_collection_address(), token_name: "".to_string(), maximum: None, - supply: BigDecimal::zero(), + supply: None, largest_property_version_v1: None, token_uri: "".to_string(), token_properties: serde_json::Value::Null, @@ -227,7 +204,7 @@ impl TokenDataV2 { is_fungible_v2: Some(false), last_transaction_version: txn_version, last_transaction_timestamp: txn_timestamp, - decimals: 0, + decimals: None, is_deleted_v2: Some(true), })) } else { From 3138773a670ed3ebd665ac8580337014717fecc7 Mon Sep 17 00:00:00 2001 From: bowenyang007 Date: Mon, 6 May 2024 13:21:34 -0700 Subject: [PATCH 6/6] lint --- rust/processor/src/models/token_v2_models/v2_token_datas.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/processor/src/models/token_v2_models/v2_token_datas.rs b/rust/processor/src/models/token_v2_models/v2_token_datas.rs index 0ed3957d6..dcd988912 100644 --- a/rust/processor/src/models/token_v2_models/v2_token_datas.rs +++ b/rust/processor/src/models/token_v2_models/v2_token_datas.rs @@ -15,7 +15,7 @@ use crate::{ utils::util::standardize_address, }; use aptos_protos::transaction::v1::{DeleteResource, WriteResource, WriteTableItem}; -use bigdecimal::{BigDecimal, Zero}; +use bigdecimal::BigDecimal; use diesel::prelude::*; use field_count::FieldCount; use serde::{Deserialize, Serialize};