Skip to content

Commit

Permalink
delete works for historical events too
Browse files Browse the repository at this point in the history
  • Loading branch information
bowenyang007 committed May 8, 2024
1 parent 6107933 commit b777a96
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ impl TokenActivityV2 {

// the new burn event has owner address now!
let owner_address = if let V2TokenEvent::Burn(inner) = token_event {
Some(inner.get_previous_owner_address())
inner.get_previous_owner_address()
} else {
// To handle a case with the old burn events, when a token is minted and burnt in the same transaction
None
Expand Down
56 changes: 32 additions & 24 deletions rust/processor/src/models/token_v2_models/v2_token_datas.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,16 +66,6 @@ pub struct CurrentTokenDataV2 {
pub is_deleted_v2: Option<bool>,
}

#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)]
#[diesel(primary_key(token_data_id))]
#[diesel(table_name = current_token_datas_v2)]
pub struct CurrentDeletedTokenDataV2 {
pub token_data_id: String,
pub last_transaction_version: i64,
pub last_transaction_timestamp: chrono::NaiveDateTime,
pub is_deleted_v2: bool,
}

#[derive(Debug, Deserialize, Identifiable, Queryable, Serialize)]
#[diesel(primary_key(token_data_id))]
#[diesel(table_name = current_token_datas_v2)]
Expand Down Expand Up @@ -187,17 +177,26 @@ impl TokenDataV2 {
txn_version: i64,
txn_timestamp: chrono::NaiveDateTime,
tokens_burned: &TokenV2Burned,
) -> anyhow::Result<Option<CurrentDeletedTokenDataV2>> {
) -> anyhow::Result<Option<CurrentTokenDataV2>> {
let token_data_id = standardize_address(&write_resource.address.to_string());
if tokens_burned
.get(&standardize_address(&token_data_id))
.is_some()
{
Ok(Some(CurrentDeletedTokenDataV2 {
// reminder that v1 events won't get to this codepath
if let Some(burn_event_v2) = tokens_burned.get(&standardize_address(&token_data_id)) {
Ok(Some(CurrentTokenDataV2 {
token_data_id,
collection_id: burn_event_v2.get_collection_address(),
token_name: "".to_string(),
maximum: None,
supply: BigDecimal::zero(),
largest_property_version_v1: None,
token_uri: "".to_string(),
token_properties: serde_json::Value::Null,
description: "".to_string(),
token_standard: TokenStandard::V2.to_string(),
is_fungible_v2: Some(false),
last_transaction_version: txn_version,
last_transaction_timestamp: txn_timestamp,
is_deleted_v2: false,
decimals: 0,
is_deleted_v2: Some(true),
}))
} else {
Ok(None)
Expand All @@ -210,17 +209,26 @@ impl TokenDataV2 {
txn_version: i64,
txn_timestamp: chrono::NaiveDateTime,
tokens_burned: &TokenV2Burned,
) -> anyhow::Result<Option<CurrentDeletedTokenDataV2>> {
) -> anyhow::Result<Option<CurrentTokenDataV2>> {
let token_data_id = standardize_address(&delete_resource.address.to_string());
if tokens_burned
.get(&standardize_address(&token_data_id))
.is_some()
{
Ok(Some(CurrentDeletedTokenDataV2 {
// reminder that v1 events won't get to this codepath
if let Some(burn_event_v2) = tokens_burned.get(&standardize_address(&token_data_id)) {
Ok(Some(CurrentTokenDataV2 {
token_data_id,
collection_id: burn_event_v2.get_collection_address(),
token_name: "".to_string(),
maximum: None,
supply: BigDecimal::zero(),
largest_property_version_v1: None,
token_uri: "".to_string(),
token_properties: serde_json::Value::Null,
description: "".to_string(),
token_standard: TokenStandard::V2.to_string(),
is_fungible_v2: Some(false),
last_transaction_version: txn_version,
last_transaction_timestamp: txn_timestamp,
is_deleted_v2: false,
decimals: 0,
is_deleted_v2: Some(true),
}))
} else {
Ok(None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,8 +336,10 @@ impl TokenOwnershipV2 {
let token_address = standardize_address(token_address);
if let Some(burn_event) = tokens_burned.get(&token_address) {
// 1. Try to lookup token address in burn event mapping
let previous_owner = if let Some(burn_event) = burn_event {
let previous_owner = if let Some(previous_owner) =
burn_event.get_previous_owner_address()
{
previous_owner
} else {
// 2. If it doesn't exist in burn event mapping, then it must be an old burn event that doesn't contain previous_owner.
// Do a lookup to get previous owner. This is necessary because previous owner is part of current token ownerships primary key.
Expand Down
26 changes: 20 additions & 6 deletions rust/processor/src/models/token_v2_models/v2_token_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ pub const TOKEN_V2_ADDR: &str =
pub const DEFAULT_OWNER_ADDRESS: &str = "unknown";

/// Tracks all token related data in a hashmap for quick access (keyed on address of the object core)
/// Maps address to burn event (new). The event is None if it's an old burn event.
pub type TokenV2Burned = AHashMap<CurrentObjectPK, Option<Burn>>;
/// Maps address to burn event. If it's an old event previous_owner will be empty
pub type TokenV2Burned = AHashMap<CurrentObjectPK, Burn>;
pub type TokenV2Minted = AHashSet<CurrentObjectPK>;
pub type TokenV2MintedPK = (CurrentObjectPK, i64);

Expand Down Expand Up @@ -348,13 +348,19 @@ impl BurnEvent {
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Burn {
collection: String,
#[serde(deserialize_with = "deserialize_from_string")]
index: BigDecimal,
token: String,
previous_owner: String,
}

impl Burn {
pub fn new(collection: String, token: String, previous_owner: String) -> Self {
Burn {
collection,
token,
previous_owner,
}
}

pub fn from_event(event: &Event, txn_version: i64) -> anyhow::Result<Option<Self>> {
if let Some(V2TokenEvent::Burn(inner)) =
V2TokenEvent::from_event(event.type_str.as_str(), &event.data, txn_version).unwrap()
Expand All @@ -369,8 +375,16 @@ impl Burn {
standardize_address(&self.token)
}

pub fn get_previous_owner_address(&self) -> String {
standardize_address(&self.previous_owner)
pub fn get_previous_owner_address(&self) -> Option<String> {
if self.previous_owner.is_empty() {
None
} else {
Some(standardize_address(&self.previous_owner))
}
}

pub fn get_collection_address(&self) -> String {
standardize_address(&self.collection)
}
}

Expand Down
31 changes: 16 additions & 15 deletions rust/processor/src/processors/token_v2_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@ use crate::{
token_v2_models::{
v2_collections::{CollectionV2, CurrentCollectionV2, CurrentCollectionV2PK},
v2_token_activities::TokenActivityV2,
v2_token_datas::{
CurrentDeletedTokenDataV2, CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2,
},
v2_token_datas::{CurrentTokenDataV2, CurrentTokenDataV2PK, TokenDataV2},
v2_token_metadata::{CurrentTokenV2Metadata, CurrentTokenV2MetadataPK},
v2_token_ownerships::{
CurrentTokenOwnershipV2, CurrentTokenOwnershipV2PK, NFTOwnershipV2,
Expand Down Expand Up @@ -99,7 +97,7 @@ async fn insert_to_db(
current_collections_v2: &[CurrentCollectionV2],
(current_token_datas_v2, current_deleted_token_datas_v2): (
&[CurrentTokenDataV2],
&[CurrentDeletedTokenDataV2],
&[CurrentTokenDataV2],
),
(current_token_ownerships_v2, current_deleted_token_ownerships_v2): (
&[CurrentTokenOwnershipV2],
Expand Down Expand Up @@ -159,7 +157,7 @@ async fn insert_to_db(
conn.clone(),
insert_current_deleted_token_datas_v2_query,
current_deleted_token_datas_v2,
get_config_table_chunk_size::<CurrentDeletedTokenDataV2>(
get_config_table_chunk_size::<CurrentTokenDataV2>(
"current_token_datas_v2",
per_table_chunk_sizes,
),
Expand Down Expand Up @@ -362,7 +360,7 @@ fn insert_current_token_datas_v2_query(
}

fn insert_current_deleted_token_datas_v2_query(
items_to_insert: Vec<CurrentDeletedTokenDataV2>,
items_to_insert: Vec<CurrentTokenDataV2>,
) -> (
impl QueryFragment<Pg> + diesel::query_builder::QueryId + Send,
Option<&'static str>,
Expand Down Expand Up @@ -589,7 +587,7 @@ async fn parse_v2_token(
Vec<TokenOwnershipV2>,
Vec<CurrentCollectionV2>,
Vec<CurrentTokenDataV2>,
Vec<CurrentDeletedTokenDataV2>,
Vec<CurrentTokenDataV2>,
Vec<CurrentTokenOwnershipV2>,
Vec<CurrentTokenOwnershipV2>, // deleted token ownerships
Vec<TokenActivityV2>,
Expand All @@ -604,10 +602,8 @@ async fn parse_v2_token(
AHashMap::new();
let mut current_token_datas_v2: AHashMap<CurrentTokenDataV2PK, CurrentTokenDataV2> =
AHashMap::new();
let mut current_deleted_token_datas_v2: AHashMap<
CurrentTokenDataV2PK,
CurrentDeletedTokenDataV2,
> = AHashMap::new();
let mut current_deleted_token_datas_v2: AHashMap<CurrentTokenDataV2PK, CurrentTokenDataV2> =
AHashMap::new();
let mut current_token_ownerships_v2: AHashMap<
CurrentTokenOwnershipV2PK,
CurrentTokenOwnershipV2,
Expand Down Expand Up @@ -726,10 +722,15 @@ async fn parse_v2_token(
// and burn / transfer events need to come before the next section
for (index, event) in user_txn.events.iter().enumerate() {
if let Some(burn_event) = Burn::from_event(event, txn_version).unwrap() {
tokens_burned.insert(burn_event.get_token_address(), Some(burn_event));
tokens_burned.insert(burn_event.get_token_address(), burn_event);
}
if let Some(burn_event) = BurnEvent::from_event(event, txn_version).unwrap() {
tokens_burned.insert(burn_event.get_token_address(), None);
if let Some(old_burn_event) = BurnEvent::from_event(event, txn_version).unwrap() {
let burn_event = Burn::new(
standardize_address(event.key.as_ref().unwrap().account_address.as_str()),
old_burn_event.get_token_address(),
"".to_string(),
);
tokens_burned.insert(burn_event.get_token_address(), burn_event);
}
if let Some(mint_event) = MintEvent::from_event(event, txn_version).unwrap() {
tokens_minted.insert(mint_event.get_token_address());
Expand Down Expand Up @@ -1080,7 +1081,7 @@ async fn parse_v2_token(
.collect::<Vec<CurrentTokenDataV2>>();
let mut current_deleted_token_datas_v2 = current_deleted_token_datas_v2
.into_values()
.collect::<Vec<CurrentDeletedTokenDataV2>>();
.collect::<Vec<CurrentTokenDataV2>>();
let mut current_token_ownerships_v2 = current_token_ownerships_v2
.into_values()
.collect::<Vec<CurrentTokenOwnershipV2>>();
Expand Down

0 comments on commit b777a96

Please sign in to comment.