Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix token claims #627

Merged
merged 2 commits into from
Dec 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions rust/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion rust/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ aptos-indexer-processor-sdk = { git = "https://github.com/aptos-labs/aptos-index
aptos-indexer-processor-sdk-server-framework = { git = "https://github.com/aptos-labs/aptos-indexer-processor-sdk.git", rev = "e6867c50a2c30ef16ad6f82e02313b2ba5ce361a" }
aptos-protos = { git = "https://github.com/aptos-labs/aptos-core.git", rev = "5c48aee129b5a141be2792ffa3d9bd0a1a61c9cb" }
aptos-system-utils = { git = "https://github.com/aptos-labs/aptos-core.git", rev = "202bdccff2b2d333a385ae86a4fcf23e89da9f62" }
aptos-indexer-test-transactions = { git = "https://github.com/aptos-labs/aptos-core.git", rev = "60ee0c686c15480b2cbba224c205e03f479bcdbd" }
aptos-indexer-test-transactions = { git = "https://github.com/aptos-labs/aptos-core.git", rev = "8bb628129ff48241c650178caf1ff8bf53a44e5e" }
aptos-indexer-testing-framework = { git = "https://github.com/aptos-labs/aptos-indexer-processor-sdk.git", rev = "e6867c50a2c30ef16ad6f82e02313b2ba5ce361a" }
async-trait = "0.1.53"
backtrace = "0.3.58"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[]
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[
{
"token_data_id": "0x2877fd783c3c7957b50941cce1b3b729dae16b5928e0ba0606cd282df2b085c8",
"property_version_v1": "0",
"owner_address": "0xe2ebcbacd81584f97b6ba9a458239ea083428d75158c0d42ef38b6379527e99a",
"storage_id": "0xa5e6f84af35d356027c1513b37248bae3968c20720af6fc2d5c58b7c90ffda55",
"amount": "1",
"table_type_v1": "0x3::token::TokenStore",
"token_properties_mutated_v1": {},
"is_soulbound_v2": null,
"token_standard": "v1",
"is_fungible_v2": null,
"last_transaction_version": 19922017,
"last_transaction_timestamp": "2024-11-28T23:53:41.291148",
"non_transferrable_by_owner": null
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[
{
"token_data_id_hash": "2877fd783c3c7957b50941cce1b3b729dae16b5928e0ba0606cd282df2b085c8",
"property_version": "0",
"from_address": "0xd77942ad91c35a2d165fdec8f6a28ec48b6bb9f905db2fd0ac8a7e481a9c543e",
"to_address": "0xe2ebcbacd81584f97b6ba9a458239ea083428d75158c0d42ef38b6379527e99a",
"collection_data_id_hash": "84b2198ac10fdbb64bc3474ed0cf184b7bdcd5bc5f098a8858f2e48f379c5fcc",
"creator_address": "0xd77942ad91c35a2d165fdec8f6a28ec48b6bb9f905db2fd0ac8a7e481a9c543e",
"collection_name": "Alice's",
"name": "Alice's first token",
"amount": "0",
"table_handle": "0xc4b3a532f522e5cc021427b4d729938073a6d34949bd397490d4bbf96a4703f1",
"last_transaction_version": 19922017,
"collection_id": "0x84b2198ac10fdbb64bc3474ed0cf184b7bdcd5bc5f098a8858f2e48f379c5fcc",
"last_transaction_timestamp": "2024-11-28T23:53:41.291148",
"token_data_id": "0x2877fd783c3c7957b50941cce1b3b729dae16b5928e0ba0606cd282df2b085c8"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[]
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
[
{
"transaction_version": 19922017,
"event_index": 0,
"event_account_address": "0x0000000000000000000000000000000000000000000000000000000000000000",
"token_data_id": "0x2877fd783c3c7957b50941cce1b3b729dae16b5928e0ba0606cd282df2b085c8",
"property_version_v1": "0",
"type_": "0x3::token::TokenDeposit",
"from_address": null,
"to_address": "0xe2ebcbacd81584f97b6ba9a458239ea083428d75158c0d42ef38b6379527e99a",
"token_amount": "1",
"before_value": null,
"after_value": null,
"entry_function_id_str": "0x3::token_transfers::claim_script",
"token_standard": "v1",
"is_fungible_v2": null,
"transaction_timestamp": "2024-11-28T23:53:41.291148"
},
{
"transaction_version": 19922017,
"event_index": 1,
"event_account_address": "0x0000000000000000000000000000000000000000000000000000000000000000",
"token_data_id": "0x2877fd783c3c7957b50941cce1b3b729dae16b5928e0ba0606cd282df2b085c8",
"property_version_v1": "0",
"type_": "0x3::token_transfers::Claim",
"from_address": "0xd77942ad91c35a2d165fdec8f6a28ec48b6bb9f905db2fd0ac8a7e481a9c543e",
"to_address": "0xe2ebcbacd81584f97b6ba9a458239ea083428d75158c0d42ef38b6379527e99a",
"token_amount": "1",
"before_value": null,
"after_value": null,
"entry_function_id_str": "0x3::token_transfers::claim_script",
"token_standard": "v1",
"is_fungible_v2": null,
"transaction_timestamp": "2024-11-28T23:53:41.291148"
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[]
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[
{
"transaction_version": 19922017,
"write_set_change_index": 4,
"token_data_id": "0x2877fd783c3c7957b50941cce1b3b729dae16b5928e0ba0606cd282df2b085c8",
"property_version_v1": "0",
"owner_address": "0xe2ebcbacd81584f97b6ba9a458239ea083428d75158c0d42ef38b6379527e99a",
"storage_id": "0xa5e6f84af35d356027c1513b37248bae3968c20720af6fc2d5c58b7c90ffda55",
"amount": "1",
"table_type_v1": "0x3::token::TokenStore",
"token_properties_mutated_v1": {},
"is_soulbound_v2": null,
"token_standard": "v1",
"is_fungible_v2": null,
"transaction_timestamp": "2024-11-28T23:53:41.291148",
"non_transferrable_by_owner": null
}
]
10 changes: 10 additions & 0 deletions rust/integration-tests/src/sdk_tests/token_v2_processor_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ mod sdk_token_v2_processor_tests {
},
};
use aptos_indexer_test_transactions::{
IMPORTED_DEVNET_TXNS_19922017_TOKEN_V1_OFFER_CLAIM,
IMPORTED_DEVNET_TXNS_78753831_TOKEN_V1_MINT_TRANSFER_WITH_V2_EVENTS,
IMPORTED_DEVNET_TXNS_78753832_TOKEN_V2_MINT_TRANSFER_WITH_V2_EVENTS,
IMPORTED_MAINNET_TXNS_1058723093_TOKEN_V1_MINT_WITHDRAW_DEPOSIT_EVENTS,
Expand Down Expand Up @@ -314,6 +315,15 @@ mod sdk_token_v2_processor_tests {
.await;
}

#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_token_v1_offer_claim_no_table_metadata() {
process_single_transaction(
IMPORTED_DEVNET_TXNS_19922017_TOKEN_V1_OFFER_CLAIM,
Some("test_token_v1_offer_claim_no_table_metadata".to_string()),
)
.await;
}

/**
* This test includes processing for the following:
* - Resources
Expand Down
34 changes: 29 additions & 5 deletions rust/processor/src/db/postgres/models/token_models/token_claims.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,20 @@
#![allow(clippy::unused_unit)]

use super::{token_utils::TokenWriteSet, tokens::TableHandleToOwner};
use crate::{schema::current_token_pending_claims, utils::util::standardize_address};
use crate::{
db::postgres::models::token_v2_models::v2_token_activities::TokenActivityHelperV1,
schema::current_token_pending_claims, utils::util::standardize_address,
};
use ahash::AHashMap;
use aptos_protos::transaction::v1::{DeleteTableItem, WriteTableItem};
use bigdecimal::{BigDecimal, Zero};
use field_count::FieldCount;
use serde::{Deserialize, Serialize};

// Map to keep track of the metadata of token offers that were claimed. The key is the token data id of the offer.
// Potentially it'd also be useful to keep track of offers that were canceled.
pub type TokenV1Claimed = AHashMap<String, TokenActivityHelperV1>;

#[derive(
Clone, Debug, Deserialize, Eq, FieldCount, Identifiable, Insertable, PartialEq, Serialize,
)]
Expand Down Expand Up @@ -136,6 +144,7 @@
txn_version: i64,
txn_timestamp: chrono::NaiveDateTime,
table_handle_to_owner: &TableHandleToOwner,
tokens_claimed: &TokenV1Claimed,
) -> anyhow::Result<Option<Self>> {
let table_item_data = table_item.data.as_ref().unwrap();

Expand All @@ -149,12 +158,27 @@
};
if let Some(offer) = &maybe_offer {
let table_handle = standardize_address(&table_item.handle.to_string());
let token_data_id = offer.token_id.token_data_id.to_id();

// Try to find owner from write resources
let mut maybe_owner_address = table_handle_to_owner
.get(&table_handle)
.map(|table_metadata| table_metadata.get_owner_address());

// If table handle isn't in TableHandleToOwner, try to find owner from token v1 claim events
if maybe_owner_address.is_none() {
if let Some(token_claimed) = tokens_claimed.get(&token_data_id) {
maybe_owner_address = token_claimed.from_address.clone();
}
}

let table_metadata = table_handle_to_owner.get(&table_handle).unwrap_or_else(|| {
let owner_address = maybe_owner_address.unwrap_or_else(|| {
panic!(
"Missing table handle metadata for claim. \
Version: {}, table handle for PendingClaims: {}, all metadata: {:?}",
txn_version, table_handle, table_handle_to_owner
Version: {}, table handle for PendingClaims: {}, all metadata: {:?} \
Missing token data id in token claim event. \
token_data_id: {}, all token claim events: {:?}",
txn_version, table_handle, table_handle_to_owner, token_data_id, tokens_claimed

Check warning on line 181 in rust/processor/src/db/postgres/models/token_models/token_claims.rs

View check run for this annotation

Codecov / codecov/patch

rust/processor/src/db/postgres/models/token_models/token_claims.rs#L178-L181

Added lines #L178 - L181 were not covered by tests
)
});

Expand All @@ -171,7 +195,7 @@
return Ok(Some(Self {
token_data_id_hash,
property_version: token_id.property_version,
from_address: table_metadata.get_owner_address(),
from_address: owner_address,
to_address: offer.get_to_address(),
collection_data_id_hash,
creator_address: token_data_id_struct.get_creator_address(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@ use super::v2_token_utils::{TokenStandard, V2TokenEvent};
use crate::{
db::postgres::models::{
object_models::v2_object_utils::ObjectAggregatedDataMapping,
token_models::token_utils::{TokenDataIdType, TokenEvent},
token_models::{
token_claims::TokenV1Claimed,
token_utils::{TokenDataIdType, TokenEvent},
},
},
schema::token_activities_v2,
utils::util::standardize_address,
Expand Down Expand Up @@ -41,7 +44,8 @@ pub struct TokenActivityV2 {
}

/// A simplified TokenActivity (excluded common fields) to reduce code duplication
struct TokenActivityHelperV1 {
#[derive(Clone, Debug)]
pub struct TokenActivityHelperV1 {
pub token_data_id_struct: TokenDataIdType,
pub property_version: BigDecimal,
pub from_address: Option<String>,
Expand Down Expand Up @@ -200,6 +204,7 @@ impl TokenActivityV2 {
txn_timestamp: chrono::NaiveDateTime,
event_index: i64,
entry_function_id_str: &Option<String>,
tokens_claimed: &mut TokenV1Claimed,
) -> anyhow::Result<Option<Self>> {
let event_type = event.type_str.clone();
if let Some(token_event) = &TokenEvent::from_event(&event_type, &event.data, txn_version)? {
Expand Down Expand Up @@ -290,12 +295,17 @@ impl TokenActivityV2 {
to_address: Some(inner.get_to_address()),
token_amount: inner.amount.clone(),
},
TokenEvent::ClaimTokenEvent(inner) => TokenActivityHelperV1 {
token_data_id_struct: inner.token_id.token_data_id.clone(),
property_version: inner.token_id.property_version.clone(),
from_address: Some(event_account_address.clone()),
to_address: Some(inner.get_to_address()),
token_amount: inner.amount.clone(),
TokenEvent::ClaimTokenEvent(inner) => {
let token_data_id_struct = inner.token_id.token_data_id.clone();
let helper = TokenActivityHelperV1 {
token_data_id_struct: token_data_id_struct.clone(),
property_version: inner.token_id.property_version.clone(),
from_address: Some(event_account_address.clone()),
to_address: Some(inner.get_to_address()),
token_amount: inner.amount.clone(),
};
tokens_claimed.insert(token_data_id_struct.to_id(), helper.clone());
helper
},
TokenEvent::Offer(inner) => TokenActivityHelperV1 {
token_data_id_struct: inner.token_id.token_data_id.clone(),
Expand All @@ -311,12 +321,17 @@ impl TokenActivityV2 {
to_address: Some(inner.get_to_address()),
token_amount: inner.amount.clone(),
},
TokenEvent::Claim(inner) => TokenActivityHelperV1 {
token_data_id_struct: inner.token_id.token_data_id.clone(),
property_version: inner.token_id.property_version.clone(),
from_address: Some(inner.get_from_address()),
to_address: Some(inner.get_to_address()),
token_amount: inner.amount.clone(),
TokenEvent::Claim(inner) => {
let token_data_id_struct = inner.token_id.token_data_id.clone();
let helper = TokenActivityHelperV1 {
token_data_id_struct: token_data_id_struct.clone(),
property_version: inner.token_id.property_version.clone(),
from_address: Some(inner.get_from_address()),
to_address: Some(inner.get_to_address()),
token_amount: inner.amount.clone(),
};
tokens_claimed.insert(token_data_id_struct.to_id(), helper.clone());
helper
},
};
let token_data_id_struct = token_activity_helper.token_data_id_struct;
Expand Down
8 changes: 7 additions & 1 deletion rust/processor/src/processors/token_v2_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::{
},
resources::{FromWriteResource, V2TokenResource},
token_models::{
token_claims::CurrentTokenPendingClaim,
token_claims::{CurrentTokenPendingClaim, TokenV1Claimed},
tokens::{CurrentTokenPendingClaimPK, TableHandleToOwner, TableMetadataForToken},
},
token_v2_models::{
Expand Down Expand Up @@ -783,6 +783,9 @@ pub async fn parse_v2_token(
// Get mint events for token v2 by object
let mut tokens_minted: TokenV2Minted = AHashSet::new();

// Get claim events for token v1 by table handle
let mut tokens_claimed: TokenV1Claimed = AHashMap::new();

// Loop 1: Need to do a first pass to get all the object addresses and insert them into the helper
for wsc in transaction_info.changes.iter() {
if let Change::WriteResource(wr) = wsc.change.as_ref().unwrap() {
Expand Down Expand Up @@ -847,6 +850,7 @@ pub async fn parse_v2_token(
// Loop 3: Pass through events to get the burn events and token activities v2
// This needs to be here because we need the metadata parsed in loop 2 for token activities
// and burn / transfer events need to come before the next loop
// Also parses token v1 claim events, which will be used in Loop 4 to build the claims table
for (index, event) in user_txn.events.iter().enumerate() {
if let Some(burn_event) = Burn::from_event(event, txn_version).unwrap() {
tokens_burned.insert(burn_event.get_token_address(), burn_event.clone());
Expand Down Expand Up @@ -890,6 +894,7 @@ pub async fn parse_v2_token(
txn_timestamp,
index as i64,
&entry_function_id_str,
&mut tokens_claimed,
)
.unwrap()
{
Expand Down Expand Up @@ -1053,6 +1058,7 @@ pub async fn parse_v2_token(
txn_version,
txn_timestamp,
table_handle_to_owner,
&tokens_claimed,
)
.unwrap()
{
Expand Down
Loading