Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[parquet-sdk][token_v2] migrate CurrentTokenV2Metadata
Browse files Browse the repository at this point in the history
yuunlimm committed Dec 10, 2024

Unverified

No user is associated with the committer email.
1 parent 8a7c04b commit 1b8deff
Showing 11 changed files with 242 additions and 80 deletions.
1 change: 1 addition & 0 deletions rust/processor/src/db/common/models/token_v2_models/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod raw_token_claims;
pub mod raw_v1_token_royalty;
pub mod raw_v2_token_metadata;
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
// Copyright © Aptos Foundation
// SPDX-License-Identifier: Apache-2.0

// This is required because a diesel macro makes clippy sad
#![allow(clippy::extra_unused_lifetimes)]
#![allow(clippy::unused_unit)]

use crate::{
db::postgres::models::{
default_models::move_resources::MoveResource,
object_models::v2_object_utils::ObjectAggregatedDataMapping,
resources::{COIN_ADDR, TOKEN_ADDR, TOKEN_V2_ADDR},
token_models::token_utils::NAME_LENGTH,
},
utils::util::{standardize_address, truncate_str},
};
use anyhow::Context;
use aptos_protos::transaction::v1::WriteResource;
use serde::{Deserialize, Serialize};
use serde_json::Value;

// PK of current_objects, i.e. object_address, resource_type
pub type CurrentTokenV2MetadataPK = (String, String);

#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]

Check warning on line 25 in rust/processor/src/db/common/models/token_v2_models/raw_v2_token_metadata.rs

Codecov / codecov/patch

rust/processor/src/db/common/models/token_v2_models/raw_v2_token_metadata.rs#L25

Added line #L25 was not covered by tests
pub struct RawCurrentTokenV2Metadata {
pub object_address: String,
pub resource_type: String,
pub data: Value,
pub state_key_hash: String,
pub last_transaction_version: i64,
pub last_transaction_timestamp: chrono::NaiveDateTime,
}

impl Ord for RawCurrentTokenV2Metadata {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.object_address
.cmp(&other.object_address)
.then(self.resource_type.cmp(&other.resource_type))
}
}
impl PartialOrd for RawCurrentTokenV2Metadata {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}

impl RawCurrentTokenV2Metadata {
/// Parsing unknown resources with 0x4::token::Token
pub fn from_write_resource(
write_resource: &WriteResource,
txn_version: i64,
object_metadatas: &ObjectAggregatedDataMapping,
txn_timestamp: chrono::NaiveDateTime,
) -> anyhow::Result<Option<Self>> {
let object_address = standardize_address(&write_resource.address.to_string());
if let Some(object_data) = object_metadatas.get(&object_address) {
// checking if token_v2
if object_data.token.is_some() {
let move_tag =
MoveResource::convert_move_struct_tag(write_resource.r#type.as_ref().unwrap());
let resource_type_addr = move_tag.get_address();
if matches!(
resource_type_addr.as_str(),
COIN_ADDR | TOKEN_ADDR | TOKEN_V2_ADDR
) {
return Ok(None);
}

let resource = MoveResource::from_write_resource(write_resource, 0, txn_version, 0);

let state_key_hash = object_data.object.get_state_key_hash();
if state_key_hash != resource.state_key_hash {
return Ok(None);

Check warning on line 74 in rust/processor/src/db/common/models/token_v2_models/raw_v2_token_metadata.rs

Codecov / codecov/patch

rust/processor/src/db/common/models/token_v2_models/raw_v2_token_metadata.rs#L74

Added line #L74 was not covered by tests
}

let resource_type = truncate_str(&resource.type_, NAME_LENGTH);
return Ok(Some(RawCurrentTokenV2Metadata {
object_address,
resource_type,
data: resource
.data
.context("data must be present in write resource")?,
state_key_hash: resource.state_key_hash,
last_transaction_version: txn_version,
last_transaction_timestamp: txn_timestamp,
}));
}
}
Ok(None)
}
}

pub trait CurrentTokenV2MetadataConvertible {
fn from_raw(raw_item: RawCurrentTokenV2Metadata) -> Self;
}
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod token_claims;
pub mod v1_token_royalty;
pub mod v2_token_metadata;
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
// Copyright © Aptos Foundation
// SPDX-License-Identifier: Apache-2.0

// This is required because a diesel macro makes clippy sad
#![allow(clippy::extra_unused_lifetimes)]
#![allow(clippy::unused_unit)]

use crate::{
bq_analytics::generic_parquet_processor::{GetTimeStamp, HasVersion, NamedTable},
db::common::models::token_v2_models::raw_v2_token_metadata::{
CurrentTokenV2MetadataConvertible, RawCurrentTokenV2Metadata,
},
};
use allocative_derive::Allocative;
use field_count::FieldCount;
use parquet_derive::ParquetRecordWriter;
use serde::{Deserialize, Serialize};

#[derive(
Allocative, Clone, Debug, Default, Deserialize, FieldCount, ParquetRecordWriter, Serialize,

Check warning on line 20 in rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs

Codecov / codecov/patch

rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs#L20

Added line #L20 was not covered by tests
)]
pub struct CurrentTokenV2Metadata {

Check warning on line 22 in rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs

Codecov / codecov/patch

rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs#L22

Added line #L22 was not covered by tests
pub object_address: String,
pub resource_type: String,
pub data: String,
pub state_key_hash: String,
pub last_transaction_version: i64,
#[allocative(skip)]
pub last_transaction_timestamp: chrono::NaiveDateTime,
}
impl NamedTable for CurrentTokenV2Metadata {
const TABLE_NAME: &'static str = "current_token_v2_metadata";
}

impl HasVersion for CurrentTokenV2Metadata {
fn version(&self) -> i64 {
self.last_transaction_version
}

Check warning on line 38 in rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs

Codecov / codecov/patch

rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs#L36-L38

Added lines #L36 - L38 were not covered by tests
}

impl GetTimeStamp for CurrentTokenV2Metadata {
fn get_timestamp(&self) -> chrono::NaiveDateTime {
self.last_transaction_timestamp
}

Check warning on line 44 in rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs

Codecov / codecov/patch

rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs#L42-L44

Added lines #L42 - L44 were not covered by tests
}

impl CurrentTokenV2MetadataConvertible for CurrentTokenV2Metadata {
// TODO: consider returning a Result
fn from_raw(raw_item: RawCurrentTokenV2Metadata) -> Self {
Self {
object_address: raw_item.object_address,
resource_type: raw_item.resource_type,
data: canonical_json::to_string(&raw_item.data).unwrap(), // TODO: handle better
state_key_hash: raw_item.state_key_hash,
last_transaction_version: raw_item.last_transaction_version,
last_transaction_timestamp: raw_item.last_transaction_timestamp,
}
}

Check warning on line 58 in rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs

Codecov / codecov/patch

rust/processor/src/db/parquet/models/token_v2_models/v2_token_metadata.rs#L49-L58

Added lines #L49 - L58 were not covered by tests
}
Original file line number Diff line number Diff line change
@@ -6,27 +6,19 @@
#![allow(clippy::unused_unit)]

use crate::{
db::postgres::models::{
default_models::move_resources::MoveResource,
object_models::v2_object_utils::ObjectAggregatedDataMapping,
resources::{COIN_ADDR, TOKEN_ADDR, TOKEN_V2_ADDR},
token_models::token_utils::NAME_LENGTH,
db::common::models::token_v2_models::raw_v2_token_metadata::{
CurrentTokenV2MetadataConvertible, RawCurrentTokenV2Metadata,
},
schema::current_token_v2_metadata,
utils::util::{standardize_address, truncate_str},
};
use anyhow::Context;
use aptos_protos::transaction::v1::WriteResource;
use field_count::FieldCount;
use serde::{Deserialize, Serialize};
use serde_json::Value;

// PK of current_objects, i.e. object_address, resource_type
pub type CurrentTokenV2MetadataPK = (String, String);

#[derive(
Clone, Debug, Deserialize, Eq, FieldCount, Identifiable, Insertable, PartialEq, Serialize,
)]
#[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)]
#[diesel(primary_key(object_address, resource_type))]
#[diesel(table_name = current_token_v2_metadata)]
pub struct CurrentTokenV2Metadata {
@@ -37,59 +29,14 @@ pub struct CurrentTokenV2Metadata {
pub last_transaction_version: i64,
}

impl Ord for CurrentTokenV2Metadata {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.object_address
.cmp(&other.object_address)
.then(self.resource_type.cmp(&other.resource_type))
}
}
impl PartialOrd for CurrentTokenV2Metadata {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}

impl CurrentTokenV2Metadata {
/// Parsing unknown resources with 0x4::token::Token
pub fn from_write_resource(
write_resource: &WriteResource,
txn_version: i64,
object_metadatas: &ObjectAggregatedDataMapping,
) -> anyhow::Result<Option<Self>> {
let object_address = standardize_address(&write_resource.address.to_string());
if let Some(object_data) = object_metadatas.get(&object_address) {
// checking if token_v2
if object_data.token.is_some() {
let move_tag =
MoveResource::convert_move_struct_tag(write_resource.r#type.as_ref().unwrap());
let resource_type_addr = move_tag.get_address();
if matches!(
resource_type_addr.as_str(),
COIN_ADDR | TOKEN_ADDR | TOKEN_V2_ADDR
) {
return Ok(None);
}

let resource = MoveResource::from_write_resource(write_resource, 0, txn_version, 0);

let state_key_hash = object_data.object.get_state_key_hash();
if state_key_hash != resource.state_key_hash {
return Ok(None);
}

let resource_type = truncate_str(&resource.type_, NAME_LENGTH);
return Ok(Some(CurrentTokenV2Metadata {
object_address,
resource_type,
data: resource
.data
.context("data must be present in write resource")?,
state_key_hash: resource.state_key_hash,
last_transaction_version: txn_version,
}));
}
impl CurrentTokenV2MetadataConvertible for CurrentTokenV2Metadata {
fn from_raw(raw_item: RawCurrentTokenV2Metadata) -> Self {
Self {
object_address: raw_item.object_address,
resource_type: raw_item.resource_type,
data: raw_item.data,
state_key_hash: raw_item.state_key_hash,
last_transaction_version: raw_item.last_transaction_version,
}
Ok(None)
}
}
37 changes: 24 additions & 13 deletions rust/processor/src/processors/token_v2_processor.rs
Original file line number Diff line number Diff line change
@@ -7,6 +7,7 @@ use crate::{
common::models::token_v2_models::{
raw_token_claims::{CurrentTokenPendingClaimConvertible, RawCurrentTokenPendingClaim},
raw_v1_token_royalty::{CurrentTokenRoyaltyV1Convertible, RawCurrentTokenRoyaltyV1},
raw_v2_token_metadata::{CurrentTokenV2MetadataConvertible, RawCurrentTokenV2Metadata},
},
postgres::models::{
fungible_asset_models::v2_fungible_asset_utils::FungibleAssetMetadata,
@@ -617,7 +618,7 @@ impl ProcessorTrait for TokenV2Processor {
current_token_ownerships_v2,
current_deleted_token_ownerships_v2,
token_activities_v2,
mut current_token_v2_metadata,
raw_current_token_v2_metadata,

Check warning on line 621 in rust/processor/src/processors/token_v2_processor.rs

Codecov / codecov/patch

rust/processor/src/processors/token_v2_processor.rs#L621

Added line #L621 was not covered by tests
raw_current_token_royalties_v1,
raw_current_token_claims,
) = parse_v2_token(
@@ -640,6 +641,12 @@ impl ProcessorTrait for TokenV2Processor {
.map(CurrentTokenRoyaltyV1::from_raw)
.collect();

let mut postgres_current_token_v2_metadata: Vec<CurrentTokenV2Metadata> =
raw_current_token_v2_metadata
.into_iter()
.map(CurrentTokenV2Metadata::from_raw)
.collect();

Check warning on line 649 in rust/processor/src/processors/token_v2_processor.rs

Codecov / codecov/patch

rust/processor/src/processors/token_v2_processor.rs#L644-L649

Added lines #L644 - L649 were not covered by tests
let processing_duration_in_secs = processing_start.elapsed().as_secs_f64();
let db_insertion_start = std::time::Instant::now();

@@ -659,7 +666,7 @@ impl ProcessorTrait for TokenV2Processor {
.deprecated_tables
.contains(TableFlags::CURRENT_TOKEN_V2_METADATA)
{
current_token_v2_metadata.clear();
postgres_current_token_v2_metadata.clear();

Check warning on line 669 in rust/processor/src/processors/token_v2_processor.rs

Codecov / codecov/patch

rust/processor/src/processors/token_v2_processor.rs#L669

Added line #L669 was not covered by tests
}

let tx_result = insert_to_db(
@@ -677,7 +684,7 @@ impl ProcessorTrait for TokenV2Processor {
&current_deleted_token_ownerships_v2,
),
&token_activities_v2,
&current_token_v2_metadata,
&postgres_current_token_v2_metadata,

Check warning on line 687 in rust/processor/src/processors/token_v2_processor.rs

Codecov / codecov/patch

rust/processor/src/processors/token_v2_processor.rs#L687

Added line #L687 was not covered by tests
&postgres_current_token_royalties_v1,
&postgres_current_token_claims,
&self.per_table_chunk_sizes,
@@ -729,7 +736,7 @@ pub async fn parse_v2_token(
Vec<CurrentTokenOwnershipV2>,
Vec<CurrentTokenOwnershipV2>, // deleted token ownerships
Vec<TokenActivityV2>,
Vec<CurrentTokenV2Metadata>,
Vec<RawCurrentTokenV2Metadata>,
Vec<RawCurrentTokenRoyaltyV1>,
Vec<RawCurrentTokenPendingClaim>,
) {
@@ -757,8 +764,10 @@ pub async fn parse_v2_token(
// we can still get the object core metadata for it
let mut token_v2_metadata_helper: ObjectAggregatedDataMapping = AHashMap::new();
// Basically token properties
let mut current_token_v2_metadata: AHashMap<CurrentTokenV2MetadataPK, CurrentTokenV2Metadata> =
AHashMap::new();
let mut current_token_v2_metadata: AHashMap<
CurrentTokenV2MetadataPK,
RawCurrentTokenV2Metadata,
> = AHashMap::new();
let mut current_token_royalties_v1: AHashMap<CurrentTokenDataV2PK, RawCurrentTokenRoyaltyV1> =
AHashMap::new();
// migrating this from v1 token model as we don't have any replacement table for this
@@ -1200,12 +1209,14 @@ pub async fn parse_v2_token(
}

// Track token properties
if let Some(token_metadata) = CurrentTokenV2Metadata::from_write_resource(
resource,
txn_version,
&token_v2_metadata_helper,
)
.unwrap()
if let Some(token_metadata) =
RawCurrentTokenV2Metadata::from_write_resource(
resource,
txn_version,
&token_v2_metadata_helper,
txn_timestamp,
)
.unwrap()
{
current_token_v2_metadata.insert(
(
@@ -1289,7 +1300,7 @@ pub async fn parse_v2_token(
.collect::<Vec<CurrentTokenOwnershipV2>>();
let mut current_token_v2_metadata = current_token_v2_metadata
.into_values()
.collect::<Vec<CurrentTokenV2Metadata>>();
.collect::<Vec<RawCurrentTokenV2Metadata>>();
let mut current_deleted_token_ownerships_v2 = current_deleted_token_ownerships_v2
.into_values()
.collect::<Vec<CurrentTokenOwnershipV2>>();
2 changes: 2 additions & 0 deletions rust/sdk-processor/src/config/processor_config.rs
Original file line number Diff line number Diff line change
@@ -29,6 +29,7 @@ use processor::{
},
token_v2_models::{
token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1,
v2_token_metadata::CurrentTokenV2Metadata,
},
transaction_metadata_model::parquet_write_set_size_info::WriteSetSize,
},
@@ -165,6 +166,7 @@ impl ProcessorConfig {
ProcessorName::ParquetTokenV2Processor => HashSet::from([
CurrentTokenPendingClaim::TABLE_NAME.to_string(),
CurrentTokenRoyaltyV1::TABLE_NAME.to_string(),
CurrentTokenV2Metadata::TABLE_NAME.to_string(),
]),
_ => HashSet::new(), // Default case for unsupported processors
}
16 changes: 16 additions & 0 deletions rust/sdk-processor/src/parquet_processors/mod.rs
Original file line number Diff line number Diff line change
@@ -34,6 +34,7 @@
},
token_v2_models::{
token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1,
v2_token_metadata::CurrentTokenV2Metadata,
},
transaction_metadata_model::parquet_write_set_size_info::WriteSetSize,
},
@@ -101,6 +102,7 @@
// token v2
CurrentTokenPendingClaims,
CurrentTokenRoyaltyV1,
CurrentTokenV2Metadata,
}

/// Trait for handling various Parquet types.
@@ -185,6 +187,10 @@
CurrentTokenRoyaltyV1,
ParquetTypeEnum::CurrentTokenRoyaltyV1
);
impl_parquet_trait!(
CurrentTokenV2Metadata,
ParquetTypeEnum::CurrentTokenV2Metadata
);

#[derive(Debug, Clone)]
#[enum_dispatch(ParquetTypeTrait)]
@@ -207,6 +213,7 @@
AccountTransaction(Vec<AccountTransaction>),
CurrentTokenPendingClaim(Vec<CurrentTokenPendingClaim>),
CurrentTokenRoyaltyV1(Vec<CurrentTokenRoyaltyV1>),
CurrentTokenV2Metadata(Vec<CurrentTokenV2Metadata>),
}

impl ParquetTypeStructs {
@@ -248,6 +255,9 @@
ParquetTypeEnum::CurrentTokenRoyaltyV1 => {
ParquetTypeStructs::CurrentTokenRoyaltyV1(Vec::new())
},
ParquetTypeEnum::CurrentTokenV2Metadata => {
ParquetTypeStructs::CurrentTokenV2Metadata(Vec::new())

Check warning on line 259 in rust/sdk-processor/src/parquet_processors/mod.rs

Codecov / codecov/patch

rust/sdk-processor/src/parquet_processors/mod.rs#L259

Added line #L259 was not covered by tests
},
}
}

@@ -366,6 +376,12 @@
) => {
handle_append!(self_data, other_data)
},
(
ParquetTypeStructs::CurrentTokenV2Metadata(self_data),
ParquetTypeStructs::CurrentTokenV2Metadata(other_data),
) => {
handle_append!(self_data, other_data)

Check warning on line 383 in rust/sdk-processor/src/parquet_processors/mod.rs

Codecov / codecov/patch

rust/sdk-processor/src/parquet_processors/mod.rs#L380-L383

Added lines #L380 - L383 were not covered by tests
},
_ => Err(ProcessorError::ProcessError {
message: "Mismatched buffer types in append operation".to_string(),
}),
Original file line number Diff line number Diff line change
@@ -32,6 +32,7 @@
bq_analytics::generic_parquet_processor::HasParquetSchema,
db::parquet::models::token_v2_models::{
token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1,
v2_token_metadata::CurrentTokenV2Metadata,
},
};
use std::{collections::HashMap, sync::Arc};
@@ -132,6 +133,10 @@
ParquetTypeEnum::CurrentTokenRoyaltyV1,
CurrentTokenRoyaltyV1::schema(),
),
(
ParquetTypeEnum::CurrentTokenV2Metadata,
CurrentTokenV2Metadata::schema(),
),

Check warning on line 139 in rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs

Codecov / codecov/patch

rust/sdk-processor/src/parquet_processors/parquet_token_v2_processor.rs#L136-L139

Added lines #L136 - L139 were not covered by tests
]
.into_iter()
.collect();
Original file line number Diff line number Diff line change
@@ -14,9 +14,11 @@
common::models::token_v2_models::{
raw_token_claims::CurrentTokenPendingClaimConvertible,
raw_v1_token_royalty::CurrentTokenRoyaltyV1Convertible,
raw_v2_token_metadata::CurrentTokenV2MetadataConvertible,
},
parquet::models::token_v2_models::{
token_claims::CurrentTokenPendingClaim, v1_token_royalty::CurrentTokenRoyaltyV1,
v2_token_metadata::CurrentTokenV2Metadata,
},
postgres::models::token_models::tokens::TableMetadataForToken,
},
@@ -89,7 +91,7 @@
_current_token_ownerships_v2,
_current_deleted_token_ownerships_v2,
_token_activities_v2,
_current_token_v2_metadata,
raw_current_token_v2_metadata,

Check warning on line 94 in rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs

Codecov / codecov/patch

rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs#L94

Added line #L94 was not covered by tests
raw_current_token_royalties_v1,
raw_current_token_claims,
) = parse_v2_token(
@@ -112,6 +114,12 @@
.map(CurrentTokenRoyaltyV1::from_raw)
.collect();

let parquet_current_token_v2_metadata: Vec<CurrentTokenV2Metadata> =
raw_current_token_v2_metadata
.into_iter()
.map(CurrentTokenV2Metadata::from_raw)
.collect();

Check warning on line 122 in rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs

Codecov / codecov/patch

rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs#L117-L122

Added lines #L117 - L122 were not covered by tests
// Print the size of each extracted data type
debug!("Processed data sizes:");
debug!(
@@ -122,6 +130,10 @@
" - CurrentTokenRoyaltyV1: {}",
parquet_current_token_royalties_v1.len()
);
debug!(
" - CurrentTokenV2Metadata: {}",
parquet_current_token_v2_metadata.len()

Check warning on line 135 in rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs

Codecov / codecov/patch

rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs#L133-L135

Added lines #L133 - L135 were not covered by tests
);

let mut map: HashMap<ParquetTypeEnum, ParquetTypeStructs> = HashMap::new();

@@ -137,6 +149,11 @@
ParquetTypeEnum::CurrentTokenRoyaltyV1,
ParquetTypeStructs::CurrentTokenRoyaltyV1(parquet_current_token_royalties_v1),
),
(
TableFlags::CURRENT_TOKEN_V2_METADATA,
ParquetTypeEnum::CurrentTokenV2Metadata,
ParquetTypeStructs::CurrentTokenV2Metadata(parquet_current_token_v2_metadata),
),

Check warning on line 156 in rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs

Codecov / codecov/patch

rust/sdk-processor/src/steps/parquet_token_v2_processor/parquet_token_v2_extractor.rs#L152-L156

Added lines #L152 - L156 were not covered by tests
];

// Populate the map based on opt-in tables
Original file line number Diff line number Diff line change
@@ -11,6 +11,7 @@ use processor::{
common::models::token_v2_models::{
raw_token_claims::CurrentTokenPendingClaimConvertible,
raw_v1_token_royalty::CurrentTokenRoyaltyV1Convertible,
raw_v2_token_metadata::CurrentTokenV2MetadataConvertible,
},
postgres::models::{
token_models::{token_claims::CurrentTokenPendingClaim, tokens::TableMetadataForToken},
@@ -112,7 +113,7 @@ impl Processable for TokenV2Extractor {
current_token_ownerships_v2,
current_deleted_token_ownerships_v2,
token_activities_v2,
current_token_v2_metadata,
raw_current_token_v2_metadata,
raw_current_token_royalties_v1,
raw_current_token_claims,
) = parse_v2_token(
@@ -135,6 +136,12 @@ impl Processable for TokenV2Extractor {
.map(CurrentTokenRoyaltyV1::from_raw)
.collect();

let postgres_current_token_v2_metadata: Vec<CurrentTokenV2Metadata> =
raw_current_token_v2_metadata
.into_iter()
.map(CurrentTokenV2Metadata::from_raw)
.collect();

Ok(Some(TransactionContext {
data: (
collections_v2,
@@ -146,7 +153,7 @@ impl Processable for TokenV2Extractor {
current_token_ownerships_v2,
current_deleted_token_ownerships_v2,
token_activities_v2,
current_token_v2_metadata,
postgres_current_token_v2_metadata,
postgres_current_token_royalties_v1,
postgres_current_token_claims,
),

0 comments on commit 1b8deff

Please sign in to comment.