Skip to content

Commit

Permalink
Merge branch 'main' into satya/fungible-concurrenty-supply
Browse files Browse the repository at this point in the history
  • Loading branch information
vusirikala authored Apr 23, 2024
2 parents 5271672 + b6a3035 commit b9245ca
Show file tree
Hide file tree
Showing 28 changed files with 859 additions and 410 deletions.
1 change: 1 addition & 0 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ jobs:
- uses: actions/checkout@v3
- name: Install deps and run linter
run: |
sudo apt update && sudo apt install libdw-dev
cargo install cargo-sort
rustup update
rustup toolchain install nightly
Expand Down
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,6 @@ This guide will get you started with creating an Aptos indexer with custom parsi

## [Aptos Indexer GRPC Release Notes](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/indexer-grpc/release_notes.md)


> [!WARNING]
> The typescript implementation is known to get stuck when there are lots of data to process. The issue is with the GRPC client and we haven't had a chance to optimize. Please proceed with caution.
33 changes: 26 additions & 7 deletions hasura-api/metadata-json/unified.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"resource_version": 307,
"resource_version": 316,
"metadata": {
"version": 3,
"sources": [
Expand Down Expand Up @@ -295,6 +295,23 @@
"name": "account_transactions",
"schema": "public"
},
"object_relationships": [
{
"name": "user_transaction",
"using": {
"manual_configuration": {
"column_mapping": {
"transaction_version": "version"
},
"insertion_order": null,
"remote_table": {
"name": "user_transactions",
"schema": "public"
}
}
}
}
],
"array_relationships": [
{
"name": "coin_activities",
Expand Down Expand Up @@ -879,17 +896,19 @@
"role": "anonymous",
"permission": {
"columns": [
"last_transaction_version",
"domain",
"domain_expiration_timestamp",
"domain_with_suffix",
"expiration_timestamp",
"is_active",
"is_primary",
"domain",
"last_transaction_version",
"owner_address",
"registered_address",
"subdomain",
"subdomain_expiration_policy",
"token_name",
"token_standard",
"domain_with_suffix",
"expiration_timestamp"
"token_standard"
],
"filter": {},
"limit": 100,
Expand Down Expand Up @@ -2483,7 +2502,7 @@
"GET"
],
"name": "Latest Processor Status",
"url": "get_lastest_processor_status"
"url": "get_latest_processor_status"
}
],
"api_limits": {
Expand Down
22 changes: 20 additions & 2 deletions python/utils/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,24 @@ def producer(
extra={
"processor_name": processor_name,
"stream_address": indexer_grpc_data_service_address,
"error": str(e),
"next_version_to_fetch": next_version_to_fetch,
"ending_version": ending_version,
},
)
# Datastream error can happen when we fail to deserialize deeply nested types.
# Skip the batch, log the error, and continue processing.
is_success = True
next_version_to_fetch += 1
response_stream = get_grpc_stream(
indexer_grpc_data_service_address,
indexer_grpc_data_stream_api_key,
indexer_grpc_http2_ping_interval,
indexer_grpc_http2_ping_timeout,
next_version_to_fetch,
ending_version,
processor_name,
)

# Check if we're at the end of the stream
reached_ending_version = (
Expand Down Expand Up @@ -369,7 +385,8 @@ async def consumer_impl(
"service_type": PROCESSOR_SERVICE_TYPE,
},
)
os._exit(1)
# Gaps are possible because we skipped versions
# os._exit(1)
last_fetched_version = transactions[-1].version
transaction_batches.append(transactions)

Expand Down Expand Up @@ -415,7 +432,8 @@ async def consumer_impl(
"service_type": PROCESSOR_SERVICE_TYPE,
},
)
os._exit(1)
# Gaps are possible because we skip versions
# os._exit(1)
prev_start = result.start_version
prev_end = result.end_version

Expand Down
Loading

0 comments on commit b9245ca

Please sign in to comment.