-
Notifications
You must be signed in to change notification settings - Fork 196
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
11 changed files
with
237 additions
and
36 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,136 @@ | ||
import fs from "node:fs"; | ||
import { z } from "zod"; | ||
import { DefaultLogger, eq } from "drizzle-orm"; | ||
import { createPublicClient, fallback, webSocket, http, Transport } from "viem"; | ||
import fastify from "fastify"; | ||
import { fastifyTRPCPlugin } from "@trpc/server/adapters/fastify"; | ||
import { createAppRouter } from "@latticexyz/store-sync/trpc-indexer"; | ||
import { createStorageAdapter } from "../src/postgres/createStorageAdapter"; | ||
import type { Chain } from "viem/chains"; | ||
import * as mudChains from "@latticexyz/common/chains"; | ||
import * as chains from "viem/chains"; | ||
import { isNotNull } from "@latticexyz/common/utils"; | ||
import { combineLatest, filter, first } from "rxjs"; | ||
import { drizzle } from "drizzle-orm/postgres-js"; | ||
import postgres from "postgres"; | ||
import { createInternalTables, schemaVersion, syncToPostgres } from "@latticexyz/store-sync/postgres"; | ||
|
||
const possibleChains = Object.values({ ...mudChains, ...chains }) as Chain[]; | ||
|
||
// TODO: refine zod type to be either CHAIN_ID or RPC_HTTP_URL/RPC_WS_URL | ||
const env = z | ||
.object({ | ||
CHAIN_ID: z.coerce.number().positive().optional(), | ||
RPC_HTTP_URL: z.string().optional(), | ||
RPC_WS_URL: z.string().optional(), | ||
START_BLOCK: z.coerce.bigint().nonnegative().default(0n), | ||
MAX_BLOCK_RANGE: z.coerce.bigint().positive().default(1000n), | ||
PORT: z.coerce.number().positive().default(3001), | ||
DATABASE_URL: z.string(), | ||
}) | ||
.parse(process.env, { | ||
errorMap: (issue) => ({ | ||
message: `Missing or invalid environment variable: ${issue.path.join(".")}`, | ||
}), | ||
}); | ||
|
||
const chain = env.CHAIN_ID != null ? possibleChains.find((c) => c.id === env.CHAIN_ID) : undefined; | ||
if (env.CHAIN_ID != null && !chain) { | ||
console.warn(`No chain found for chain ID ${env.CHAIN_ID}`); | ||
} | ||
|
||
const transports: Transport[] = [ | ||
env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : null, | ||
env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : null, | ||
].filter(isNotNull); | ||
|
||
const publicClient = createPublicClient({ | ||
chain, | ||
transport: fallback( | ||
// If one or more RPC URLs are provided, we'll configure the transport with only those RPC URLs | ||
transports.length > 0 | ||
? transports | ||
: // Otherwise use the chain defaults | ||
[webSocket(), http()] | ||
), | ||
pollingInterval: 1000, | ||
}); | ||
|
||
// Fetch the chain ID from the RPC if no chain object was found for the provided chain ID. | ||
// We do this to match the downstream logic, which also attempts to find the chain ID. | ||
const chainId = chain?.id ?? (await publicClient.getChainId()); | ||
|
||
const database = drizzle(postgres(env.DATABASE_URL), { | ||
logger: new DefaultLogger(), | ||
}); | ||
|
||
let startBlock = env.START_BLOCK; | ||
|
||
// Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error. | ||
try { | ||
const internalTables = createInternalTables(); | ||
const currentChainStates = await database | ||
.select() | ||
.from(internalTables.chain) | ||
.where(eq(internalTables.chain.chainId, chainId)) | ||
.execute(); | ||
// TODO: replace this type workaround with `noUncheckedIndexedAccess: true` when we can fix all the issues related (https://github.com/latticexyz/mud/issues/1212) | ||
const currentChainState: (typeof currentChainStates)[number] | undefined = currentChainStates[0]; | ||
|
||
if (currentChainState != null) { | ||
if (currentChainState.schemaVersion != schemaVersion) { | ||
console.log( | ||
"schema version changed from", | ||
currentChainState.schemaVersion, | ||
"to", | ||
schemaVersion, | ||
"recreating database" | ||
); | ||
fs.truncateSync(env.DATABASE_URL); | ||
} else if (currentChainState.lastUpdatedBlockNumber != null) { | ||
console.log("resuming from block number", currentChainState.lastUpdatedBlockNumber + 1n); | ||
startBlock = currentChainState.lastUpdatedBlockNumber + 1n; | ||
} | ||
} | ||
} catch (error) { | ||
// ignore errors, this is optional | ||
} | ||
|
||
const { latestBlockNumber$, blockStorageOperations$ } = await syncToPostgres({ | ||
database, | ||
publicClient, | ||
startBlock, | ||
maxBlockRange: env.MAX_BLOCK_RANGE, | ||
}); | ||
|
||
combineLatest([latestBlockNumber$, blockStorageOperations$]) | ||
.pipe( | ||
filter( | ||
([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) => latestBlockNumber === lastBlockNumberProcessed | ||
), | ||
first() | ||
) | ||
.subscribe(() => { | ||
console.log("all caught up"); | ||
}); | ||
|
||
// @see https://fastify.dev/docs/latest/ | ||
const server = fastify({ | ||
maxParamLength: 5000, | ||
}); | ||
|
||
await server.register(import("@fastify/cors")); | ||
|
||
// @see https://trpc.io/docs/server/adapters/fastify | ||
server.register(fastifyTRPCPlugin, { | ||
prefix: "/trpc", | ||
trpcOptions: { | ||
router: createAppRouter(), | ||
createContext: async () => ({ | ||
storageAdapter: await createStorageAdapter(database), | ||
}), | ||
}, | ||
}); | ||
|
||
await server.listen({ port: env.PORT }); | ||
console.log(`indexer server listening on http://127.0.0.1:${env.PORT}`); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
54 changes: 54 additions & 0 deletions
54
packages/store-indexer/src/postgres/createStorageAdapter.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
import { eq } from "drizzle-orm"; | ||
import { PgDatabase } from "drizzle-orm/pg-core"; | ||
import { createTable, createInternalTables, getTables } from "@latticexyz/store-sync/postgres"; | ||
import { StorageAdapter } from "@latticexyz/store-sync/trpc-indexer"; | ||
import { debug } from "../debug"; | ||
import { getAddress } from "viem"; | ||
|
||
/** | ||
* Creates a storage adapter for the tRPC server/client to query data from Postgres. | ||
* | ||
* @param {PgDatabase<any>} database Postgres database object from Drizzle | ||
* @returns {Promise<StorageAdapter>} A set of methods used by tRPC endpoints. | ||
*/ | ||
export async function createStorageAdapter(database: PgDatabase<any>): Promise<StorageAdapter> { | ||
const adapter: StorageAdapter = { | ||
async findAll(chainId, address) { | ||
const internalTables = createInternalTables(); | ||
const tables = (await getTables(database)).filter( | ||
(table) => address != null && getAddress(address) === getAddress(table.address) | ||
); | ||
|
||
const tablesWithRecords = await Promise.all( | ||
tables.map(async (table) => { | ||
const sqliteTable = createTable(table); | ||
const records = await database.select().from(sqliteTable).where(eq(sqliteTable.__isDeleted, false)).execute(); | ||
return { | ||
...table, | ||
records: records.map((record) => ({ | ||
key: Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]])), | ||
value: Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]])), | ||
})), | ||
}; | ||
}) | ||
); | ||
|
||
const metadata = await database | ||
.select() | ||
.from(internalTables.chain) | ||
.where(eq(internalTables.chain.chainId, chainId)) | ||
.execute(); | ||
const { lastUpdatedBlockNumber } = metadata[0] ?? {}; | ||
|
||
const result = { | ||
blockNumber: lastUpdatedBlockNumber ?? null, | ||
tables: tablesWithRecords, | ||
}; | ||
|
||
debug("findAll", chainId, address, result); | ||
|
||
return result; | ||
}, | ||
}; | ||
return adapter; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.