From 8a9ff880e2ca082301f39dceffac836e7c266082 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Thu, 19 Oct 2023 11:32:22 +0100 Subject: [PATCH 01/14] feat(store-indexer,store-sync): filter by table and/or key --- packages/store-indexer/package.json | 1 + .../src/postgres/createQueryAdapter.ts | 20 +++++++++++++++-- .../src/sqlite/createQueryAdapter.ts | 20 +++++++++++++++-- packages/store-sync/src/common.ts | 12 ++++++++-- packages/store-sync/src/createStoreSync.ts | 22 ++++++++++++++++--- .../store-sync/src/trpc-indexer/common.ts | 4 ++-- .../src/trpc-indexer/createAppRouter.ts | 14 +++++++++--- pnpm-lock.yaml | 3 +++ 8 files changed, 82 insertions(+), 14 deletions(-) diff --git a/packages/store-indexer/package.json b/packages/store-indexer/package.json index 30b68cfabd..ae0672b847 100644 --- a/packages/store-indexer/package.json +++ b/packages/store-indexer/package.json @@ -37,6 +37,7 @@ "@fastify/cors": "^8.3.0", "@latticexyz/block-logs-stream": "workspace:*", "@latticexyz/common": "workspace:*", + "@latticexyz/protocol-parser": "workspace:*", "@latticexyz/store": "workspace:*", "@latticexyz/store-sync": "workspace:*", "@trpc/client": "10.34.0", diff --git a/packages/store-indexer/src/postgres/createQueryAdapter.ts b/packages/store-indexer/src/postgres/createQueryAdapter.ts index 02eba0a39e..a678454c15 100644 --- a/packages/store-indexer/src/postgres/createQueryAdapter.ts +++ b/packages/store-indexer/src/postgres/createQueryAdapter.ts @@ -4,6 +4,8 @@ import { buildTable, buildInternalTables, getTables } from "@latticexyz/store-sy import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer"; import { debug } from "../debug"; import { getAddress } from "viem"; +import { isDefined } from "@latticexyz/common/utils"; +import { decodeDynamicField } from "@latticexyz/protocol-parser"; /** * Creates a query adapter for the tRPC server/client to query data from Postgres. @@ -13,7 +15,10 @@ import { getAddress } from "viem"; */ export async function createQueryAdapter(database: PgDatabase): Promise { const adapter: QueryAdapter = { - async findAll({ chainId, address, tableIds = [] }) { + async findAll({ chainId, address, filters = [] }) { + // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters. + // TODO: improve this so we can express this in the query (likely need to build and query the "megatable" rather than each distinct SQL table) + const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId).filter(isDefined))); const tables = (await getTables(database)) .filter((table) => address == null || getAddress(address) === getAddress(table.address)) .filter((table) => !tableIds.length || tableIds.includes(table.tableId)); @@ -22,9 +27,20 @@ export async function createQueryAdapter(database: PgDatabase): Promise { const sqliteTable = buildTable(table); const records = await database.select().from(sqliteTable).where(eq(sqliteTable.__isDeleted, false)).execute(); + const filteredRecords = !filters.length + ? records + : records.filter((record) => { + const keyTuple = decodeDynamicField("bytes32[]", record.__key); + return filters.some( + (filter) => + (filter.tableId == null || filter.tableId === table.tableId) && + (filter.key0 == null || filter.key0 === keyTuple[0]) && + (filter.key1 == null || filter.key1 === keyTuple[1]) + ); + }); return { ...table, - records: records.map((record) => ({ + records: filteredRecords.map((record) => ({ key: Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]])), value: Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]])), })), diff --git a/packages/store-indexer/src/sqlite/createQueryAdapter.ts b/packages/store-indexer/src/sqlite/createQueryAdapter.ts index 23286cf556..a92ed3130d 100644 --- a/packages/store-indexer/src/sqlite/createQueryAdapter.ts +++ b/packages/store-indexer/src/sqlite/createQueryAdapter.ts @@ -4,6 +4,8 @@ import { buildTable, chainState, getTables } from "@latticexyz/store-sync/sqlite import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer"; import { debug } from "../debug"; import { getAddress } from "viem"; +import { isDefined } from "@latticexyz/common/utils"; +import { decodeDynamicField } from "@latticexyz/protocol-parser"; /** * Creates a storage adapter for the tRPC server/client to query data from SQLite. @@ -13,7 +15,10 @@ import { getAddress } from "viem"; */ export async function createQueryAdapter(database: BaseSQLiteDatabase<"sync", any>): Promise { const adapter: QueryAdapter = { - async findAll({ chainId, address, tableIds = [] }) { + async findAll({ chainId, address, filters = [] }) { + // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters. + // TODO: improve this so we can express this in the query (likely need to build and query the "megatable" rather than each distinct SQL table) + const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId).filter(isDefined))); const tables = getTables(database) .filter((table) => address == null || getAddress(address) === getAddress(table.address)) .filter((table) => !tableIds.length || tableIds.includes(table.tableId)); @@ -21,9 +26,20 @@ export async function createQueryAdapter(database: BaseSQLiteDatabase<"sync", an const tablesWithRecords = tables.map((table) => { const sqliteTable = buildTable(table); const records = database.select().from(sqliteTable).where(eq(sqliteTable.__isDeleted, false)).all(); + const filteredRecords = !filters.length + ? records + : records.filter((record) => { + const keyTuple = decodeDynamicField("bytes32[]", record.__key); + return filters.some( + (filter) => + (filter.tableId == null || filter.tableId === table.tableId) && + (filter.key0 == null || filter.key0 === keyTuple[0]) && + (filter.key1 == null || filter.key1 === keyTuple[1]) + ); + }); return { ...table, - records: records.map((record) => ({ + records: filteredRecords.map((record) => ({ key: Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]])), value: Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]])), })), diff --git a/packages/store-sync/src/common.ts b/packages/store-sync/src/common.ts index 4f8a714d3b..e603e5d66e 100644 --- a/packages/store-sync/src/common.ts +++ b/packages/store-sync/src/common.ts @@ -26,6 +26,14 @@ export type TableWithRecords = Table & { records: TableRecord[] }; export type StoreEventsLog = Log; export type BlockLogs = { blockNumber: StoreEventsLog["blockNumber"]; logs: StoreEventsLog[] }; +// TODO: should we add address here? +export type SyncFilter = { + tableId?: Hex; + key0?: Hex; + key1?: Hex; + // TODO: decide how many keys to support +}; + export type SyncOptions = { /** * MUD config @@ -42,9 +50,9 @@ export type SyncOptions = { */ address?: Address; /** - * Optional table IDs to filter indexer state and RPC state. + * Optional filters for indexer and RPC state. Useful to narrow down the data received by the client for large worlds. */ - tableIds?: Hex[]; + filters?: SyncFilter[]; /** * Optional block number to start indexing from. Useful for resuming the indexer from a particular point in time or starting after a particular contract deployment. */ diff --git a/packages/store-sync/src/createStoreSync.ts b/packages/store-sync/src/createStoreSync.ts index 0fec87e5b2..df2cfaa80f 100644 --- a/packages/store-sync/src/createStoreSync.ts +++ b/packages/store-sync/src/createStoreSync.ts @@ -4,6 +4,7 @@ import { StorageAdapter, StorageAdapterBlock, StorageAdapterLog, + SyncFilter, SyncOptions, SyncResult, TableWithRecords, @@ -36,6 +37,8 @@ import { internalTableIds } from "./internalTableIds"; const debug = parentDebug.extend("createStoreSync"); +const defaultFilters: SyncFilter[] = internalTableIds.map((tableId) => ({ tableId })); + type CreateStoreSyncOptions = SyncOptions & { storageAdapter: StorageAdapter; onProgress?: (opts: { @@ -52,13 +55,13 @@ export async function createStoreSync onProgress, publicClient, address, - tableIds = [], + filters: initialFilters = [], startBlock: initialStartBlock = 0n, maxBlockRange, initialState, indexerUrl, }: CreateStoreSyncOptions): Promise { - const includedTableIds = new Set(tableIds.length ? [...internalTableIds, ...tableIds] : []); + const filters = initialFilters.length ? [...initialFilters, ...defaultFilters] : []; const initialState$ = defer( async (): Promise< | { @@ -82,7 +85,7 @@ export async function createStoreSync const indexer = createIndexerClient({ url: indexerUrl }); const chainId = publicClient.chain?.id ?? (await publicClient.getChainId()); - const result = await indexer.findAll.query({ chainId, address, tableIds: Array.from(includedTableIds) }); + const result = await indexer.findAll.query({ chainId, address, filters }); onProgress?.({ step: SyncStep.SNAPSHOT, @@ -198,8 +201,21 @@ export async function createStoreSync publicClient, address, events: storeEventsAbi, + // TODO: pass filters in here so we can filter at RPC level maxBlockRange, }), + map(({ toBlock, logs }) => { + if (!filters.length) return { toBlock, logs }; + const filteredLogs = logs.filter((log) => + filters.some( + (filter) => + (filter.tableId == null || filter.tableId === log.args.tableId) && + (filter.key0 == null || filter.key0 === log.args.keyTuple[0]) && + (filter.key1 == null || filter.key1 === log.args.keyTuple[1]) + ) + ); + return { toBlock, logs: filteredLogs }; + }), mergeMap(({ toBlock, logs }) => from(groupLogsByBlockNumber(logs, toBlock))), share() ); diff --git a/packages/store-sync/src/trpc-indexer/common.ts b/packages/store-sync/src/trpc-indexer/common.ts index dc5d7005f4..731c940362 100644 --- a/packages/store-sync/src/trpc-indexer/common.ts +++ b/packages/store-sync/src/trpc-indexer/common.ts @@ -1,8 +1,8 @@ import { Hex } from "viem"; -import { TableWithRecords } from "../common"; +import { SyncFilter, TableWithRecords } from "../common"; export type QueryAdapter = { - findAll: (opts: { chainId: number; address?: Hex; tableIds?: Hex[] }) => Promise<{ + findAll: (opts: { chainId: number; address?: Hex; filters?: SyncFilter[] }) => Promise<{ blockNumber: bigint | null; tables: TableWithRecords[]; }>; diff --git a/packages/store-sync/src/trpc-indexer/createAppRouter.ts b/packages/store-sync/src/trpc-indexer/createAppRouter.ts index 71a4fae085..a549875bed 100644 --- a/packages/store-sync/src/trpc-indexer/createAppRouter.ts +++ b/packages/store-sync/src/trpc-indexer/createAppRouter.ts @@ -16,13 +16,21 @@ export function createAppRouter() { z.object({ chainId: z.number(), address: z.string().refine(isHex).optional(), - tableIds: z.array(z.string().refine(isHex)).optional(), + filters: z + .array( + z.object({ + tableId: z.string().refine(isHex).optional(), + key0: z.string().refine(isHex).optional(), + key1: z.string().refine(isHex).optional(), + }) + ) + .optional(), }) ) .query(async (opts): ReturnType => { const { queryAdapter } = opts.ctx; - const { chainId, address, tableIds } = opts.input; - return queryAdapter.findAll({ chainId, address, tableIds }); + const { chainId, address, filters } = opts.input; + return queryAdapter.findAll({ chainId, address, filters }); }), }); } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8942de00f0..25bc7f7a9d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -827,6 +827,9 @@ importers: '@latticexyz/common': specifier: workspace:* version: link:../common + '@latticexyz/protocol-parser': + specifier: workspace:* + version: link:../protocol-parser '@latticexyz/store': specifier: workspace:* version: link:../store From 2b9ab10b2c611d18db211c8c0f6e66385f2fdc60 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Thu, 19 Oct 2023 16:21:19 +0100 Subject: [PATCH 02/14] pass through sync options --- packages/store-sync/src/postgres/syncToPostgres.ts | 12 ++---------- packages/store-sync/src/recs/syncToRecs.ts | 14 ++------------ packages/store-sync/src/sqlite/syncToSqlite.ts | 12 ++---------- 3 files changed, 6 insertions(+), 32 deletions(-) diff --git a/packages/store-sync/src/postgres/syncToPostgres.ts b/packages/store-sync/src/postgres/syncToPostgres.ts index 1042ab2832..10ea4cf2af 100644 --- a/packages/store-sync/src/postgres/syncToPostgres.ts +++ b/packages/store-sync/src/postgres/syncToPostgres.ts @@ -28,23 +28,15 @@ export async function syncToPostgres( config, database, publicClient, - address, - startBlock, - maxBlockRange, - indexerUrl, - initialState, startSync = true, + ...syncOptions }: SyncToPostgresOptions): Promise { const { storageAdapter } = await postgresStorage({ database, publicClient, config }); const storeSync = await createStoreSync({ storageAdapter, config, - address, publicClient, - startBlock, - maxBlockRange, - indexerUrl, - initialState, + ...syncOptions, }); const sub = startSync ? storeSync.storedBlockLogs$.subscribe() : null; diff --git a/packages/store-sync/src/recs/syncToRecs.ts b/packages/store-sync/src/recs/syncToRecs.ts index 4759e5f538..87dc02b737 100644 --- a/packages/store-sync/src/recs/syncToRecs.ts +++ b/packages/store-sync/src/recs/syncToRecs.ts @@ -20,13 +20,8 @@ type SyncToRecsResult = SyncResult & export async function syncToRecs({ world, config, - address, - publicClient, - startBlock, - maxBlockRange, - initialState, - indexerUrl, startSync = true, + ...syncOptions }: SyncToRecsOptions): Promise> { const { storageAdapter, components } = recsStorage({ world, @@ -38,12 +33,7 @@ export async function syncToRecs({ const storeSync = await createStoreSync({ storageAdapter, config, - address, - publicClient, - startBlock, - maxBlockRange, - indexerUrl, - initialState, + ...syncOptions, onProgress: ({ step, percentage, latestBlockNumber, lastBlockNumberProcessed, message }) => { // already live, no need for more progress updates if (getComponentValue(components.SyncProgress, singletonEntity)?.step === SyncStep.LIVE) return; diff --git a/packages/store-sync/src/sqlite/syncToSqlite.ts b/packages/store-sync/src/sqlite/syncToSqlite.ts index 210b9677d0..b7751b0bd0 100644 --- a/packages/store-sync/src/sqlite/syncToSqlite.ts +++ b/packages/store-sync/src/sqlite/syncToSqlite.ts @@ -28,22 +28,14 @@ export async function syncToSqlite({ config, database, publicClient, - address, - startBlock, - maxBlockRange, - indexerUrl, - initialState, startSync = true, + ...syncOptions }: SyncToSqliteOptions): Promise { const storeSync = await createStoreSync({ storageAdapter: await sqliteStorage({ database, publicClient, config }), config, - address, publicClient, - startBlock, - maxBlockRange, - indexerUrl, - initialState, + ...syncOptions, }); const sub = startSync ? storeSync.storedBlockLogs$.subscribe() : null; From 89d37c57f229e1417d6cd612ae5334fd6ffa008f Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 15:38:00 +0100 Subject: [PATCH 03/14] require tableId for now --- packages/store-indexer/src/postgres/createQueryAdapter.ts | 7 +++---- packages/store-indexer/src/sqlite/createQueryAdapter.ts | 7 +++---- packages/store-sync/src/common.ts | 7 ++++--- packages/store-sync/src/createStoreSync.ts | 2 +- packages/store-sync/src/trpc-indexer/createAppRouter.ts | 2 +- 5 files changed, 12 insertions(+), 13 deletions(-) diff --git a/packages/store-indexer/src/postgres/createQueryAdapter.ts b/packages/store-indexer/src/postgres/createQueryAdapter.ts index a678454c15..43eb0b0b9a 100644 --- a/packages/store-indexer/src/postgres/createQueryAdapter.ts +++ b/packages/store-indexer/src/postgres/createQueryAdapter.ts @@ -4,7 +4,6 @@ import { buildTable, buildInternalTables, getTables } from "@latticexyz/store-sy import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer"; import { debug } from "../debug"; import { getAddress } from "viem"; -import { isDefined } from "@latticexyz/common/utils"; import { decodeDynamicField } from "@latticexyz/protocol-parser"; /** @@ -17,8 +16,8 @@ export async function createQueryAdapter(database: PgDatabase): Promise filter.tableId).filter(isDefined))); + // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily) + const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId))); const tables = (await getTables(database)) .filter((table) => address == null || getAddress(address) === getAddress(table.address)) .filter((table) => !tableIds.length || tableIds.includes(table.tableId)); @@ -33,7 +32,7 @@ export async function createQueryAdapter(database: PgDatabase): Promise - (filter.tableId == null || filter.tableId === table.tableId) && + filter.tableId === table.tableId && (filter.key0 == null || filter.key0 === keyTuple[0]) && (filter.key1 == null || filter.key1 === keyTuple[1]) ); diff --git a/packages/store-indexer/src/sqlite/createQueryAdapter.ts b/packages/store-indexer/src/sqlite/createQueryAdapter.ts index a92ed3130d..aee529e4f1 100644 --- a/packages/store-indexer/src/sqlite/createQueryAdapter.ts +++ b/packages/store-indexer/src/sqlite/createQueryAdapter.ts @@ -4,7 +4,6 @@ import { buildTable, chainState, getTables } from "@latticexyz/store-sync/sqlite import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer"; import { debug } from "../debug"; import { getAddress } from "viem"; -import { isDefined } from "@latticexyz/common/utils"; import { decodeDynamicField } from "@latticexyz/protocol-parser"; /** @@ -17,8 +16,8 @@ export async function createQueryAdapter(database: BaseSQLiteDatabase<"sync", an const adapter: QueryAdapter = { async findAll({ chainId, address, filters = [] }) { // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters. - // TODO: improve this so we can express this in the query (likely need to build and query the "megatable" rather than each distinct SQL table) - const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId).filter(isDefined))); + // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily) + const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId))); const tables = getTables(database) .filter((table) => address == null || getAddress(address) === getAddress(table.address)) .filter((table) => !tableIds.length || tableIds.includes(table.tableId)); @@ -32,7 +31,7 @@ export async function createQueryAdapter(database: BaseSQLiteDatabase<"sync", an const keyTuple = decodeDynamicField("bytes32[]", record.__key); return filters.some( (filter) => - (filter.tableId == null || filter.tableId === table.tableId) && + filter.tableId === table.tableId && (filter.key0 == null || filter.key0 === keyTuple[0]) && (filter.key1 == null || filter.key1 === keyTuple[1]) ); diff --git a/packages/store-sync/src/common.ts b/packages/store-sync/src/common.ts index e603e5d66e..ed0b14556e 100644 --- a/packages/store-sync/src/common.ts +++ b/packages/store-sync/src/common.ts @@ -26,12 +26,13 @@ export type TableWithRecords = Table & { records: TableRecord[] }; export type StoreEventsLog = Log; export type BlockLogs = { blockNumber: StoreEventsLog["blockNumber"]; logs: StoreEventsLog[] }; -// TODO: should we add address here? export type SyncFilter = { - tableId?: Hex; + // TODO: make optional to enable filtering just on keys (any table) + // this is blocked on reworking data storage so we can more easily query data across tables + tableId: Hex; + // only two keys for now, to reduce complexity of creating indexes on SQL tables key0?: Hex; key1?: Hex; - // TODO: decide how many keys to support }; export type SyncOptions = { diff --git a/packages/store-sync/src/createStoreSync.ts b/packages/store-sync/src/createStoreSync.ts index df2cfaa80f..efdb01c72f 100644 --- a/packages/store-sync/src/createStoreSync.ts +++ b/packages/store-sync/src/createStoreSync.ts @@ -209,7 +209,7 @@ export async function createStoreSync const filteredLogs = logs.filter((log) => filters.some( (filter) => - (filter.tableId == null || filter.tableId === log.args.tableId) && + filter.tableId === log.args.tableId && (filter.key0 == null || filter.key0 === log.args.keyTuple[0]) && (filter.key1 == null || filter.key1 === log.args.keyTuple[1]) ) diff --git a/packages/store-sync/src/trpc-indexer/createAppRouter.ts b/packages/store-sync/src/trpc-indexer/createAppRouter.ts index a549875bed..4d9272e61d 100644 --- a/packages/store-sync/src/trpc-indexer/createAppRouter.ts +++ b/packages/store-sync/src/trpc-indexer/createAppRouter.ts @@ -19,7 +19,7 @@ export function createAppRouter() { filters: z .array( z.object({ - tableId: z.string().refine(isHex).optional(), + tableId: z.string().refine(isHex), key0: z.string().refine(isHex).optional(), key1: z.string().refine(isHex).optional(), }) From ab0139cf83ce760455043b0baa99893b5c708cc0 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 15:43:04 +0100 Subject: [PATCH 04/14] add back in deprecated tableIds option --- packages/store-sync/src/common.ts | 4 ++++ packages/store-sync/src/createStoreSync.ts | 7 ++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/store-sync/src/common.ts b/packages/store-sync/src/common.ts index ed0b14556e..1957720455 100644 --- a/packages/store-sync/src/common.ts +++ b/packages/store-sync/src/common.ts @@ -54,6 +54,10 @@ export type SyncOptions = { * Optional filters for indexer and RPC state. Useful to narrow down the data received by the client for large worlds. */ filters?: SyncFilter[]; + /** + * @deprecated Use `filters` option instead. + * */ + tableIds?: Hex[]; /** * Optional block number to start indexing from. Useful for resuming the indexer from a particular point in time or starting after a particular contract deployment. */ diff --git a/packages/store-sync/src/createStoreSync.ts b/packages/store-sync/src/createStoreSync.ts index efdb01c72f..c4add205c4 100644 --- a/packages/store-sync/src/createStoreSync.ts +++ b/packages/store-sync/src/createStoreSync.ts @@ -34,6 +34,7 @@ import { SyncStep } from "./SyncStep"; import { chunk, isDefined } from "@latticexyz/common/utils"; import { encodeKey, encodeValueArgs } from "@latticexyz/protocol-parser"; import { internalTableIds } from "./internalTableIds"; +import { table } from "console"; const debug = parentDebug.extend("createStoreSync"); @@ -56,12 +57,16 @@ export async function createStoreSync publicClient, address, filters: initialFilters = [], + tableIds = [], startBlock: initialStartBlock = 0n, maxBlockRange, initialState, indexerUrl, }: CreateStoreSyncOptions): Promise { - const filters = initialFilters.length ? [...initialFilters, ...defaultFilters] : []; + const filters = + initialFilters.length || tableIds.length + ? [...initialFilters, ...tableIds.map((tableId) => ({ tableId })), ...defaultFilters] + : []; const initialState$ = defer( async (): Promise< | { From e7844a3e68ffd5d76db6a3fe26c60315a4a436db Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 07:48:54 -0700 Subject: [PATCH 05/14] Create empty-starfishes-lick.md --- .changeset/empty-starfishes-lick.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .changeset/empty-starfishes-lick.md diff --git a/.changeset/empty-starfishes-lick.md b/.changeset/empty-starfishes-lick.md new file mode 100644 index 0000000000..e6830d943d --- /dev/null +++ b/.changeset/empty-starfishes-lick.md @@ -0,0 +1,16 @@ +--- +"@latticexyz/store-indexer": major +--- + +Removed `tableIds` filter option in favor of the more flexible `filters` option that accepts `tableId` and an optional `key0` and/or `key1` to filter data by tables and keys. + +If you were using an indexer client directly, you'll need to update your query: + +```diff + await indexer.findAll.query({ + chainId, + address, +- tableIds: ['0x...'], ++ filters: [{ tableId: '0x...' }], + }); +``` From 02707b5194c5185af80aecceadcdc03031f22e3e Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 07:54:01 -0700 Subject: [PATCH 06/14] Create sharp-falcons-tie.md --- .changeset/sharp-falcons-tie.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .changeset/sharp-falcons-tie.md diff --git a/.changeset/sharp-falcons-tie.md b/.changeset/sharp-falcons-tie.md new file mode 100644 index 0000000000..ee662e49c7 --- /dev/null +++ b/.changeset/sharp-falcons-tie.md @@ -0,0 +1,16 @@ +--- +"@latticexyz/store-sync": minor +--- + +Added a `filters` option to store sync to allow filtering client data on tables and keys. Previously, it was only possible to filter on `tableIds`, but the new filter option allows for more flexible filtering by key. + +If you are building a large MUD application, you can use positional keys as a way to shard data and make it possible to load only the data needed in the client for a particular section of your app. We're using this already in Sky Strife to load match-specific data into match pages without having to load data for all matches, greatly improving load time and client performance. + +```ts +syncToRecs({ + ... + filters: [{ tableId: '0x...', key0: '0x...' }], +}); +``` + +The `tableIds` option is now deprecated and will be removed in the future, but is kept here for backwards compatibility. From 60aa7f440bc315cf47cdf3494ba5beb1e2dd1e3c Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 16:28:28 +0100 Subject: [PATCH 07/14] fix type --- packages/store-sync/src/createStoreSync.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/store-sync/src/createStoreSync.ts b/packages/store-sync/src/createStoreSync.ts index c4add205c4..df46433fde 100644 --- a/packages/store-sync/src/createStoreSync.ts +++ b/packages/store-sync/src/createStoreSync.ts @@ -63,7 +63,7 @@ export async function createStoreSync initialState, indexerUrl, }: CreateStoreSyncOptions): Promise { - const filters = + const filters: SyncFilter[] = initialFilters.length || tableIds.length ? [...initialFilters, ...tableIds.map((tableId) => ({ tableId })), ...defaultFilters] : []; From 5cd34b66ec490ef3740a0a81c4da4041e8a05cd1 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 17:26:54 +0100 Subject: [PATCH 08/14] add e2e tests for filtered data --- e2e/packages/client-vanilla/src/index.ts | 8 + .../client-vanilla/src/mud/setupNetwork.ts | 19 +- e2e/packages/contracts/mud.config.ts | 10 + e2e/packages/contracts/remappings.txt | 6 +- .../contracts/script/PostDeploy.s.sol | 27 ++ e2e/packages/contracts/src/codegen/index.sol | 1 + .../contracts/src/codegen/tables/Position.sol | 257 ++++++++++++++++++ e2e/packages/contracts/worlds.json | 2 +- ...dComponentValue.ts => callPageFunction.ts} | 20 +- .../sync-test/data/expectClientData.ts | 7 +- e2e/packages/sync-test/indexerSync.test.ts | 33 ++- e2e/packages/sync-test/rpcSync.test.ts | 27 +- e2e/packages/sync-test/tsconfig.json | 1 - 13 files changed, 390 insertions(+), 28 deletions(-) create mode 100644 e2e/packages/contracts/script/PostDeploy.s.sol create mode 100644 e2e/packages/contracts/src/codegen/tables/Position.sol rename e2e/packages/sync-test/data/{readComponentValue.ts => callPageFunction.ts} (61%) diff --git a/e2e/packages/client-vanilla/src/index.ts b/e2e/packages/client-vanilla/src/index.ts index 37fe83d84d..464d986f78 100644 --- a/e2e/packages/client-vanilla/src/index.ts +++ b/e2e/packages/client-vanilla/src/index.ts @@ -1,5 +1,6 @@ import { Component, Entity, getComponentValue } from "@latticexyz/recs"; import { setup } from "./mud/setup"; +import { decodeEntity } from "@latticexyz/store-sync/recs"; const { network: { components, latestBlock$, worldContract, waitForTransaction }, @@ -12,6 +13,13 @@ _window.waitForTransaction = waitForTransaction; _window.getComponentValue = (componentName: keyof typeof components, entity: Entity) => getComponentValue(components[componentName] as Component, entity); +_window.getEntities = (componentName: keyof typeof components) => Array.from(components[componentName].entities()); + +_window.getKeys = (componentName: keyof typeof components) => + Array.from(components[componentName].entities()).map((entity) => + decodeEntity(components[componentName].metadata.keySchema, entity) + ); + // Update block number in the UI latestBlock$.subscribe((block) => { const element = document.querySelector("#block"); diff --git a/e2e/packages/client-vanilla/src/mud/setupNetwork.ts b/e2e/packages/client-vanilla/src/mud/setupNetwork.ts index 17c1586374..34e93b7bdc 100644 --- a/e2e/packages/client-vanilla/src/mud/setupNetwork.ts +++ b/e2e/packages/client-vanilla/src/mud/setupNetwork.ts @@ -1,10 +1,10 @@ -import { createPublicClient, http, createWalletClient, Hex, parseEther, ClientConfig } from "viem"; +import { createPublicClient, http, createWalletClient, Hex, parseEther, ClientConfig, stringToHex } from "viem"; import { createFaucetService } from "@latticexyz/services/faucet"; import { encodeEntity, syncToRecs } from "@latticexyz/store-sync/recs"; import { getNetworkConfig } from "./getNetworkConfig"; import { world } from "./world"; import IWorldAbi from "contracts/out/IWorld.sol/IWorld.abi.json"; -import { createBurnerAccount, getContract, transportObserver } from "@latticexyz/common"; +import { createBurnerAccount, getContract, resourceToHex, transportObserver } from "@latticexyz/common"; import mudConfig from "contracts/mud.config"; export type SetupNetworkResult = Awaited>; @@ -40,6 +40,21 @@ export async function setupNetwork() { publicClient, startBlock: BigInt(networkConfig.initialBlockNumber), indexerUrl: networkConfig.indexerUrl ?? undefined, + filters: Object.entries(mudConfig.tables).map(([, table]) => { + const tableId = resourceToHex({ + type: table.offchainOnly ? "offchainTable" : "table", + namespace: mudConfig.namespace, + name: table.name, + }); + if (table.name === mudConfig.tables.Position.name) { + return { + tableId, + key0: stringToHex("map1", { size: 32 }), + }; + } else { + return { tableId }; + } + }), }); // Request drip from faucet diff --git a/e2e/packages/contracts/mud.config.ts b/e2e/packages/contracts/mud.config.ts index c3da4297c5..712396e1dd 100644 --- a/e2e/packages/contracts/mud.config.ts +++ b/e2e/packages/contracts/mud.config.ts @@ -37,5 +37,15 @@ export default mudConfig({ value: "bool", }, }, + Position: { + keySchema: { + zone: "bytes32", + x: "int32", + y: "int32", + }, + valueSchema: { + player: "address", + }, + }, }, }); diff --git a/e2e/packages/contracts/remappings.txt b/e2e/packages/contracts/remappings.txt index 97058b20d7..c4d992480e 100644 --- a/e2e/packages/contracts/remappings.txt +++ b/e2e/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=./node_modules/ds-test/src/ -forge-std/=./node_modules/forge-std/src/ -@latticexyz/=./node_modules/@latticexyz/ +ds-test/=node_modules/ds-test/src/ +forge-std/=node_modules/forge-std/src/ +@latticexyz/=node_modules/@latticexyz/ diff --git a/e2e/packages/contracts/script/PostDeploy.s.sol b/e2e/packages/contracts/script/PostDeploy.s.sol new file mode 100644 index 0000000000..5e7da474d5 --- /dev/null +++ b/e2e/packages/contracts/script/PostDeploy.s.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.8.21; + +import { Script } from "forge-std/Script.sol"; +import { console } from "forge-std/console.sol"; +import { StoreSwitch } from "@latticexyz/store/src/StoreSwitch.sol"; +import { IWorld } from "../src/codegen/world/IWorld.sol"; + +import { Position } from "../src/codegen/index.sol"; + +contract PostDeploy is Script { + function run(address worldAddress) external { + StoreSwitch.setStoreAddress(worldAddress); + + uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); + vm.startBroadcast(deployerPrivateKey); + + // Set up a bunch of position data so we can demonstrate filtering + Position.set({ zone: "map1", x: 1, y: 1, player: msg.sender }); + Position.set({ zone: "map1", x: 2, y: -2, player: msg.sender }); + Position.set({ zone: "map2", x: 0, y: -99, player: msg.sender }); + Position.set({ zone: "map2", x: 0, y: 99, player: msg.sender }); + Position.set({ zone: "map99", x: 99, y: 99, player: msg.sender }); + + vm.stopBroadcast(); + } +} diff --git a/e2e/packages/contracts/src/codegen/index.sol b/e2e/packages/contracts/src/codegen/index.sol index 9350bf7b8c..acc22835c2 100644 --- a/e2e/packages/contracts/src/codegen/index.sol +++ b/e2e/packages/contracts/src/codegen/index.sol @@ -7,3 +7,4 @@ import { Number, NumberTableId } from "./tables/Number.sol"; import { Vector, VectorData, VectorTableId } from "./tables/Vector.sol"; import { NumberList, NumberListTableId } from "./tables/NumberList.sol"; import { Multi, MultiData, MultiTableId } from "./tables/Multi.sol"; +import { Position, PositionTableId } from "./tables/Position.sol"; diff --git a/e2e/packages/contracts/src/codegen/tables/Position.sol b/e2e/packages/contracts/src/codegen/tables/Position.sol new file mode 100644 index 0000000000..8c501c3d16 --- /dev/null +++ b/e2e/packages/contracts/src/codegen/tables/Position.sol @@ -0,0 +1,257 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.8.21; + +/* Autogenerated file. Do not edit manually. */ + +// Import schema type +import { SchemaType } from "@latticexyz/schema-type/src/solidity/SchemaType.sol"; + +// Import store internals +import { IStore } from "@latticexyz/store/src/IStore.sol"; +import { StoreSwitch } from "@latticexyz/store/src/StoreSwitch.sol"; +import { StoreCore } from "@latticexyz/store/src/StoreCore.sol"; +import { Bytes } from "@latticexyz/store/src/Bytes.sol"; +import { Memory } from "@latticexyz/store/src/Memory.sol"; +import { SliceLib } from "@latticexyz/store/src/Slice.sol"; +import { EncodeArray } from "@latticexyz/store/src/tightcoder/EncodeArray.sol"; +import { FieldLayout, FieldLayoutLib } from "@latticexyz/store/src/FieldLayout.sol"; +import { Schema, SchemaLib } from "@latticexyz/store/src/Schema.sol"; +import { PackedCounter, PackedCounterLib } from "@latticexyz/store/src/PackedCounter.sol"; +import { ResourceId } from "@latticexyz/store/src/ResourceId.sol"; +import { RESOURCE_TABLE, RESOURCE_OFFCHAIN_TABLE } from "@latticexyz/store/src/storeResourceTypes.sol"; + +ResourceId constant _tableId = ResourceId.wrap( + bytes32(abi.encodePacked(RESOURCE_TABLE, bytes14(""), bytes16("Position"))) +); +ResourceId constant PositionTableId = _tableId; + +FieldLayout constant _fieldLayout = FieldLayout.wrap( + 0x0014010014000000000000000000000000000000000000000000000000000000 +); + +library Position { + /** + * @notice Get the table values' field layout. + * @return _fieldLayout The field layout for the table. + */ + function getFieldLayout() internal pure returns (FieldLayout) { + return _fieldLayout; + } + + /** + * @notice Get the table's key schema. + * @return _keySchema The key schema for the table. + */ + function getKeySchema() internal pure returns (Schema) { + SchemaType[] memory _keySchema = new SchemaType[](3); + _keySchema[0] = SchemaType.BYTES32; + _keySchema[1] = SchemaType.INT32; + _keySchema[2] = SchemaType.INT32; + + return SchemaLib.encode(_keySchema); + } + + /** + * @notice Get the table's value schema. + * @return _valueSchema The value schema for the table. + */ + function getValueSchema() internal pure returns (Schema) { + SchemaType[] memory _valueSchema = new SchemaType[](1); + _valueSchema[0] = SchemaType.ADDRESS; + + return SchemaLib.encode(_valueSchema); + } + + /** + * @notice Get the table's key field names. + * @return keyNames An array of strings with the names of key fields. + */ + function getKeyNames() internal pure returns (string[] memory keyNames) { + keyNames = new string[](3); + keyNames[0] = "zone"; + keyNames[1] = "x"; + keyNames[2] = "y"; + } + + /** + * @notice Get the table's value field names. + * @return fieldNames An array of strings with the names of value fields. + */ + function getFieldNames() internal pure returns (string[] memory fieldNames) { + fieldNames = new string[](1); + fieldNames[0] = "player"; + } + + /** + * @notice Register the table with its config. + */ + function register() internal { + StoreSwitch.registerTable(_tableId, _fieldLayout, getKeySchema(), getValueSchema(), getKeyNames(), getFieldNames()); + } + + /** + * @notice Register the table with its config. + */ + function _register() internal { + StoreCore.registerTable(_tableId, _fieldLayout, getKeySchema(), getValueSchema(), getKeyNames(), getFieldNames()); + } + + /** + * @notice Get player. + */ + function getPlayer(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreSwitch.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Get player. + */ + function _getPlayer(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreCore.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Get player. + */ + function get(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreSwitch.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Get player. + */ + function _get(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreCore.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Set player. + */ + function setPlayer(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreSwitch.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Set player. + */ + function _setPlayer(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreCore.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Set player. + */ + function set(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreSwitch.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Set player. + */ + function _set(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreCore.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Delete all data for given keys. + */ + function deleteRecord(bytes32 zone, int32 x, int32 y) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreSwitch.deleteRecord(_tableId, _keyTuple); + } + + /** + * @notice Delete all data for given keys. + */ + function _deleteRecord(bytes32 zone, int32 x, int32 y) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreCore.deleteRecord(_tableId, _keyTuple, _fieldLayout); + } + + /** + * @notice Tightly pack static (fixed length) data using this table's schema. + * @return The static data, encoded into a sequence of bytes. + */ + function encodeStatic(address player) internal pure returns (bytes memory) { + return abi.encodePacked(player); + } + + /** + * @notice Encode all of a record's fields. + * @return The static (fixed length) data, encoded into a sequence of bytes. + * @return The lengths of the dynamic fields (packed into a single bytes32 value). + * @return The dyanmic (variable length) data, encoded into a sequence of bytes. + */ + function encode(address player) internal pure returns (bytes memory, PackedCounter, bytes memory) { + bytes memory _staticData = encodeStatic(player); + + PackedCounter _encodedLengths; + bytes memory _dynamicData; + + return (_staticData, _encodedLengths, _dynamicData); + } + + /** + * @notice Encode keys as a bytes32 array using this table's field layout. + */ + function encodeKeyTuple(bytes32 zone, int32 x, int32 y) internal pure returns (bytes32[] memory) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + return _keyTuple; + } +} diff --git a/e2e/packages/contracts/worlds.json b/e2e/packages/contracts/worlds.json index 287eacb247..a3a1b09f18 100644 --- a/e2e/packages/contracts/worlds.json +++ b/e2e/packages/contracts/worlds.json @@ -1,5 +1,5 @@ { "31337": { - "address": "0x97e55ad21ee5456964460c5465eac35861d2e797" + "address": "0x6e9474e9c83676b9a71133ff96db43e7aa0a4342" } } \ No newline at end of file diff --git a/e2e/packages/sync-test/data/readComponentValue.ts b/e2e/packages/sync-test/data/callPageFunction.ts similarity index 61% rename from e2e/packages/sync-test/data/readComponentValue.ts rename to e2e/packages/sync-test/data/callPageFunction.ts index fd46d6d41f..ece5b02e01 100644 --- a/e2e/packages/sync-test/data/readComponentValue.ts +++ b/e2e/packages/sync-test/data/callPageFunction.ts @@ -1,5 +1,4 @@ import { Page } from "@playwright/test"; -import { Entity } from "@latticexyz/recs"; import { deserialize, serialize } from "./utils"; /** @@ -7,17 +6,16 @@ import { deserialize, serialize } from "./utils"; * This is necessary because `page.evaluate` can only transmit serialisable data, * so we can't just return the entire client store (which includes functions to read data) */ -export async function readComponentValue( +export async function callPageFunction( page: Page, - componentName: string, - entity: Entity + functionName: string, + args: unknown[] ): Promise | undefined> { - const args = [componentName, entity, serialize.toString(), deserialize.toString()]; - const serializedValue = await page.evaluate(async (_args) => { - const [_componentName, _entity, _serializeString, _deserializeString] = _args; - const _serialize = deserializeFunction(_serializeString); - const _deserialize = deserializeFunction(_deserializeString); - const value = await window["getComponentValue"](_componentName, _entity); + const context = [functionName, args, serialize.toString(), deserialize.toString()] as const; + const serializedValue = await page.evaluate(async ([functionName, args, serializeString, deserializeString]) => { + const _serialize = deserializeFunction(serializeString); + const _deserialize = deserializeFunction(deserializeString); + const value = await (window as any)[functionName](...args); const serializedValue = value ? _serialize(value) : undefined; return serializedValue; @@ -27,7 +25,7 @@ export async function readComponentValue( function deserializeFunction(serializedFunction: string) { return eval(`(() => ${serializedFunction})()`); } - }, args); + }, context); return serializedValue ? deserialize(serializedValue) : undefined; } diff --git a/e2e/packages/sync-test/data/expectClientData.ts b/e2e/packages/sync-test/data/expectClientData.ts index 92e81c323f..e2861f15cd 100644 --- a/e2e/packages/sync-test/data/expectClientData.ts +++ b/e2e/packages/sync-test/data/expectClientData.ts @@ -1,8 +1,8 @@ import { Page, expect } from "@playwright/test"; import { Data } from "./types"; import config from "../../contracts/mud.config"; -import { readComponentValue } from "./readComponentValue"; import { encodeEntity } from "@latticexyz/store-sync/recs"; +import { callPageFunction } from "./callPageFunction"; /** * Confirms that the client state equals the given state by reading from the client's data store @@ -10,7 +10,10 @@ import { encodeEntity } from "@latticexyz/store-sync/recs"; export async function expectClientData(page: Page, data: Data) { for (const [table, records] of Object.entries(data)) { for (const record of records) { - const value = await readComponentValue(page, table, encodeEntity(config.tables[table].keySchema, record.key)); + const value = await callPageFunction(page, "getComponentValue", [ + table, + encodeEntity(config.tables[table].keySchema, record.key), + ]); expect(value).toMatchObject(record.value); } } diff --git a/e2e/packages/sync-test/indexerSync.test.ts b/e2e/packages/sync-test/indexerSync.test.ts index 093ff0dff5..c4d119396a 100644 --- a/e2e/packages/sync-test/indexerSync.test.ts +++ b/e2e/packages/sync-test/indexerSync.test.ts @@ -1,6 +1,6 @@ -import { afterEach, beforeEach, describe, test } from "vitest"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; import type { ViteDevServer } from "vite"; -import { expect, Browser, Page } from "@playwright/test"; +import { Browser, Page } from "@playwright/test"; import { createAsyncErrorHandler } from "./asyncErrors"; import { deployContracts, @@ -24,6 +24,7 @@ import { range } from "@latticexyz/utils"; import path from "node:path"; import { rpcHttpUrl } from "./setup/constants"; import { z } from "zod"; +import { callPageFunction } from "./data/callPageFunction"; const env = z .object({ @@ -57,7 +58,7 @@ describe("Sync from indexer", async () => { asyncErrorHandler.resetErrors(); }); - test("should log error if indexer is offline", async () => { + it("should log error if indexer is offline", async () => { await openClientWithRootAccount(page, { indexerUrl: `http://127.0.0.1:9999/trpc` }); await waitForInitialSync(page); @@ -87,7 +88,7 @@ describe("Sync from indexer", async () => { await indexer.kill(); }); - test("should sync test data", async () => { + it("should sync test data", async () => { await openClientWithRootAccount(page, { indexerUrl: indexer.url }); await waitForInitialSync(page); @@ -107,7 +108,7 @@ describe("Sync from indexer", async () => { asyncErrorHandler.expectNoAsyncErrors(); }); - test("should sync number list modified via system", async () => { + it("should sync number list modified via system", async () => { await openClientWithRootAccount(page, { indexerUrl: indexer.url }); await waitForInitialSync(page); @@ -126,5 +127,27 @@ describe("Sync from indexer", async () => { // Should not have thrown errors asyncErrorHandler.expectNoAsyncErrors(); }); + + it("should only have filtered position data", async () => { + await openClientWithRootAccount(page, { indexerUrl: indexer.url }); + await waitForInitialSync(page); + + const entities = await callPageFunction(page, "getKeys", ["Position"]); + expect(entities).toEqual([ + { + x: 1, + y: 1, + zone: "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + { + x: 2, + y: -2, + zone: "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + ]); + + // Should not have thrown errors + asyncErrorHandler.expectNoAsyncErrors(); + }); }); }); diff --git a/e2e/packages/sync-test/rpcSync.test.ts b/e2e/packages/sync-test/rpcSync.test.ts index 39e786a319..57a53571e8 100644 --- a/e2e/packages/sync-test/rpcSync.test.ts +++ b/e2e/packages/sync-test/rpcSync.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, test } from "vitest"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; import type { ViteDevServer } from "vite"; import { Browser, Page } from "@playwright/test"; import { createAsyncErrorHandler } from "./asyncErrors"; @@ -16,6 +16,7 @@ import { } from "./data"; import { range } from "@latticexyz/utils"; import { rpcHttpUrl } from "./setup/constants"; +import { callPageFunction } from "./data/callPageFunction"; describe("Sync from RPC", async () => { const asyncErrorHandler = createAsyncErrorHandler(); @@ -40,7 +41,7 @@ describe("Sync from RPC", async () => { await webserver.close(); }); - test("should sync test data", async () => { + it("should sync test data", async () => { await openClientWithRootAccount(page); await waitForInitialSync(page); @@ -60,7 +61,7 @@ describe("Sync from RPC", async () => { asyncErrorHandler.expectNoAsyncErrors(); }); - test("should sync number list modified via system", async () => { + it("should sync number list modified via system", async () => { await openClientWithRootAccount(page); await waitForInitialSync(page); @@ -79,4 +80,24 @@ describe("Sync from RPC", async () => { // Should not have thrown errors asyncErrorHandler.expectNoAsyncErrors(); }); + + it("should only have filtered position data", async () => { + await openClientWithRootAccount(page); + await waitForInitialSync(page); + + expect(await callPageFunction(page, "getKeys", ["Position"])).toMatchInlineSnapshot(` + [ + { + "x": 1, + "y": 1, + "zone": "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + { + "x": 2, + "y": -2, + "zone": "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + ] + `); + }); }); diff --git a/e2e/packages/sync-test/tsconfig.json b/e2e/packages/sync-test/tsconfig.json index 34a228b8fc..4da186be7b 100644 --- a/e2e/packages/sync-test/tsconfig.json +++ b/e2e/packages/sync-test/tsconfig.json @@ -1,6 +1,5 @@ { "compilerOptions": { - "types": ["vitest/globals"], "target": "ESNext", "module": "ESNext", "lib": ["ESNext", "DOM"], From 7ef893972376e5b9c35798ba7a25359e6659c5de Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 17:28:57 +0100 Subject: [PATCH 09/14] fix remappings --- examples/minimal/packages/contracts/remappings.txt | 6 +++--- templates/phaser/packages/contracts/remappings.txt | 6 +++--- templates/react/packages/contracts/remappings.txt | 6 +++--- templates/threejs/packages/contracts/remappings.txt | 6 +++--- templates/vanilla/packages/contracts/remappings.txt | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/examples/minimal/packages/contracts/remappings.txt b/examples/minimal/packages/contracts/remappings.txt index 97058b20d7..c4d992480e 100644 --- a/examples/minimal/packages/contracts/remappings.txt +++ b/examples/minimal/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=./node_modules/ds-test/src/ -forge-std/=./node_modules/forge-std/src/ -@latticexyz/=./node_modules/@latticexyz/ +ds-test/=node_modules/ds-test/src/ +forge-std/=node_modules/forge-std/src/ +@latticexyz/=node_modules/@latticexyz/ diff --git a/templates/phaser/packages/contracts/remappings.txt b/templates/phaser/packages/contracts/remappings.txt index 97058b20d7..c4d992480e 100644 --- a/templates/phaser/packages/contracts/remappings.txt +++ b/templates/phaser/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=./node_modules/ds-test/src/ -forge-std/=./node_modules/forge-std/src/ -@latticexyz/=./node_modules/@latticexyz/ +ds-test/=node_modules/ds-test/src/ +forge-std/=node_modules/forge-std/src/ +@latticexyz/=node_modules/@latticexyz/ diff --git a/templates/react/packages/contracts/remappings.txt b/templates/react/packages/contracts/remappings.txt index 97058b20d7..c4d992480e 100644 --- a/templates/react/packages/contracts/remappings.txt +++ b/templates/react/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=./node_modules/ds-test/src/ -forge-std/=./node_modules/forge-std/src/ -@latticexyz/=./node_modules/@latticexyz/ +ds-test/=node_modules/ds-test/src/ +forge-std/=node_modules/forge-std/src/ +@latticexyz/=node_modules/@latticexyz/ diff --git a/templates/threejs/packages/contracts/remappings.txt b/templates/threejs/packages/contracts/remappings.txt index 97058b20d7..c4d992480e 100644 --- a/templates/threejs/packages/contracts/remappings.txt +++ b/templates/threejs/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=./node_modules/ds-test/src/ -forge-std/=./node_modules/forge-std/src/ -@latticexyz/=./node_modules/@latticexyz/ +ds-test/=node_modules/ds-test/src/ +forge-std/=node_modules/forge-std/src/ +@latticexyz/=node_modules/@latticexyz/ diff --git a/templates/vanilla/packages/contracts/remappings.txt b/templates/vanilla/packages/contracts/remappings.txt index 97058b20d7..c4d992480e 100644 --- a/templates/vanilla/packages/contracts/remappings.txt +++ b/templates/vanilla/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=./node_modules/ds-test/src/ -forge-std/=./node_modules/forge-std/src/ -@latticexyz/=./node_modules/@latticexyz/ +ds-test/=node_modules/ds-test/src/ +forge-std/=node_modules/forge-std/src/ +@latticexyz/=node_modules/@latticexyz/ From b16596006281819e3f6ef72bad0c702ead14ea4e Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 17:39:04 +0100 Subject: [PATCH 10/14] remove index exports --- e2e/packages/sync-test/data/index.ts | 10 ---------- 1 file changed, 10 deletions(-) delete mode 100644 e2e/packages/sync-test/data/index.ts diff --git a/e2e/packages/sync-test/data/index.ts b/e2e/packages/sync-test/data/index.ts deleted file mode 100644 index 045a4d5727..0000000000 --- a/e2e/packages/sync-test/data/index.ts +++ /dev/null @@ -1,10 +0,0 @@ -export * from "./callWorld"; -export * from "./encodeTestData"; -export * from "./expectClientData"; -export * from "./mergeTestData"; -export * from "./numberListSystem"; -export * from "./readComponentValue"; -export * from "./setContractData"; -export * from "./testData"; -export * from "./types"; -export * from "./waitForInitialSync"; From bf91e9526e53c4ddae11ec179fee946b7f55fa78 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 17:39:59 +0100 Subject: [PATCH 11/14] revert remappings in favor of #1823 --- e2e/packages/contracts/remappings.txt | 6 +++--- examples/minimal/packages/contracts/remappings.txt | 6 +++--- templates/phaser/packages/contracts/remappings.txt | 6 +++--- templates/react/packages/contracts/remappings.txt | 6 +++--- templates/threejs/packages/contracts/remappings.txt | 6 +++--- templates/vanilla/packages/contracts/remappings.txt | 6 +++--- 6 files changed, 18 insertions(+), 18 deletions(-) diff --git a/e2e/packages/contracts/remappings.txt b/e2e/packages/contracts/remappings.txt index c4d992480e..97058b20d7 100644 --- a/e2e/packages/contracts/remappings.txt +++ b/e2e/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=node_modules/ds-test/src/ -forge-std/=node_modules/forge-std/src/ -@latticexyz/=node_modules/@latticexyz/ +ds-test/=./node_modules/ds-test/src/ +forge-std/=./node_modules/forge-std/src/ +@latticexyz/=./node_modules/@latticexyz/ diff --git a/examples/minimal/packages/contracts/remappings.txt b/examples/minimal/packages/contracts/remappings.txt index c4d992480e..97058b20d7 100644 --- a/examples/minimal/packages/contracts/remappings.txt +++ b/examples/minimal/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=node_modules/ds-test/src/ -forge-std/=node_modules/forge-std/src/ -@latticexyz/=node_modules/@latticexyz/ +ds-test/=./node_modules/ds-test/src/ +forge-std/=./node_modules/forge-std/src/ +@latticexyz/=./node_modules/@latticexyz/ diff --git a/templates/phaser/packages/contracts/remappings.txt b/templates/phaser/packages/contracts/remappings.txt index c4d992480e..97058b20d7 100644 --- a/templates/phaser/packages/contracts/remappings.txt +++ b/templates/phaser/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=node_modules/ds-test/src/ -forge-std/=node_modules/forge-std/src/ -@latticexyz/=node_modules/@latticexyz/ +ds-test/=./node_modules/ds-test/src/ +forge-std/=./node_modules/forge-std/src/ +@latticexyz/=./node_modules/@latticexyz/ diff --git a/templates/react/packages/contracts/remappings.txt b/templates/react/packages/contracts/remappings.txt index c4d992480e..97058b20d7 100644 --- a/templates/react/packages/contracts/remappings.txt +++ b/templates/react/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=node_modules/ds-test/src/ -forge-std/=node_modules/forge-std/src/ -@latticexyz/=node_modules/@latticexyz/ +ds-test/=./node_modules/ds-test/src/ +forge-std/=./node_modules/forge-std/src/ +@latticexyz/=./node_modules/@latticexyz/ diff --git a/templates/threejs/packages/contracts/remappings.txt b/templates/threejs/packages/contracts/remappings.txt index c4d992480e..97058b20d7 100644 --- a/templates/threejs/packages/contracts/remappings.txt +++ b/templates/threejs/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=node_modules/ds-test/src/ -forge-std/=node_modules/forge-std/src/ -@latticexyz/=node_modules/@latticexyz/ +ds-test/=./node_modules/ds-test/src/ +forge-std/=./node_modules/forge-std/src/ +@latticexyz/=./node_modules/@latticexyz/ diff --git a/templates/vanilla/packages/contracts/remappings.txt b/templates/vanilla/packages/contracts/remappings.txt index c4d992480e..97058b20d7 100644 --- a/templates/vanilla/packages/contracts/remappings.txt +++ b/templates/vanilla/packages/contracts/remappings.txt @@ -1,3 +1,3 @@ -ds-test/=node_modules/ds-test/src/ -forge-std/=node_modules/forge-std/src/ -@latticexyz/=node_modules/@latticexyz/ +ds-test/=./node_modules/ds-test/src/ +forge-std/=./node_modules/forge-std/src/ +@latticexyz/=./node_modules/@latticexyz/ From 1b4764f0cdab52b8fa1d59bb0d01450e822491f7 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 17:43:07 +0100 Subject: [PATCH 12/14] fix imports --- e2e/packages/sync-test/indexerSync.test.ts | 17 ++++++----------- e2e/packages/sync-test/rpcSync.test.ts | 17 ++++++----------- 2 files changed, 12 insertions(+), 22 deletions(-) diff --git a/e2e/packages/sync-test/indexerSync.test.ts b/e2e/packages/sync-test/indexerSync.test.ts index c4d119396a..61bc5a0e5f 100644 --- a/e2e/packages/sync-test/indexerSync.test.ts +++ b/e2e/packages/sync-test/indexerSync.test.ts @@ -9,22 +9,17 @@ import { startIndexer, openClientWithRootAccount, } from "./setup"; -import { - setContractData, - expectClientData, - testData1, - mergeTestData, - testData2, - waitForInitialSync, - push, - pushRange, - pop, -} from "./data"; import { range } from "@latticexyz/utils"; import path from "node:path"; import { rpcHttpUrl } from "./setup/constants"; import { z } from "zod"; import { callPageFunction } from "./data/callPageFunction"; +import { expectClientData } from "./data/expectClientData"; +import { mergeTestData } from "./data/mergeTestData"; +import { push, pushRange, pop } from "./data/numberListSystem"; +import { setContractData } from "./data/setContractData"; +import { testData1, testData2 } from "./data/testData"; +import { waitForInitialSync } from "./data/waitForInitialSync"; const env = z .object({ diff --git a/e2e/packages/sync-test/rpcSync.test.ts b/e2e/packages/sync-test/rpcSync.test.ts index 57a53571e8..b90be12ddc 100644 --- a/e2e/packages/sync-test/rpcSync.test.ts +++ b/e2e/packages/sync-test/rpcSync.test.ts @@ -3,20 +3,15 @@ import type { ViteDevServer } from "vite"; import { Browser, Page } from "@playwright/test"; import { createAsyncErrorHandler } from "./asyncErrors"; import { deployContracts, startViteServer, startBrowserAndPage, openClientWithRootAccount } from "./setup"; -import { - setContractData, - expectClientData, - testData1, - testData2, - mergeTestData, - waitForInitialSync, - push, - pushRange, - pop, -} from "./data"; import { range } from "@latticexyz/utils"; import { rpcHttpUrl } from "./setup/constants"; import { callPageFunction } from "./data/callPageFunction"; +import { expectClientData } from "./data/expectClientData"; +import { mergeTestData } from "./data/mergeTestData"; +import { push, pushRange, pop } from "./data/numberListSystem"; +import { setContractData } from "./data/setContractData"; +import { testData1, testData2 } from "./data/testData"; +import { waitForInitialSync } from "./data/waitForInitialSync"; describe("Sync from RPC", async () => { const asyncErrorHandler = createAsyncErrorHandler(); From 5e0bcea152996a75d6954b911f6940b8dd70e4a0 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Tue, 24 Oct 2023 11:43:07 -0700 Subject: [PATCH 13/14] Update packages/store-sync/src/createStoreSync.ts Co-authored-by: alvarius --- packages/store-sync/src/createStoreSync.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/store-sync/src/createStoreSync.ts b/packages/store-sync/src/createStoreSync.ts index df46433fde..c8d926d23c 100644 --- a/packages/store-sync/src/createStoreSync.ts +++ b/packages/store-sync/src/createStoreSync.ts @@ -34,7 +34,6 @@ import { SyncStep } from "./SyncStep"; import { chunk, isDefined } from "@latticexyz/common/utils"; import { encodeKey, encodeValueArgs } from "@latticexyz/protocol-parser"; import { internalTableIds } from "./internalTableIds"; -import { table } from "console"; const debug = parentDebug.extend("createStoreSync"); From f7a366a47823b5d069935dae3240e88606fd83a9 Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Wed, 25 Oct 2023 10:19:25 +0100 Subject: [PATCH 14/14] add typedoc to SyncFilter --- packages/store-sync/src/common.ts | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/store-sync/src/common.ts b/packages/store-sync/src/common.ts index 1957720455..6caa2f46d7 100644 --- a/packages/store-sync/src/common.ts +++ b/packages/store-sync/src/common.ts @@ -26,12 +26,21 @@ export type TableWithRecords = Table & { records: TableRecord[] }; export type StoreEventsLog = Log; export type BlockLogs = { blockNumber: StoreEventsLog["blockNumber"]; logs: StoreEventsLog[] }; +// only two keys for now, to reduce complexity of creating indexes on SQL tables +// TODO: make tableId optional to enable filtering just on keys (any table) +// this is blocked on reworking data storage so we can more easily query data across tables export type SyncFilter = { - // TODO: make optional to enable filtering just on keys (any table) - // this is blocked on reworking data storage so we can more easily query data across tables + /** + * Filter by the `bytes32` table ID. + */ tableId: Hex; - // only two keys for now, to reduce complexity of creating indexes on SQL tables + /** + * Optionally filter by the `bytes32` value of the key in the first position (index zero of the record's key tuple). + */ key0?: Hex; + /** + * Optionally filter by the `bytes32` value of the key in the second position (index one of the record's key tuple). + */ key1?: Hex; };