From f6d214e3d79f9591fddd3687aa987a57f417256c Mon Sep 17 00:00:00 2001 From: Kevin Ingersoll Date: Wed, 25 Oct 2023 10:19:46 +0100 Subject: [PATCH] feat(store-indexer,store-sync): filter by table and key (#1794) Co-authored-by: alvarius --- .changeset/empty-starfishes-lick.md | 16 ++ .changeset/sharp-falcons-tie.md | 16 ++ e2e/packages/client-vanilla/src/index.ts | 8 + .../client-vanilla/src/mud/setupNetwork.ts | 19 +- e2e/packages/contracts/mud.config.ts | 10 + .../contracts/script/PostDeploy.s.sol | 27 ++ e2e/packages/contracts/src/codegen/index.sol | 1 + .../contracts/src/codegen/tables/Position.sol | 257 ++++++++++++++++++ e2e/packages/contracts/worlds.json | 2 +- ...dComponentValue.ts => callPageFunction.ts} | 20 +- .../sync-test/data/expectClientData.ts | 7 +- e2e/packages/sync-test/data/index.ts | 10 - e2e/packages/sync-test/indexerSync.test.ts | 50 ++-- e2e/packages/sync-test/rpcSync.test.ts | 44 ++- e2e/packages/sync-test/tsconfig.json | 1 - packages/store-indexer/package.json | 1 + .../src/postgres/createQueryAdapter.ts | 19 +- .../src/sqlite/createQueryAdapter.ts | 19 +- packages/store-sync/src/common.ts | 24 +- packages/store-sync/src/createStoreSync.ts | 24 +- .../store-sync/src/postgres/syncToPostgres.ts | 12 +- packages/store-sync/src/recs/syncToRecs.ts | 14 +- .../store-sync/src/sqlite/syncToSqlite.ts | 12 +- .../store-sync/src/trpc-indexer/common.ts | 4 +- .../src/trpc-indexer/createAppRouter.ts | 14 +- pnpm-lock.yaml | 3 + 26 files changed, 533 insertions(+), 101 deletions(-) create mode 100644 .changeset/empty-starfishes-lick.md create mode 100644 .changeset/sharp-falcons-tie.md create mode 100644 e2e/packages/contracts/script/PostDeploy.s.sol create mode 100644 e2e/packages/contracts/src/codegen/tables/Position.sol rename e2e/packages/sync-test/data/{readComponentValue.ts => callPageFunction.ts} (61%) delete mode 100644 e2e/packages/sync-test/data/index.ts diff --git a/.changeset/empty-starfishes-lick.md b/.changeset/empty-starfishes-lick.md new file mode 100644 index 0000000000..e6830d943d --- /dev/null +++ b/.changeset/empty-starfishes-lick.md @@ -0,0 +1,16 @@ +--- +"@latticexyz/store-indexer": major +--- + +Removed `tableIds` filter option in favor of the more flexible `filters` option that accepts `tableId` and an optional `key0` and/or `key1` to filter data by tables and keys. + +If you were using an indexer client directly, you'll need to update your query: + +```diff + await indexer.findAll.query({ + chainId, + address, +- tableIds: ['0x...'], ++ filters: [{ tableId: '0x...' }], + }); +``` diff --git a/.changeset/sharp-falcons-tie.md b/.changeset/sharp-falcons-tie.md new file mode 100644 index 0000000000..ee662e49c7 --- /dev/null +++ b/.changeset/sharp-falcons-tie.md @@ -0,0 +1,16 @@ +--- +"@latticexyz/store-sync": minor +--- + +Added a `filters` option to store sync to allow filtering client data on tables and keys. Previously, it was only possible to filter on `tableIds`, but the new filter option allows for more flexible filtering by key. + +If you are building a large MUD application, you can use positional keys as a way to shard data and make it possible to load only the data needed in the client for a particular section of your app. We're using this already in Sky Strife to load match-specific data into match pages without having to load data for all matches, greatly improving load time and client performance. + +```ts +syncToRecs({ + ... + filters: [{ tableId: '0x...', key0: '0x...' }], +}); +``` + +The `tableIds` option is now deprecated and will be removed in the future, but is kept here for backwards compatibility. diff --git a/e2e/packages/client-vanilla/src/index.ts b/e2e/packages/client-vanilla/src/index.ts index 37fe83d84d..464d986f78 100644 --- a/e2e/packages/client-vanilla/src/index.ts +++ b/e2e/packages/client-vanilla/src/index.ts @@ -1,5 +1,6 @@ import { Component, Entity, getComponentValue } from "@latticexyz/recs"; import { setup } from "./mud/setup"; +import { decodeEntity } from "@latticexyz/store-sync/recs"; const { network: { components, latestBlock$, worldContract, waitForTransaction }, @@ -12,6 +13,13 @@ _window.waitForTransaction = waitForTransaction; _window.getComponentValue = (componentName: keyof typeof components, entity: Entity) => getComponentValue(components[componentName] as Component, entity); +_window.getEntities = (componentName: keyof typeof components) => Array.from(components[componentName].entities()); + +_window.getKeys = (componentName: keyof typeof components) => + Array.from(components[componentName].entities()).map((entity) => + decodeEntity(components[componentName].metadata.keySchema, entity) + ); + // Update block number in the UI latestBlock$.subscribe((block) => { const element = document.querySelector("#block"); diff --git a/e2e/packages/client-vanilla/src/mud/setupNetwork.ts b/e2e/packages/client-vanilla/src/mud/setupNetwork.ts index 17c1586374..34e93b7bdc 100644 --- a/e2e/packages/client-vanilla/src/mud/setupNetwork.ts +++ b/e2e/packages/client-vanilla/src/mud/setupNetwork.ts @@ -1,10 +1,10 @@ -import { createPublicClient, http, createWalletClient, Hex, parseEther, ClientConfig } from "viem"; +import { createPublicClient, http, createWalletClient, Hex, parseEther, ClientConfig, stringToHex } from "viem"; import { createFaucetService } from "@latticexyz/services/faucet"; import { encodeEntity, syncToRecs } from "@latticexyz/store-sync/recs"; import { getNetworkConfig } from "./getNetworkConfig"; import { world } from "./world"; import IWorldAbi from "contracts/out/IWorld.sol/IWorld.abi.json"; -import { createBurnerAccount, getContract, transportObserver } from "@latticexyz/common"; +import { createBurnerAccount, getContract, resourceToHex, transportObserver } from "@latticexyz/common"; import mudConfig from "contracts/mud.config"; export type SetupNetworkResult = Awaited>; @@ -40,6 +40,21 @@ export async function setupNetwork() { publicClient, startBlock: BigInt(networkConfig.initialBlockNumber), indexerUrl: networkConfig.indexerUrl ?? undefined, + filters: Object.entries(mudConfig.tables).map(([, table]) => { + const tableId = resourceToHex({ + type: table.offchainOnly ? "offchainTable" : "table", + namespace: mudConfig.namespace, + name: table.name, + }); + if (table.name === mudConfig.tables.Position.name) { + return { + tableId, + key0: stringToHex("map1", { size: 32 }), + }; + } else { + return { tableId }; + } + }), }); // Request drip from faucet diff --git a/e2e/packages/contracts/mud.config.ts b/e2e/packages/contracts/mud.config.ts index c3da4297c5..712396e1dd 100644 --- a/e2e/packages/contracts/mud.config.ts +++ b/e2e/packages/contracts/mud.config.ts @@ -37,5 +37,15 @@ export default mudConfig({ value: "bool", }, }, + Position: { + keySchema: { + zone: "bytes32", + x: "int32", + y: "int32", + }, + valueSchema: { + player: "address", + }, + }, }, }); diff --git a/e2e/packages/contracts/script/PostDeploy.s.sol b/e2e/packages/contracts/script/PostDeploy.s.sol new file mode 100644 index 0000000000..5e7da474d5 --- /dev/null +++ b/e2e/packages/contracts/script/PostDeploy.s.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.8.21; + +import { Script } from "forge-std/Script.sol"; +import { console } from "forge-std/console.sol"; +import { StoreSwitch } from "@latticexyz/store/src/StoreSwitch.sol"; +import { IWorld } from "../src/codegen/world/IWorld.sol"; + +import { Position } from "../src/codegen/index.sol"; + +contract PostDeploy is Script { + function run(address worldAddress) external { + StoreSwitch.setStoreAddress(worldAddress); + + uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); + vm.startBroadcast(deployerPrivateKey); + + // Set up a bunch of position data so we can demonstrate filtering + Position.set({ zone: "map1", x: 1, y: 1, player: msg.sender }); + Position.set({ zone: "map1", x: 2, y: -2, player: msg.sender }); + Position.set({ zone: "map2", x: 0, y: -99, player: msg.sender }); + Position.set({ zone: "map2", x: 0, y: 99, player: msg.sender }); + Position.set({ zone: "map99", x: 99, y: 99, player: msg.sender }); + + vm.stopBroadcast(); + } +} diff --git a/e2e/packages/contracts/src/codegen/index.sol b/e2e/packages/contracts/src/codegen/index.sol index 9350bf7b8c..acc22835c2 100644 --- a/e2e/packages/contracts/src/codegen/index.sol +++ b/e2e/packages/contracts/src/codegen/index.sol @@ -7,3 +7,4 @@ import { Number, NumberTableId } from "./tables/Number.sol"; import { Vector, VectorData, VectorTableId } from "./tables/Vector.sol"; import { NumberList, NumberListTableId } from "./tables/NumberList.sol"; import { Multi, MultiData, MultiTableId } from "./tables/Multi.sol"; +import { Position, PositionTableId } from "./tables/Position.sol"; diff --git a/e2e/packages/contracts/src/codegen/tables/Position.sol b/e2e/packages/contracts/src/codegen/tables/Position.sol new file mode 100644 index 0000000000..8c501c3d16 --- /dev/null +++ b/e2e/packages/contracts/src/codegen/tables/Position.sol @@ -0,0 +1,257 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.8.21; + +/* Autogenerated file. Do not edit manually. */ + +// Import schema type +import { SchemaType } from "@latticexyz/schema-type/src/solidity/SchemaType.sol"; + +// Import store internals +import { IStore } from "@latticexyz/store/src/IStore.sol"; +import { StoreSwitch } from "@latticexyz/store/src/StoreSwitch.sol"; +import { StoreCore } from "@latticexyz/store/src/StoreCore.sol"; +import { Bytes } from "@latticexyz/store/src/Bytes.sol"; +import { Memory } from "@latticexyz/store/src/Memory.sol"; +import { SliceLib } from "@latticexyz/store/src/Slice.sol"; +import { EncodeArray } from "@latticexyz/store/src/tightcoder/EncodeArray.sol"; +import { FieldLayout, FieldLayoutLib } from "@latticexyz/store/src/FieldLayout.sol"; +import { Schema, SchemaLib } from "@latticexyz/store/src/Schema.sol"; +import { PackedCounter, PackedCounterLib } from "@latticexyz/store/src/PackedCounter.sol"; +import { ResourceId } from "@latticexyz/store/src/ResourceId.sol"; +import { RESOURCE_TABLE, RESOURCE_OFFCHAIN_TABLE } from "@latticexyz/store/src/storeResourceTypes.sol"; + +ResourceId constant _tableId = ResourceId.wrap( + bytes32(abi.encodePacked(RESOURCE_TABLE, bytes14(""), bytes16("Position"))) +); +ResourceId constant PositionTableId = _tableId; + +FieldLayout constant _fieldLayout = FieldLayout.wrap( + 0x0014010014000000000000000000000000000000000000000000000000000000 +); + +library Position { + /** + * @notice Get the table values' field layout. + * @return _fieldLayout The field layout for the table. + */ + function getFieldLayout() internal pure returns (FieldLayout) { + return _fieldLayout; + } + + /** + * @notice Get the table's key schema. + * @return _keySchema The key schema for the table. + */ + function getKeySchema() internal pure returns (Schema) { + SchemaType[] memory _keySchema = new SchemaType[](3); + _keySchema[0] = SchemaType.BYTES32; + _keySchema[1] = SchemaType.INT32; + _keySchema[2] = SchemaType.INT32; + + return SchemaLib.encode(_keySchema); + } + + /** + * @notice Get the table's value schema. + * @return _valueSchema The value schema for the table. + */ + function getValueSchema() internal pure returns (Schema) { + SchemaType[] memory _valueSchema = new SchemaType[](1); + _valueSchema[0] = SchemaType.ADDRESS; + + return SchemaLib.encode(_valueSchema); + } + + /** + * @notice Get the table's key field names. + * @return keyNames An array of strings with the names of key fields. + */ + function getKeyNames() internal pure returns (string[] memory keyNames) { + keyNames = new string[](3); + keyNames[0] = "zone"; + keyNames[1] = "x"; + keyNames[2] = "y"; + } + + /** + * @notice Get the table's value field names. + * @return fieldNames An array of strings with the names of value fields. + */ + function getFieldNames() internal pure returns (string[] memory fieldNames) { + fieldNames = new string[](1); + fieldNames[0] = "player"; + } + + /** + * @notice Register the table with its config. + */ + function register() internal { + StoreSwitch.registerTable(_tableId, _fieldLayout, getKeySchema(), getValueSchema(), getKeyNames(), getFieldNames()); + } + + /** + * @notice Register the table with its config. + */ + function _register() internal { + StoreCore.registerTable(_tableId, _fieldLayout, getKeySchema(), getValueSchema(), getKeyNames(), getFieldNames()); + } + + /** + * @notice Get player. + */ + function getPlayer(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreSwitch.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Get player. + */ + function _getPlayer(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreCore.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Get player. + */ + function get(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreSwitch.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Get player. + */ + function _get(bytes32 zone, int32 x, int32 y) internal view returns (address player) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + bytes32 _blob = StoreCore.getStaticField(_tableId, _keyTuple, 0, _fieldLayout); + return (address(bytes20(_blob))); + } + + /** + * @notice Set player. + */ + function setPlayer(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreSwitch.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Set player. + */ + function _setPlayer(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreCore.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Set player. + */ + function set(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreSwitch.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Set player. + */ + function _set(bytes32 zone, int32 x, int32 y, address player) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreCore.setStaticField(_tableId, _keyTuple, 0, abi.encodePacked((player)), _fieldLayout); + } + + /** + * @notice Delete all data for given keys. + */ + function deleteRecord(bytes32 zone, int32 x, int32 y) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreSwitch.deleteRecord(_tableId, _keyTuple); + } + + /** + * @notice Delete all data for given keys. + */ + function _deleteRecord(bytes32 zone, int32 x, int32 y) internal { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + StoreCore.deleteRecord(_tableId, _keyTuple, _fieldLayout); + } + + /** + * @notice Tightly pack static (fixed length) data using this table's schema. + * @return The static data, encoded into a sequence of bytes. + */ + function encodeStatic(address player) internal pure returns (bytes memory) { + return abi.encodePacked(player); + } + + /** + * @notice Encode all of a record's fields. + * @return The static (fixed length) data, encoded into a sequence of bytes. + * @return The lengths of the dynamic fields (packed into a single bytes32 value). + * @return The dyanmic (variable length) data, encoded into a sequence of bytes. + */ + function encode(address player) internal pure returns (bytes memory, PackedCounter, bytes memory) { + bytes memory _staticData = encodeStatic(player); + + PackedCounter _encodedLengths; + bytes memory _dynamicData; + + return (_staticData, _encodedLengths, _dynamicData); + } + + /** + * @notice Encode keys as a bytes32 array using this table's field layout. + */ + function encodeKeyTuple(bytes32 zone, int32 x, int32 y) internal pure returns (bytes32[] memory) { + bytes32[] memory _keyTuple = new bytes32[](3); + _keyTuple[0] = zone; + _keyTuple[1] = bytes32(uint256(int256(x))); + _keyTuple[2] = bytes32(uint256(int256(y))); + + return _keyTuple; + } +} diff --git a/e2e/packages/contracts/worlds.json b/e2e/packages/contracts/worlds.json index 287eacb247..a3a1b09f18 100644 --- a/e2e/packages/contracts/worlds.json +++ b/e2e/packages/contracts/worlds.json @@ -1,5 +1,5 @@ { "31337": { - "address": "0x97e55ad21ee5456964460c5465eac35861d2e797" + "address": "0x6e9474e9c83676b9a71133ff96db43e7aa0a4342" } } \ No newline at end of file diff --git a/e2e/packages/sync-test/data/readComponentValue.ts b/e2e/packages/sync-test/data/callPageFunction.ts similarity index 61% rename from e2e/packages/sync-test/data/readComponentValue.ts rename to e2e/packages/sync-test/data/callPageFunction.ts index fd46d6d41f..ece5b02e01 100644 --- a/e2e/packages/sync-test/data/readComponentValue.ts +++ b/e2e/packages/sync-test/data/callPageFunction.ts @@ -1,5 +1,4 @@ import { Page } from "@playwright/test"; -import { Entity } from "@latticexyz/recs"; import { deserialize, serialize } from "./utils"; /** @@ -7,17 +6,16 @@ import { deserialize, serialize } from "./utils"; * This is necessary because `page.evaluate` can only transmit serialisable data, * so we can't just return the entire client store (which includes functions to read data) */ -export async function readComponentValue( +export async function callPageFunction( page: Page, - componentName: string, - entity: Entity + functionName: string, + args: unknown[] ): Promise | undefined> { - const args = [componentName, entity, serialize.toString(), deserialize.toString()]; - const serializedValue = await page.evaluate(async (_args) => { - const [_componentName, _entity, _serializeString, _deserializeString] = _args; - const _serialize = deserializeFunction(_serializeString); - const _deserialize = deserializeFunction(_deserializeString); - const value = await window["getComponentValue"](_componentName, _entity); + const context = [functionName, args, serialize.toString(), deserialize.toString()] as const; + const serializedValue = await page.evaluate(async ([functionName, args, serializeString, deserializeString]) => { + const _serialize = deserializeFunction(serializeString); + const _deserialize = deserializeFunction(deserializeString); + const value = await (window as any)[functionName](...args); const serializedValue = value ? _serialize(value) : undefined; return serializedValue; @@ -27,7 +25,7 @@ export async function readComponentValue( function deserializeFunction(serializedFunction: string) { return eval(`(() => ${serializedFunction})()`); } - }, args); + }, context); return serializedValue ? deserialize(serializedValue) : undefined; } diff --git a/e2e/packages/sync-test/data/expectClientData.ts b/e2e/packages/sync-test/data/expectClientData.ts index 92e81c323f..e2861f15cd 100644 --- a/e2e/packages/sync-test/data/expectClientData.ts +++ b/e2e/packages/sync-test/data/expectClientData.ts @@ -1,8 +1,8 @@ import { Page, expect } from "@playwright/test"; import { Data } from "./types"; import config from "../../contracts/mud.config"; -import { readComponentValue } from "./readComponentValue"; import { encodeEntity } from "@latticexyz/store-sync/recs"; +import { callPageFunction } from "./callPageFunction"; /** * Confirms that the client state equals the given state by reading from the client's data store @@ -10,7 +10,10 @@ import { encodeEntity } from "@latticexyz/store-sync/recs"; export async function expectClientData(page: Page, data: Data) { for (const [table, records] of Object.entries(data)) { for (const record of records) { - const value = await readComponentValue(page, table, encodeEntity(config.tables[table].keySchema, record.key)); + const value = await callPageFunction(page, "getComponentValue", [ + table, + encodeEntity(config.tables[table].keySchema, record.key), + ]); expect(value).toMatchObject(record.value); } } diff --git a/e2e/packages/sync-test/data/index.ts b/e2e/packages/sync-test/data/index.ts deleted file mode 100644 index 045a4d5727..0000000000 --- a/e2e/packages/sync-test/data/index.ts +++ /dev/null @@ -1,10 +0,0 @@ -export * from "./callWorld"; -export * from "./encodeTestData"; -export * from "./expectClientData"; -export * from "./mergeTestData"; -export * from "./numberListSystem"; -export * from "./readComponentValue"; -export * from "./setContractData"; -export * from "./testData"; -export * from "./types"; -export * from "./waitForInitialSync"; diff --git a/e2e/packages/sync-test/indexerSync.test.ts b/e2e/packages/sync-test/indexerSync.test.ts index 093ff0dff5..61bc5a0e5f 100644 --- a/e2e/packages/sync-test/indexerSync.test.ts +++ b/e2e/packages/sync-test/indexerSync.test.ts @@ -1,6 +1,6 @@ -import { afterEach, beforeEach, describe, test } from "vitest"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; import type { ViteDevServer } from "vite"; -import { expect, Browser, Page } from "@playwright/test"; +import { Browser, Page } from "@playwright/test"; import { createAsyncErrorHandler } from "./asyncErrors"; import { deployContracts, @@ -9,21 +9,17 @@ import { startIndexer, openClientWithRootAccount, } from "./setup"; -import { - setContractData, - expectClientData, - testData1, - mergeTestData, - testData2, - waitForInitialSync, - push, - pushRange, - pop, -} from "./data"; import { range } from "@latticexyz/utils"; import path from "node:path"; import { rpcHttpUrl } from "./setup/constants"; import { z } from "zod"; +import { callPageFunction } from "./data/callPageFunction"; +import { expectClientData } from "./data/expectClientData"; +import { mergeTestData } from "./data/mergeTestData"; +import { push, pushRange, pop } from "./data/numberListSystem"; +import { setContractData } from "./data/setContractData"; +import { testData1, testData2 } from "./data/testData"; +import { waitForInitialSync } from "./data/waitForInitialSync"; const env = z .object({ @@ -57,7 +53,7 @@ describe("Sync from indexer", async () => { asyncErrorHandler.resetErrors(); }); - test("should log error if indexer is offline", async () => { + it("should log error if indexer is offline", async () => { await openClientWithRootAccount(page, { indexerUrl: `http://127.0.0.1:9999/trpc` }); await waitForInitialSync(page); @@ -87,7 +83,7 @@ describe("Sync from indexer", async () => { await indexer.kill(); }); - test("should sync test data", async () => { + it("should sync test data", async () => { await openClientWithRootAccount(page, { indexerUrl: indexer.url }); await waitForInitialSync(page); @@ -107,7 +103,7 @@ describe("Sync from indexer", async () => { asyncErrorHandler.expectNoAsyncErrors(); }); - test("should sync number list modified via system", async () => { + it("should sync number list modified via system", async () => { await openClientWithRootAccount(page, { indexerUrl: indexer.url }); await waitForInitialSync(page); @@ -126,5 +122,27 @@ describe("Sync from indexer", async () => { // Should not have thrown errors asyncErrorHandler.expectNoAsyncErrors(); }); + + it("should only have filtered position data", async () => { + await openClientWithRootAccount(page, { indexerUrl: indexer.url }); + await waitForInitialSync(page); + + const entities = await callPageFunction(page, "getKeys", ["Position"]); + expect(entities).toEqual([ + { + x: 1, + y: 1, + zone: "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + { + x: 2, + y: -2, + zone: "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + ]); + + // Should not have thrown errors + asyncErrorHandler.expectNoAsyncErrors(); + }); }); }); diff --git a/e2e/packages/sync-test/rpcSync.test.ts b/e2e/packages/sync-test/rpcSync.test.ts index 39e786a319..b90be12ddc 100644 --- a/e2e/packages/sync-test/rpcSync.test.ts +++ b/e2e/packages/sync-test/rpcSync.test.ts @@ -1,21 +1,17 @@ -import { afterEach, beforeEach, describe, test } from "vitest"; +import { afterEach, beforeEach, describe, expect, it } from "vitest"; import type { ViteDevServer } from "vite"; import { Browser, Page } from "@playwright/test"; import { createAsyncErrorHandler } from "./asyncErrors"; import { deployContracts, startViteServer, startBrowserAndPage, openClientWithRootAccount } from "./setup"; -import { - setContractData, - expectClientData, - testData1, - testData2, - mergeTestData, - waitForInitialSync, - push, - pushRange, - pop, -} from "./data"; import { range } from "@latticexyz/utils"; import { rpcHttpUrl } from "./setup/constants"; +import { callPageFunction } from "./data/callPageFunction"; +import { expectClientData } from "./data/expectClientData"; +import { mergeTestData } from "./data/mergeTestData"; +import { push, pushRange, pop } from "./data/numberListSystem"; +import { setContractData } from "./data/setContractData"; +import { testData1, testData2 } from "./data/testData"; +import { waitForInitialSync } from "./data/waitForInitialSync"; describe("Sync from RPC", async () => { const asyncErrorHandler = createAsyncErrorHandler(); @@ -40,7 +36,7 @@ describe("Sync from RPC", async () => { await webserver.close(); }); - test("should sync test data", async () => { + it("should sync test data", async () => { await openClientWithRootAccount(page); await waitForInitialSync(page); @@ -60,7 +56,7 @@ describe("Sync from RPC", async () => { asyncErrorHandler.expectNoAsyncErrors(); }); - test("should sync number list modified via system", async () => { + it("should sync number list modified via system", async () => { await openClientWithRootAccount(page); await waitForInitialSync(page); @@ -79,4 +75,24 @@ describe("Sync from RPC", async () => { // Should not have thrown errors asyncErrorHandler.expectNoAsyncErrors(); }); + + it("should only have filtered position data", async () => { + await openClientWithRootAccount(page); + await waitForInitialSync(page); + + expect(await callPageFunction(page, "getKeys", ["Position"])).toMatchInlineSnapshot(` + [ + { + "x": 1, + "y": 1, + "zone": "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + { + "x": 2, + "y": -2, + "zone": "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + ] + `); + }); }); diff --git a/e2e/packages/sync-test/tsconfig.json b/e2e/packages/sync-test/tsconfig.json index 34a228b8fc..4da186be7b 100644 --- a/e2e/packages/sync-test/tsconfig.json +++ b/e2e/packages/sync-test/tsconfig.json @@ -1,6 +1,5 @@ { "compilerOptions": { - "types": ["vitest/globals"], "target": "ESNext", "module": "ESNext", "lib": ["ESNext", "DOM"], diff --git a/packages/store-indexer/package.json b/packages/store-indexer/package.json index 344091627d..da20550a2b 100644 --- a/packages/store-indexer/package.json +++ b/packages/store-indexer/package.json @@ -37,6 +37,7 @@ "@fastify/cors": "^8.3.0", "@latticexyz/block-logs-stream": "workspace:*", "@latticexyz/common": "workspace:*", + "@latticexyz/protocol-parser": "workspace:*", "@latticexyz/store": "workspace:*", "@latticexyz/store-sync": "workspace:*", "@trpc/client": "10.34.0", diff --git a/packages/store-indexer/src/postgres/createQueryAdapter.ts b/packages/store-indexer/src/postgres/createQueryAdapter.ts index 02eba0a39e..43eb0b0b9a 100644 --- a/packages/store-indexer/src/postgres/createQueryAdapter.ts +++ b/packages/store-indexer/src/postgres/createQueryAdapter.ts @@ -4,6 +4,7 @@ import { buildTable, buildInternalTables, getTables } from "@latticexyz/store-sy import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer"; import { debug } from "../debug"; import { getAddress } from "viem"; +import { decodeDynamicField } from "@latticexyz/protocol-parser"; /** * Creates a query adapter for the tRPC server/client to query data from Postgres. @@ -13,7 +14,10 @@ import { getAddress } from "viem"; */ export async function createQueryAdapter(database: PgDatabase): Promise { const adapter: QueryAdapter = { - async findAll({ chainId, address, tableIds = [] }) { + async findAll({ chainId, address, filters = [] }) { + // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters. + // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily) + const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId))); const tables = (await getTables(database)) .filter((table) => address == null || getAddress(address) === getAddress(table.address)) .filter((table) => !tableIds.length || tableIds.includes(table.tableId)); @@ -22,9 +26,20 @@ export async function createQueryAdapter(database: PgDatabase): Promise { const sqliteTable = buildTable(table); const records = await database.select().from(sqliteTable).where(eq(sqliteTable.__isDeleted, false)).execute(); + const filteredRecords = !filters.length + ? records + : records.filter((record) => { + const keyTuple = decodeDynamicField("bytes32[]", record.__key); + return filters.some( + (filter) => + filter.tableId === table.tableId && + (filter.key0 == null || filter.key0 === keyTuple[0]) && + (filter.key1 == null || filter.key1 === keyTuple[1]) + ); + }); return { ...table, - records: records.map((record) => ({ + records: filteredRecords.map((record) => ({ key: Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]])), value: Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]])), })), diff --git a/packages/store-indexer/src/sqlite/createQueryAdapter.ts b/packages/store-indexer/src/sqlite/createQueryAdapter.ts index 23286cf556..aee529e4f1 100644 --- a/packages/store-indexer/src/sqlite/createQueryAdapter.ts +++ b/packages/store-indexer/src/sqlite/createQueryAdapter.ts @@ -4,6 +4,7 @@ import { buildTable, chainState, getTables } from "@latticexyz/store-sync/sqlite import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer"; import { debug } from "../debug"; import { getAddress } from "viem"; +import { decodeDynamicField } from "@latticexyz/protocol-parser"; /** * Creates a storage adapter for the tRPC server/client to query data from SQLite. @@ -13,7 +14,10 @@ import { getAddress } from "viem"; */ export async function createQueryAdapter(database: BaseSQLiteDatabase<"sync", any>): Promise { const adapter: QueryAdapter = { - async findAll({ chainId, address, tableIds = [] }) { + async findAll({ chainId, address, filters = [] }) { + // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters. + // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily) + const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId))); const tables = getTables(database) .filter((table) => address == null || getAddress(address) === getAddress(table.address)) .filter((table) => !tableIds.length || tableIds.includes(table.tableId)); @@ -21,9 +25,20 @@ export async function createQueryAdapter(database: BaseSQLiteDatabase<"sync", an const tablesWithRecords = tables.map((table) => { const sqliteTable = buildTable(table); const records = database.select().from(sqliteTable).where(eq(sqliteTable.__isDeleted, false)).all(); + const filteredRecords = !filters.length + ? records + : records.filter((record) => { + const keyTuple = decodeDynamicField("bytes32[]", record.__key); + return filters.some( + (filter) => + filter.tableId === table.tableId && + (filter.key0 == null || filter.key0 === keyTuple[0]) && + (filter.key1 == null || filter.key1 === keyTuple[1]) + ); + }); return { ...table, - records: records.map((record) => ({ + records: filteredRecords.map((record) => ({ key: Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]])), value: Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]])), })), diff --git a/packages/store-sync/src/common.ts b/packages/store-sync/src/common.ts index 4f8a714d3b..6caa2f46d7 100644 --- a/packages/store-sync/src/common.ts +++ b/packages/store-sync/src/common.ts @@ -26,6 +26,24 @@ export type TableWithRecords = Table & { records: TableRecord[] }; export type StoreEventsLog = Log; export type BlockLogs = { blockNumber: StoreEventsLog["blockNumber"]; logs: StoreEventsLog[] }; +// only two keys for now, to reduce complexity of creating indexes on SQL tables +// TODO: make tableId optional to enable filtering just on keys (any table) +// this is blocked on reworking data storage so we can more easily query data across tables +export type SyncFilter = { + /** + * Filter by the `bytes32` table ID. + */ + tableId: Hex; + /** + * Optionally filter by the `bytes32` value of the key in the first position (index zero of the record's key tuple). + */ + key0?: Hex; + /** + * Optionally filter by the `bytes32` value of the key in the second position (index one of the record's key tuple). + */ + key1?: Hex; +}; + export type SyncOptions = { /** * MUD config @@ -42,8 +60,12 @@ export type SyncOptions = { */ address?: Address; /** - * Optional table IDs to filter indexer state and RPC state. + * Optional filters for indexer and RPC state. Useful to narrow down the data received by the client for large worlds. */ + filters?: SyncFilter[]; + /** + * @deprecated Use `filters` option instead. + * */ tableIds?: Hex[]; /** * Optional block number to start indexing from. Useful for resuming the indexer from a particular point in time or starting after a particular contract deployment. diff --git a/packages/store-sync/src/createStoreSync.ts b/packages/store-sync/src/createStoreSync.ts index 0fec87e5b2..c8d926d23c 100644 --- a/packages/store-sync/src/createStoreSync.ts +++ b/packages/store-sync/src/createStoreSync.ts @@ -4,6 +4,7 @@ import { StorageAdapter, StorageAdapterBlock, StorageAdapterLog, + SyncFilter, SyncOptions, SyncResult, TableWithRecords, @@ -36,6 +37,8 @@ import { internalTableIds } from "./internalTableIds"; const debug = parentDebug.extend("createStoreSync"); +const defaultFilters: SyncFilter[] = internalTableIds.map((tableId) => ({ tableId })); + type CreateStoreSyncOptions = SyncOptions & { storageAdapter: StorageAdapter; onProgress?: (opts: { @@ -52,13 +55,17 @@ export async function createStoreSync onProgress, publicClient, address, + filters: initialFilters = [], tableIds = [], startBlock: initialStartBlock = 0n, maxBlockRange, initialState, indexerUrl, }: CreateStoreSyncOptions): Promise { - const includedTableIds = new Set(tableIds.length ? [...internalTableIds, ...tableIds] : []); + const filters: SyncFilter[] = + initialFilters.length || tableIds.length + ? [...initialFilters, ...tableIds.map((tableId) => ({ tableId })), ...defaultFilters] + : []; const initialState$ = defer( async (): Promise< | { @@ -82,7 +89,7 @@ export async function createStoreSync const indexer = createIndexerClient({ url: indexerUrl }); const chainId = publicClient.chain?.id ?? (await publicClient.getChainId()); - const result = await indexer.findAll.query({ chainId, address, tableIds: Array.from(includedTableIds) }); + const result = await indexer.findAll.query({ chainId, address, filters }); onProgress?.({ step: SyncStep.SNAPSHOT, @@ -198,8 +205,21 @@ export async function createStoreSync publicClient, address, events: storeEventsAbi, + // TODO: pass filters in here so we can filter at RPC level maxBlockRange, }), + map(({ toBlock, logs }) => { + if (!filters.length) return { toBlock, logs }; + const filteredLogs = logs.filter((log) => + filters.some( + (filter) => + filter.tableId === log.args.tableId && + (filter.key0 == null || filter.key0 === log.args.keyTuple[0]) && + (filter.key1 == null || filter.key1 === log.args.keyTuple[1]) + ) + ); + return { toBlock, logs: filteredLogs }; + }), mergeMap(({ toBlock, logs }) => from(groupLogsByBlockNumber(logs, toBlock))), share() ); diff --git a/packages/store-sync/src/postgres/syncToPostgres.ts b/packages/store-sync/src/postgres/syncToPostgres.ts index 1042ab2832..10ea4cf2af 100644 --- a/packages/store-sync/src/postgres/syncToPostgres.ts +++ b/packages/store-sync/src/postgres/syncToPostgres.ts @@ -28,23 +28,15 @@ export async function syncToPostgres( config, database, publicClient, - address, - startBlock, - maxBlockRange, - indexerUrl, - initialState, startSync = true, + ...syncOptions }: SyncToPostgresOptions): Promise { const { storageAdapter } = await postgresStorage({ database, publicClient, config }); const storeSync = await createStoreSync({ storageAdapter, config, - address, publicClient, - startBlock, - maxBlockRange, - indexerUrl, - initialState, + ...syncOptions, }); const sub = startSync ? storeSync.storedBlockLogs$.subscribe() : null; diff --git a/packages/store-sync/src/recs/syncToRecs.ts b/packages/store-sync/src/recs/syncToRecs.ts index 4759e5f538..87dc02b737 100644 --- a/packages/store-sync/src/recs/syncToRecs.ts +++ b/packages/store-sync/src/recs/syncToRecs.ts @@ -20,13 +20,8 @@ type SyncToRecsResult = SyncResult & export async function syncToRecs({ world, config, - address, - publicClient, - startBlock, - maxBlockRange, - initialState, - indexerUrl, startSync = true, + ...syncOptions }: SyncToRecsOptions): Promise> { const { storageAdapter, components } = recsStorage({ world, @@ -38,12 +33,7 @@ export async function syncToRecs({ const storeSync = await createStoreSync({ storageAdapter, config, - address, - publicClient, - startBlock, - maxBlockRange, - indexerUrl, - initialState, + ...syncOptions, onProgress: ({ step, percentage, latestBlockNumber, lastBlockNumberProcessed, message }) => { // already live, no need for more progress updates if (getComponentValue(components.SyncProgress, singletonEntity)?.step === SyncStep.LIVE) return; diff --git a/packages/store-sync/src/sqlite/syncToSqlite.ts b/packages/store-sync/src/sqlite/syncToSqlite.ts index 210b9677d0..b7751b0bd0 100644 --- a/packages/store-sync/src/sqlite/syncToSqlite.ts +++ b/packages/store-sync/src/sqlite/syncToSqlite.ts @@ -28,22 +28,14 @@ export async function syncToSqlite({ config, database, publicClient, - address, - startBlock, - maxBlockRange, - indexerUrl, - initialState, startSync = true, + ...syncOptions }: SyncToSqliteOptions): Promise { const storeSync = await createStoreSync({ storageAdapter: await sqliteStorage({ database, publicClient, config }), config, - address, publicClient, - startBlock, - maxBlockRange, - indexerUrl, - initialState, + ...syncOptions, }); const sub = startSync ? storeSync.storedBlockLogs$.subscribe() : null; diff --git a/packages/store-sync/src/trpc-indexer/common.ts b/packages/store-sync/src/trpc-indexer/common.ts index dc5d7005f4..731c940362 100644 --- a/packages/store-sync/src/trpc-indexer/common.ts +++ b/packages/store-sync/src/trpc-indexer/common.ts @@ -1,8 +1,8 @@ import { Hex } from "viem"; -import { TableWithRecords } from "../common"; +import { SyncFilter, TableWithRecords } from "../common"; export type QueryAdapter = { - findAll: (opts: { chainId: number; address?: Hex; tableIds?: Hex[] }) => Promise<{ + findAll: (opts: { chainId: number; address?: Hex; filters?: SyncFilter[] }) => Promise<{ blockNumber: bigint | null; tables: TableWithRecords[]; }>; diff --git a/packages/store-sync/src/trpc-indexer/createAppRouter.ts b/packages/store-sync/src/trpc-indexer/createAppRouter.ts index 71a4fae085..4d9272e61d 100644 --- a/packages/store-sync/src/trpc-indexer/createAppRouter.ts +++ b/packages/store-sync/src/trpc-indexer/createAppRouter.ts @@ -16,13 +16,21 @@ export function createAppRouter() { z.object({ chainId: z.number(), address: z.string().refine(isHex).optional(), - tableIds: z.array(z.string().refine(isHex)).optional(), + filters: z + .array( + z.object({ + tableId: z.string().refine(isHex), + key0: z.string().refine(isHex).optional(), + key1: z.string().refine(isHex).optional(), + }) + ) + .optional(), }) ) .query(async (opts): ReturnType => { const { queryAdapter } = opts.ctx; - const { chainId, address, tableIds } = opts.input; - return queryAdapter.findAll({ chainId, address, tableIds }); + const { chainId, address, filters } = opts.input; + return queryAdapter.findAll({ chainId, address, filters }); }), }); } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 203cc1065b..a73336d757 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -821,6 +821,9 @@ importers: '@latticexyz/common': specifier: workspace:* version: link:../common + '@latticexyz/protocol-parser': + specifier: workspace:* + version: link:../protocol-parser '@latticexyz/store': specifier: workspace:* version: link:../store