diff --git a/packages/config/src/common.ts b/packages/config/src/common.ts index 3577a2318d..e37b6a5da7 100644 --- a/packages/config/src/common.ts +++ b/packages/config/src/common.ts @@ -23,10 +23,20 @@ export type Schema = { }; }; +export type KeySchema = { + readonly [fieldName: string]: { + /** the Solidity primitive ABI type */ + readonly type: StaticAbiType; + /** the user defined type or Solidity primitive ABI type */ + readonly internalType: string; + }; +}; + export type Table = { readonly label: string; readonly type: satisfy; readonly namespace: string; + readonly namespaceLabel: string; readonly name: string; readonly tableId: Hex; readonly schema: Schema; diff --git a/packages/config/src/exports/index.ts b/packages/config/src/exports/index.ts index 8805ded492..0ea1270ce4 100644 --- a/packages/config/src/exports/index.ts +++ b/packages/config/src/exports/index.ts @@ -4,4 +4,4 @@ * Be sure we're ready to commit to these being supported and changes made backward compatible! */ -export type { AbiType, StaticAbiType, DynamicAbiType, Schema, Table, Tables } from "../common"; +export type { AbiType, StaticAbiType, DynamicAbiType, Schema, KeySchema, Table, Tables } from "../common"; diff --git a/packages/protocol-parser/src/common.ts b/packages/protocol-parser/src/common.ts index 904a646ef1..fa72916f82 100644 --- a/packages/protocol-parser/src/common.ts +++ b/packages/protocol-parser/src/common.ts @@ -1,3 +1,4 @@ +import { Table } from "@latticexyz/config"; import { DynamicAbiType, SchemaAbiType, @@ -51,3 +52,5 @@ export type ValueArgs = { encodedLengths: Hex; dynamicData: Hex; }; + +export type PartialTable = Pick; diff --git a/packages/protocol-parser/src/decodeDozerField.test.ts b/packages/protocol-parser/src/decodeDozerField.test.ts new file mode 100644 index 0000000000..6fb7cec67c --- /dev/null +++ b/packages/protocol-parser/src/decodeDozerField.test.ts @@ -0,0 +1,9 @@ +import { describe, expect, it } from "vitest"; +import { decodeDozerField } from "./decodeDozerField"; + +describe("decodeDozerField", () => { + it("should decode numbers to the expected value type", () => { + expect(decodeDozerField("uint48", "1")).toBe(1); + expect(decodeDozerField("uint56", "1")).toBe(1n); + }); +}); diff --git a/packages/protocol-parser/src/decodeDozerField.ts b/packages/protocol-parser/src/decodeDozerField.ts new file mode 100644 index 0000000000..0e499ae9bc --- /dev/null +++ b/packages/protocol-parser/src/decodeDozerField.ts @@ -0,0 +1,24 @@ +import { AbiType } from "@latticexyz/config"; +import { + ArrayAbiType, + SchemaAbiTypeToPrimitiveType, + arrayToStaticAbiType, + schemaAbiTypeToDefaultValue, +} from "@latticexyz/schema-type/internal"; + +export function decodeDozerField( + abiType: abiType, + data: string | boolean | string[], +): SchemaAbiTypeToPrimitiveType { + const defaultValueType = typeof schemaAbiTypeToDefaultValue[abiType]; + if (Array.isArray(data)) { + return data.map((element) => decodeDozerField(arrayToStaticAbiType(abiType as ArrayAbiType), element)) as never; + } + if (defaultValueType === "number") { + return Number(data) as never; + } + if (defaultValueType === "bigint") { + return BigInt(data) as never; + } + return data as never; +} diff --git a/packages/protocol-parser/src/decodeDozerRecords.test.ts b/packages/protocol-parser/src/decodeDozerRecords.test.ts new file mode 100644 index 0000000000..91d00b5542 --- /dev/null +++ b/packages/protocol-parser/src/decodeDozerRecords.test.ts @@ -0,0 +1,38 @@ +import { describe, expect, it } from "vitest"; +import { decodeDozerRecords } from "./decodeDozerRecords"; + +describe("decodeDozerRecord", () => { + const schema = { + address: { type: "address", internalType: "address" }, + uint256: { type: "uint256", internalType: "uint256" }, + uint32: { type: "uint32", internalType: "uint32" }, + bool: { type: "bool", internalType: "bool" }, + bytes: { type: "bytes", internalType: "bytes" }, + string: { type: "string", internalType: "string" }, + uint32Arr: { type: "uint32[]", internalType: "uint32[]" }, + } as const; + + it("decodes dozer record", () => { + const dozerRecord = [ + "0x0000000000000000000000000000000000000000", + "1234", + "1234", + true, + "0x1234", + "hello world", + ["1234", "5678"], + ]; + const decodedRecord = { + address: "0x0000000000000000000000000000000000000000", + uint256: 1234n, + uint32: 1234, + bool: true, + bytes: "0x1234", + string: "hello world", + uint32Arr: [1234, 5678], + }; + + const decoded = decodeDozerRecords({ schema, records: [dozerRecord] }); + expect(decoded).toStrictEqual([decodedRecord]); + }); +}); diff --git a/packages/protocol-parser/src/decodeDozerRecords.ts b/packages/protocol-parser/src/decodeDozerRecords.ts new file mode 100644 index 0000000000..822f3d354b --- /dev/null +++ b/packages/protocol-parser/src/decodeDozerRecords.ts @@ -0,0 +1,44 @@ +import { Schema } from "@latticexyz/config"; +import { decodeDozerField } from "./decodeDozerField"; +import { getSchemaPrimitives } from "./getSchemaPrimitives"; + +type DozerQueryHeader = string[]; +type DozerQueryRecord = (string | boolean | string[])[]; + +// First item in the result is the header +export type DozerQueryResult = [DozerQueryHeader, ...DozerQueryRecord[]]; + +export type DecodeDozerRecordsArgs = { + schema: Schema; + records: DozerQueryResult; +}; + +/** + * Trim the header row from the query result + */ +function trimHeader(result: DozerQueryResult): DozerQueryRecord[] { + return result.slice(1); +} + +export type DecodeDozerRecordsResult = getSchemaPrimitives[]; + +export function decodeDozerRecords({ + schema, + records, +}: DecodeDozerRecordsArgs): DecodeDozerRecordsResult { + const fieldNames = Object.keys(schema); + if (records.length > 0 && fieldNames.length !== records[0].length) { + throw new Error( + `Mismatch between schema and query result.\nSchema: [${fieldNames.join(", ")}]\nQuery result: [${records[0].join(", ")}]`, + ); + } + + return trimHeader(records).map((record) => + Object.fromEntries( + Object.keys(schema).map((fieldName, index) => [ + fieldName, + decodeDozerField(schema[fieldName].type, record[index]), + ]), + ), + ) as never; +} diff --git a/packages/protocol-parser/src/exports/internal.ts b/packages/protocol-parser/src/exports/internal.ts index 6f2d68af6b..603a0fd985 100644 --- a/packages/protocol-parser/src/exports/internal.ts +++ b/packages/protocol-parser/src/exports/internal.ts @@ -25,6 +25,11 @@ export * from "../schemaToHex"; export * from "../staticDataLength"; export * from "../valueSchemaToFieldLayoutHex"; export * from "../valueSchemaToHex"; +export * from "../decodeDozerField"; +export * from "../decodeDozerRecords"; + +export * from "../getKey"; +export * from "../getValue"; export * from "../getKeySchema"; export * from "../getValueSchema"; diff --git a/packages/protocol-parser/src/getKey.test.ts b/packages/protocol-parser/src/getKey.test.ts new file mode 100644 index 0000000000..46e50255d2 --- /dev/null +++ b/packages/protocol-parser/src/getKey.test.ts @@ -0,0 +1,25 @@ +import { describe, expect, it } from "vitest"; +import { getKey } from "./getKey"; + +describe("getKey", () => { + it("should return the key fields of the record", () => { + const table = { + schema: { + key1: { type: "uint32", internalType: "uint32" }, + key2: { type: "uint256", internalType: "uint256" }, + value1: { type: "string", internalType: "string" }, + value2: { type: "string", internalType: "string" }, + }, + key: ["key1", "key2"], + } as const; + const record = { key1: 1, key2: 2n, value1: "hello", value2: "world" }; + const key = getKey(table, record); + + expect(key).toMatchInlineSnapshot(` + { + "key1": 1, + "key2": 2n, + } + `); + }); +}); diff --git a/packages/protocol-parser/src/getKey.ts b/packages/protocol-parser/src/getKey.ts new file mode 100644 index 0000000000..b633363810 --- /dev/null +++ b/packages/protocol-parser/src/getKey.ts @@ -0,0 +1,10 @@ +import { getKeySchema } from "./getKeySchema"; +import { getSchemaPrimitives } from "./getSchemaPrimitives"; +import { PartialTable } from "./common"; + +export function getKey( + table: table, + record: getSchemaPrimitives, +): getSchemaPrimitives> { + return Object.fromEntries(table.key.map((fieldName) => [fieldName, record[fieldName]])) as never; +} diff --git a/packages/protocol-parser/src/getKeySchema.ts b/packages/protocol-parser/src/getKeySchema.ts index 9f8bd452fa..4fab4a77bf 100644 --- a/packages/protocol-parser/src/getKeySchema.ts +++ b/packages/protocol-parser/src/getKeySchema.ts @@ -1,11 +1,13 @@ -import { Schema, Table } from "@latticexyz/config"; +import { KeySchema, StaticAbiType, Table } from "@latticexyz/config"; type PartialTable = Pick; export type getKeySchema
= PartialTable extends table - ? Schema + ? KeySchema : { - readonly [fieldName in Extract]: table["schema"][fieldName]; + readonly [fieldName in Extract]: table["schema"][fieldName] & { + type: StaticAbiType; + }; }; export function getKeySchema
(table: table): getKeySchema
{ diff --git a/packages/protocol-parser/src/getValue.test.ts b/packages/protocol-parser/src/getValue.test.ts new file mode 100644 index 0000000000..612270d26b --- /dev/null +++ b/packages/protocol-parser/src/getValue.test.ts @@ -0,0 +1,25 @@ +import { describe, expect, it } from "vitest"; +import { getValue } from "./getValue"; + +describe("getValue", () => { + it("should return the key fields of the record", () => { + const table = { + schema: { + key1: { type: "uint32", internalType: "uint32" }, + key2: { type: "uint256", internalType: "uint256" }, + value1: { type: "string", internalType: "string" }, + value2: { type: "string", internalType: "string" }, + }, + key: ["key1", "key2"], + } as const; + const record = { key1: 1, key2: 2n, value1: "hello", value2: "world" }; + const value = getValue(table, record); + + expect(value).toMatchInlineSnapshot(` + { + "value1": "hello", + "value2": "world", + } + `); + }); +}); diff --git a/packages/protocol-parser/src/getValue.ts b/packages/protocol-parser/src/getValue.ts new file mode 100644 index 0000000000..f4591ad6a1 --- /dev/null +++ b/packages/protocol-parser/src/getValue.ts @@ -0,0 +1,14 @@ +import { PartialTable } from "./common"; +import { getValueSchema } from "./getValueSchema"; +import { getSchemaPrimitives } from "./getSchemaPrimitives"; + +export function getValue
( + table: table, + record: getSchemaPrimitives, +): getSchemaPrimitives> { + return Object.fromEntries( + Object.keys(table.schema) + .filter((fieldName) => !table.key.includes(fieldName)) + .map((fieldName) => [fieldName, record[fieldName]]), + ) as never; +} diff --git a/packages/stash/CHANGELOG.md b/packages/stash/CHANGELOG.md new file mode 100644 index 0000000000..8ae6429759 --- /dev/null +++ b/packages/stash/CHANGELOG.md @@ -0,0 +1 @@ +# @latticexyz/stash diff --git a/packages/stash/README.md b/packages/stash/README.md new file mode 100644 index 0000000000..809d0e6255 --- /dev/null +++ b/packages/stash/README.md @@ -0,0 +1,3 @@ +# Stash + +High performance client store and query engine for MUD diff --git a/packages/stash/package.json b/packages/stash/package.json new file mode 100644 index 0000000000..f10efe9063 --- /dev/null +++ b/packages/stash/package.json @@ -0,0 +1,67 @@ +{ + "name": "@latticexyz/stash", + "version": "2.0.12", + "description": "High performance client store and query engine for MUD", + "repository": { + "type": "git", + "url": "https://github.com/latticexyz/mud.git", + "directory": "packages/stash" + }, + "license": "MIT", + "type": "module", + "exports": { + ".": "./dist/index.js", + "./internal": "./dist/internal.js", + "./recs": "./dist/recs.js" + }, + "typesVersions": { + "*": { + "index": [ + "./dist/index.d.ts" + ], + "internal": [ + "./dist/internal.d.ts" + ], + "recs": [ + "./dist/recs.d.ts" + ] + } + }, + "files": [ + "dist" + ], + "scripts": { + "bench": "tsx src/bench.ts", + "build": "tsup", + "clean": "rimraf dist", + "dev": "tsup --watch", + "test": "vitest typecheck --run --passWithNoTests && vitest --run --passWithNoTests", + "test:ci": "pnpm run test" + }, + "dependencies": { + "@arktype/util": "0.0.40", + "@latticexyz/config": "workspace:*", + "@latticexyz/protocol-parser": "workspace:*", + "@latticexyz/recs": "workspace:*", + "@latticexyz/schema-type": "workspace:*", + "@latticexyz/store": "workspace:*", + "mutative": "^1.0.6", + "react": "^18.2.0", + "rxjs": "7.5.5", + "viem": "2.9.20", + "zustand": "^4.3.7", + "zustand-mutative": "^1.0.1" + }, + "devDependencies": { + "@arktype/attest": "0.7.5", + "@testing-library/react": "^16.0.0", + "@testing-library/react-hooks": "^8.0.1", + "@types/react": "18.2.22", + "react-dom": "^18.2.0", + "tsup": "^6.7.0", + "vitest": "0.34.6" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/packages/stash/src/README.md b/packages/stash/src/README.md new file mode 100644 index 0000000000..3fd7cc2fb5 --- /dev/null +++ b/packages/stash/src/README.md @@ -0,0 +1,24 @@ +# TODOs + +- Set up performance benchmarks for setting records, reading records, running queries, updating records with subscribers +- Replace objects with Maps for performance ? (https://rehmat-sayany.medium.com/using-map-over-objects-in-javascript-a-performance-benchmark-ff2f351851ed) + - could be useful for TableRecords, Keys +- maybe add option to include records in the query result? +- Maybe turn `entityKey` into a tagged string? So we could make it the return type of `encodeKey`, + and allow us to use Symbol (to reduce memory overhead) or something else later without breaking change +- add more query fragments - ie GreaterThan, LessThan, Range, etc +- we might be able to enable different key shapes if we add something like a `keySelector` +- getKeySchema expects a full table as type, but only needs schema and key + +Ideas + +- Update streams: + - if each query added a new subscriber to the main state, each state update would trigger _every_ subscriber + - instead we could add a subscriber per table, which can be subscribed to again, so we only have to iterate through all tables once, + and then through all subscribers per table (only those who care about updates to this table) +- Query fragments: + - Instead of pre-defined query types (Has, HasValue, Not, NotValue), could we define fragments in a self-contained way, so + it's easy to add a new type of query fragment without changing the core code? + - The main complexity is the logic to initialize the initial set with the first query fragment, + but it's probably not that critical - we could just run the first fragment on all entities of the first table, + unless an initialSet is provided. diff --git a/packages/stash/src/actions/decodeKey.test.ts b/packages/stash/src/actions/decodeKey.test.ts new file mode 100644 index 0000000000..f8826ee2f5 --- /dev/null +++ b/packages/stash/src/actions/decodeKey.test.ts @@ -0,0 +1,28 @@ +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { defineStore } from "@latticexyz/store/config/v2"; +import { setRecord } from "./setRecord"; +import { encodeKey } from "./encodeKey"; +import { attest } from "@ark/attest"; +import { decodeKey } from "./decodeKey"; + +describe("decodeKey", () => { + it("should decode an encoded table key", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { field1: "string", field2: "uint32", field3: "uint256" }, + key: ["field2", "field3"], + }, + }, + }); + const stash = createStash(config); + const table = config.namespaces.namespace1.tables.table1; + const key = { field2: 1, field3: 2n }; + setRecord({ stash, table, key, record: { field1: "hello" } }); + + const encodedKey = encodeKey({ table, key }); + attest(decodeKey({ stash, table, encodedKey })).equals({ field2: 1, field3: 2n }); + }); +}); diff --git a/packages/stash/src/actions/decodeKey.ts b/packages/stash/src/actions/decodeKey.ts new file mode 100644 index 0000000000..6f6767eb77 --- /dev/null +++ b/packages/stash/src/actions/decodeKey.ts @@ -0,0 +1,22 @@ +import { Table } from "@latticexyz/config"; +import { Key, Stash } from "../common"; + +export type DecodeKeyArgs
= { + stash: Stash; + table: table; + encodedKey: string; +}; + +export type DecodeKeyResult
= Key
; + +export function decodeKey
({ + stash, + table, + encodedKey, +}: DecodeKeyArgs
): DecodeKeyResult
{ + const { namespaceLabel, label, key } = table; + const record = stash.get().records[namespaceLabel][label][encodedKey]; + + // Typecast needed because record values could be arrays, but we know they are not if they are key fields + return Object.fromEntries(Object.entries(record).filter(([field]) => key.includes(field))) as never; +} diff --git a/packages/stash/src/actions/deleteRecord.test.ts b/packages/stash/src/actions/deleteRecord.test.ts new file mode 100644 index 0000000000..921e6ccee2 --- /dev/null +++ b/packages/stash/src/actions/deleteRecord.test.ts @@ -0,0 +1,98 @@ +import { attest } from "@ark/attest"; +import { defineStore } from "@latticexyz/store"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { setRecord } from "./setRecord"; +import { deleteRecord } from "./deleteRecord"; + +describe("deleteRecord", () => { + it("should delete a record from the stash", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + + const stash = createStash(config); + + setRecord({ + stash, + table, + key: { field2: 1, field3: 2 }, + record: { field1: "hello" }, + }); + + setRecord({ + stash, + table, + key: { field2: 3, field3: 1 }, + record: { field1: "world" }, + }); + + deleteRecord({ + stash, + table, + key: { field2: 1, field3: 2 }, + }); + + attest(stash.get().records).snap({ + namespace1: { + table1: { + "3|1": { + field1: "world", + field2: 3, + field3: 1, + }, + }, + }, + }); + }); + + it("should throw a type error if an invalid key is provided", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + + const stash = createStash(config); + + attest(() => + deleteRecord({ + stash, + table, + // @ts-expect-error Property 'field3' is missing in type '{ field2: number; }' + key: { field2: 1 }, + }), + ).type.errors(`Property 'field3' is missing in type '{ field2: number; }'`); + + attest(() => + deleteRecord({ + stash, + table, + // @ts-expect-error Type 'string' is not assignable to type 'number' + key: { field2: 1, field3: "invalid" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'`); + }); +}); diff --git a/packages/stash/src/actions/deleteRecord.ts b/packages/stash/src/actions/deleteRecord.ts new file mode 100644 index 0000000000..ff8a04e475 --- /dev/null +++ b/packages/stash/src/actions/deleteRecord.ts @@ -0,0 +1,37 @@ +import { Table } from "@latticexyz/config"; +import { Key, Stash } from "../common"; +import { encodeKey } from "./encodeKey"; +import { registerTable } from "./registerTable"; + +export type DeleteRecordArgs
= { + stash: Stash; + table: table; + key: Key
; +}; + +export type DeleteRecordResult = void; + +export function deleteRecord
({ stash, table, key }: DeleteRecordArgs
): DeleteRecordResult { + const { namespaceLabel, label } = table; + + if (stash.get().config[namespaceLabel] == null) { + registerTable({ stash, table }); + } + + const encodedKey = encodeKey({ table, key }); + const prevRecord = stash.get().records[namespaceLabel][label][encodedKey]; + + // Early return if this record doesn't exist + if (prevRecord == null) return; + + // Delete record + delete stash._.state.records[namespaceLabel][label][encodedKey]; + + // Notify table subscribers + const updates = { [encodedKey]: { prev: prevRecord && { ...prevRecord }, current: undefined } }; + stash._.tableSubscribers[namespaceLabel][label].forEach((subscriber) => subscriber(updates)); + + // Notify stash subscribers + const storeUpdate = { config: {}, records: { [namespaceLabel]: { [label]: updates } } }; + stash._.storeSubscribers.forEach((subscriber) => subscriber(storeUpdate)); +} diff --git a/packages/stash/src/actions/encodeKey.test.ts b/packages/stash/src/actions/encodeKey.test.ts new file mode 100644 index 0000000000..10fd0d77a7 --- /dev/null +++ b/packages/stash/src/actions/encodeKey.test.ts @@ -0,0 +1,46 @@ +import { attest } from "@ark/attest"; +import { describe, it } from "vitest"; +import { encodeKey } from "./encodeKey"; +import { defineTable } from "@latticexyz/store/config/v2"; + +describe("encodeKey", () => { + it("should encode a key to a string", () => { + const table = defineTable({ + label: "test", + schema: { field1: "uint32", field2: "uint256", field3: "string" }, + key: ["field1", "field2"], + }); + attest(encodeKey({ table, key: { field1: 1, field2: 2n } })).snap("1|2"); + }); + + it("should throw a type error if an invalid key is provided", () => { + const table = defineTable({ + label: "test", + schema: { field1: "uint32", field2: "uint256", field3: "string" }, + key: ["field1", "field2"], + }); + + attest(() => + encodeKey({ + table, + // @ts-expect-error Property 'field2' is missing in type '{ field1: number; }' + key: { + field1: 1, + }, + }), + ) + .throws(`Provided key is missing field field2.`) + .type.errors(`Property 'field2' is missing in type '{ field1: number; }'`); + + attest( + encodeKey({ + table, + key: { + field1: 1, + // @ts-expect-error Type 'string' is not assignable to type 'bigint'. + field2: "invalid", + }, + }), + ).type.errors(`Type 'string' is not assignable to type 'bigint'.`); + }); +}); diff --git a/packages/stash/src/actions/encodeKey.ts b/packages/stash/src/actions/encodeKey.ts new file mode 100644 index 0000000000..5336a6493a --- /dev/null +++ b/packages/stash/src/actions/encodeKey.ts @@ -0,0 +1,27 @@ +import { Table } from "@latticexyz/config"; +import { Key } from "../common"; + +export type EncodeKeyArgs
= { + table: table; + key: Key
; +}; + +export type EncodeKeyResult = string; + +/** + * Encode a key object into a string that can be used as index in the stash + * TODO: Benchmark performance of this function + */ +export function encodeKey
({ table, key }: EncodeKeyArgs
): EncodeKeyResult { + const { key: keyOrder } = table; + + return keyOrder + .map((keyName) => { + const keyValue = key[keyName as never]; + if (keyValue == null) { + throw new Error(`Provided key is missing field ${keyName}.`); + } + return keyValue; + }) + .join("|"); +} diff --git a/packages/stash/src/actions/extend.test.ts b/packages/stash/src/actions/extend.test.ts new file mode 100644 index 0000000000..8eae889237 --- /dev/null +++ b/packages/stash/src/actions/extend.test.ts @@ -0,0 +1,15 @@ +import { describe, it } from "vitest"; +import { attest } from "@ark/attest"; +import { createStash } from "../createStash"; +import { extend } from "./extend"; + +describe("extend", () => { + it("should extend the stash API", () => { + const stash = createStash(); + const actions = { additional: (a: T): T => a }; + const extended = extend({ stash: stash, actions }); + + attest(); + attest(Object.keys(extended)).equals([...Object.keys(stash), "additional"]); + }); +}); diff --git a/packages/stash/src/actions/extend.ts b/packages/stash/src/actions/extend.ts new file mode 100644 index 0000000000..01742da7a9 --- /dev/null +++ b/packages/stash/src/actions/extend.ts @@ -0,0 +1,15 @@ +import { Stash } from "../common"; + +export type ExtendArgs = { + stash: stash; + actions: actions; +}; + +export type ExtendResult = stash & actions; + +export function extend({ + stash, + actions, +}: ExtendArgs): ExtendResult { + return { ...stash, ...actions }; +} diff --git a/packages/stash/src/actions/getConfig.test.ts b/packages/stash/src/actions/getConfig.test.ts new file mode 100644 index 0000000000..01475d8734 --- /dev/null +++ b/packages/stash/src/actions/getConfig.test.ts @@ -0,0 +1,42 @@ +import { defineTable } from "@latticexyz/store/config/v2"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { attest } from "@ark/attest"; +import { getConfig } from "./getConfig"; +import { registerTable } from "./registerTable"; + +describe("getConfig", () => { + it("should return the config of the given table", () => { + const { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + codegen: _1, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + deploy: _2, + ...rootTable + } = defineTable({ + label: "test", + schema: { field1: "address", field2: "string" }, + key: ["field1"], + }); + + const { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + codegen: _3, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + deploy: _4, + ...namespacedTable + } = defineTable({ + namespace: "namespace", + label: "test", + schema: { field1: "address", field2: "string" }, + key: ["field1"], + }); + + const stash = createStash(); + registerTable({ stash: stash, table: rootTable }); + registerTable({ stash: stash, table: namespacedTable }); + + attest(getConfig({ stash: stash, table: { label: "test" } })).equals(rootTable); + attest(getConfig({ stash: stash, table: { label: "test", namespaceLabel: "namespace" } })).equals(namespacedTable); + }); +}); diff --git a/packages/stash/src/actions/getConfig.ts b/packages/stash/src/actions/getConfig.ts new file mode 100644 index 0000000000..11ce416266 --- /dev/null +++ b/packages/stash/src/actions/getConfig.ts @@ -0,0 +1,14 @@ +import { Table } from "@latticexyz/config"; +import { Stash } from "../common"; + +export type GetConfigArgs = { + stash: Stash; + table: { label: string; namespaceLabel?: string }; +}; + +export type GetConfigResult
= table; + +export function getConfig({ stash, table }: GetConfigArgs): GetConfigResult
{ + const { namespaceLabel, label } = table; + return stash.get().config[namespaceLabel ?? ""][label]; +} diff --git a/packages/stash/src/actions/getKeys.test.ts b/packages/stash/src/actions/getKeys.test.ts new file mode 100644 index 0000000000..7cc061f0cc --- /dev/null +++ b/packages/stash/src/actions/getKeys.test.ts @@ -0,0 +1,34 @@ +import { defineStore } from "@latticexyz/store"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { setRecord } from "./setRecord"; +import { attest } from "@ark/attest"; +import { getKeys } from "./getKeys"; + +describe("getKeys", () => { + it("should return the key map of a table", () => { + const config = defineStore({ + tables: { + test: { + schema: { + player: "int32", + match: "int32", + x: "int32", + y: "int32", + }, + key: ["player", "match"], + }, + }, + }); + const table = config.tables.test; + const stash = createStash(config); + + setRecord({ stash, table, key: { player: 1, match: 2 }, record: { x: 3, y: 4 } }); + setRecord({ stash, table, key: { player: 5, match: 6 }, record: { x: 7, y: 8 } }); + + attest<{ [encodedKey: string]: { player: number; match: number } }>(getKeys({ stash, table })).snap({ + "1|2": { player: 1, match: 2 }, + "5|6": { player: 5, match: 6 }, + }); + }); +}); diff --git a/packages/stash/src/actions/getKeys.ts b/packages/stash/src/actions/getKeys.ts new file mode 100644 index 0000000000..eee1f31c5d --- /dev/null +++ b/packages/stash/src/actions/getKeys.ts @@ -0,0 +1,21 @@ +import { Table } from "@latticexyz/config"; +import { Stash, Keys } from "../common"; +import { decodeKey } from "./decodeKey"; + +export type GetKeysArgs
= { + stash: Stash; + table: table; +}; + +export type GetKeysResult
= Keys
; + +export function getKeys
({ stash, table }: GetKeysArgs
): GetKeysResult
{ + const { namespaceLabel, label } = table; + + return Object.fromEntries( + Object.keys(stash.get().records[namespaceLabel][label]).map((encodedKey) => [ + encodedKey, + decodeKey({ stash, table, encodedKey }), + ]), + ); +} diff --git a/packages/stash/src/actions/getRecord.test.ts b/packages/stash/src/actions/getRecord.test.ts new file mode 100644 index 0000000000..f2871b000d --- /dev/null +++ b/packages/stash/src/actions/getRecord.test.ts @@ -0,0 +1,96 @@ +import { attest } from "@ark/attest"; +import { defineStore } from "@latticexyz/store"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { setRecord } from "./setRecord"; +import { getRecord } from "./getRecord"; + +describe("getRecord", () => { + it("should get a record by key from the table", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + + const stash = createStash(config); + + setRecord({ + stash, + table, + key: { field2: 1, field3: 2 }, + record: { field1: "hello" }, + }); + + setRecord({ + stash, + table, + key: { field2: 2, field3: 1 }, + record: { field1: "world" }, + }); + + attest( + getRecord({ + stash, + table, + key: { field2: 1, field3: 2 }, + }), + ).snap({ field1: "hello", field2: 1, field3: 2 }); + + attest<{ field1: string; field2: number; field3: number }>( + getRecord({ + stash, + table, + key: { field2: 2, field3: 1 }, + }), + ).snap({ field1: "world", field2: 2, field3: 1 }); + }); + + it("should throw a type error if the key type doesn't match", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + + const stash = createStash(config); + + attest(() => + getRecord({ + stash, + table, + // @ts-expect-error Property 'field3' is missing in type '{ field2: number; }' + key: { field2: 1 }, + }), + ).type.errors(`Property 'field3' is missing in type '{ field2: number; }'`); + + attest(() => + getRecord({ + stash, + table, + // @ts-expect-error Type 'string' is not assignable to type 'number' + key: { field2: 1, field3: "invalid" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'`); + }); +}); diff --git a/packages/stash/src/actions/getRecord.ts b/packages/stash/src/actions/getRecord.ts new file mode 100644 index 0000000000..ebdb141a2c --- /dev/null +++ b/packages/stash/src/actions/getRecord.ts @@ -0,0 +1,16 @@ +import { Table } from "@latticexyz/config"; +import { Key, Stash, TableRecord } from "../common"; +import { encodeKey } from "./encodeKey"; + +export type GetRecordArgs
= { + stash: Stash; + table: table; + key: Key
; +}; + +export type GetRecordResult
= TableRecord
; + +export function getRecord
({ stash, table, key }: GetRecordArgs
): GetRecordResult
{ + const { namespaceLabel, label } = table; + return stash.get().records[namespaceLabel][label][encodeKey({ table, key })]; +} diff --git a/packages/stash/src/actions/getRecords.test.ts b/packages/stash/src/actions/getRecords.test.ts new file mode 100644 index 0000000000..dd3bc33c32 --- /dev/null +++ b/packages/stash/src/actions/getRecords.test.ts @@ -0,0 +1,42 @@ +import { defineStore } from "@latticexyz/store"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { setRecord } from "./setRecord"; +import { attest } from "@ark/attest"; +import { getRecords } from "./getRecords"; + +describe("getRecords", () => { + it("should get all records from a table", () => { + const config = defineStore({ + tables: { + test: { + schema: { + player: "int32", + match: "int32", + x: "uint256", + y: "uint256", + }, + key: ["player", "match"], + }, + }, + }); + const table = config.tables.test; + const stash = createStash(config); + + setRecord({ stash, table, key: { player: 1, match: 2 }, record: { x: 3n, y: 4n } }); + setRecord({ stash, table, key: { player: 5, match: 6 }, record: { x: 7n, y: 8n } }); + + attest<{ [encodedKey: string]: { player: number; match: number; x: bigint; y: bigint } }>( + getRecords({ stash, table }), + ).equals({ + "1|2": { player: 1, match: 2, x: 3n, y: 4n }, + "5|6": { player: 5, match: 6, x: 7n, y: 8n }, + }); + + attest<{ [encodedKey: string]: { player: number; match: number; x: bigint; y: bigint } }>( + getRecords({ stash, table, keys: [{ player: 1, match: 2 }] }), + ).equals({ + "1|2": { player: 1, match: 2, x: 3n, y: 4n }, + }); + }); +}); diff --git a/packages/stash/src/actions/getRecords.ts b/packages/stash/src/actions/getRecords.ts new file mode 100644 index 0000000000..9470cca33c --- /dev/null +++ b/packages/stash/src/actions/getRecords.ts @@ -0,0 +1,31 @@ +import { Table } from "@latticexyz/config"; +import { Key, Stash, TableRecords } from "../common"; +import { encodeKey } from "./encodeKey"; + +export type GetRecordsArgs
= { + stash: Stash; + table: table; + keys?: Key
[]; +}; + +export type GetRecordsResult
= TableRecords
; + +export function getRecords
({ + stash, + table, + keys, +}: GetRecordsArgs
): GetRecordsResult
{ + const { namespaceLabel, label } = table; + const records = stash.get().records[namespaceLabel][label]; + + if (!keys) { + return records; + } + + return Object.fromEntries( + keys.map((key) => { + const encodedKey = encodeKey({ table, key }); + return [encodedKey, records[encodedKey]]; + }), + ); +} diff --git a/packages/stash/src/actions/getTable.test.ts b/packages/stash/src/actions/getTable.test.ts new file mode 100644 index 0000000000..4de80db169 --- /dev/null +++ b/packages/stash/src/actions/getTable.test.ts @@ -0,0 +1,342 @@ +import { attest } from "@ark/attest"; +import { defineTable } from "@latticexyz/store/config/v2"; +import { describe, it, expect, vi } from "vitest"; +import { createStash } from "../createStash"; +import { getTable } from "./getTable"; + +describe("getTable", () => { + it("should return a bound table", () => { + const stash = createStash(); + const table = getTable({ + stash: stash, + table: defineTable({ + label: "table1", + namespace: "namespace1", + schema: { field1: "uint32", field2: "address" }, + key: ["field1"], + }), + }); + + attest(stash.get().config).snap({ + namespace1: { + table1: { + label: "table1", + type: "table", + namespace: "namespace1", + namespaceLabel: "namespace1", + name: "table1", + tableId: "0x74626e616d65737061636531000000007461626c653100000000000000000000", + schema: { + field1: { type: "uint32", internalType: "uint32" }, + field2: { type: "address", internalType: "address" }, + }, + key: ["field1"], + }, + }, + }); + + attest(stash.get().records).snap({ namespace1: { table1: {} } }); + expect(table.setRecord).toBeDefined(); + expect(table.getRecord).toBeDefined(); + }); + + describe("decodeKey", () => { + it("should decode an encoded table key", () => { + const stash = createStash(); + const table = stash.getTable({ + table: defineTable({ + label: "test", + schema: { field1: "string", field2: "uint32", field3: "uint256" }, + key: ["field2", "field3"], + }), + }); + + const key = { field2: 1, field3: 2n }; + table.setRecord({ key, record: { field1: "hello" } }); + + const encodedKey = table.encodeKey({ key }); + attest(table.decodeKey({ encodedKey })).equals({ field2: 1, field3: 2n }); + }); + }); + + describe("deleteRecord", () => { + it("should throw a type error if an invalid key is provided", () => { + const stash = createStash(); + const table = stash.getTable({ + table: defineTable({ + label: "test", + schema: { field1: "string", field2: "uint32", field3: "uint256" }, + key: ["field2", "field3"], + }), + }); + + attest(() => + table.deleteRecord({ + // @ts-expect-error Property 'field3' is missing in type '{ field2: number; }' + key: { field2: 1 }, + }), + ).type.errors(`Property 'field3' is missing in type '{ field2: number; }'`); + + attest(() => + table.deleteRecord({ + // @ts-expect-error Type 'string' is not assignable to type 'number' + key: { field2: 1, field3: "invalid" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'bigint'`); + }); + }); + + describe("encodeKey", () => { + it("should throw a type error if an invalid key is provided", () => { + const stash = createStash(); + const table = stash.getTable({ + table: defineTable({ + label: "test", + schema: { field1: "string", field2: "uint32", field3: "uint256" }, + key: ["field2", "field3"], + }), + }); + + attest(() => + table.encodeKey({ + // @ts-expect-error Property 'field3' is missing in type '{ field2: number; }' + key: { + field2: 1, + }, + }), + ) + .throws(`Provided key is missing field field3.`) + .type.errors(`Property 'field3' is missing in type '{ field2: number; }'`); + + attest( + table.encodeKey({ + key: { + field2: 1, + // @ts-expect-error Type 'string' is not assignable to type 'bigint'. + field3: "invalid", + }, + }), + ).type.errors(`Type 'string' is not assignable to type 'bigint'.`); + }); + }); + + describe("getConfig", () => { + it("should return the config type of the given table", () => { + const stash = createStash(); + const config = defineTable({ + label: "test", + schema: { field1: "string", field2: "uint32", field3: "uint256" }, + key: ["field2", "field3"], + }); + const table = stash.getTable({ + table: config, + }); + attest>(); + }); + }); + + describe("getKeys", () => { + it("should return the key map of a table", () => { + const stash = createStash(); + const table = stash.getTable({ + table: defineTable({ + label: "test", + schema: { + player: "int32", + match: "int32", + x: "int32", + y: "int32", + }, + key: ["player", "match"], + }), + }); + + table.setRecord({ key: { player: 1, match: 2 }, record: { x: 3, y: 4 } }); + table.setRecord({ key: { player: 5, match: 6 }, record: { x: 7, y: 8 } }); + + attest<{ [encodedKey: string]: { player: number; match: number } }>(table.getKeys()).snap({ + "1|2": { player: 1, match: 2 }, + "5|6": { player: 5, match: 6 }, + }); + }); + }); + + describe("getRecord", () => { + it("should throw a type error if the key type doesn't match", () => { + const stash = createStash(); + const table = stash.getTable({ + table: defineTable({ + label: "test", + schema: { field1: "string", field2: "uint32", field3: "int32" }, + key: ["field2", "field3"], + }), + }); + + attest(() => + table.getRecord({ + // @ts-expect-error Property 'field3' is missing in type '{ field2: number; }' + key: { field2: 1 }, + }), + ).type.errors(`Property 'field3' is missing in type '{ field2: number; }'`); + + attest(() => + table.getRecord({ + // @ts-expect-error Type 'string' is not assignable to type 'number' + key: { field2: 1, field3: "invalid" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'`); + }); + }); + + describe("getRecords", () => { + it("should get all records from a table", () => { + const config = defineTable({ + label: "test", + schema: { + player: "int32", + match: "int32", + x: "uint256", + y: "uint256", + }, + key: ["player", "match"], + }); + const stash = createStash(); + const table = stash.getTable({ table: config }); + + table.setRecord({ key: { player: 1, match: 2 }, record: { x: 3n, y: 4n } }); + table.setRecord({ key: { player: 5, match: 6 }, record: { x: 7n, y: 8n } }); + + attest<{ [encodedKey: string]: { player: number; match: number; x: bigint; y: bigint } }>( + table.getRecords(), + ).equals({ + "1|2": { player: 1, match: 2, x: 3n, y: 4n }, + "5|6": { player: 5, match: 6, x: 7n, y: 8n }, + }); + + attest<{ [encodedKey: string]: { player: number; match: number; x: bigint; y: bigint } }>( + table.getRecords({ keys: [{ player: 1, match: 2 }] }), + ).equals({ + "1|2": { player: 1, match: 2, x: 3n, y: 4n }, + }); + }); + }); + + describe("setRecord", () => { + it("should show a type warning if an invalid table, key or record is used", () => { + const config = defineTable({ + label: "test", + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }); + const stash = createStash(); + const table = stash.getTable({ table: config }); + + attest(() => + table.setRecord({ + // @ts-expect-error Property 'field2' is missing in type '{ field3: number; }' + key: { field3: 2 }, + record: { field1: "" }, + }), + ) + .throws("Provided key is missing field field2.") + .type.errors(`Property 'field2' is missing in type '{ field3: number; }`); + + attest(() => + table.setRecord({ + // @ts-expect-error Type 'string' is not assignable to type 'number'. + key: { field2: 1, field3: "invalid" }, + record: { field1: "" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'.`); + + attest(() => + table.setRecord({ + key: { field2: 1, field3: 2 }, + // @ts-expect-error Type 'number' is not assignable to type 'string'. + record: { field1: 1 }, + }), + ).type.errors(`Type 'number' is not assignable to type 'string'.`); + }); + }); + + describe("setRecords", () => { + it("should show a type warning if an invalid table, key or record is used", () => { + const config = defineTable({ + label: "test", + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }); + const stash = createStash(); + const table = stash.getTable({ table: config }); + + attest(() => + table.setRecords({ + // @ts-expect-error Type '{ field1: string; }' is missing the following properties from type + records: [{ field1: "" }], + }), + ) + .throws("Provided key is missing field field2.") + .type.errors(`Type '{ field1: string; }' is missing the following properties from type`); + + attest(() => + table.setRecords({ + // @ts-expect-error Type 'number' is not assignable to type 'string'. + records: [{ field1: 1, field2: 1, field3: 2 }], + }), + ).type.errors(`Type 'number' is not assignable to type 'string'.`); + }); + }); + + describe("subscribe", () => { + it("should notify subscriber of table change", () => { + const config1 = defineTable({ + label: "table1", + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }); + const config2 = defineTable({ + label: "table2", + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }); + const stash = createStash(); + const table1 = stash.getTable({ table: config1 }); + const table2 = stash.getTable({ table: config2 }); + const subscriber = vi.fn(); + + table1.subscribe({ subscriber }); + + table1.setRecord({ key: { a: "0x00" }, record: { b: 1n, c: 2 } }); + + expect(subscriber).toHaveBeenCalledTimes(1); + expect(subscriber).toHaveBeenNthCalledWith(1, { + "0x00": { + prev: undefined, + current: { a: "0x00", b: 1n, c: 2 }, + }, + }); + + // Expect unrelated updates to not notify subscribers + table2.setRecord({ key: { a: "0x01" }, record: { b: 1n, c: 2 } }); + expect(subscriber).toHaveBeenCalledTimes(1); + + table1.setRecord({ key: { a: "0x00" }, record: { b: 1n, c: 3 } }); + + expect(subscriber).toHaveBeenCalledTimes(2); + expect(subscriber).toHaveBeenNthCalledWith(2, { + "0x00": { + prev: { a: "0x00", b: 1n, c: 2 }, + current: { a: "0x00", b: 1n, c: 3 }, + }, + }); + }); + }); +}); diff --git a/packages/stash/src/actions/getTable.ts b/packages/stash/src/actions/getTable.ts new file mode 100644 index 0000000000..636d6b0552 --- /dev/null +++ b/packages/stash/src/actions/getTable.ts @@ -0,0 +1,68 @@ +import { Table } from "@latticexyz/config"; +import { Stash } from "../common"; +import { DecodeKeyArgs, DecodeKeyResult, decodeKey } from "./decodeKey"; +import { DeleteRecordArgs, DeleteRecordResult, deleteRecord } from "./deleteRecord"; +import { EncodeKeyArgs, EncodeKeyResult, encodeKey } from "./encodeKey"; +import { GetConfigResult, getConfig } from "./getConfig"; +import { GetKeysResult, getKeys } from "./getKeys"; +import { GetRecordArgs, GetRecordResult, getRecord } from "./getRecord"; +import { GetRecordsArgs, GetRecordsResult, getRecords } from "./getRecords"; +import { SetRecordArgs, SetRecordResult, setRecord } from "./setRecord"; +import { SetRecordsArgs, SetRecordsResult, setRecords } from "./setRecords"; +import { SubscribeTableArgs, SubscribeTableResult, subscribeTable } from "./subscribeTable"; +import { registerTable } from "./registerTable"; + +export type TableBoundDecodeKeyArgs
= Omit, "stash" | "table">; +export type TableBoundDeleteRecordArgs
= Omit, "stash" | "table">; +export type TableBoundEncodeKeyArgs
= Omit, "stash" | "table">; +export type TableBoundGetRecordArgs
= Omit, "stash" | "table">; +export type TableBoundGetRecordsArgs
= Omit, "stash" | "table">; +export type TableBoundSetRecordArgs
= Omit, "stash" | "table">; +export type TableBoundSetRecordsArgs
= Omit, "stash" | "table">; +export type TableBoundSubscribeTableArgs
= Omit< + SubscribeTableArgs
, + "stash" | "table" +>; + +export type BoundTable
= { + decodeKey: (args: TableBoundDecodeKeyArgs
) => DecodeKeyResult
; + deleteRecord: (args: TableBoundDeleteRecordArgs
) => DeleteRecordResult; + encodeKey: (args: TableBoundEncodeKeyArgs
) => EncodeKeyResult; + getConfig: () => GetConfigResult
; + getKeys: () => GetKeysResult
; + getRecord: (args: TableBoundGetRecordArgs
) => GetRecordResult
; + getRecords: (args?: TableBoundGetRecordsArgs
) => GetRecordsResult
; + setRecord: (args: TableBoundSetRecordArgs
) => SetRecordResult; + setRecords: (args: TableBoundSetRecordsArgs
) => SetRecordsResult; + subscribe: (args: TableBoundSubscribeTableArgs
) => SubscribeTableResult; +}; + +export type GetTableArgs
= { + stash: Stash; + table: table; +}; + +export type GetTableResult
= BoundTable
; + +export function getTable
({ stash, table }: GetTableArgs
): GetTableResult
{ + const { namespaceLabel, label } = table; + + if (stash.get().config[namespaceLabel]?.[label] == null) { + registerTable({ stash, table }); + } + + return { + decodeKey: (args: TableBoundDecodeKeyArgs
) => decodeKey({ stash, table, ...args }), + deleteRecord: (args: TableBoundDeleteRecordArgs
) => deleteRecord({ stash, table, ...args }), + encodeKey: (args: TableBoundEncodeKeyArgs
) => encodeKey({ table, ...args }), + getConfig: () => getConfig({ stash, table }) as table, + getKeys: () => getKeys({ stash, table }), + getRecord: (args: TableBoundGetRecordArgs
) => getRecord({ stash, table, ...args }), + getRecords: (args?: TableBoundGetRecordsArgs
) => getRecords({ stash, table, ...args }), + setRecord: (args: TableBoundSetRecordArgs
) => setRecord({ stash, table, ...args }), + setRecords: (args: TableBoundSetRecordsArgs
) => setRecords({ stash, table, ...args }), + subscribe: (args: TableBoundSubscribeTableArgs) => subscribeTable({ stash, table, ...args }), + + // TODO: dynamically add setters and getters for individual fields of the table + }; +} diff --git a/packages/stash/src/actions/getTables.test.ts b/packages/stash/src/actions/getTables.test.ts new file mode 100644 index 0000000000..e04f174992 --- /dev/null +++ b/packages/stash/src/actions/getTables.test.ts @@ -0,0 +1,67 @@ +import { defineStore } from "@latticexyz/store"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { getTables } from "./getTables"; +import { attest } from "@ark/attest"; + +describe("getTables", () => { + it("should return bound tables for each registered table in the stash", () => { + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + table1: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + namespace2: { + tables: { + table2: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + }, + }); + const stash = createStash(config); + const tables = getTables({ stash }); + + attest<"namespace1" | "namespace2", keyof typeof tables>(); + + attest<"table2", keyof typeof tables.namespace2>(); + + attest(tables).snap({ + namespace1: { + table1: { + decodeKey: "Function(decodeKey)", + deleteRecord: "Function(deleteRecord)", + encodeKey: "Function(encodeKey)", + getConfig: "Function(getConfig)", + getKeys: "Function(getKeys)", + getRecord: "Function(getRecord)", + getRecords: "Function(getRecords)", + setRecord: "Function(setRecord)", + setRecords: "Function(setRecords)", + subscribe: "Function(subscribe)", + }, + }, + namespace2: { + table2: { + decodeKey: "Function(decodeKey1)", + deleteRecord: "Function(deleteRecord1)", + encodeKey: "Function(encodeKey1)", + getConfig: "Function(getConfig1)", + getKeys: "Function(getKeys1)", + getRecord: "Function(getRecord1)", + getRecords: "Function(getRecords1)", + setRecord: "Function(setRecord1)", + setRecords: "Function(setRecords1)", + subscribe: "Function(subscribe1)", + }, + }, + }); + }); +}); diff --git a/packages/stash/src/actions/getTables.ts b/packages/stash/src/actions/getTables.ts new file mode 100644 index 0000000000..f9f42b94f7 --- /dev/null +++ b/packages/stash/src/actions/getTables.ts @@ -0,0 +1,32 @@ +import { Stash, StoreConfig, getNamespaces, getTableConfig, getNamespaceTables } from "../common"; +import { BoundTable, getTable } from "./getTable"; + +type MutableBoundTables = { + -readonly [namespace in getNamespaces]: { + -readonly [table in getNamespaceTables]: BoundTable>; + }; +}; + +export type BoundTables = { + [namespace in getNamespaces]: { + [table in getNamespaceTables]: BoundTable>; + }; +}; + +export type GetTablesArgs = { + stash: Stash; +}; + +export type GetTablesResult = BoundTables; + +export function getTables({ stash }: GetTablesArgs): GetTablesResult { + const boundTables: MutableBoundTables = {}; + const config = stash.get().config; + for (const namespace of Object.keys(config)) { + boundTables[namespace] ??= {}; + for (const label of Object.keys(config[namespace])) { + boundTables[namespace][label] = getTable({ stash, table: config[namespace][label] }) as never; + } + } + return boundTables as never; +} diff --git a/packages/stash/src/actions/index.ts b/packages/stash/src/actions/index.ts new file mode 100644 index 0000000000..eb44c0514c --- /dev/null +++ b/packages/stash/src/actions/index.ts @@ -0,0 +1,17 @@ +export * from "./decodeKey"; +export * from "./deleteRecord"; +export * from "./encodeKey"; +export * from "./extend"; +export * from "./getConfig"; +export * from "./getKeys"; +export * from "./getRecord"; +export * from "./getRecords"; +export * from "./getTable"; +export * from "./getTables"; +export * from "./registerTable"; +export * from "./runQuery"; +export * from "./setRecord"; +export * from "./setRecords"; +export * from "./subscribeQuery"; +export * from "./subscribeStore"; +export * from "./subscribeTable"; diff --git a/packages/stash/src/actions/registerTable.test.ts b/packages/stash/src/actions/registerTable.test.ts new file mode 100644 index 0000000000..dcb7a2db6e --- /dev/null +++ b/packages/stash/src/actions/registerTable.test.ts @@ -0,0 +1,42 @@ +import { attest } from "@ark/attest"; +import { defineTable } from "@latticexyz/store/config/v2"; +import { describe, it, expect } from "vitest"; +import { createStash } from "../createStash"; +import { registerTable } from "./registerTable"; + +describe("registerTable", () => { + it("should add a new table to the stash and return a bound table", () => { + const stash = createStash(); + const table = registerTable({ + stash: stash, + table: defineTable({ + label: "table1", + namespace: "namespace1", + schema: { field1: "uint32", field2: "address" }, + key: ["field1"], + }), + }); + + attest(stash.get().config).snap({ + namespace1: { + table1: { + label: "table1", + type: "table", + namespace: "namespace1", + namespaceLabel: "namespace1", + name: "table1", + tableId: "0x74626e616d65737061636531000000007461626c653100000000000000000000", + schema: { + field1: { type: "uint32", internalType: "uint32" }, + field2: { type: "address", internalType: "address" }, + }, + key: ["field1"], + }, + }, + }); + + attest(stash.get().records).snap({ namespace1: { table1: {} } }); + expect(table.setRecord).toBeDefined(); + expect(table.getRecord).toBeDefined(); + }); +}); diff --git a/packages/stash/src/actions/registerTable.ts b/packages/stash/src/actions/registerTable.ts new file mode 100644 index 0000000000..b93a198610 --- /dev/null +++ b/packages/stash/src/actions/registerTable.ts @@ -0,0 +1,40 @@ +import { Table } from "@latticexyz/config"; +import { getTable, BoundTable } from "./getTable"; +import { Stash } from "../common"; + +export type RegisterTableArgs
= { + stash: Stash; + table: table; +}; + +export type RegisterTableResult
= BoundTable
; + +export function registerTable
({ + stash, + table, +}: RegisterTableArgs
): RegisterTableResult
{ + // Pick only relevant keys from the table config, ignore keys like `codegen`, `deploy` + const { namespace, namespaceLabel, name, label, key, schema, type, tableId } = table; + const tableConfig = { namespace, namespaceLabel, name, label, key, schema, type, tableId }; + + // Set config for table + stash._.state.config[namespaceLabel] ??= {}; + stash._.state.config[namespaceLabel][label] = tableConfig; + + // Init records map for table + stash._.state.records[namespaceLabel] ??= {}; + stash._.state.records[namespaceLabel][label] ??= {}; + + // Init subscribers set for table + stash._.tableSubscribers[namespaceLabel] ??= {}; + stash._.tableSubscribers[namespaceLabel][label] ??= new Set(); + + // Notify stash subscribers + const storeUpdate = { + config: { [namespaceLabel]: { [label]: { prev: undefined, current: tableConfig } } }, + records: {}, + }; + stash._.storeSubscribers.forEach((subscriber) => subscriber(storeUpdate)); + + return getTable({ stash, table }); +} diff --git a/packages/stash/src/actions/runQuery.test.ts b/packages/stash/src/actions/runQuery.test.ts new file mode 100644 index 0000000000..123d7ee795 --- /dev/null +++ b/packages/stash/src/actions/runQuery.test.ts @@ -0,0 +1,158 @@ +import { describe, beforeEach, it } from "vitest"; +import { attest } from "@arktype/attest"; +import { createStash } from "../createStash"; +import { runQuery } from "./runQuery"; +import { defineStore } from "@latticexyz/store"; +import { Stash, StoreRecords, getQueryConfig } from "../common"; +import { setRecord } from "./setRecord"; +import { In, MatchRecord, NotIn, NotMatchRecord } from "../queryFragments"; +import { Hex } from "viem"; + +describe("runQuery", () => { + let stash: Stash; + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + Position: { + schema: { player: "bytes32", x: "int32", y: "int32" }, + key: ["player"], + }, + }, + }, + namespace2: { + tables: { + Inventory: { + schema: { player: "bytes32", item: "bytes32", amount: "uint32" }, + key: ["player", "item"], + }, + Health: { + schema: { player: "bytes32", health: "uint32" }, + key: ["player"], + }, + }, + }, + }, + }); + + const { Position } = config.namespaces.namespace1.tables; + const { Inventory, Health } = config.namespaces.namespace2.tables; + + beforeEach(() => { + stash = createStash(config); + + // Add some mock data + const items = ["0xgold", "0xsilver"] as const; + const num = 5; + for (let i = 0; i < num; i++) { + setRecord({ stash, table: Position, key: { player: `0x${String(i)}` }, record: { x: i, y: num - i } }); + if (i > 2) { + setRecord({ stash, table: Health, key: { player: `0x${String(i)}` }, record: { health: i } }); + } + for (const item of items) { + setRecord({ stash, table: Inventory, key: { player: `0x${String(i)}`, item }, record: { amount: i } }); + } + } + }); + + it("should return all keys in the Position table", () => { + const result = runQuery({ stash, query: [In(Position)] }); + attest(result).snap({ + keys: { + "0x0": { player: "0x0" }, + "0x1": { player: "0x1" }, + "0x2": { player: "0x2" }, + "0x3": { player: "0x3" }, + "0x4": { player: "0x4" }, + }, + }); + }); + + it("should return all keys that are in the Position and Health table", () => { + const result = runQuery({ stash, query: [In(Position), In(Health)] }); + attest(result).snap({ + keys: { + "0x3": { player: "0x3" }, + "0x4": { player: "0x4" }, + }, + }); + }); + + it("should return all keys that have Position.x = 4 and are included in Health", () => { + const result = runQuery({ stash, query: [MatchRecord(Position, { x: 4 }), In(Health)] }); + attest(result).snap({ keys: { "0x4": { player: "0x4" } } }); + }); + + it("should return all keys that are in Position but not Health", () => { + const result = runQuery({ stash, query: [In(Position), NotIn(Health)] }); + attest(result).snap({ + keys: { + "0x0": { player: "0x0" }, + "0x1": { player: "0x1" }, + "0x2": { player: "0x2" }, + }, + }); + }); + + it("should return all keys that don't include a gold item in the Inventory table", () => { + const result = runQuery({ stash, query: [NotMatchRecord(Inventory, { item: "0xgold" })] }); + attest(result).snap({ + keys: { + "0x0|0xsilver": { player: "0x0", item: "0xsilver" }, + "0x1|0xsilver": { player: "0x1", item: "0xsilver" }, + "0x2|0xsilver": { player: "0x2", item: "0xsilver" }, + "0x3|0xsilver": { player: "0x3", item: "0xsilver" }, + "0x4|0xsilver": { player: "0x4", item: "0xsilver" }, + }, + }); + }); + + it("should throw an error when tables with different key schemas are mixed", () => { + attest(() => + runQuery({ stash, query: [In(Position), MatchRecord(Inventory, { item: "0xgold", amount: 2 })] }), + ).throws("All tables in a query must share the same key schema"); + }); + + it("should include all matching records from the tables if includeRecords is set", () => { + const result = runQuery({ stash, query: [In(Position), In(Health)], options: { includeRecords: true } }); + attest(result).snap({ + keys: { + "0x3": { player: "0x3" }, + "0x4": { player: "0x4" }, + }, + records: { + namespace1: { + Position: { + "0x3": { player: "0x3", x: 3, y: 2 }, + "0x4": { player: "0x4", x: 4, y: 1 }, + }, + }, + namespace2: { + Health: { + "0x3": { player: "0x3", health: 3 }, + "0x4": { player: "0x4", health: 4 }, + }, + }, + }, + }); + }); + + it("should include `records` only if the `includeRecords` option is provided", () => { + const query = [In(Position)] as const; + const resultWithoutRecords = runQuery({ stash, query }); + attest(); + + const resultWithRecords = runQuery({ stash, query, options: { includeRecords: true } }); + attest>, (typeof resultWithRecords)["records"]>(); + }); + + it("should type the `records` in the result based on tables in the query", () => { + const result = runQuery({ stash, query: [In(Position), In(Health)], options: { includeRecords: true } }); + + attest<"namespace1" | "namespace2", keyof (typeof result)["records"]>(); + attest<"Position", keyof (typeof result)["records"]["namespace1"]>(); + attest<"Health", keyof (typeof result)["records"]["namespace2"]>(); + attest<{ player: Hex; x: number; y: number }, (typeof result)["records"]["namespace1"]["Position"][string]>(); + attest<{ player: Hex; health: number }, (typeof result)["records"]["namespace2"]["Health"][string]>(); + }); +}); diff --git a/packages/stash/src/actions/runQuery.ts b/packages/stash/src/actions/runQuery.ts new file mode 100644 index 0000000000..46cf77039f --- /dev/null +++ b/packages/stash/src/actions/runQuery.ts @@ -0,0 +1,84 @@ +import { getKeySchema } from "@latticexyz/protocol-parser/internal"; +import { + StoreRecords, + Query, + Stash, + MutableStoreRecords, + CommonQueryOptions, + CommonQueryResult, + getQueryConfig, +} from "../common"; +import { getConfig } from "./getConfig"; +import { getRecords } from "./getRecords"; + +export type RunQueryOptions = CommonQueryOptions & { + includeRecords?: boolean; +}; + +// TODO: is it feasible to type the stash records return type based on the query? +export type RunQueryArgs = { + stash: Stash; + query: query; + options?: options; +}; + +export type RunQueryResult< + query extends Query = Query, + options extends RunQueryOptions = RunQueryOptions, +> = CommonQueryResult & + (options extends { + includeRecords: true; + } + ? { + records: StoreRecords>; + } + : { records?: never }); + +export function runQuery({ + stash, + query, + options, +}: RunQueryArgs): RunQueryResult { + // Only allow fragments with matching table keys for now + // TODO: we might be able to enable this if we add something like a `keySelector` + const expectedKeySchema = getKeySchema(getConfig({ stash, table: query[0].table })); + for (const fragment of query) { + if ( + Object.values(expectedKeySchema).join("|") !== + Object.values(getKeySchema(getConfig({ stash, table: fragment.table }))).join("|") + ) { + throw new Error( + "All tables in a query must share the same key schema. Found mismatch when comparing tables: " + + fragment.table.label + + " and " + + query[0].table.label, + ); + } + } + + // Initial set of matching keys is either the provided `initialKeys` or all keys of the table of the first fragment + const keys = options?.initialKeys ?? query[0].getInitialKeys(stash); + + for (const fragment of query) { + // TODO: this might be more efficient if we would use a Map() instead of an object + for (const encodedKey of Object.keys(keys)) { + if (!fragment.match(stash, encodedKey)) { + delete keys[encodedKey]; + } + } + } + + // Early return if records are not requested + if (!options?.includeRecords) { + return { keys } as never; + } + + const records: MutableStoreRecords = {}; + for (const { table } of query) { + const { namespaceLabel, label } = table; + records[namespaceLabel] ??= {}; + const tableRecords = getRecords({ stash, table, keys: Object.values(keys) }); + records[namespaceLabel][label] ??= tableRecords; + } + return { keys, records } as never; +} diff --git a/packages/stash/src/actions/setRecord.test.ts b/packages/stash/src/actions/setRecord.test.ts new file mode 100644 index 0000000000..d0b020443b --- /dev/null +++ b/packages/stash/src/actions/setRecord.test.ts @@ -0,0 +1,100 @@ +import { defineStore } from "@latticexyz/store/config/v2"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { attest } from "@ark/attest"; +import { setRecord } from "./setRecord"; + +describe("setRecord", () => { + it("should add the record to the table", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + setRecord({ + stash, + table, + key: { field2: 1, field3: 2 }, + record: { field1: "hello" }, + }); + + setRecord({ + stash, + table, + key: { field2: 2, field3: 1 }, + record: { field1: "world" }, + }); + + attest(stash.get().records).snap({ + namespace1: { + table1: { + "1|2": { field1: "hello", field2: 1, field3: 2 }, + "2|1": { field1: "world", field2: 2, field3: 1 }, + }, + }, + }); + }); + + it("should show a type warning if an invalid table, key or record is used", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + attest(() => + setRecord({ + stash, + table, + // @ts-expect-error Property 'field2' is missing in type '{ field3: number; }' + key: { field3: 2 }, + record: { field1: "" }, + }), + ) + .throws("Provided key is missing field field2.") + .type.errors(`Property 'field2' is missing in type '{ field3: number; }`); + + attest(() => + setRecord({ + stash, + table, + // @ts-expect-error Type 'string' is not assignable to type 'number'. + key: { field2: 1, field3: "invalid" }, + record: { field1: "" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'.`); + + attest(() => + setRecord({ + stash, + table, + key: { field2: 1, field3: 2 }, + // @ts-expect-error Type 'number' is not assignable to type 'string'. + record: { field1: 1 }, + }), + ).type.errors(`Type 'number' is not assignable to type 'string'.`); + }); +}); diff --git a/packages/stash/src/actions/setRecord.ts b/packages/stash/src/actions/setRecord.ts new file mode 100644 index 0000000000..6cbc45c600 --- /dev/null +++ b/packages/stash/src/actions/setRecord.ts @@ -0,0 +1,23 @@ +import { Key, TableRecord, Stash } from "../common"; +import { setRecords } from "./setRecords"; +import { Table } from "@latticexyz/config"; + +export type SetRecordArgs
= { + stash: Stash; + table: table; + key: Key
; + record: Partial>; +}; + +export type SetRecordResult = void; + +export function setRecord
({ stash, table, key, record }: SetRecordArgs
): SetRecordResult { + setRecords({ + stash, + table, + records: [ + // Stored record should include key + { ...record, ...key }, + ], + }); +} diff --git a/packages/stash/src/actions/setRecords.test.ts b/packages/stash/src/actions/setRecords.test.ts new file mode 100644 index 0000000000..8b54b87180 --- /dev/null +++ b/packages/stash/src/actions/setRecords.test.ts @@ -0,0 +1,82 @@ +import { defineStore } from "@latticexyz/store"; +import { describe, it } from "vitest"; +import { createStash } from "../createStash"; +import { setRecords } from "./setRecords"; +import { attest } from "@ark/attest"; + +describe("setRecords", () => { + it("should add the records to the table", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + setRecords({ + stash, + table, + records: [ + { field1: "hello", field2: 1, field3: 2 }, + { field1: "world", field2: 2, field3: 1 }, + ], + }); + + attest(stash.get().records).snap({ + namespace1: { + table1: { + "1|2": { field1: "hello", field2: 1, field3: 2 }, + "2|1": { field1: "world", field2: 2, field3: 1 }, + }, + }, + }); + }); + + it("should show a type warning if an invalid table, key or record is used", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + attest(() => + setRecords({ + stash, + table, + // @ts-expect-error Type '{ field1: string; }' is missing the following properties from type + records: [{ field1: "" }], + }), + ) + .throws("Provided key is missing field field2.") + .type.errors(`Type '{ field1: string; }' is missing the following properties from type`); + + attest(() => + setRecords({ + stash, + table, + // @ts-expect-error Type 'number' is not assignable to type 'string'. + records: [{ field1: 1, field2: 1, field3: 2 }], + }), + ).type.errors(`Type 'number' is not assignable to type 'string'.`); + }); +}); diff --git a/packages/stash/src/actions/setRecords.ts b/packages/stash/src/actions/setRecords.ts new file mode 100644 index 0000000000..8e8e958d6a --- /dev/null +++ b/packages/stash/src/actions/setRecords.ts @@ -0,0 +1,50 @@ +import { dynamicAbiTypeToDefaultValue, staticAbiTypeToDefaultValue } from "@latticexyz/schema-type/internal"; +import { Stash, TableRecord, TableUpdates } from "../common"; +import { encodeKey } from "./encodeKey"; +import { Table } from "@latticexyz/config"; +import { registerTable } from "./registerTable"; + +export type SetRecordsArgs
= { + stash: Stash; + table: table; + records: TableRecord
[]; +}; + +export type SetRecordsResult = void; + +export function setRecords
({ stash, table, records }: SetRecordsArgs
): SetRecordsResult { + const { namespaceLabel, label, schema } = table; + + if (stash.get().config[namespaceLabel]?.[label] == null) { + registerTable({ stash, table }); + } + + // Construct table updates + const updates: TableUpdates = {}; + for (const record of records) { + const encodedKey = encodeKey({ table, key: record as never }); + const prevRecord = stash.get().records[namespaceLabel][label][encodedKey]; + const newRecord = Object.fromEntries( + Object.keys(schema).map((fieldName) => [ + fieldName, + record[fieldName] ?? // Override provided record fields + prevRecord?.[fieldName] ?? // Keep existing non-overridden fields + staticAbiTypeToDefaultValue[schema[fieldName] as never] ?? // Default values for new fields + dynamicAbiTypeToDefaultValue[schema[fieldName] as never], + ]), + ); + updates[encodedKey] = { prev: prevRecord, current: newRecord }; + } + + // Update records + for (const [encodedKey, { current }] of Object.entries(updates)) { + stash._.state.records[namespaceLabel][label][encodedKey] = current as never; + } + + // Notify table subscribers + stash._.tableSubscribers[namespaceLabel][label].forEach((subscriber) => subscriber(updates)); + + // Notify stash subscribers + const storeUpdate = { config: {}, records: { [namespaceLabel]: { [label]: updates } } }; + stash._.storeSubscribers.forEach((subscriber) => subscriber(storeUpdate)); +} diff --git a/packages/stash/src/actions/subscribeQuery.test.ts b/packages/stash/src/actions/subscribeQuery.test.ts new file mode 100644 index 0000000000..80ea8ff12c --- /dev/null +++ b/packages/stash/src/actions/subscribeQuery.test.ts @@ -0,0 +1,167 @@ +import { beforeEach, describe, it, vi, expect } from "vitest"; +import { QueryUpdate, subscribeQuery } from "./subscribeQuery"; +import { attest } from "@arktype/attest"; +import { defineStore } from "@latticexyz/store"; +import { In, MatchRecord } from "../queryFragments"; +import { deleteRecord } from "./deleteRecord"; +import { setRecord } from "./setRecord"; +import { Stash } from "../common"; +import { createStash } from "../createStash"; + +describe("defineQuery", () => { + let stash: Stash; + const config = defineStore({ + namespace: "namespace1", + tables: { + Position: { + schema: { player: "bytes32", x: "int32", y: "int32" }, + key: ["player"], + }, + Health: { + schema: { player: "bytes32", health: "uint32" }, + key: ["player"], + }, + Inventory: { + schema: { player: "bytes32", item: "bytes32", amount: "uint32" }, + key: ["player", "item"], + }, + }, + }); + + const { Position, Health, Inventory } = config.namespaces.namespace1.tables; + + beforeEach(() => { + stash = createStash(config); + + // Add some mock data + const items = ["0xgold", "0xsilver"] as const; + const num = 5; + for (let i = 0; i < num; i++) { + setRecord({ stash, table: Position, key: { player: `0x${String(i)}` }, record: { x: i, y: num - i } }); + if (i > 2) { + setRecord({ stash, table: Health, key: { player: `0x${String(i)}` }, record: { health: i } }); + } + for (const item of items) { + setRecord({ stash, table: Inventory, key: { player: `0x${String(i)}`, item }, record: { amount: i } }); + } + } + }); + + it("should return the matching keys and keep it updated", () => { + const result = subscribeQuery({ stash, query: [In(Position), In(Health)] }); + attest(result.keys).snap({ + "0x3": { player: "0x3" }, + "0x4": { player: "0x4" }, + }); + + setRecord({ stash, table: Health, key: { player: `0x2` }, record: { health: 2 } }); + + attest(result.keys).snap({ + "0x2": { player: "0x2" }, + "0x3": { player: "0x3" }, + "0x4": { player: "0x4" }, + }); + }); + + it("should notify subscribers when a matching key is updated", () => { + let lastUpdate: unknown; + const subscriber = vi.fn((update: QueryUpdate) => (lastUpdate = update)); + const result = subscribeQuery({ stash, query: [MatchRecord(Position, { x: 4 }), In(Health)] }); + result.subscribe(subscriber); + + setRecord({ stash, table: Position, key: { player: "0x4" }, record: { y: 2 } }); + + expect(subscriber).toBeCalledTimes(1); + attest(lastUpdate).snap({ + records: { + namespace1: { + Position: { + "0x4": { + prev: { player: "0x4", x: 4, y: 1 }, + current: { player: "0x4", x: 4, y: 2 }, + }, + }, + }, + }, + keys: { "0x4": { player: "0x4" } }, + types: { "0x4": "update" }, + }); + }); + + it("should notify subscribers when a new key matches", () => { + let lastUpdate: unknown; + const subscriber = vi.fn((update: QueryUpdate) => (lastUpdate = update)); + const result = subscribeQuery({ stash, query: [In(Position), In(Health)] }); + result.subscribe(subscriber); + + setRecord({ stash, table: Health, key: { player: `0x2` }, record: { health: 2 } }); + + expect(subscriber).toBeCalledTimes(1); + attest(lastUpdate).snap({ + records: { + namespace1: { + Health: { + "0x2": { + prev: undefined, + current: { player: `0x2`, health: 2 }, + }, + }, + }, + }, + keys: { "0x2": { player: "0x2" } }, + types: { "0x2": "enter" }, + }); + }); + + it("should notify subscribers when a key doesn't match anymore", () => { + let lastUpdate: unknown; + const subscriber = vi.fn((update: QueryUpdate) => (lastUpdate = update)); + const result = subscribeQuery({ stash, query: [In(Position), In(Health)] }); + result.subscribe(subscriber); + + deleteRecord({ stash, table: Position, key: { player: `0x3` } }); + + expect(subscriber).toBeCalledTimes(1); + attest(lastUpdate).snap({ + records: { + namespace1: { + Position: { + "0x3": { + prev: { player: "0x3", x: 3, y: 2 }, + current: undefined, + }, + }, + }, + }, + keys: { "0x3": { player: "0x3" } }, + types: { "0x3": "exit" }, + }); + }); + + it("should notify initial subscribers with initial query result", () => { + let lastUpdate: unknown; + const subscriber = vi.fn((update: QueryUpdate) => (lastUpdate = update)); + subscribeQuery({ stash, query: [In(Position), In(Health)], options: { initialSubscribers: [subscriber] } }); + + expect(subscriber).toBeCalledTimes(1); + attest(lastUpdate).snap({ + keys: { + "0x3": { player: "0x3" }, + "0x4": { player: "0x4" }, + }, + records: { + namespace1: { + Position: { + "0x3": { prev: undefined, current: { player: "0x3", x: 3, y: 2 } }, + "0x4": { prev: undefined, current: { player: "0x4", x: 4, y: 1 } }, + }, + Health: { + "0x3": { prev: undefined, current: { player: "0x3", health: 3 } }, + "0x4": { prev: undefined, current: { player: "0x4", health: 4 } }, + }, + }, + }, + types: { "0x3": "enter", "0x4": "enter" }, + }); + }); +}); diff --git a/packages/stash/src/actions/subscribeQuery.ts b/packages/stash/src/actions/subscribeQuery.ts new file mode 100644 index 0000000000..a3ceb532a7 --- /dev/null +++ b/packages/stash/src/actions/subscribeQuery.ts @@ -0,0 +1,157 @@ +import { Table } from "@latticexyz/config"; +import { + TableUpdates, + Keys, + Unsubscribe, + Query, + Stash, + CommonQueryOptions, + CommonQueryResult, + StoreConfig, + getNamespaces, + getNamespaceTables, + getTableConfig, + getQueryConfig, +} from "../common"; +import { decodeKey } from "./decodeKey"; +import { getTable } from "./getTable"; +import { runQuery } from "./runQuery"; + +export type SubscribeQueryOptions = CommonQueryOptions & { + // Skip the initial `runQuery` to initialize the query result. + // Only updates after the query was defined are considered in the result. + skipInitialRun?: boolean; + initialSubscribers?: QuerySubscriber[]; +}; + +// TODO: is it feasible to type the table updates based on the query? +type QueryTableUpdates = { + [namespace in getNamespaces]: { + [table in getNamespaceTables]: TableUpdates>; + }; +}; + +export type QueryUpdate = { + records: QueryTableUpdates; + keys: Keys; + types: { [key: string]: "enter" | "update" | "exit" }; +}; + +type QuerySubscriber = (update: QueryUpdate) => void; + +export type SubscribeQueryArgs = { + stash: Stash; + query: query; + options?: SubscribeQueryOptions>; +}; + +export type SubscribeQueryResult = CommonQueryResult & { + /** + * Subscribe to query updates. + * Returns a function to unsubscribe the provided subscriber. + */ + subscribe: (subscriber: QuerySubscriber>) => Unsubscribe; + /** + * Unsubscribe the query from all table updates. + * Note: this is different from unsubscribing a query subscriber. + */ + unsubscribe: () => void; +}; + +export function subscribeQuery({ + stash, + query, + options, +}: SubscribeQueryArgs): SubscribeQueryResult { + const initialRun = options?.skipInitialRun + ? undefined + : runQuery({ + stash, + query, + options: { + // Pass the initial keys + initialKeys: options?.initialKeys, + // Request initial records if there are initial subscribers + includeRecords: options?.initialSubscribers && options.initialSubscribers.length > 0, + }, + }); + const matching: Keys = initialRun?.keys ?? {}; + const subscribers = new Set(options?.initialSubscribers as QuerySubscriber[]); + + const subscribe = (subscriber: QuerySubscriber>): Unsubscribe => { + subscribers.add(subscriber as QuerySubscriber); + return () => subscribers.delete(subscriber as QuerySubscriber); + }; + + const updateQueryResult = ({ namespaceLabel, label }: Table, tableUpdates: TableUpdates) => { + const update: QueryUpdate = { + records: { [namespaceLabel]: { [label]: tableUpdates } }, + keys: {}, + types: {}, + }; + + for (const key of Object.keys(tableUpdates)) { + if (key in matching) { + update.keys[key] = matching[key]; + // If the key matched before, check if the relevant fragments (accessing this table) still match + const relevantFragments = query.filter((f) => f.table.namespace === namespaceLabel && f.table.label === label); + const match = relevantFragments.every((f) => f.match(stash, key)); + if (match) { + // If all relevant fragments still match, the key still matches the query. + update.types[key] = "update"; + } else { + // If one of the relevant fragments don't match anymore, the key doesn't pass the query anymore. + delete matching[key]; + update.types[key] = "exit"; + } + } else { + // If this key didn't match the query before, check all fragments + const match = query.every((f) => f.match(stash, key)); + if (match) { + // Since the key schema of query fragments has to match, we can pick any fragment to decode they key + const decodedKey = decodeKey({ stash, table: query[0].table, encodedKey: key }); + matching[key] = decodedKey; + update.keys[key] = decodedKey; + update.types[key] = "enter"; + } + } + } + + // Notify subscribers + subscribers.forEach((subscriber) => subscriber(update)); + }; + + // Subscribe to each table's update stream and stash the unsubscribers + const unsubsribers = query.map((fragment) => + getTable({ stash, table: fragment.table }).subscribe({ + subscriber: (updates) => updateQueryResult(fragment.table, updates), + }), + ); + + const unsubscribe = () => unsubsribers.forEach((unsub) => unsub()); + + // TODO: find a more elegant way to do this + if (subscribers.size > 0 && initialRun?.records) { + // Convert records from the initial run to TableUpdate format + const records: QueryTableUpdates = {}; + for (const namespace of Object.keys(initialRun.records)) { + for (const table of Object.keys(initialRun.records[namespace])) { + records[namespace] ??= {}; + records[namespace][table] = Object.fromEntries( + Object.entries(initialRun.records[namespace][table]).map(([key, record]) => [ + key, + { prev: undefined, current: record }, + ]), + ) as never; + } + } + + // Convert keys to types format + const types = Object.fromEntries(Object.keys(matching).map((key) => [key, "enter" as const])); + + // Notify initial subscribers + subscribers.forEach((subscriber) => subscriber({ keys: matching, records, types })); + } + + return { keys: matching, subscribe, unsubscribe }; +} diff --git a/packages/stash/src/actions/subscribeStore.test.ts b/packages/stash/src/actions/subscribeStore.test.ts new file mode 100644 index 0000000000..11298f7562 --- /dev/null +++ b/packages/stash/src/actions/subscribeStore.test.ts @@ -0,0 +1,86 @@ +import { defineStore } from "@latticexyz/store"; +import { describe, expect, it, vi } from "vitest"; +import { createStash } from "../createStash"; +import { subscribeStore } from "./subscribeStore"; +import { setRecord } from "./setRecord"; + +describe("subscribeStore", () => { + it("should notify subscriber of any stash change", () => { + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + table1: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + namespace2: { + tables: { + table2: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + }, + }); + + const stash = createStash(config); + const subscriber = vi.fn(); + + subscribeStore({ stash, subscriber }); + + setRecord({ stash, table: config.tables.namespace1__table1, key: { a: "0x00" }, record: { b: 1n, c: 2 } }); + + expect(subscriber).toHaveBeenCalledTimes(1); + expect(subscriber).toHaveBeenNthCalledWith(1, { + config: {}, + records: { + namespace1: { + table1: { + "0x00": { + prev: undefined, + current: { a: "0x00", b: 1n, c: 2 }, + }, + }, + }, + }, + }); + + setRecord({ stash, table: config.tables.namespace2__table2, key: { a: "0x01" }, record: { b: 1n, c: 2 } }); + + expect(subscriber).toHaveBeenCalledTimes(2); + expect(subscriber).toHaveBeenNthCalledWith(2, { + config: {}, + records: { + namespace2: { + table2: { + "0x01": { + prev: undefined, + current: { a: "0x01", b: 1n, c: 2 }, + }, + }, + }, + }, + }); + + setRecord({ stash, table: config.tables.namespace2__table2, key: { a: "0x01" }, record: { b: 1n, c: 3 } }); + + expect(subscriber).toHaveBeenCalledTimes(3); + expect(subscriber).toHaveBeenNthCalledWith(3, { + config: {}, + records: { + namespace2: { + table2: { + "0x01": { + prev: { a: "0x01", b: 1n, c: 2 }, + current: { a: "0x01", b: 1n, c: 3 }, + }, + }, + }, + }, + }); + }); +}); diff --git a/packages/stash/src/actions/subscribeStore.ts b/packages/stash/src/actions/subscribeStore.ts new file mode 100644 index 0000000000..f0e6ed119f --- /dev/null +++ b/packages/stash/src/actions/subscribeStore.ts @@ -0,0 +1,16 @@ +import { Stash, StoreConfig, StoreUpdatesSubscriber, Unsubscribe } from "../common"; + +export type SubscribeStoreArgs = { + stash: Stash; + subscriber: StoreUpdatesSubscriber; +}; + +export type SubscribeStoreResult = Unsubscribe; + +export function subscribeStore({ + stash, + subscriber, +}: SubscribeStoreArgs): SubscribeStoreResult { + stash._.storeSubscribers.add(subscriber as StoreUpdatesSubscriber); + return () => stash._.storeSubscribers.delete(subscriber as StoreUpdatesSubscriber); +} diff --git a/packages/stash/src/actions/subscribeTable.test.ts b/packages/stash/src/actions/subscribeTable.test.ts new file mode 100644 index 0000000000..72840b117a --- /dev/null +++ b/packages/stash/src/actions/subscribeTable.test.ts @@ -0,0 +1,61 @@ +import { defineStore } from "@latticexyz/store"; +import { describe, expect, it, vi } from "vitest"; +import { createStash } from "../createStash"; +import { subscribeTable } from "./subscribeTable"; +import { setRecord } from "./setRecord"; + +describe("subscribeTable", () => { + it("should notify subscriber of table change", () => { + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + table1: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + namespace2: { + tables: { + table2: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + }, + }); + + const table1 = config.namespaces.namespace1.tables.table1; + const table2 = config.namespaces.namespace2.tables.table2; + const stash = createStash(config); + const subscriber = vi.fn(); + + subscribeTable({ stash, table: table1, subscriber }); + + setRecord({ stash, table: table1, key: { a: "0x00" }, record: { b: 1n, c: 2 } }); + + expect(subscriber).toHaveBeenCalledTimes(1); + expect(subscriber).toHaveBeenNthCalledWith(1, { + "0x00": { + prev: undefined, + current: { a: "0x00", b: 1n, c: 2 }, + }, + }); + + // Expect unrelated updates to not notify subscribers + setRecord({ stash, table: table2, key: { a: "0x01" }, record: { b: 1n, c: 2 } }); + expect(subscriber).toHaveBeenCalledTimes(1); + + setRecord({ stash, table: table1, key: { a: "0x00" }, record: { b: 1n, c: 3 } }); + + expect(subscriber).toHaveBeenCalledTimes(2); + expect(subscriber).toHaveBeenNthCalledWith(2, { + "0x00": { + prev: { a: "0x00", b: 1n, c: 2 }, + current: { a: "0x00", b: 1n, c: 3 }, + }, + }); + }); +}); diff --git a/packages/stash/src/actions/subscribeTable.ts b/packages/stash/src/actions/subscribeTable.ts new file mode 100644 index 0000000000..f129ecea1a --- /dev/null +++ b/packages/stash/src/actions/subscribeTable.ts @@ -0,0 +1,21 @@ +import { Table } from "@latticexyz/config"; +import { Stash, TableUpdatesSubscriber, Unsubscribe } from "../common"; + +export type SubscribeTableArgs
= { + stash: Stash; + table: table; + subscriber: TableUpdatesSubscriber
; +}; + +export type SubscribeTableResult = Unsubscribe; + +export function subscribeTable
({ + stash, + table, + subscriber, +}: SubscribeTableArgs
): SubscribeTableResult { + const { namespaceLabel, label } = table; + + stash._.tableSubscribers[namespaceLabel][label].add(subscriber); + return () => stash._.tableSubscribers[namespaceLabel][label].delete(subscriber); +} diff --git a/packages/stash/src/apiEquality.test.ts b/packages/stash/src/apiEquality.test.ts new file mode 100644 index 0000000000..b63df25ba9 --- /dev/null +++ b/packages/stash/src/apiEquality.test.ts @@ -0,0 +1,47 @@ +import { describe, expect, it } from "vitest"; +import { createStash } from "./createStash"; +import { attest } from "@ark/attest"; +import { BoundTable } from "./actions/getTable"; +import { DefaultActions } from "./decorators/default"; +import { defineTable } from "@latticexyz/store/config/v2"; + +describe("stash actions, bound table", () => { + const stash = createStash(); + const Position = stash.registerTable({ + table: defineTable({ + label: "Position", + schema: { player: "address", x: "uint32", y: "uint32" }, + key: ["player"], + }), + }); + + it("should expose the same functionality", () => { + const excludedStoreKeys = [ + "registerTable", + "getTable", + "getTables", + "runQuery", + "subscribeQuery", + "subscribeStore", + "subscribeTable", // renamed to subscribe in table API + "_", + "get", + ] as const; + + const excludedTableKeys = [ + "subscribe", // renamed from subscribeTable in stash API + ] as const; + + attest< + keyof Omit, + keyof Omit + >(); + attest< + keyof Omit, + keyof Omit + >(); + expect(Object.keys(Position).filter((key) => !excludedTableKeys.includes(key as never))).toEqual( + Object.keys(stash).filter((key) => !excludedStoreKeys.includes(key as never)), + ); + }); +}); diff --git a/packages/stash/src/bench.ts b/packages/stash/src/bench.ts new file mode 100644 index 0000000000..9238fe0125 --- /dev/null +++ b/packages/stash/src/bench.ts @@ -0,0 +1,64 @@ +import { bench } from "@ark/attest"; +import { defineStore } from "@latticexyz/store"; +import { createStash } from "./createStash"; +import { In } from "./queryFragments"; + +const config = defineStore({ + tables: { + Position: { + schema: { player: "address", x: "int32", y: "int32" }, + key: ["player"], + }, + }, +}); +const stash = createStash(config); + +bench("createStash", () => { + createStash( + defineStore({ + tables: { + Position: { + schema: { player: "address", x: "int32", y: "int32" }, + key: ["player"], + }, + }, + }), + ); +}).types([1690, "instantiations"]); + +bench("boundTable", () => { + const table = stash.getTable({ table: config.tables.Position }); + table.getRecord({ key: { player: "0x" } }); +}).types([108, "instantiations"]); + +bench("runQuery", () => { + const { Position } = config.tables; + stash.runQuery({ query: [In(Position)] }); +}).types([95, "instantiations"]); + +const filledStore = createStash(config); +const numItems = 10_000; +for (let i = 0; i < numItems; i++) { + filledStore.setRecord({ + table: config.tables.Position, + key: { player: `0x${i}` }, + record: { x: i, y: i }, + }); +} +bench("setRecord", () => { + filledStore.setRecord({ + table: config.tables.Position, + key: { player: `0x0` }, + record: { x: 1, y: 1 }, + }); +}).mark({ mean: [1.2, "us"], median: [1, "us"] }); + +bench("10x setRecord", () => { + for (let i = 0; i < 10; i++) { + filledStore.setRecord({ + table: config.tables.Position, + key: { player: `0x${i}` }, + record: { x: i + 1, y: i + 1 }, + }); + } +}).mark({ mean: [13, "us"], median: [12, "us"] }); diff --git a/packages/stash/src/boundTable.test.ts b/packages/stash/src/boundTable.test.ts new file mode 100644 index 0000000000..ee70d60d29 --- /dev/null +++ b/packages/stash/src/boundTable.test.ts @@ -0,0 +1,51 @@ +import { describe, beforeEach, it } from "vitest"; +import { attest } from "@arktype/attest"; +import { createStash } from "./createStash"; +import { BoundTable } from "./actions/getTable"; +import { Stash } from "./common"; +import { DefaultActions } from "./decorators/default"; +import { defineTable } from "@latticexyz/store/config/v2"; + +describe("BoundTable", () => { + const tableConfig = defineTable({ + label: "table1", + namespace: "namespace1", + schema: { field1: "uint32", field2: "address" }, + key: ["field1"], + }); + let table: BoundTable; + + let stash: Stash & DefaultActions; + + beforeEach(() => { + stash = createStash(); + table = stash.registerTable({ table: tableConfig }); + }); + + describe("setRecord", () => { + it("should set a record in the table", () => { + table.setRecord({ key: { field1: 1 }, record: { field2: "0x00" } }); + attest(stash.get().records).snap({ namespace1: { table1: { "1": { field1: 1, field2: "0x00" } } } }); + }); + + it("should throw a type error if the key or record type doesn't match", () => { + attest(() => + table.setRecord({ + key: { field1: 1 }, + // @ts-expect-error Type '"world"' is not assignable to type '`0x${string}`' + record: { field2: "world" }, + }), + ).type.errors("Type '\"world\"' is not assignable to type '`0x${string}`'"); + }); + }); + + describe("getRecord", () => { + it("should get a record from the table", () => { + table.setRecord({ key: { field1: 2 }, record: { field2: "0x01" } }); + attest(table.getRecord({ key: { field1: 2 } })).snap({ field1: 2, field2: "0x01" }); + }); + }); + + describe.todo("getRecords"); + describe.todo("getKeys"); +}); diff --git a/packages/stash/src/common.ts b/packages/stash/src/common.ts new file mode 100644 index 0000000000..048d0a822d --- /dev/null +++ b/packages/stash/src/common.ts @@ -0,0 +1,167 @@ +import { QueryFragment } from "./queryFragments"; +import { Table } from "@latticexyz/config"; +import { getKeySchema, getSchemaPrimitives } from "@latticexyz/protocol-parser/internal"; + +export type StoreConfig = { + namespaces: { + [namespaceLabel: string]: { + tables: { + [tableLabel: string]: Table; + }; + }; + }; +}; + +export type getNamespaces = keyof config["namespaces"]; + +export type getNamespaceTables< + config extends StoreConfig, + namespace extends keyof config["namespaces"], +> = keyof config["namespaces"][namespace]["tables"]; + +export type getTableConfig< + config extends StoreConfig, + namespace extends keyof config["namespaces"] | undefined, + table extends keyof config["namespaces"][namespace extends undefined ? "" : namespace]["tables"], +> = Omit; + +/** + * A Key is the unique identifier for a row in the table. + */ +export type Key
= getSchemaPrimitives>; + +/** + * A map from encoded key to decoded key + */ +export type Keys
= { [encodedKey: string]: Key
}; + +export type CommonQueryResult = { + /** + * Readyonly, mutable, includes currently matching keys. + */ + keys: Readonly; +}; + +export type CommonQueryOptions = { + initialKeys?: Keys; +}; + +export type Query = readonly [QueryFragment, ...QueryFragment[]]; + +type OmitNeverKeys = { [key in keyof T as T[key] extends never ? never : key]: T[key] }; + +export type getQueryConfig = { + namespaces: { + [namespaceLabel in query[number]["table"]["namespaceLabel"]]: { + tables: OmitNeverKeys<{ + [label in query[number]["table"]["label"]]: query[number]["table"] & { + namespaceLabel: namespaceLabel; + label: label; + }; + }>; + }; + }; +}; + +export type Unsubscribe = () => void; + +/** + * A TableRecord is one row of the table. It includes both the key and the value. + */ +export type TableRecord
= getSchemaPrimitives; + +export type TableLabel = string> = { + label: getNamespaceTables; + namespace?: namespace; +}; + +export type TableRecords
= { readonly [key: string]: TableRecord
}; + +export type MutableTableRecords
= { [key: string]: TableRecord
}; + +export type StoreRecords = { + readonly [namespace in getNamespaces]: { + readonly [table in getNamespaceTables]: TableRecords>; + }; +}; + +export type MutableStoreRecords = { + -readonly [namespace in getNamespaces]: { + -readonly [table in getNamespaceTables]: MutableTableRecords< + getTableConfig + >; + }; +}; + +export type State = { + readonly config: { + readonly [namespace in getNamespaces]: { + readonly [table in getNamespaceTables]: getTableConfig; + }; + }; + readonly records: StoreRecords; +}; + +export type MutableState = { + config: { + -readonly [namespace in getNamespaces]: { + -readonly [table in getNamespaceTables]: getTableConfig; + }; + }; + records: MutableStoreRecords; +}; + +export type TableUpdate
= { + prev: TableRecord
| undefined; + current: TableRecord
| undefined; +}; + +export type TableUpdates
= { [key: string]: TableUpdate
}; + +export type TableUpdatesSubscriber
= (updates: TableUpdates
) => void; + +export type TableSubscribers = { + [namespace: string]: { + [table: string]: Set; + }; +}; + +export type ConfigUpdate = { prev: Table | undefined; current: Table }; + +export type StoreUpdates = { + config: { + [namespace: string]: { + [table: string]: ConfigUpdate; + }; + }; + records: { + [namespace in getNamespaces]: { + [table in getNamespaceTables]: TableUpdates>; + }; + } & { + [namespace: string]: { + [table: string]: TableUpdates; + }; + }; +}; + +export type StoreUpdatesSubscriber = (updates: StoreUpdates) => void; + +export type StoreSubscribers = Set>; + +export type Stash = { + /** + * Get a readonly reference to the current state + */ + get: () => State; + /** + * Internal references for interacting with the state. + * @internal + * @deprecated Do not use this internal reference externally. + */ + _: { + tableSubscribers: TableSubscribers; + storeSubscribers: StoreSubscribers; + state: MutableState; + }; +}; diff --git a/packages/stash/src/createStash.test.ts b/packages/stash/src/createStash.test.ts new file mode 100644 index 0000000000..6486ea44ec --- /dev/null +++ b/packages/stash/src/createStash.test.ts @@ -0,0 +1,413 @@ +import { describe, expect, it, vi } from "vitest"; +import { attest } from "@arktype/attest"; +import { CreateStoreResult, createStash } from "./createStash"; +import { defineStore, defineTable } from "@latticexyz/store/config/v2"; +import { Hex } from "viem"; + +describe("createStash", () => { + it("should initialize the stash", () => { + const tablesConfig = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + }, + key: ["field2"], + }, + }, + }); + const stash = createStash(tablesConfig); + + attest(stash.get().config).snap({ + namespace1: { + table1: { + label: "table1", + type: "table", + namespace: "namespace1", + namespaceLabel: "namespace1", + name: "table1", + tableId: "0x74626e616d65737061636531000000007461626c653100000000000000000000", + schema: { + field1: { type: "string", internalType: "string" }, + field2: { type: "uint32", internalType: "uint32" }, + }, + key: ["field2"], + }, + }, + }); + attest(stash.get().records).snap({ namespace1: { table1: {} } }); + }); + + it("should be typed with the config tables", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + }, + key: ["field2"], + }, + }, + }); + const stash = createStash(config); + stash.setRecord({ + table: config.namespaces.namespace1.tables.table1, + key: { field2: 1 }, + record: { field1: "hello" }, + }); + + attest>(stash); + attest<{ + config: { + namespace1: { + table1: { + label: "table1"; + type: "table"; + namespace: string; + namespaceLabel: "namespace1"; + name: string; + tableId: Hex; + schema: { + field1: { type: "string"; internalType: "string" }; + field2: { type: "uint32"; internalType: "uint32" }; + }; + key: readonly ["field2"]; + }; + }; + }; + records: { + namespace1: { + table1: { + [key: string]: { + field1: string; + field2: number; + }; + }; + }; + }; + }>(stash.get()); + }); + + describe("subscribeTable", () => { + it("should notify listeners on table updates", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + const listener = vi.fn(); + + stash.subscribeTable({ + table, + subscriber: listener, + }); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "hello" }, + }); + + expect(listener).toHaveBeenNthCalledWith(1, { + "1|2": { + prev: undefined, + current: { field1: "hello", field2: 1, field3: 2 }, + }, + }); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "world" }, + }); + + expect(listener).toHaveBeenNthCalledWith(2, { + "1|2": { + prev: { field1: "hello", field2: 1, field3: 2 }, + current: { field1: "world", field2: 1, field3: 2 }, + }, + }); + + stash.deleteRecord({ + table, + key: { field2: 1, field3: 2 }, + }); + + expect(listener).toHaveBeenNthCalledWith(3, { + "1|2": { + prev: { field1: "world", field2: 1, field3: 2 }, + current: undefined, + }, + }); + }); + + it("should not notify listeners after they have been removed", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + const subscriber = vi.fn(); + + const unsubscribe = stash.subscribeTable({ + table, + subscriber, + }); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "hello" }, + }); + + expect(subscriber).toHaveBeenNthCalledWith(1, { + "1|2": { + prev: undefined, + current: { field1: "hello", field2: 1, field3: 2 }, + }, + }); + + unsubscribe(); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "world" }, + }); + + expect(subscriber).toBeCalledTimes(1); + }); + }); + + describe("subscribeStore", () => { + it("should notify listeners on stash updates", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + const subscriber = vi.fn(); + + stash.subscribeStore({ subscriber }); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "hello" }, + }); + + expect(subscriber).toHaveBeenNthCalledWith(1, { + config: {}, + records: { + namespace1: { + table1: { + "1|2": { + prev: undefined, + current: { field1: "hello", field2: 1, field3: 2 }, + }, + }, + }, + }, + }); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "world" }, + }); + + expect(subscriber).toHaveBeenNthCalledWith(2, { + config: {}, + records: { + namespace1: { + table1: { + "1|2": { + prev: { field1: "hello", field2: 1, field3: 2 }, + current: { field1: "world", field2: 1, field3: 2 }, + }, + }, + }, + }, + }); + + stash.deleteRecord({ + table, + key: { field2: 1, field3: 2 }, + }); + + expect(subscriber).toHaveBeenNthCalledWith(3, { + config: {}, + records: { + namespace1: { + table1: { + "1|2": { + prev: { field1: "world", field2: 1, field3: 2 }, + current: undefined, + }, + }, + }, + }, + }); + + stash.registerTable({ + table: defineTable({ + namespace: "namespace2", + label: "table2", + schema: { field1: "uint256", value: "uint256" }, + key: ["field1"], + }), + }); + + expect(subscriber).toHaveBeenNthCalledWith(4, { + config: { + namespace2: { + table2: { + current: { + key: ["field1"], + label: "table2", + name: "table2", + namespace: "namespace2", + namespaceLabel: "namespace2", + schema: { + field1: { + internalType: "uint256", + type: "uint256", + }, + value: { + internalType: "uint256", + type: "uint256", + }, + }, + tableId: "0x74626e616d65737061636532000000007461626c653200000000000000000000", + type: "table", + }, + prev: undefined, + }, + }, + }, + records: {}, + }); + }); + + it("should not notify listeners after they have been removed", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + const subscriber = vi.fn(); + + const unsubscribe = stash.subscribeStore({ + subscriber, + }); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "hello" }, + }); + + expect(subscriber).toHaveBeenNthCalledWith(1, { + config: {}, + records: { + namespace1: { + table1: { + "1|2": { + prev: undefined, + current: { field1: "hello", field2: 1, field3: 2 }, + }, + }, + }, + }, + }); + + unsubscribe(); + + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + record: { field1: "world" }, + }); + + expect(subscriber).toBeCalledTimes(1); + }); + }); + + describe("getTables", () => { + it("should return an object of bound tables in the stash", () => { + const stash = createStash(); + stash.registerTable({ + table: defineTable({ + label: "table1", + namespace: "namespace1", + schema: { field1: "uint32", field2: "address" }, + key: ["field1"], + }), + }); + stash.registerTable({ + table: defineTable({ + label: "table2", + namespace: "namespace2", + schema: { field1: "uint32", field2: "address" }, + key: ["field1"], + }), + }); + const tables = stash.getTables(); + + expect(tables.namespace1.table1).toBeDefined(); + expect(tables.namespace2.table2).toBeDefined(); + }); + }); +}); diff --git a/packages/stash/src/createStash.ts b/packages/stash/src/createStash.ts new file mode 100644 index 0000000000..75bd82325d --- /dev/null +++ b/packages/stash/src/createStash.ts @@ -0,0 +1,55 @@ +import { MutableState, Stash, StoreConfig, StoreSubscribers, TableSubscribers } from "./common"; +import { DefaultActions, defaultActions } from "./decorators/default"; +import { extend } from "./actions/extend"; +import { Table } from "@latticexyz/store/config/v2"; + +export type Config = StoreConfig; + +export type CreateStoreResult = Stash & DefaultActions; + +/** + * Initializes a Zustand stash based on the provided table configs. + */ +export function createStash(storeConfig?: config): CreateStoreResult { + const tableSubscribers: TableSubscribers = {}; + const storeSubscribers: StoreSubscribers = new Set(); + + const state: MutableState = { + config: {}, + records: {}, + }; + + // Initialize the stash + if (storeConfig) { + for (const [namespace, { tables }] of Object.entries(storeConfig.namespaces)) { + for (const [table, fullTableConfig] of Object.entries(tables)) { + // Remove unused artifacts from the stash config + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const { deploy, codegen, ...tableConfig } = { ...(fullTableConfig as Table) }; + + // Set config for tables + state.config[namespace] ??= {}; + state.config[namespace][table] = tableConfig; + + // Init records map for tables + state.records[namespace] ??= {}; + state.records[namespace][table] = {}; + + // Init subscribers set for tables + tableSubscribers[namespace] ??= {}; + tableSubscribers[namespace][table] ??= new Set(); + } + } + } + + const stash = { + get: () => state, + _: { + state, + tableSubscribers, + storeSubscribers, + }, + } satisfies Stash; + + return extend({ stash, actions: defaultActions(stash) }) as never; +} diff --git a/packages/stash/src/decorators/default.test.ts b/packages/stash/src/decorators/default.test.ts new file mode 100644 index 0000000000..d9c461a80c --- /dev/null +++ b/packages/stash/src/decorators/default.test.ts @@ -0,0 +1,557 @@ +import { attest } from "@ark/attest"; +import { defineStore } from "@latticexyz/store"; +import { describe, expect, it, vi } from "vitest"; +import { createStash } from "../createStash"; +import { defineTable } from "@latticexyz/store/config/v2"; +import { In } from "../queryFragments"; +import { Hex } from "viem"; +import { runQuery } from "../actions"; +import { StoreRecords, getQueryConfig } from "../common"; + +describe("stash with default actions", () => { + describe("decodeKey", () => { + it("should decode an encoded table key", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { field1: "string", field2: "uint32", field3: "uint256" }, + key: ["field2", "field3"], + }, + }, + }); + const stash = createStash(config); + const table = config.namespaces.namespace1.tables.table1; + const key = { field2: 1, field3: 2n }; + stash.setRecord({ table, key, record: { field1: "hello" } }); + + const encodedKey = stash.encodeKey({ table, key }); + attest(stash.decodeKey({ table, encodedKey })).equals({ field2: 1, field3: 2n }); + }); + }); + + describe("deleteRecord", () => { + it("should throw a type error if an invalid key is provided", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + + const stash = createStash(config); + + attest(() => + stash.deleteRecord({ + table, + // @ts-expect-error Property 'field3' is missing in type '{ field2: number; }' + key: { field2: 1 }, + }), + ).type.errors(`Property 'field3' is missing in type '{ field2: number; }'`); + + attest(() => + stash.deleteRecord({ + table, + // @ts-expect-error Type 'string' is not assignable to type 'number' + key: { field2: 1, field3: "invalid" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'`); + }); + }); + + describe("encodeKey", () => { + it("should throw a type error if an invalid key is provided", () => { + const config = defineStore({ + tables: { + test: { + schema: { field1: "uint32", field2: "uint256", field3: "string" }, + key: ["field1", "field2"], + }, + }, + }); + + const stash = createStash(config); + const table = config.tables.test; + + attest(() => + stash.encodeKey({ + table, + // @ts-expect-error Property 'field2' is missing in type '{ field1: number; }' + key: { + field1: 1, + }, + }), + ) + .throws(`Provided key is missing field field2.`) + .type.errors(`Property 'field2' is missing in type '{ field1: number; }'`); + + attest( + stash.encodeKey({ + table, + key: { + field1: 1, + // @ts-expect-error Type 'string' is not assignable to type 'bigint'. + field2: "invalid", + }, + }), + ).type.errors(`Type 'string' is not assignable to type 'bigint'.`); + }); + }); + + describe("getConfig", () => { + it("should return the config of the given table", () => { + const { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + codegen: _, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + deploy: __, + ...table + } = defineTable({ + namespace: "namespace", + label: "test", + schema: { field1: "address", field2: "string" }, + key: ["field1"], + }); + + const stash = createStash(); + stash.registerTable({ table }); + + attest(stash.getConfig({ table: { label: "test", namespaceLabel: "namespace" } })).equals(table); + }); + }); + + describe("getKeys", () => { + it("should return the key map of a table", () => { + const config = defineStore({ + tables: { + test: { + schema: { + player: "int32", + match: "int32", + x: "int32", + y: "int32", + }, + key: ["player", "match"], + }, + }, + }); + const table = config.tables.test; + const stash = createStash(config); + + stash.setRecord({ table, key: { player: 1, match: 2 }, record: { x: 3, y: 4 } }); + stash.setRecord({ table, key: { player: 5, match: 6 }, record: { x: 7, y: 8 } }); + + attest<{ [encodedKey: string]: { player: number; match: number } }>(stash.getKeys({ table })).snap({ + "1|2": { player: 1, match: 2 }, + "5|6": { player: 5, match: 6 }, + }); + }); + }); + + describe("getRecord", () => { + it("should get a record by key from the table", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + + const stash = createStash(config); + + stash.setRecord({ + table, + key: { field2: 2, field3: 1 }, + record: { field1: "world" }, + }); + + attest<{ field1: string; field2: number; field3: number }>( + stash.getRecord({ + table, + key: { field2: 2, field3: 1 }, + }), + ).snap({ field1: "world", field2: 2, field3: 1 }); + }); + + it("should throw a type error if the key type doesn't match", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + + const stash = createStash(config); + + attest(() => + stash.getRecord({ + table, + // @ts-expect-error Property 'field3' is missing in type '{ field2: number; }' + key: { field2: 1 }, + }), + ).type.errors(`Property 'field3' is missing in type '{ field2: number; }'`); + + attest(() => + stash.getRecord({ + table, + // @ts-expect-error Type 'string' is not assignable to type 'number' + key: { field2: 1, field3: "invalid" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'`); + }); + }); + + describe("getRecords", () => { + it("should get all records from a table", () => { + const config = defineStore({ + tables: { + test: { + schema: { + player: "int32", + match: "int32", + x: "uint256", + y: "uint256", + }, + key: ["player", "match"], + }, + }, + }); + const table = config.tables.test; + const stash = createStash(config); + + stash.setRecord({ table, key: { player: 1, match: 2 }, record: { x: 3n, y: 4n } }); + stash.setRecord({ table, key: { player: 5, match: 6 }, record: { x: 7n, y: 8n } }); + + attest<{ [encodedKey: string]: { player: number; match: number; x: bigint; y: bigint } }>( + stash.getRecords({ table }), + ).equals({ + "1|2": { player: 1, match: 2, x: 3n, y: 4n }, + "5|6": { player: 5, match: 6, x: 7n, y: 8n }, + }); + + attest<{ [encodedKey: string]: { player: number; match: number; x: bigint; y: bigint } }>( + stash.getRecords({ table, keys: [{ player: 1, match: 2 }] }), + ).equals({ + "1|2": { player: 1, match: 2, x: 3n, y: 4n }, + }); + }); + }); + + describe("getTables", () => { + it("should return bound tables for each registered table in the stash", () => { + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + table1: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + namespace2: { + tables: { + table2: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + }, + }); + const stash = createStash(config); + const tables = stash.getTables(); + + attest<"namespace1" | "namespace2", keyof typeof tables>(); + + attest<"table2", keyof typeof tables.namespace2>(); + + attest(tables).snap({ + namespace1: { + table1: { + decodeKey: "Function(decodeKey)", + deleteRecord: "Function(deleteRecord)", + encodeKey: "Function(encodeKey)", + getConfig: "Function(getConfig)", + getKeys: "Function(getKeys)", + getRecord: "Function(getRecord)", + getRecords: "Function(getRecords)", + setRecord: "Function(setRecord)", + setRecords: "Function(setRecords)", + subscribe: "Function(subscribe)", + }, + }, + namespace2: { + table2: { + decodeKey: "Function(decodeKey1)", + deleteRecord: "Function(deleteRecord1)", + encodeKey: "Function(encodeKey1)", + getConfig: "Function(getConfig1)", + getKeys: "Function(getKeys1)", + getRecord: "Function(getRecord1)", + getRecords: "Function(getRecords1)", + setRecord: "Function(setRecord1)", + setRecords: "Function(setRecords1)", + subscribe: "Function(subscribe1)", + }, + }, + }); + }); + }); + + describe("runQuery", () => { + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + Position: { + schema: { player: "bytes32", x: "int32", y: "int32" }, + key: ["player"], + }, + }, + }, + namespace2: { + tables: { + Health: { + schema: { player: "bytes32", health: "uint32" }, + key: ["player"], + }, + }, + }, + }, + }); + const stash = createStash(config); + const { Position } = config.namespaces.namespace1.tables; + const { Health } = config.namespaces.namespace2.tables; + + it("should include `records` only if the `includeRecords` option is provided", () => { + const query = [In(Position)] as const; + const resultWithoutRecords = stash.runQuery({ query }); + attest(); + + const resultWithRecords = stash.runQuery({ query, options: { includeRecords: true } }); + attest>, (typeof resultWithRecords)["records"]>(); + }); + + it("should type the `records` in the result based on tables in the query", () => { + const result = runQuery({ stash, query: [In(Position), In(Health)], options: { includeRecords: true } }); + + attest<"namespace1" | "namespace2", keyof (typeof result)["records"]>(); + attest<"Position", keyof (typeof result)["records"]["namespace1"]>(); + attest<"Health", keyof (typeof result)["records"]["namespace2"]>(); + attest<{ player: Hex; x: number; y: number }, (typeof result)["records"]["namespace1"]["Position"][string]>(); + attest<{ player: Hex; health: number }, (typeof result)["records"]["namespace2"]["Health"][string]>(); + }); + }); + + describe("setRecord", () => { + it("should show a type warning if an invalid table, key or record is used", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + attest(() => + stash.setRecord({ + table, + // @ts-expect-error Property 'field2' is missing in type '{ field3: number; }' + key: { field3: 2 }, + record: { field1: "" }, + }), + ) + .throws("Provided key is missing field field2.") + .type.errors(`Property 'field2' is missing in type '{ field3: number; }`); + + attest(() => + stash.setRecord({ + table, + // @ts-expect-error Type 'string' is not assignable to type 'number'. + key: { field2: 1, field3: "invalid" }, + record: { field1: "" }, + }), + ).type.errors(`Type 'string' is not assignable to type 'number'.`); + + attest(() => + stash.setRecord({ + table, + key: { field2: 1, field3: 2 }, + // @ts-expect-error Type 'number' is not assignable to type 'string'. + record: { field1: 1 }, + }), + ).type.errors(`Type 'number' is not assignable to type 'string'.`); + }); + }); + + describe("setRecords", () => { + it("should show a type warning if an invalid table, key or record is used", () => { + const config = defineStore({ + namespace: "namespace1", + tables: { + table1: { + schema: { + field1: "string", + field2: "uint32", + field3: "int32", + }, + key: ["field2", "field3"], + }, + }, + }); + + const table = config.namespaces.namespace1.tables.table1; + const stash = createStash(config); + + attest(() => + stash.setRecords({ + table, + // @ts-expect-error Type '{ field1: string; }' is missing the following properties from type + records: [{ field1: "" }], + }), + ) + .throws("Provided key is missing field field2.") + .type.errors(`Type '{ field1: string; }' is missing the following properties from type`); + + attest(() => + stash.setRecords({ + table, + // @ts-expect-error Type 'number' is not assignable to type 'string'. + records: [{ field1: 1, field2: 1, field3: 2 }], + }), + ).type.errors(`Type 'number' is not assignable to type 'string'.`); + }); + }); + + describe("subscribeStore", () => { + it("should notify subscriber of any stash change", () => { + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + table1: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + }, + }); + + const stash = createStash(config); + const subscriber = vi.fn(); + + stash.subscribeStore({ subscriber }); + + stash.setRecord({ table: config.tables.namespace1__table1, key: { a: "0x00" }, record: { b: 1n, c: 2 } }); + + expect(subscriber).toHaveBeenCalledTimes(1); + expect(subscriber).toHaveBeenNthCalledWith(1, { + config: {}, + records: { + namespace1: { + table1: { + "0x00": { + prev: undefined, + current: { a: "0x00", b: 1n, c: 2 }, + }, + }, + }, + }, + }); + }); + }); + + describe("subscribeTable", () => { + it("should notify subscriber of table change", () => { + const config = defineStore({ + namespaces: { + namespace1: { + tables: { + table1: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + namespace2: { + tables: { + table2: { + schema: { a: "address", b: "uint256", c: "uint32" }, + key: ["a"], + }, + }, + }, + }, + }); + + const table1 = config.namespaces.namespace1.tables.table1; + const table2 = config.namespaces.namespace2.tables.table2; + const stash = createStash(config); + const subscriber = vi.fn(); + + stash.subscribeTable({ table: table1, subscriber }); + + stash.setRecord({ table: table1, key: { a: "0x00" }, record: { b: 1n, c: 2 } }); + + expect(subscriber).toHaveBeenCalledTimes(1); + expect(subscriber).toHaveBeenNthCalledWith(1, { + "0x00": { + prev: undefined, + current: { a: "0x00", b: 1n, c: 2 }, + }, + }); + + // Expect unrelated updates to not notify subscribers + stash.setRecord({ table: table2, key: { a: "0x01" }, record: { b: 1n, c: 2 } }); + expect(subscriber).toHaveBeenCalledTimes(1); + + stash.setRecord({ table: table1, key: { a: "0x00" }, record: { b: 1n, c: 3 } }); + + expect(subscriber).toHaveBeenCalledTimes(2); + expect(subscriber).toHaveBeenNthCalledWith(2, { + "0x00": { + prev: { a: "0x00", b: 1n, c: 2 }, + current: { a: "0x00", b: 1n, c: 3 }, + }, + }); + }); + }); +}); diff --git a/packages/stash/src/decorators/default.ts b/packages/stash/src/decorators/default.ts new file mode 100644 index 0000000000..e7fa7b2245 --- /dev/null +++ b/packages/stash/src/decorators/default.ts @@ -0,0 +1,86 @@ +import { Query, Stash, StoreConfig } from "../common"; +import { DecodeKeyArgs, DecodeKeyResult, decodeKey } from "../actions/decodeKey"; +import { DeleteRecordArgs, DeleteRecordResult, deleteRecord } from "../actions/deleteRecord"; +import { EncodeKeyArgs, EncodeKeyResult, encodeKey } from "../actions/encodeKey"; +import { GetConfigArgs, GetConfigResult, getConfig } from "../actions/getConfig"; +import { GetKeysArgs, GetKeysResult, getKeys } from "../actions/getKeys"; +import { GetRecordArgs, GetRecordResult, getRecord } from "../actions/getRecord"; +import { GetRecordsArgs, GetRecordsResult, getRecords } from "../actions/getRecords"; +import { GetTableArgs, GetTableResult, getTable } from "../actions/getTable"; +import { GetTablesResult, getTables } from "../actions/getTables"; +import { RegisterTableArgs, RegisterTableResult, registerTable } from "../actions/registerTable"; +import { RunQueryArgs, RunQueryOptions, RunQueryResult, runQuery } from "../actions/runQuery"; +import { SetRecordArgs, SetRecordResult, setRecord } from "../actions/setRecord"; +import { SetRecordsArgs, SetRecordsResult, setRecords } from "../actions/setRecords"; +import { SubscribeQueryArgs, SubscribeQueryResult, subscribeQuery } from "../actions/subscribeQuery"; +import { SubscribeStoreArgs, SubscribeStoreResult, subscribeStore } from "../actions/subscribeStore"; +import { SubscribeTableArgs, SubscribeTableResult, subscribeTable } from "../actions/subscribeTable"; +import { Table } from "@latticexyz/config"; + +export type StashBoundDecodeKeyArgs
= Omit, "stash">; +export type StashBoundDeleteRecordArgs
= Omit, "stash">; +export type StashBoundEncodeKeyArgs
= EncodeKeyArgs
; +export type StashBoundGetConfigArgs = Omit; +export type StashBoundGetKeysArgs
= Omit, "stash">; +export type StashBoundGetRecordArgs
= Omit, "stash">; +export type StashBoundGetRecordsArgs
= Omit, "stash">; +export type StashBoundGetTableArgs
= Omit, "stash">; +export type StashBoundRegisterTableArgs
= Omit, "stash">; +export type StashBoundRunQueryArgs< + query extends Query = Query, + options extends RunQueryOptions = RunQueryOptions, +> = Omit, "stash">; +export type StashBoundSetRecordArgs
= Omit, "stash">; +export type StashBoundSetRecordsArgs
= Omit, "stash">; +export type StashBoundSubscribeQueryArgs = Omit, "stash">; +export type StashBoundSubscribeStoreArgs = Omit< + SubscribeStoreArgs, + "stash" +>; +export type StashBoundSubscribeTableArgs
= Omit, "stash">; + +export type DefaultActions = { + decodeKey:
(args: StashBoundDecodeKeyArgs
) => DecodeKeyResult
; + deleteRecord:
(args: StashBoundDeleteRecordArgs
) => DeleteRecordResult; + encodeKey:
(args: StashBoundEncodeKeyArgs
) => EncodeKeyResult; + getConfig: (args: StashBoundGetConfigArgs) => GetConfigResult; + getKeys:
(args: StashBoundGetKeysArgs
) => GetKeysResult
; + getRecord:
(args: StashBoundGetRecordArgs
) => GetRecordResult
; + getRecords:
(args: StashBoundGetRecordsArgs
) => GetRecordsResult
; + getTable:
(args: StashBoundGetTableArgs
) => GetTableResult
; + getTables: () => GetTablesResult; + registerTable:
(args: StashBoundRegisterTableArgs
) => RegisterTableResult
; + runQuery: ( + args: StashBoundRunQueryArgs, + ) => RunQueryResult; + setRecord:
(args: StashBoundSetRecordArgs
) => SetRecordResult; + setRecords:
(args: StashBoundSetRecordsArgs
) => SetRecordsResult; + subscribeQuery: (args: StashBoundSubscribeQueryArgs) => SubscribeQueryResult; + subscribeStore: (args: StashBoundSubscribeStoreArgs) => SubscribeStoreResult; + subscribeTable:
(args: StashBoundSubscribeTableArgs
) => SubscribeTableResult; +}; + +export function defaultActions(stash: Stash): DefaultActions { + return { + decodeKey:
(args: StashBoundDecodeKeyArgs
) => decodeKey({ stash, ...args }), + deleteRecord:
(args: StashBoundDeleteRecordArgs
) => deleteRecord({ stash, ...args }), + encodeKey:
(args: StashBoundEncodeKeyArgs
) => encodeKey(args), + getConfig: (args: StashBoundGetConfigArgs) => getConfig({ stash, ...args }), + getKeys:
(args: StashBoundGetKeysArgs
) => getKeys({ stash, ...args }), + getRecord:
(args: StashBoundGetRecordArgs
) => getRecord({ stash, ...args }), + getRecords:
(args: StashBoundGetRecordsArgs
) => getRecords({ stash, ...args }), + getTable:
(args: StashBoundGetTableArgs
) => getTable({ stash, ...args }), + getTables: () => getTables({ stash }), + registerTable:
(args: StashBoundRegisterTableArgs
) => registerTable({ stash, ...args }), + runQuery: (args: StashBoundRunQueryArgs) => + runQuery({ stash, ...args }), + setRecord:
(args: StashBoundSetRecordArgs
) => setRecord({ stash, ...args }), + setRecords:
(args: StashBoundSetRecordsArgs
) => setRecords({ stash, ...args }), + subscribeQuery: (args: StashBoundSubscribeQueryArgs) => + subscribeQuery({ stash, ...args }), + subscribeStore: (args: StashBoundSubscribeStoreArgs) => + subscribeStore({ stash, ...args }), + subscribeTable:
(args: StashBoundSubscribeTableArgs
) => + subscribeTable({ stash, ...args }), + }; +} diff --git a/packages/stash/src/exports/index.ts b/packages/stash/src/exports/index.ts new file mode 100644 index 0000000000..1e843685ef --- /dev/null +++ b/packages/stash/src/exports/index.ts @@ -0,0 +1,6 @@ +/** + * External exports. + * + * Be sure we're ready to commit to these being supported and changes made backward compatible! + */ +export {}; diff --git a/packages/stash/src/exports/internal.ts b/packages/stash/src/exports/internal.ts new file mode 100644 index 0000000000..37b08e10f6 --- /dev/null +++ b/packages/stash/src/exports/internal.ts @@ -0,0 +1,4 @@ +export * from "../createStash"; +export * from "../common"; +export * from "../queryFragments"; +export * from "../actions"; diff --git a/packages/stash/src/exports/recs.ts b/packages/stash/src/exports/recs.ts new file mode 100644 index 0000000000..02ed08b114 --- /dev/null +++ b/packages/stash/src/exports/recs.ts @@ -0,0 +1 @@ +export * from "../recs"; diff --git a/packages/stash/src/perf.test.ts b/packages/stash/src/perf.test.ts new file mode 100644 index 0000000000..7a5f6efbc0 --- /dev/null +++ b/packages/stash/src/perf.test.ts @@ -0,0 +1,432 @@ +import { describe, beforeEach, it } from "vitest"; +import { StoreApi, createStore as createZustandStore } from "zustand/vanilla"; +import { Component, Type, createEntity, createWorld, defineComponent, setComponent } from "@latticexyz/recs"; +import { mutative } from "zustand-mutative"; +import { defineStore } from "@latticexyz/store/config/v2"; +import { CreateStoreResult, createStash } from "./createStash"; + +export function printDuration(description: string, fn: () => unknown) { + const start = performance.now(); + fn(); + const end = performance.now(); + const duration = end - start; + console.log(description, duration); + return duration; +} + +type PositionSchema = { + x: number; + y: number; +}; + +type NameSchema = { + name: string; +}; + +type State = { + namespaces: { + app: { + Position: Record; + Name: Record; + }; + }; +}; + +type Actions = { + setPosition: (entity: string, position: PositionSchema) => void; +}; + +describe.skip("setting records in recs", () => { + let world: ReturnType; + let Position: Component<{ + x: Type.Number; + y: Type.Number; + }>; + + beforeEach(() => { + world = createWorld(); + defineComponent(world, { name: Type.String }); + Position = defineComponent(world, { x: Type.Number, y: Type.Number }); + }); + + it("[recs]: setting 10 records", () => { + printDuration("setting 10 records", () => { + for (let i = 0; i < 10; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 100 records", () => { + printDuration("setting 100 records", () => { + for (let i = 0; i < 100; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 1,000 records", () => { + printDuration("setting 1,000 records", () => { + for (let i = 0; i < 1_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 5,000 records", () => { + printDuration("setting 5,000 records", () => { + for (let i = 0; i < 5_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 10,000 records", () => { + printDuration("setting 10,000 records", () => { + for (let i = 0; i < 10_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 15,000 records", () => { + printDuration("setting 15,000 records", () => { + for (let i = 0; i < 15_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 20,000 records", () => { + printDuration("setting 20,000 records", () => { + for (let i = 0; i < 20_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 50,000 records", () => { + printDuration("setting 50,000 records", () => { + for (let i = 0; i < 50_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 100,000 records", () => { + printDuration("setting 100,000 records", () => { + for (let i = 0; i < 100_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); + + it("[recs]: setting 1,000,000 records", () => { + printDuration("setting 1,000,000 records", () => { + for (let i = 0; i < 1_000_000; i++) { + const entity = createEntity(world); + setComponent(Position, entity, { x: i, y: i }); + } + }); + }); +}); + +describe.skip("setting records in stash", () => { + let stash: CreateStoreResult; + const config = defineStore({ + tables: { + Position: { + schema: { player: "address", x: "uint32", y: "uint32" }, + key: ["player"], + }, + }, + }); + const Position = config.tables.Position; + + beforeEach(() => { + stash = createStash(config); + }); + + it("[stash]: setting 10 records", () => { + printDuration("setting 10 records", () => { + for (let i = 0; i < 10; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 100 records", () => { + printDuration("setting 100 records", () => { + for (let i = 0; i < 100; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 1,000 records", () => { + printDuration("setting 1,000 records", () => { + for (let i = 0; i < 1_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 5,000 records", () => { + printDuration("setting 5,000 records", () => { + for (let i = 0; i < 5_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 10,000 records", () => { + printDuration("setting 10,000 records", () => { + for (let i = 0; i < 10_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 15,000 records", () => { + printDuration("setting 15,000 records", () => { + for (let i = 0; i < 15_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 20,000 records", () => { + printDuration("setting 20,000 records", () => { + for (let i = 0; i < 20_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 50,000 records", () => { + printDuration("setting 50,000 records", () => { + for (let i = 0; i < 50_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 100,000 records", () => { + printDuration("setting 100,000 records", () => { + for (let i = 0; i < 100_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); + + it("[stash]: setting 1,000,000 records", () => { + printDuration("setting 1,000,000 records", () => { + for (let i = 0; i < 1_000_000; i++) { + stash.setRecord({ table: Position, key: { player: `0x${i}` }, record: { x: i, y: i } }); + } + }); + }); +}); + +describe.skip("setting records in zustand", () => { + let stash: StoreApi; + + beforeEach(() => { + stash = createZustandStore((set) => ({ + namespaces: { + app: { + Position: { + "0x": { + x: 0, + y: 0, + }, + }, + Name: { + "0x": { + name: "Some Name", + }, + }, + }, + }, + setPosition: (entity: string, position: PositionSchema) => + set((prev) => ({ + namespaces: { + app: { + ...prev.namespaces.app, + Position: { + ...prev.namespaces.app.Position, + [entity]: position, + }, + }, + }, + })), + })); + }); + + it("[zustand]: setting 10 records", () => { + printDuration("setting 10 records", () => { + for (let i = 0; i < 10; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand]: setting 100 records", () => { + printDuration("setting 100 records", () => { + for (let i = 0; i < 100; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand]: setting 1,000 records", () => { + printDuration("setting 1,000 records", () => { + for (let i = 0; i < 1_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand]: setting 5,000 records", () => { + printDuration("setting 5,000 records", () => { + for (let i = 0; i < 5_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand]: setting 10,000 records", () => { + printDuration("setting 10,000 records", () => { + for (let i = 0; i < 10_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand]: setting 15,000 records", () => { + printDuration("setting 15,000 records", () => { + for (let i = 0; i < 15_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand]: setting 20,000 records", () => { + printDuration("setting 20,000 records", () => { + for (let i = 0; i < 20_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); +}); + +describe.skip("setting records in zustand with mutative", () => { + let stash: StoreApi; + + beforeEach(() => { + stash = createZustandStore()( + mutative((set) => ({ + namespaces: { + app: { + Position: { + "0x": { + x: 0, + y: 0, + }, + }, + Name: { + "0x": { + name: "Some Name", + }, + }, + }, + }, + setPosition: (entity: string, position: PositionSchema) => + set((state) => { + state.namespaces.app.Position[entity] = position; + }), + })), + ); + }); + + it("[zustand mutative]: setting 10 records", () => { + printDuration("setting 10 records", () => { + for (let i = 0; i < 10; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 100 records", () => { + printDuration("setting 100 records", () => { + for (let i = 0; i < 100; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 1,000 records", () => { + printDuration("setting 1,000 records", () => { + for (let i = 0; i < 1_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 5,000 records", () => { + printDuration("setting 5,000 records", () => { + for (let i = 0; i < 5_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 10,000 records", () => { + printDuration("setting 10,000 records", () => { + for (let i = 0; i < 10_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 15,000 records", () => { + printDuration("setting 15,000 records", () => { + for (let i = 0; i < 15_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 20,000 records", () => { + printDuration("setting 20,000 records", () => { + for (let i = 0; i < 20_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 30,000 records", () => { + printDuration("setting 30,000 records", () => { + for (let i = 0; i < 30_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); + + it("[zustand mutative]: setting 40,000 records", () => { + printDuration("setting 40,000 records", () => { + for (let i = 0; i < 40_000; i++) { + stash.getState().setPosition(String(i), { x: i, y: i }); + } + }); + }); +}); diff --git a/packages/stash/src/queryFragment.test.ts b/packages/stash/src/queryFragment.test.ts new file mode 100644 index 0000000000..7970519879 --- /dev/null +++ b/packages/stash/src/queryFragment.test.ts @@ -0,0 +1,19 @@ +import { describe, it } from "vitest"; + +describe("queryFragments", () => { + describe("In", () => { + it.todo("should match all records in the table"); + }); + + describe("NotIn", () => { + it.todo("should match all records not in the table"); + }); + + describe("MatchRecord", () => { + it.todo("should match all records matching the provided partial value"); + }); + + describe("NotMatchRecord", () => { + it.todo("should match all records not matching the provided partial value"); + }); +}); diff --git a/packages/stash/src/queryFragments.ts b/packages/stash/src/queryFragments.ts new file mode 100644 index 0000000000..42b915da36 --- /dev/null +++ b/packages/stash/src/queryFragments.ts @@ -0,0 +1,82 @@ +import { Table } from "@latticexyz/config"; +import { Keys, Stash } from "./common"; +import { TableRecord } from "./common"; +import { recordMatches } from "./recordEquals"; +import { getRecords } from "./actions/getRecords"; +import { getKeys } from "./actions/getKeys"; + +// TODO: add more query fragments - ie GreaterThan, LessThan, Range, etc + +export type QueryFragment
= { + table: table; + /** + * Checking an individual table row for whether it matches the query fragment + */ + match: (stash: Stash, encodedKey: string) => boolean; + /** + * The keys that should be included in the query result if this is the first fragment in the query. + * This is to avoid having to iterate over each key in the first table if there is a more efficient + * way to get to the initial result. + */ + getInitialKeys: (stash: Stash) => Keys; +}; + +/** + * Matches all records that exist in the table. + * RECS equivalent: Has(Component) + */ +export function In
(table: table): QueryFragment
{ + const match = (stash: Stash, encodedKey: string) => encodedKey in getRecords({ stash, table }); + const getInitialKeys = (stash: Stash) => getKeys({ stash, table }); + return { table, match, getInitialKeys }; +} + +/** + * Matches all records that don't exist in the table. + * RECS equivalent: Not(Component) + */ +export function NotIn
(table: table): QueryFragment
{ + const match = (stash: Stash, encodedKey: string) => !(encodedKey in getRecords({ stash, table })); + const getInitialKeys = () => ({}); + return { table, match, getInitialKeys }; +} + +/** + * Matches all records that match the provided partial record. + * This works for both value and key, since both are part of the record. + * RECS equivalent (only for value match): HasValue(Component, value) + */ +export function MatchRecord
( + table: table, + partialRecord: Partial>, +): QueryFragment
{ + const match = (stash: Stash, encodedKey: string) => { + const record = getRecords({ stash, table })[encodedKey]; + return recordMatches(partialRecord, record); + }; + // TODO: this is a very naive and inefficient implementation for large tables, can be optimized via indexer tables + const getInitialKeys = (stash: Stash) => + Object.fromEntries(Object.entries(getKeys({ stash, table })).filter(([key]) => match(stash, key))); + return { table, match, getInitialKeys }; +} + +/** + * Matches all records that don't match the provided partial record. + * This works for both value and key, since both are part of the record. + * RECS equivalent (only for value match): NotValue(Component, value) + * @param table + * @param partialRecord + */ +export function NotMatchRecord
( + table: table, + partialRecord: Partial>, +): QueryFragment
{ + const match = (stash: Stash, encodedKey: string) => { + const record = getRecords({ stash, table })[encodedKey]; + return !recordMatches(partialRecord, record); + }; + // TODO: this is a very naive and inefficient implementation for large tables, can be optimized via indexer tables + const getInitialKeys = (stash: Stash) => + Object.fromEntries(Object.entries(getKeys({ stash, table })).filter(([key]) => match(stash, key))); + return { table, match, getInitialKeys }; +} diff --git a/packages/stash/src/react/useSelector.test.ts b/packages/stash/src/react/useSelector.test.ts new file mode 100644 index 0000000000..5a86d262fa --- /dev/null +++ b/packages/stash/src/react/useSelector.test.ts @@ -0,0 +1,55 @@ +import { describe, it, expect } from "vitest"; +import { renderHook, act } from "@testing-library/react-hooks"; +import { useSelector } from "./useSelector"; +import { defineStore } from "@latticexyz/store/config/v2"; +import { createStash } from "../createStash"; + +describe("useCustomHook", () => { + it("checks the re-render behavior of the hook", async () => { + const config = defineStore({ + namespaces: { + game: { + tables: { + Position: { + schema: { player: "address", x: "uint32", y: "uint32" }, + key: ["player"], + }, + }, + }, + }, + }); + const Position = config.namespaces.game.tables.Position; + const stash = createStash(config); + const player = "0x00"; + + const { result } = renderHook(() => useSelector(stash, (state) => state.records["game"]["Position"][player])); + expect(result.current).toBe(undefined); + + act(() => { + stash.setRecord({ table: Position, key: { player }, record: { x: 1, y: 2 } }); + }); + + // Expect update to have triggered rerender + expect(result.all.length).toBe(2); + expect(result.all).toStrictEqual([undefined, { player, x: 1, y: 2 }]); + expect(result.current).toStrictEqual({ player, x: 1, y: 2 }); + + act(() => { + stash.setRecord({ table: Position, key: { player: "0x01" }, record: { x: 1, y: 2 } }); + }); + + // Expect unrelated update to not have triggered rerender + expect(result.all.length).toBe(2); + expect(result.all).toStrictEqual([undefined, { player, x: 1, y: 2 }]); + expect(result.current).toStrictEqual({ player, x: 1, y: 2 }); + + // Expect update to have triggered rerender + act(() => { + stash.setRecord({ table: Position, key: { player }, record: { x: 1, y: 3 } }); + }); + + expect(result.all.length).toBe(3); + expect(result.all).toStrictEqual([undefined, { player, x: 1, y: 2 }, { player, x: 1, y: 3 }]); + expect(result.current).toStrictEqual({ player, x: 1, y: 3 }); + }); +}); diff --git a/packages/stash/src/react/useSelector.ts b/packages/stash/src/react/useSelector.ts new file mode 100644 index 0000000000..cd1bca84fc --- /dev/null +++ b/packages/stash/src/react/useSelector.ts @@ -0,0 +1,28 @@ +import { useEffect, useRef, useState } from "react"; +import { State, Stash, StoreConfig } from "../common"; +import { subscribeStore } from "../actions"; + +export function useSelector( + stash: Stash, + selector: (stash: State) => T, + equals: (a: T, b: T) => boolean = (a, b) => a === b, +): T { + const state = useRef(selector(stash.get())); + const [, forceUpdate] = useState({}); + + useEffect(() => { + const unsubscribe = subscribeStore({ + stash, + subscriber: () => { + const nextState = selector(stash.get()); + if (!equals(state.current, nextState)) { + state.current = nextState; + forceUpdate({}); + } + }, + }); + return unsubscribe; + }, []); + + return state.current; +} diff --git a/packages/stash/src/recordEquals.ts b/packages/stash/src/recordEquals.ts new file mode 100644 index 0000000000..f5c52b3622 --- /dev/null +++ b/packages/stash/src/recordEquals.ts @@ -0,0 +1,26 @@ +import { TableRecord } from "./common"; + +/** + * Compare two {@link TableRecord}s. + * `a` can be a partial record, in which case only the keys present in `a` are compared to the corresponding keys in `b`. + * + * @param a Partial {@link TableRecord} to compare to `b` + * @param b ${@link TableRecord} to compare `a` to. + * @returns True if `a` equals `b` in the keys present in a or neither `a` nor `b` are defined, else false. + * + * @example + * ``` + * recordMatches({ x: 1, y: 2 }, { x: 1, y: 3 }) // returns false because value of y doesn't match + * recordMatches({ x: 1 }, { x: 1, y: 3 }) // returns true because x is equal and y is not present in a + * ``` + */ +export function recordMatches(a?: Partial, b?: TableRecord) { + if (!a && !b) return true; + if (!a || !b) return false; + + for (const key of Object.keys(a)) { + if (a[key] !== b[key]) return false; + } + + return true; +} diff --git a/packages/stash/src/recs/index.ts b/packages/stash/src/recs/index.ts new file mode 100644 index 0000000000..ee89e2d780 --- /dev/null +++ b/packages/stash/src/recs/index.ts @@ -0,0 +1,259 @@ +import { In, MatchRecord, NotIn, NotMatchRecord } from "../queryFragments"; +import { Query, Stash, TableRecord } from "../common"; +import { BoundTable, getTable } from "../actions/getTable"; +import { runQuery as runQueryInternal } from "../actions/runQuery"; +import { QueryUpdate, subscribeQuery } from "../actions/subscribeQuery"; +import { getConfig } from "../actions"; +import { Subject } from "rxjs"; + +export type Entity = string; +export const singletonEntity = ""; + +export enum UpdateType { + Enter = "enter", + Update = "update", + Exit = "exit", +} + +export function hasComponent(t: BoundTable, key: string): boolean { + return Boolean(t.getKeys()[key]); +} + +export function setComponent(t: BoundTable, key: string, value: TableRecord) { + t.setRecord({ + key: t.decodeKey({ encodedKey: key }), + record: value, + }); +} + +export function updateComponent(t: BoundTable, key: string, value: TableRecord) { + setComponent(t, key, value); +} + +export function removeComponent(t: BoundTable, key: string) { + t.deleteRecord({ key: t.decodeKey({ encodedKey: key }) }); +} + +export function getComponentValue(t: BoundTable, key: string): TableRecord | undefined { + return t.getRecord({ key: t.decodeKey({ encodedKey: key }) }); +} + +export function getComponentValueStrict(t: BoundTable, key: string): TableRecord { + const value = getComponentValue(t, key); + if (!value) { + throw new Error(`No value found for key ${key} in table ${t.getConfig().key}`); + } + return value; +} + +export function Has(t: BoundTable) { + return In(t.getConfig()); +} +export function Not(t: BoundTable) { + return NotIn(t.getConfig()); +} + +export function HasValue(t: BoundTable, partialRecord: TableRecord) { + return MatchRecord(t.getConfig(), partialRecord); +} + +export function NotValue(t: BoundTable, partialRecord: TableRecord) { + return NotMatchRecord(t.getConfig(), partialRecord); +} + +export function runQuery(stash: Stash, query: Query) { + const result = runQueryInternal({ stash, query }); + + return new Set(Object.keys(result.keys)); +} + +type SystemUpdate = { + entity: string; + value: [TableRecord | undefined, TableRecord | undefined]; + type: UpdateType; + component: T; +}; + +function transformUpdate(s: Stash, update: QueryUpdate, updateFilter?: UpdateType): SystemUpdate[] { + const transformedUpdates = [] as SystemUpdate[]; + + const namespaces = Object.entries(update.records); + for (const [namespaceLabel, tableUpdates] of namespaces) { + for (const [tableLabel, updates] of Object.entries(tableUpdates)) { + const table = getTable({ + stash: s, + table: getConfig({ stash: s, table: { namespaceLabel, label: tableLabel } }), + }); + const updateList = Object.entries(updates); + + for (const [key, recordUpdate] of updateList) { + if (!recordUpdate) { + continue; + } + + if (updateFilter && updateFilter !== update.types[key]) { + continue; + } + + transformedUpdates.push({ + component: table, + entity: key, + value: [recordUpdate.current, recordUpdate.prev], + type: update.types[key] as UpdateType, + }); + } + } + } + + return transformedUpdates; +} + +export function componentValueEquals(value1: TableRecord, value2: TableRecord): boolean { + return JSON.stringify(value1) === JSON.stringify(value2); +} + +export function defineSyncSystem( + stash: Stash, + query: Query, + component: (entity: Entity) => BoundTable, + value: (entity: Entity) => TableRecord, + options: { update?: boolean; runOnInit?: boolean } = { update: false, runOnInit: true }, +) { + defineSystemInternal( + stash, + query, + ({ entity, type }) => { + if (type === UpdateType.Enter) setComponent(component(entity), entity, value(entity)); + if (type === UpdateType.Exit) removeComponent(component(entity), entity); + if (options?.update && type === UpdateType.Update) setComponent(component(entity), entity, value(entity)); + }, + options, + ); +} + +export function defineComponentSystem(stash: Stash, table: BoundTable, system: (update: SystemUpdate) => void) { + table.subscribe({ + subscriber: (updates) => { + for (const [key, update] of Object.entries(updates)) { + let updateType = UpdateType.Update; + if (update.prev === undefined) { + updateType = UpdateType.Enter; + } else if (update.current === undefined) { + updateType = UpdateType.Exit; + } + + system({ entity: key, value: [update.current, update.prev], type: updateType, component: table }); + } + }, + }); +} + +export function isComponentUpdate(update: SystemUpdate, table: T): update is SystemUpdate { + return update.component.getConfig().tableId === table.getConfig().tableId; +} + +export function getComponentEntities(table: BoundTable) { + return Object.keys(table.getKeys()); +} + +export function defineExitQuery(stash: Stash, query: Query) { + const update$ = new Subject(); + + subscribeQuery({ + stash, + query, + options: { + skipInitialRun: true, + }, + }).subscribe((update) => { + const updates = transformUpdate(stash, update); + for (const update of updates) { + update$.next(update); + } + }); + + return update$; +} + +function defineSystemInternal( + stash: Stash, + query: Query, + system: (update: SystemUpdate) => void, + options: { runOnInit?: boolean } = { runOnInit: true }, + updateFilter?: UpdateType, +) { + subscribeQuery({ + stash, + query, + options: { + skipInitialRun: !options.runOnInit, + initialSubscribers: [ + (update) => { + const updates = transformUpdate(stash, update, updateFilter); + for (const update of updates) { + system(update); + } + }, + ], + }, + }); +} + +export function defineSystem( + stash: Stash, + query: Query, + system: (update: SystemUpdate) => void, + options: { runOnInit?: boolean } = { runOnInit: true }, +) { + defineSystemInternal(stash, query, system, options); +} + +export function defineEnterSystem( + stash: Stash, + query: Query, + system: (update: SystemUpdate) => void, + options: { runOnInit?: boolean } = { runOnInit: true }, +) { + defineSystemInternal(stash, query, system, options, UpdateType.Enter); +} + +export function defineUpdateSystem( + stash: Stash, + query: Query, + system: (update: SystemUpdate) => void, + options: { runOnInit?: boolean } = { runOnInit: true }, +) { + defineSystemInternal(stash, query, system, options, UpdateType.Update); +} + +export function defineExitSystem( + stash: Stash, + query: Query, + system: (update: SystemUpdate) => void, + options: { runOnInit?: boolean } = { runOnInit: true }, +) { + defineSystemInternal(stash, query, system, options, UpdateType.Exit); +} + +export function update$Wrapper(table: BoundTable) { + const update$ = new Subject(); + + table.subscribe({ + subscriber: (updates) => { + for (const [key, update] of Object.entries(updates)) { + const transformedUpdate = { + entity: key, + value: [update.current, update.prev] as [TableRecord | undefined, TableRecord | undefined], + type: UpdateType.Update, + component: table, + }; + update$.next(transformedUpdate); + } + }, + }); + + return { + ...table, + update$, + }; +} diff --git a/packages/stash/src/write.bench.ts b/packages/stash/src/write.bench.ts new file mode 100644 index 0000000000..ceb2b55bc9 --- /dev/null +++ b/packages/stash/src/write.bench.ts @@ -0,0 +1,229 @@ +import { describe, bench } from "vitest"; +import { StoreApi, createStore } from "zustand/vanilla"; +import { Component, Type, createEntity, createWorld, defineComponent, setComponent } from "@latticexyz/recs"; +import { mutative } from "zustand-mutative"; + +export function printDuration(description: string, fn: () => unknown) { + const start = performance.now(); + fn(); + const end = performance.now(); + const duration = end - start; + console.log(description, duration); + return duration; +} + +type PositionSchema = { + x: number; + y: number; +}; + +type NameSchema = { + name: string; +}; + +type State = { + namespaces: { + app: { + Position: Record; + Name: Record; + }; + }; +}; + +type Actions = { + setPositions: (positions: { [entity: string]: PositionSchema }) => void; +}; + +let prefix = 0; +function generatePositions(numRecords: number): Record { + prefix++; + const positions: Record = {}; + for (let i = 0; i < numRecords; i++) { + positions[String(prefix) + "-" + String(i)] = { x: i, y: i }; + } + return positions; +} + +describe.skip.each([ + { initialRecords: 1_000, newRecords: 1 }, + { initialRecords: 1_000, newRecords: 100 }, + { initialRecords: 10_000, newRecords: 1 }, + { initialRecords: 10_000, newRecords: 100 }, + { initialRecords: 100_000, newRecords: 1 }, + { initialRecords: 100_000, newRecords: 100 }, + { initialRecords: 1_000_000, newRecords: 1 }, + { initialRecords: 1_000_000, newRecords: 100 }, + { initialRecords: 1_000_000, newRecords: 1_000 }, + { initialRecords: 1_000_000, newRecords: 10_000 }, + { initialRecords: 1_000_000, newRecords: 100_000 }, +])( + "[zustand]: setting $newRecords records in a stash with $initialRecords records", + ({ initialRecords, newRecords }) => { + let stash: StoreApi; + let positions: Record; + + function setupStore(numRecords: number) { + // Create stash + stash = createStore((set) => ({ + namespaces: { + app: { + Position: { + "0x": { + x: 0, + y: 0, + }, + }, + Name: { + "0x": { + name: "Some Name", + }, + }, + }, + }, + setPositions: (positions: { [entity: string]: PositionSchema }) => + set((prev) => ({ + namespaces: { + app: { + ...prev.namespaces.app, + Position: { + ...prev.namespaces.app.Position, + ...positions, + }, + }, + }, + })), + })); + + // Initialize stash with specified number of records + stash.getState().setPositions(generatePositions(numRecords)); + } + + bench( + "bench", + () => { + stash.getState().setPositions(positions); + }, + { + setup: () => { + setupStore(initialRecords); + positions = generatePositions(newRecords); + }, + iterations: 3, + }, + ); + }, +); + +describe.skip.each([ + { initialRecords: 1_000, newRecords: 1 }, + { initialRecords: 1_000, newRecords: 100 }, + { initialRecords: 10_000, newRecords: 1 }, + { initialRecords: 10_000, newRecords: 100 }, + { initialRecords: 100_000, newRecords: 1 }, + { initialRecords: 100_000, newRecords: 100 }, + { initialRecords: 1_000_000, newRecords: 1 }, + { initialRecords: 1_000_000, newRecords: 100 }, + { initialRecords: 1_000_000, newRecords: 1_000 }, + { initialRecords: 1_000_000, newRecords: 10_000 }, + { initialRecords: 1_000_000, newRecords: 100_000 }, +])( + "[zustand-mutative]: setting $newRecords records in a stash with $initialRecords records", + ({ initialRecords, newRecords }) => { + let stash: StoreApi; + let positions: Record; + + function setupStore(numRecords: number) { + // Create stash + stash = createStore()( + mutative((set) => ({ + namespaces: { + app: { + Position: { + "0x": { + x: 0, + y: 0, + }, + }, + Name: { + "0x": { + name: "Some Name", + }, + }, + }, + }, + setPositions: (positions: { [entity: string]: PositionSchema }) => + set((prev) => { + for (const [entity, position] of Object.entries(positions)) { + prev.namespaces.app.Position[entity] = position; + } + }), + })), + ); + + // Initialize stash with specified number of records + stash.getState().setPositions(generatePositions(numRecords)); + } + + bench( + "bench", + () => { + stash.getState().setPositions(positions); + }, + { + setup: () => { + setupStore(initialRecords); + positions = generatePositions(newRecords); + }, + iterations: 3, + }, + ); + }, +); + +describe.skip.each([ + { initialRecords: 1_000, newRecords: 1 }, + { initialRecords: 1_000, newRecords: 100 }, + { initialRecords: 10_000, newRecords: 1 }, + { initialRecords: 10_000, newRecords: 100 }, + { initialRecords: 100_000, newRecords: 1 }, + { initialRecords: 100_000, newRecords: 100 }, + { initialRecords: 1_000_000, newRecords: 1 }, + { initialRecords: 1_000_000, newRecords: 100 }, + { initialRecords: 1_000_000, newRecords: 1_000 }, + { initialRecords: 1_000_000, newRecords: 10_000 }, + // { initialRecords: 1_000_000, newRecords: 100_000 }, +])("[recs]: setting $newRecords records in a stash with $initialRecords records", ({ initialRecords, newRecords }) => { + let world: ReturnType; + let positions: Record; + let Position: Component<{ x: Type.Number; y: Type.Number }>; + + function setupStore(numRecords: number) { + world = createWorld(); + defineComponent(world, { name: Type.String }); + Position = defineComponent(world, { x: Type.Number, y: Type.Number }); + + // Initialize Position component with specified number of records + const initialPositions = generatePositions(numRecords); + for (const position of Object.values(initialPositions)) { + const entity = createEntity(world); + setComponent(Position, entity, position); + } + } + + bench( + "bench", + () => { + for (const position of Object.values(positions)) { + const entity = createEntity(world); + setComponent(Position, entity, position); + } + }, + { + setup: () => { + setupStore(initialRecords); + positions = generatePositions(newRecords); + }, + iterations: 3, + }, + ); +}); diff --git a/packages/stash/tsconfig.json b/packages/stash/tsconfig.json new file mode 100644 index 0000000000..039e0b4d16 --- /dev/null +++ b/packages/stash/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": ["src"] +} diff --git a/packages/stash/tsup.config.ts b/packages/stash/tsup.config.ts new file mode 100644 index 0000000000..3fa50a80ee --- /dev/null +++ b/packages/stash/tsup.config.ts @@ -0,0 +1,15 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: { + index: "src/exports/index.ts", + internal: "src/exports/internal.ts", + recs: "src/exports/recs.ts", + }, + target: "esnext", + format: ["esm"], + dts: !process.env.TSUP_SKIP_DTS, + sourcemap: true, + clean: true, + minify: true, +}); diff --git a/packages/stash/vitest.config.ts b/packages/stash/vitest.config.ts new file mode 100644 index 0000000000..b6a66f2a98 --- /dev/null +++ b/packages/stash/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + globalSetup: "vitestSetup.ts", + }, +}); diff --git a/packages/stash/vitestSetup.ts b/packages/stash/vitestSetup.ts new file mode 100644 index 0000000000..035460b806 --- /dev/null +++ b/packages/stash/vitestSetup.ts @@ -0,0 +1 @@ +export { setup, teardown } from "@arktype/attest"; diff --git a/packages/store-sync/package.json b/packages/store-sync/package.json index 878aa2306c..5a14e3ae0c 100644 --- a/packages/store-sync/package.json +++ b/packages/store-sync/package.json @@ -12,12 +12,14 @@ "exports": { ".": "./dist/index.js", "./indexer-client": "./dist/indexer-client/index.js", + "./dozer": "./dist/dozer/index.js", "./postgres": "./dist/postgres/index.js", "./postgres-decoded": "./dist/postgres-decoded/index.js", "./recs": "./dist/recs/index.js", "./sqlite": "./dist/sqlite/index.js", "./trpc-indexer": "./dist/trpc-indexer/index.js", - "./zustand": "./dist/zustand/index.js" + "./zustand": "./dist/zustand/index.js", + "./stash": "./dist/stash/index.js" }, "typesVersions": { "*": { @@ -27,6 +29,9 @@ "indexer-client": [ "./dist/indexer-client/index.d.ts" ], + "dozer": [ + "./dist/dozer/index.d.ts" + ], "postgres": [ "./dist/postgres/index.d.ts" ], @@ -44,6 +49,9 @@ ], "zustand": [ "./dist/zustand/index.d.ts" + ], + "stash": [ + "./src/stash/index.ts" ] } }, @@ -70,6 +78,7 @@ "@latticexyz/query": "workspace:*", "@latticexyz/recs": "workspace:*", "@latticexyz/schema-type": "workspace:*", + "@latticexyz/stash": "workspace:*", "@latticexyz/store": "workspace:*", "@latticexyz/world": "workspace:*", "@trpc/client": "10.34.0", diff --git a/packages/store-sync/src/common.ts b/packages/store-sync/src/common.ts index 740935e2c9..721a4491b1 100644 --- a/packages/store-sync/src/common.ts +++ b/packages/store-sync/src/common.ts @@ -25,8 +25,8 @@ export const internalTableIds = Object.values(mudTables).map((table) => table.ta export type ChainId = number; export type WorldId = `${ChainId}:${Address}`; -// TODO: add label once we register it onchain -export type DeployedTable = Omit; +// TODO: add label and namespaceLabel once we register it onchain +export type DeployedTable = Omit; export type TableRecord
= { readonly key: getSchemaPrimitives>; diff --git a/packages/store-sync/src/dozer/common.ts b/packages/store-sync/src/dozer/common.ts new file mode 100644 index 0000000000..d12b65bba5 --- /dev/null +++ b/packages/store-sync/src/dozer/common.ts @@ -0,0 +1,30 @@ +import { Table } from "@latticexyz/config"; +import { Hex } from "viem"; + +export type DozerTableQuery = { + table: Table; + /** + * SQL to filter the records of this table. + * The SQL result is expected to be of the same culumn shape as the table. + * Use the `selectFrom` helper to ensure the expected column shape. + * Note: requires an indexer with SQL API (ie. Dozer). + */ + sql: string; +}; + +export type DozerLogFilter = { + /** + * Filter logs by the table ID. + */ + table: Table; + /** + * Optionally filter by the `bytes32` value of the key in the first position (index zero of the record's key tuple). + */ + key0?: Hex; + /** + * Optionally filter by the `bytes32` value of the key in the second position (index one of the record's key tuple). + */ + key1?: Hex; +}; + +export type DozerSyncFilter = DozerTableQuery | DozerLogFilter; diff --git a/packages/store-sync/src/dozer/fetchInitialBlockLogsDozer.ts b/packages/store-sync/src/dozer/fetchInitialBlockLogsDozer.ts new file mode 100644 index 0000000000..edf6e2e5c0 --- /dev/null +++ b/packages/store-sync/src/dozer/fetchInitialBlockLogsDozer.ts @@ -0,0 +1,84 @@ +import { DozerLogFilter, DozerSyncFilter, DozerTableQuery } from "./common"; +import { Hex } from "viem"; +import { StorageAdapterBlock, SyncFilter } from "../common"; +import { fetchRecordsDozerSql } from "./fetchRecordsDozerSql"; +import { recordToLog } from "../recordToLog"; +import { getSnapshot } from "../getSnapshot"; +import { bigIntMin } from "@latticexyz/common/utils"; + +export type FetchInitialBlockLogsDozerArgs = { + dozerUrl: string; + storeAddress: Hex; + filters?: DozerSyncFilter[]; + startBlock?: bigint; + chainId: number; +}; + +export type FetchInitialBlockLogsDozerResult = { + initialBlockLogs: StorageAdapterBlock; +}; + +export async function fetchInitialBlockLogsDozer({ + dozerUrl, + storeAddress, + filters, + startBlock = 0n, + chainId, +}: FetchInitialBlockLogsDozerArgs): Promise { + const initialBlockLogs: StorageAdapterBlock = { blockNumber: startBlock, logs: [] }; + + // We execute the list of provided SQL queries for hydration. For performance + // reasons the queries are not executed against a fixed block height, but against + // the latest state. We therefore pass the min block number of all query results + // as overall block number. This means some logs will be re-fetched again during + // the hydration process, but after the hydration is complete, the state will be + // correct. Intermediate state updates during hydration might be incorrect (for + // partial updates), so we only notify consumers of state updates after the + // initial hydration is complete. + + const sqlFilters = filters && (filters.filter((filter) => "sql" in filter) as DozerTableQuery[]); + + // TODO: it might be more performant to execute individual sql queries separately than in one network request + // to parallelize on the backend (one request is expected to be execute against the same db state so it can't + // be parallelized). + const dozerTables = + sqlFilters && sqlFilters.length > 0 + ? await fetchRecordsDozerSql({ dozerUrl, storeAddress, queries: sqlFilters }) + : undefined; + + if (dozerTables) { + initialBlockLogs.blockNumber = dozerTables.blockHeight; + initialBlockLogs.logs = dozerTables.result.flatMap(({ table, records }) => + records.map((record) => recordToLog({ table, record, address: storeAddress })), + ); + } + + // Fetch the tables without SQL filter from the snapshot logs API for better performance. + const snapshotFilters = + filters && + filters + .filter((filter) => !("sql" in filter)) + .map((filter) => { + const { table, key0, key1 } = filter as DozerLogFilter; + return { tableId: table.tableId, key0, key1 } as SyncFilter; + }); + + const snapshot = + // If no filters are provided, the entire state is fetched + !snapshotFilters || snapshotFilters.length > 0 + ? await getSnapshot({ + chainId, + address: storeAddress, + filters: snapshotFilters, + indexerUrl: dozerUrl, + }) + : undefined; + + // The block number passed in the overall result will be the min of all queries and the snapshot. + if (snapshot) { + initialBlockLogs.blockNumber = bigIntMin(initialBlockLogs.blockNumber, snapshot.blockNumber); + initialBlockLogs.logs = [...initialBlockLogs.logs, ...snapshot.logs]; + } + + return { initialBlockLogs }; +} diff --git a/packages/store-sync/src/dozer/fetchRecordsDozerSql.test.ts b/packages/store-sync/src/dozer/fetchRecordsDozerSql.test.ts new file mode 100644 index 0000000000..3c65420e58 --- /dev/null +++ b/packages/store-sync/src/dozer/fetchRecordsDozerSql.test.ts @@ -0,0 +1,136 @@ +import { describe, expect, it } from "vitest"; +import { fetchRecordsDozerSql } from "./fetchRecordsDozerSql"; +import mudConfig from "@latticexyz/world/mud.config"; +import { selectFrom } from "./selectFrom"; + +describe("fetch dozer sql", () => { + // TODO: set up CI test case for this (requires setting up dozer in CI) + it("should fetch dozer sql", async () => { + const result = await fetchRecordsDozerSql({ + dozerUrl: "https://redstone2.dozer.skystrife.xyz/q", + storeAddress: "0x9d05cc196c87104a7196fcca41280729b505dbbf", + queries: [ + selectFrom({ table: mudConfig.tables.world__Balances, where: '"balance" > 0', limit: 2 }), + selectFrom({ table: mudConfig.tables.world__FunctionSignatures, limit: 10 }), + ], + }); + + expect(result).toMatchInlineSnapshot(` + { + "blockHeight": 4909521n, + "result": [ + { + "records": [ + { + "balance": 308500000000000000n, + "namespaceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + ], + "table": { + "codegen": { + "dataStruct": false, + "outputDirectory": "tables", + "storeArgument": false, + "tableIdArgument": false, + }, + "deploy": { + "disabled": false, + }, + "key": [ + "namespaceId", + ], + "label": "Balances", + "name": "Balances", + "namespace": "world", + "schema": { + "balance": { + "internalType": "uint256", + "type": "uint256", + }, + "namespaceId": { + "internalType": "ResourceId", + "type": "bytes32", + }, + }, + "tableId": "0x7462776f726c6400000000000000000042616c616e6365730000000000000000", + "type": "table", + }, + }, + { + "records": [ + { + "functionSelector": "0x0560912900000000000000000000000000000000000000000000000000000000", + "functionSignature": "unregisterStoreHook(bytes32,address)", + }, + { + "functionSelector": "0x0ba51f4900000000000000000000000000000000000000000000000000000000", + "functionSignature": "registerTable(bytes32,bytes32,bytes32,bytes32,string[],string[])", + }, + { + "functionSelector": "0x127de47a00000000000000000000000000000000000000000000000000000000", + "functionSignature": "createMatch(string,bytes32,bytes32,bytes32)", + }, + { + "functionSelector": "0x17902d6100000000000000000000000000000000000000000000000000000000", + "functionSignature": "createMatchSeasonPass(string,bytes32,bytes32,bytes32,bytes32,uint256,uint256[],bool)", + }, + { + "functionSelector": "0x1b9a91a400000000000000000000000000000000000000000000000000000000", + "functionSignature": "withdrawEth(address,uint256)", + }, + { + "functionSelector": "0x1d2257ba00000000000000000000000000000000000000000000000000000000", + "functionSignature": "registerDelegation(address,bytes32,bytes)", + }, + { + "functionSelector": "0x1fc595cd00000000000000000000000000000000000000000000000000000000", + "functionSignature": "setOfficial(bytes32,bool)", + }, + { + "functionSelector": "0x219adc2e00000000000000000000000000000000000000000000000000000000", + "functionSignature": "renounceOwnership(bytes32)", + }, + { + "functionSelector": "0x220ca1f600000000000000000000000000000000000000000000000000000000", + "functionSignature": "toggleReady(bytes32)", + }, + { + "functionSelector": "0x231bb4cd00000000000000000000000000000000000000000000000000000000", + "functionSignature": "createNewSeasonPass(bytes14,uint256,uint256,uint256,uint256,uint256,uint256,uint256)", + }, + ], + "table": { + "codegen": { + "dataStruct": false, + "outputDirectory": "tables", + "storeArgument": false, + "tableIdArgument": false, + }, + "deploy": { + "disabled": false, + }, + "key": [ + "functionSelector", + ], + "label": "FunctionSignatures", + "name": "FunctionSignatur", + "namespace": "world", + "schema": { + "functionSelector": { + "internalType": "bytes4", + "type": "bytes4", + }, + "functionSignature": { + "internalType": "string", + "type": "string", + }, + }, + "tableId": "0x6f74776f726c6400000000000000000046756e6374696f6e5369676e61747572", + "type": "offchainTable", + }, + }, + ], + } + `); + }); +}); diff --git a/packages/store-sync/src/dozer/fetchRecordsDozerSql.ts b/packages/store-sync/src/dozer/fetchRecordsDozerSql.ts new file mode 100644 index 0000000000..6526d88ca6 --- /dev/null +++ b/packages/store-sync/src/dozer/fetchRecordsDozerSql.ts @@ -0,0 +1,69 @@ +import { DecodeDozerRecordsResult, DozerQueryResult, decodeDozerRecords } from "@latticexyz/protocol-parser/internal"; +import { Hex } from "viem"; +import { DozerTableQuery } from "./common"; +import { Table } from "@latticexyz/config"; + +type DozerResponseSuccess = { + block_height: string; + result: DozerQueryResult[]; +}; + +type DozerResponseFail = { msg: string }; + +type DozerResponse = DozerResponseSuccess | DozerResponseFail; + +type FetchDozerSqlArgs = { + dozerUrl: string; + storeAddress: Hex; + queries: DozerTableQuery[]; +}; + +type FetchDozerSqlResult = + | { + blockHeight: bigint; + result: { + table: Table; + records: DecodeDozerRecordsResult; + }[]; + } + | undefined; + +function isDozerResponseFail(response: DozerResponse): response is DozerResponseFail { + return "msg" in response; +} + +export async function fetchRecordsDozerSql({ + dozerUrl, + queries, + storeAddress, +}: FetchDozerSqlArgs): Promise { + const response: DozerResponse = await ( + await fetch(dozerUrl, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(queries.map((query) => ({ address: storeAddress, query: query.sql }))), + }) + ).json(); + + if (isDozerResponseFail(response)) { + console.warn(`Dozer response: ${response.msg}\n\nTry reproducing via cURL: + curl ${dozerUrl} \\ + --compressed \\ + -H 'Accept-Encoding: gzip' \\ + -H 'Content-Type: application/json' \\ + -d '[${queries.map((query) => `{"address": "${storeAddress}", "query": "${query.sql.replaceAll('"', '\\"')}"}`).join(",")}]'`); + return; + } + + const result: FetchDozerSqlResult = { + blockHeight: BigInt(response.block_height), + result: response.result.map((records, index) => ({ + table: queries[index].table, + records: decodeDozerRecords({ schema: queries[index].table.schema, records }), + })), + }; + + return result; +} diff --git a/packages/store-sync/src/dozer/index.ts b/packages/store-sync/src/dozer/index.ts new file mode 100644 index 0000000000..844a4308ff --- /dev/null +++ b/packages/store-sync/src/dozer/index.ts @@ -0,0 +1,4 @@ +export * from "./common"; +export * from "./fetchRecordsDozerSql"; +export * from "./selectFrom"; +export * from "./fetchInitialBlockLogsDozer"; diff --git a/packages/store-sync/src/dozer/selectFrom.ts b/packages/store-sync/src/dozer/selectFrom.ts new file mode 100644 index 0000000000..81b1d757e7 --- /dev/null +++ b/packages/store-sync/src/dozer/selectFrom.ts @@ -0,0 +1,19 @@ +import { Table } from "@latticexyz/config"; +import { DozerTableQuery } from "./common"; + +// For autocompletion but still allowing all SQL strings +export type Where
= `"${keyof table["schema"] & string}"` | (string & {}); + +export type SelectFromArgs
= { table: table; where?: Where
; limit?: number }; + +export function selectFrom
({ table, where, limit }: SelectFromArgs
): DozerTableQuery { + const dozerTableLabel = table.namespace === "" ? table.name : `${table.namespace}__${table.name}`; + return { + table: table, + sql: `select ${Object.keys(table.schema) + .map((key) => `"${key}"`) + .join( + ", ", + )} from ${dozerTableLabel}${where != null ? ` where ${where}` : ""}${limit != null ? ` limit ${limit}` : ""}`, + }; +} diff --git a/packages/store-sync/src/index.ts b/packages/store-sync/src/index.ts index b69f95f5ce..de276bd499 100644 --- a/packages/store-sync/src/index.ts +++ b/packages/store-sync/src/index.ts @@ -6,3 +6,4 @@ export * from "./isTableRegistrationLog"; export * from "./logToTable"; export * from "./tablesWithRecordsToLogs"; export * from "./tableToLog"; +export * from "./recordToLog"; diff --git a/packages/store-sync/src/logToRecord.test.ts b/packages/store-sync/src/logToRecord.test.ts new file mode 100644 index 0000000000..fc2704e749 --- /dev/null +++ b/packages/store-sync/src/logToRecord.test.ts @@ -0,0 +1,43 @@ +/* eslint-disable max-len */ +import { describe, it, expect } from "vitest"; +import { defineTable } from "@latticexyz/store/config/v2"; +import { logToRecord } from "./logToRecord"; + +describe("logToRecord", () => { + it("should convert a Store_SetRecord log into a decoded table record", async () => { + const table = defineTable({ + label: "Test", + schema: { + key1: "uint32", + key2: "uint256", + value1: "address", + value2: "string", + }, + key: ["key1", "key2"], + }); + + const record = { + key1: 1, + key2: 2n, + value1: "0x3Aa5ebB10DC797CAC828524e59A333d0A371443c", + value2: "hello", + } as const; + + const log = { + address: "0x3Aa5ebB10DC797CAC828524e59A333d0A371443c", + args: { + dynamicData: "0x68656c6c6f", + encodedLengths: "0x0000000000000000000000000000000000000000000000000500000000000005", + keyTuple: [ + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0x0000000000000000000000000000000000000000000000000000000000000002", + ], + staticData: "0x3aa5ebb10dc797cac828524e59a333d0a371443c", + tableId: "0x7462000000000000000000000000000054657374000000000000000000000000", + }, + eventName: "Store_SetRecord", + } as const; + + expect(logToRecord({ table, log })).toStrictEqual(record); + }); +}); diff --git a/packages/store-sync/src/logToRecord.ts b/packages/store-sync/src/logToRecord.ts new file mode 100644 index 0000000000..4ec8f08363 --- /dev/null +++ b/packages/store-sync/src/logToRecord.ts @@ -0,0 +1,26 @@ +import { + PartialTable, + SchemaToPrimitives, + decodeKey, + decodeValueArgs, + getKeySchema, + getSchemaTypes, + getValueSchema, +} from "@latticexyz/protocol-parser/internal"; +import { StorageAdapterLog } from "./common"; + +type LogToRecordArgs
= { + table: table; + log: StorageAdapterLog & { eventName: "Store_SetRecord" }; +}; + +export function logToRecord
({ + table, + log, +}: LogToRecordArgs
): SchemaToPrimitives> { + const keySchema = getSchemaTypes(getKeySchema(table)); + const valueSchema = getSchemaTypes(getValueSchema(table)); + const key = decodeKey(keySchema, log.args.keyTuple); + const value = decodeValueArgs(valueSchema, log.args); + return { ...key, ...value }; +} diff --git a/packages/store-sync/src/recordToLog.test.ts b/packages/store-sync/src/recordToLog.test.ts new file mode 100644 index 0000000000..3ddf07166d --- /dev/null +++ b/packages/store-sync/src/recordToLog.test.ts @@ -0,0 +1,52 @@ +/* eslint-disable max-len */ +import { describe, it, expect } from "vitest"; +import { recordToLog } from "./recordToLog"; +import { defineTable } from "@latticexyz/store/config/v2"; +import { logToRecord } from "./logToRecord"; + +describe("recordToLog", () => { + it("should convert table record into a Store_SetRecord log", async () => { + const table = defineTable({ + label: "Test", + schema: { + key1: "uint32", + key2: "uint256", + value1: "address", + value2: "string", + }, + key: ["key1", "key2"], + }); + + const record = { + key1: 1, + key2: 2n, + value1: "0x3Aa5ebB10DC797CAC828524e59A333d0A371443c", + value2: "hello", + } as const; + + const log = recordToLog({ + address: "0x3Aa5ebB10DC797CAC828524e59A333d0A371443c", + table, + record, + }); + + expect(log).toMatchInlineSnapshot(` + { + "address": "0x3Aa5ebB10DC797CAC828524e59A333d0A371443c", + "args": { + "dynamicData": "0x68656c6c6f", + "encodedLengths": "0x0000000000000000000000000000000000000000000000000500000000000005", + "keyTuple": [ + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0x0000000000000000000000000000000000000000000000000000000000000002", + ], + "staticData": "0x3aa5ebb10dc797cac828524e59a333d0a371443c", + "tableId": "0x7462000000000000000000000000000054657374000000000000000000000000", + }, + "eventName": "Store_SetRecord", + } + `); + + expect(logToRecord({ table, log })).toStrictEqual(record); + }); +}); diff --git a/packages/store-sync/src/recordToLog.ts b/packages/store-sync/src/recordToLog.ts new file mode 100644 index 0000000000..55f027a6b1 --- /dev/null +++ b/packages/store-sync/src/recordToLog.ts @@ -0,0 +1,39 @@ +import { + SchemaToPrimitives, + encodeKey, + encodeValueArgs, + getKeySchema, + getSchemaTypes, + getValueSchema, + getKey, + getValue, + PartialTable, +} from "@latticexyz/protocol-parser/internal"; +import { StorageAdapterLog } from "./common"; +import { Table } from "@latticexyz/config"; +import { Hex } from "viem"; + +type RecordToLogArgs
= { + address: Hex; + table: table; + record: SchemaToPrimitives>; +}; + +export function recordToLog
({ + table, + record, + address, +}: RecordToLogArgs
): StorageAdapterLog & { eventName: "Store_SetRecord" } { + const keySchema = getSchemaTypes(getKeySchema(table)); + const valueSchema = getSchemaTypes(getValueSchema(table)); + + return { + eventName: "Store_SetRecord", + address: address, + args: { + tableId: table.tableId, + keyTuple: encodeKey(keySchema, getKey(table, record)), + ...encodeValueArgs(valueSchema, getValue(table, record)), + }, + }; +} diff --git a/packages/store-sync/src/stash/createStorageAdapter.test.ts b/packages/store-sync/src/stash/createStorageAdapter.test.ts new file mode 100644 index 0000000000..e0e925d049 --- /dev/null +++ b/packages/store-sync/src/stash/createStorageAdapter.test.ts @@ -0,0 +1,839 @@ +/* eslint-disable max-len */ +import { describe, expect, it } from "vitest"; +import { createStorageAdapter } from "./createStorageAdapter"; +import mudConfig from "../../../../e2e/packages/contracts/mud.config"; +import worldRpcLogs from "../../../../test-data/world-logs.json"; +import { groupLogsByBlockNumber } from "@latticexyz/block-logs-stream"; +import { StoreEventsLog } from "../common"; +import { RpcLog, formatLog, decodeEventLog, Hex } from "viem"; +import { storeEventsAbi } from "@latticexyz/store"; +import { createStash, getTable } from "@latticexyz/stash/internal"; + +// TODO: make test-data a proper package and export this +const blocks = groupLogsByBlockNumber( + worldRpcLogs.map((log) => { + const { eventName, args } = decodeEventLog({ + abi: storeEventsAbi, + data: log.data as Hex, + topics: log.topics as [Hex, ...Hex[]], + strict: true, + }); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return formatLog(log as any as RpcLog, { args, eventName: eventName as string }) as StoreEventsLog; + }), +); + +describe("createStorageAdapter", () => { + it("sets records from logs", async () => { + const stash = createStash(mudConfig); + const { storageAdapter } = createStorageAdapter({ stash }); + + for (const block of blocks) { + await storageAdapter(block); + } + + const NumberList = getTable({ stash, table: { namespace: "", label: "NumberList" } }); + + expect(NumberList.getKeys()).toMatchInlineSnapshot(` + { + "": {}, + } + `); + + expect(NumberList.getRecords()).toMatchInlineSnapshot(` + { + "": { + "value": [ + 420, + 69, + ], + }, + } + `); + + expect(stash.get().records).toMatchInlineSnapshot(` + { + "": { + "DynamicArray": {}, + "Multi": {}, + "Number": {}, + "NumberList": { + "": { + "value": [ + 420, + 69, + ], + }, + }, + "Position": { + "0x6d61703100000000000000000000000000000000000000000000000000000000|1|1": { + "player": "0x1804c8AB1F12E6bbf3894d4083f33e07309d1f38", + "x": 1, + "y": 1, + "zone": "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + "0x6d61703100000000000000000000000000000000000000000000000000000000|2|-2": { + "player": "0x1804c8AB1F12E6bbf3894d4083f33e07309d1f38", + "x": 2, + "y": -2, + "zone": "0x6d61703100000000000000000000000000000000000000000000000000000000", + }, + "0x6d61703200000000000000000000000000000000000000000000000000000000|0|-99": { + "player": "0x1804c8AB1F12E6bbf3894d4083f33e07309d1f38", + "x": 0, + "y": -99, + "zone": "0x6d61703200000000000000000000000000000000000000000000000000000000", + }, + "0x6d61703200000000000000000000000000000000000000000000000000000000|0|99": { + "player": "0x1804c8AB1F12E6bbf3894d4083f33e07309d1f38", + "x": 0, + "y": 99, + "zone": "0x6d61703200000000000000000000000000000000000000000000000000000000", + }, + "0x6d61703939000000000000000000000000000000000000000000000000000000|99|99": { + "player": "0x1804c8AB1F12E6bbf3894d4083f33e07309d1f38", + "x": 99, + "y": 99, + "zone": "0x6d61703939000000000000000000000000000000000000000000000000000000", + }, + }, + "StaticArray": {}, + "Vector": {}, + }, + "stash": { + "ResourceIds": { + "0x6e73000000000000000000000000000000000000000000000000000000000000": { + "exists": true, + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e7373746f726500000000000000000000000000000000000000000000000000": { + "exists": true, + "resourceId": "0x6e7373746f726500000000000000000000000000000000000000000000000000", + }, + "0x6e73776f726c6400000000000000000000000000000000000000000000000000": { + "exists": true, + "resourceId": "0x6e73776f726c6400000000000000000000000000000000000000000000000000", + }, + "0x6f74776f726c6400000000000000000046756e6374696f6e5369676e61747572": { + "exists": true, + "resourceId": "0x6f74776f726c6400000000000000000046756e6374696f6e5369676e61747572", + }, + "0x737900000000000000000000000000004163636573734d616e6167656d656e74": { + "exists": true, + "resourceId": "0x737900000000000000000000000000004163636573734d616e6167656d656e74", + }, + "0x7379000000000000000000000000000042616c616e63655472616e7366657200": { + "exists": true, + "resourceId": "0x7379000000000000000000000000000042616c616e63655472616e7366657200", + }, + "0x73790000000000000000000000000000426174636843616c6c00000000000000": { + "exists": true, + "resourceId": "0x73790000000000000000000000000000426174636843616c6c00000000000000", + }, + "0x73790000000000000000000000000000437573746f6d4572726f727353797374": { + "exists": true, + "resourceId": "0x73790000000000000000000000000000437573746f6d4572726f727353797374", + }, + "0x737900000000000000000000000000004e756d6265724c69737453797374656d": { + "exists": true, + "resourceId": "0x737900000000000000000000000000004e756d6265724c69737453797374656d", + }, + "0x737900000000000000000000000000004e756d62657253797374656d00000000": { + "exists": true, + "resourceId": "0x737900000000000000000000000000004e756d62657253797374656d00000000", + }, + "0x73790000000000000000000000000000526567697374726174696f6e00000000": { + "exists": true, + "resourceId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x73790000000000000000000000000000566563746f7253797374656d00000000": { + "exists": true, + "resourceId": "0x73790000000000000000000000000000566563746f7253797374656d00000000", + }, + "0x746200000000000000000000000000004d756c74690000000000000000000000": { + "exists": true, + "resourceId": "0x746200000000000000000000000000004d756c74690000000000000000000000", + }, + "0x746200000000000000000000000000004e756d62657200000000000000000000": { + "exists": true, + "resourceId": "0x746200000000000000000000000000004e756d62657200000000000000000000", + }, + "0x746200000000000000000000000000004e756d6265724c697374000000000000": { + "exists": true, + "resourceId": "0x746200000000000000000000000000004e756d6265724c697374000000000000", + }, + "0x74620000000000000000000000000000506f736974696f6e0000000000000000": { + "exists": true, + "resourceId": "0x74620000000000000000000000000000506f736974696f6e0000000000000000", + }, + "0x7462000000000000000000000000000053746174696341727261790000000000": { + "exists": true, + "resourceId": "0x7462000000000000000000000000000053746174696341727261790000000000", + }, + "0x74620000000000000000000000000000566563746f7200000000000000000000": { + "exists": true, + "resourceId": "0x74620000000000000000000000000000566563746f7200000000000000000000", + }, + "0x746273746f72650000000000000000005265736f757263654964730000000000": { + "exists": true, + "resourceId": "0x746273746f72650000000000000000005265736f757263654964730000000000", + }, + "0x746273746f726500000000000000000053746f7265486f6f6b73000000000000": { + "exists": true, + "resourceId": "0x746273746f726500000000000000000053746f7265486f6f6b73000000000000", + }, + "0x746273746f72650000000000000000005461626c657300000000000000000000": { + "exists": true, + "resourceId": "0x746273746f72650000000000000000005461626c657300000000000000000000", + }, + "0x7462776f726c6400000000000000000042616c616e6365730000000000000000": { + "exists": true, + "resourceId": "0x7462776f726c6400000000000000000042616c616e6365730000000000000000", + }, + "0x7462776f726c6400000000000000000046756e6374696f6e53656c6563746f72": { + "exists": true, + "resourceId": "0x7462776f726c6400000000000000000046756e6374696f6e53656c6563746f72", + }, + "0x7462776f726c64000000000000000000496e69744d6f64756c65416464726573": { + "exists": true, + "resourceId": "0x7462776f726c64000000000000000000496e69744d6f64756c65416464726573", + }, + "0x7462776f726c64000000000000000000496e7374616c6c65644d6f64756c6573": { + "exists": true, + "resourceId": "0x7462776f726c64000000000000000000496e7374616c6c65644d6f64756c6573", + }, + "0x7462776f726c640000000000000000004e616d65737061636544656c65676174": { + "exists": true, + "resourceId": "0x7462776f726c640000000000000000004e616d65737061636544656c65676174", + }, + "0x7462776f726c640000000000000000004e616d6573706163654f776e65720000": { + "exists": true, + "resourceId": "0x7462776f726c640000000000000000004e616d6573706163654f776e65720000", + }, + "0x7462776f726c640000000000000000005265736f757263654163636573730000": { + "exists": true, + "resourceId": "0x7462776f726c640000000000000000005265736f757263654163636573730000", + }, + "0x7462776f726c6400000000000000000053797374656d486f6f6b730000000000": { + "exists": true, + "resourceId": "0x7462776f726c6400000000000000000053797374656d486f6f6b730000000000", + }, + "0x7462776f726c6400000000000000000053797374656d52656769737472790000": { + "exists": true, + "resourceId": "0x7462776f726c6400000000000000000053797374656d52656769737472790000", + }, + "0x7462776f726c6400000000000000000053797374656d73000000000000000000": { + "exists": true, + "resourceId": "0x7462776f726c6400000000000000000053797374656d73000000000000000000", + }, + "0x7462776f726c640000000000000000005573657244656c65676174696f6e436f": { + "exists": true, + "resourceId": "0x7462776f726c640000000000000000005573657244656c65676174696f6e436f", + }, + }, + "StoreHooks": {}, + "Tables": { + "0x6f74776f726c6400000000000000000046756e6374696f6e5369676e61747572": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001166756e6374696f6e5369676e6174757265000000000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001066756e6374696f6e53656c6563746f7200000000000000000000000000000000", + "fieldLayout": "0x0000000100000000000000000000000000000000000000000000000000000000", + "keySchema": "0x0004010043000000000000000000000000000000000000000000000000000000", + "tableId": "0x6f74776f726c6400000000000000000046756e6374696f6e5369676e61747572", + "valueSchema": "0x00000001c5000000000000000000000000000000000000000000000000000000", + }, + "0x746200000000000000000000000000004d756c74690000000000000000000000": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000036e756d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000000000016100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000162000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001630000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016400000000000000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0021020020010000000000000000000000000000000000000000000000000000", + "keySchema": "0x0034040003601f2e000000000000000000000000000000000000000000000000", + "tableId": "0x746200000000000000000000000000004d756c74690000000000000000000000", + "valueSchema": "0x002102003f600000000000000000000000000000000000000000000000000000", + }, + "0x746200000000000000000000000000004e756d62657200000000000000000000": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000036b65790000000000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0004010004000000000000000000000000000000000000000000000000000000", + "keySchema": "0x0004010003000000000000000000000000000000000000000000000000000000", + "tableId": "0x746200000000000000000000000000004e756d62657200000000000000000000", + "valueSchema": "0x0004010003000000000000000000000000000000000000000000000000000000", + }, + "0x746200000000000000000000000000004e756d6265724c697374000000000000": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0000000100000000000000000000000000000000000000000000000000000000", + "keySchema": "0x0000000000000000000000000000000000000000000000000000000000000000", + "tableId": "0x746200000000000000000000000000004e756d6265724c697374000000000000", + "valueSchema": "0x0000000165000000000000000000000000000000000000000000000000000000", + }, + "0x74620000000000000000000000000000506f736974696f6e0000000000000000": { + "abiEncodedFieldNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000006706c617965720000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000047a6f6e65000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001780000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017900000000000000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0014010014000000000000000000000000000000000000000000000000000000", + "keySchema": "0x002803005f232300000000000000000000000000000000000000000000000000", + "tableId": "0x74620000000000000000000000000000506f736974696f6e0000000000000000", + "valueSchema": "0x0014010061000000000000000000000000000000000000000000000000000000", + }, + "0x7462000000000000000000000000000053746174696341727261790000000000": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0000000100000000000000000000000000000000000000000000000000000000", + "keySchema": "0x0000000000000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462000000000000000000000000000053746174696341727261790000000000", + "valueSchema": "0x0000000181000000000000000000000000000000000000000000000000000000", + }, + "0x74620000000000000000000000000000566563746f7200000000000000000000": { + "abiEncodedFieldNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000001780000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017900000000000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000036b65790000000000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0008020004040000000000000000000000000000000000000000000000000000", + "keySchema": "0x0004010003000000000000000000000000000000000000000000000000000000", + "tableId": "0x74620000000000000000000000000000566563746f7200000000000000000000", + "valueSchema": "0x0008020023230000000000000000000000000000000000000000000000000000", + }, + "0x746273746f72650000000000000000005265736f757263654964730000000000": { + "abiEncodedFieldNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000066578697374730000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000a7265736f75726365496400000000000000000000000000000000000000000000", + "fieldLayout": "0x0001010001000000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x746273746f72650000000000000000005265736f757263654964730000000000", + "valueSchema": "0x0001010060000000000000000000000000000000000000000000000000000000", + }, + "0x746273746f726500000000000000000053746f7265486f6f6b73000000000000": { + "abiEncodedFieldNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000005686f6f6b73000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000077461626c65496400000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0000000100000000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x746273746f726500000000000000000053746f7265486f6f6b73000000000000", + "valueSchema": "0x00000001b6000000000000000000000000000000000000000000000000000000", + }, + "0x746273746f72650000000000000000005461626c657300000000000000000000": { + "abiEncodedFieldNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000001a0000000000000000000000000000000000000000000000000000000000000000b6669656c644c61796f757400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000096b6579536368656d610000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b76616c7565536368656d610000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000012616269456e636f6465644b65794e616d657300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014616269456e636f6465644669656c644e616d6573000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000077461626c65496400000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0060030220202000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x746273746f72650000000000000000005461626c657300000000000000000000", + "valueSchema": "0x006003025f5f5fc4c40000000000000000000000000000000000000000000000", + }, + "0x7462776f726c6400000000000000000042616c616e6365730000000000000000": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000762616c616e636500000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000b6e616d6573706163654964000000000000000000000000000000000000000000", + "fieldLayout": "0x0020010020000000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c6400000000000000000042616c616e6365730000000000000000", + "valueSchema": "0x002001001f000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c6400000000000000000046756e6374696f6e53656c6563746f72": { + "abiEncodedFieldNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000873797374656d4964000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001673797374656d46756e6374696f6e53656c6563746f7200000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001066756e6374696f6e53656c6563746f7200000000000000000000000000000000", + "fieldLayout": "0x0024020020040000000000000000000000000000000000000000000000000000", + "keySchema": "0x0004010043000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c6400000000000000000046756e6374696f6e53656c6563746f72", + "valueSchema": "0x002402005f430000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c64000000000000000000496e69744d6f64756c65416464726573": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0014010014000000000000000000000000000000000000000000000000000000", + "keySchema": "0x0000000000000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c64000000000000000000496e69744d6f64756c65416464726573", + "valueSchema": "0x0014010061000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c64000000000000000000496e7374616c6c65644d6f64756c6573": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000b6973496e7374616c6c6564000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000d6d6f64756c654164647265737300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000d617267756d656e74734861736800000000000000000000000000000000000000", + "fieldLayout": "0x0001010001000000000000000000000000000000000000000000000000000000", + "keySchema": "0x00340200615f0000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c64000000000000000000496e7374616c6c65644d6f64756c6573", + "valueSchema": "0x0001010060000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c640000000000000000004e616d65737061636544656c65676174": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001364656c65676174696f6e436f6e74726f6c496400000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000b6e616d6573706163654964000000000000000000000000000000000000000000", + "fieldLayout": "0x0020010020000000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c640000000000000000004e616d65737061636544656c65676174", + "valueSchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c640000000000000000004e616d6573706163654f776e65720000": { + "abiEncodedFieldNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000056f776e6572000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000b6e616d6573706163654964000000000000000000000000000000000000000000", + "fieldLayout": "0x0014010014000000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c640000000000000000004e616d6573706163654f776e65720000", + "valueSchema": "0x0014010061000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c640000000000000000005265736f757263654163636573730000": { + "abiEncodedFieldNames": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000066163636573730000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000a7265736f75726365496400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000663616c6c65720000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0001010001000000000000000000000000000000000000000000000000000000", + "keySchema": "0x003402005f610000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c640000000000000000005265736f757263654163636573730000", + "valueSchema": "0x0001010060000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c6400000000000000000053797374656d486f6f6b730000000000": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000873797374656d4964000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0000000100000000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c6400000000000000000053797374656d486f6f6b730000000000", + "valueSchema": "0x00000001b6000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c6400000000000000000053797374656d52656769737472790000": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000873797374656d4964000000000000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000673797374656d0000000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0020010020000000000000000000000000000000000000000000000000000000", + "keySchema": "0x0014010061000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c6400000000000000000053797374656d52656769737472790000", + "valueSchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c6400000000000000000053797374656d73000000000000000000": { + "abiEncodedFieldNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000673797374656d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c7075626c69634163636573730000000000000000000000000000000000000000", + "abiEncodedKeyNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000873797374656d4964000000000000000000000000000000000000000000000000", + "fieldLayout": "0x0015020014010000000000000000000000000000000000000000000000000000", + "keySchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c6400000000000000000053797374656d73000000000000000000", + "valueSchema": "0x0015020061600000000000000000000000000000000000000000000000000000", + }, + "0x7462776f726c640000000000000000005573657244656c65676174696f6e436f": { + "abiEncodedFieldNames": "0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001364656c65676174696f6e436f6e74726f6c496400000000000000000000000000", + "abiEncodedKeyNames": "0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000964656c656761746f720000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000964656c6567617465650000000000000000000000000000000000000000000000", + "fieldLayout": "0x0020010020000000000000000000000000000000000000000000000000000000", + "keySchema": "0x0028020061610000000000000000000000000000000000000000000000000000", + "tableId": "0x7462776f726c640000000000000000005573657244656c65676174696f6e436f", + "valueSchema": "0x002001005f000000000000000000000000000000000000000000000000000000", + }, + }, + }, + "world": { + "Balances": {}, + "FunctionSelector": { + "0x05609129": { + "functionSelector": "0x05609129", + "systemFunctionSelector": "0x05609129", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x0ba51f49": { + "functionSelector": "0x0ba51f49", + "systemFunctionSelector": "0x0ba51f49", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x1d2257ba": { + "functionSelector": "0x1d2257ba", + "systemFunctionSelector": "0x1d2257ba", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x219adc2e": { + "functionSelector": "0x219adc2e", + "systemFunctionSelector": "0x219adc2e", + "systemId": "0x737900000000000000000000000000004163636573734d616e6167656d656e74", + }, + "0x26d98102": { + "functionSelector": "0x26d98102", + "systemFunctionSelector": "0x26d98102", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x306d61a5": { + "functionSelector": "0x306d61a5", + "systemFunctionSelector": "0x306d61a5", + "systemId": "0x737900000000000000000000000000004e756d6265724c69737453797374656d", + }, + "0x3350b6a9": { + "functionSelector": "0x3350b6a9", + "systemFunctionSelector": "0x3350b6a9", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x40554c3a": { + "functionSelector": "0x40554c3a", + "systemFunctionSelector": "0x40554c3a", + "systemId": "0x737900000000000000000000000000004163636573734d616e6167656d656e74", + }, + "0x45afd199": { + "functionSelector": "0x45afd199", + "systemFunctionSelector": "0x45afd199", + "systemId": "0x7379000000000000000000000000000042616c616e63655472616e7366657200", + }, + "0x4ac3618f": { + "functionSelector": "0x4ac3618f", + "systemFunctionSelector": "0x4ac3618f", + "systemId": "0x73790000000000000000000000000000566563746f7253797374656d00000000", + }, + "0x530f4b60": { + "functionSelector": "0x530f4b60", + "systemFunctionSelector": "0x530f4b60", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x5f644e3c": { + "functionSelector": "0x5f644e3c", + "systemFunctionSelector": "0x5f644e3c", + "systemId": "0x73790000000000000000000000000000437573746f6d4572726f727353797374", + }, + "0x742d6118": { + "functionSelector": "0x742d6118", + "systemFunctionSelector": "0x742d6118", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x8d53b208": { + "functionSelector": "0x8d53b208", + "systemFunctionSelector": "0x8d53b208", + "systemId": "0x737900000000000000000000000000004163636573734d616e6167656d656e74", + }, + "0x8da798da": { + "functionSelector": "0x8da798da", + "systemFunctionSelector": "0x8da798da", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x8fc8cf7e": { + "functionSelector": "0x8fc8cf7e", + "systemFunctionSelector": "0x8fc8cf7e", + "systemId": "0x73790000000000000000000000000000426174636843616c6c00000000000000", + }, + "0x9dc63213": { + "functionSelector": "0x9dc63213", + "systemFunctionSelector": "0x9dc63213", + "systemId": "0x737900000000000000000000000000004e756d62657253797374656d00000000", + }, + "0xa4ece52c": { + "functionSelector": "0xa4ece52c", + "systemFunctionSelector": "0xa4ece52c", + "systemId": "0x737900000000000000000000000000004e756d6265724c69737453797374656d", + }, + "0xa92813ad": { + "functionSelector": "0xa92813ad", + "systemFunctionSelector": "0xa92813ad", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0xaa66e9c8": { + "functionSelector": "0xaa66e9c8", + "systemFunctionSelector": "0xaa66e9c8", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0xb29e4089": { + "functionSelector": "0xb29e4089", + "systemFunctionSelector": "0xb29e4089", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0xb8a44c7c": { + "functionSelector": "0xb8a44c7c", + "systemFunctionSelector": "0xb8a44c7c", + "systemId": "0x737900000000000000000000000000004e756d6265724c69737453797374656d", + }, + "0xbfdfaff7": { + "functionSelector": "0xbfdfaff7", + "systemFunctionSelector": "0xbfdfaff7", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0xc9c85a60": { + "functionSelector": "0xc9c85a60", + "systemFunctionSelector": "0xc9c85a60", + "systemId": "0x7379000000000000000000000000000042616c616e63655472616e7366657200", + }, + "0xcdc938c5": { + "functionSelector": "0xcdc938c5", + "systemFunctionSelector": "0xcdc938c5", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0xce5e8dd9": { + "functionSelector": "0xce5e8dd9", + "systemFunctionSelector": "0xce5e8dd9", + "systemId": "0x73790000000000000000000000000000426174636843616c6c00000000000000", + }, + "0xd5f8337f": { + "functionSelector": "0xd5f8337f", + "systemFunctionSelector": "0xd5f8337f", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0xef5d6bbb": { + "functionSelector": "0xef5d6bbb", + "systemFunctionSelector": "0xef5d6bbb", + "systemId": "0x737900000000000000000000000000004163636573734d616e6167656d656e74", + }, + "0xf7f0e440": { + "functionSelector": "0xf7f0e440", + "systemFunctionSelector": "0xf7f0e440", + "systemId": "0x737900000000000000000000000000004e756d6265724c69737453797374656d", + }, + }, + "FunctionSignatur": { + "0x05609129": { + "functionSelector": "0x05609129", + "functionSignature": "unregisterStoreHook(bytes32,address)", + }, + "0x0ba51f49": { + "functionSelector": "0x0ba51f49", + "functionSignature": "registerTable(bytes32,bytes32,bytes32,bytes32,string[],string[])", + }, + "0x1d2257ba": { + "functionSelector": "0x1d2257ba", + "functionSignature": "registerDelegation(address,bytes32,bytes)", + }, + "0x219adc2e": { + "functionSelector": "0x219adc2e", + "functionSignature": "renounceOwnership(bytes32)", + }, + "0x26d98102": { + "functionSelector": "0x26d98102", + "functionSignature": "registerFunctionSelector(bytes32,string)", + }, + "0x306d61a5": { + "functionSelector": "0x306d61a5", + "functionSignature": "pushRange(uint32,uint32)", + }, + "0x3350b6a9": { + "functionSelector": "0x3350b6a9", + "functionSignature": "registerSystem(bytes32,address,bool)", + }, + "0x40554c3a": { + "functionSelector": "0x40554c3a", + "functionSignature": "grantAccess(bytes32,address)", + }, + "0x45afd199": { + "functionSelector": "0x45afd199", + "functionSignature": "transferBalanceToAddress(bytes32,address,uint256)", + }, + "0x4ac3618f": { + "functionSelector": "0x4ac3618f", + "functionSignature": "setVector(uint32,int32,int32)", + }, + "0x530f4b60": { + "functionSelector": "0x530f4b60", + "functionSignature": "registerStoreHook(bytes32,address,uint8)", + }, + "0x5f644e3c": { + "functionSelector": "0x5f644e3c", + "functionSignature": "stub(uint256)", + }, + "0x742d6118": { + "functionSelector": "0x742d6118", + "functionSignature": "registerRootFunctionSelector(bytes32,string,bytes4)", + }, + "0x8d53b208": { + "functionSelector": "0x8d53b208", + "functionSignature": "revokeAccess(bytes32,address)", + }, + "0x8da798da": { + "functionSelector": "0x8da798da", + "functionSignature": "installModule(address,bytes)", + }, + "0x8fc8cf7e": { + "functionSelector": "0x8fc8cf7e", + "functionSignature": "batchCallFrom((address,bytes32,bytes)[])", + }, + "0x9dc63213": { + "functionSelector": "0x9dc63213", + "functionSignature": "setNumber(uint32,uint32)", + }, + "0xa4ece52c": { + "functionSelector": "0xa4ece52c", + "functionSignature": "pop()", + }, + "0xa92813ad": { + "functionSelector": "0xa92813ad", + "functionSignature": "unregisterSystemHook(bytes32,address)", + }, + "0xaa66e9c8": { + "functionSelector": "0xaa66e9c8", + "functionSignature": "unregisterNamespaceDelegation(bytes32)", + }, + "0xb29e4089": { + "functionSelector": "0xb29e4089", + "functionSignature": "registerNamespace(bytes32)", + }, + "0xb8a44c7c": { + "functionSelector": "0xb8a44c7c", + "functionSignature": "set(uint32[])", + }, + "0xbfdfaff7": { + "functionSelector": "0xbfdfaff7", + "functionSignature": "registerNamespaceDelegation(bytes32,bytes32,bytes)", + }, + "0xc9c85a60": { + "functionSelector": "0xc9c85a60", + "functionSignature": "transferBalanceToNamespace(bytes32,bytes32,uint256)", + }, + "0xcdc938c5": { + "functionSelector": "0xcdc938c5", + "functionSignature": "unregisterDelegation(address)", + }, + "0xce5e8dd9": { + "functionSelector": "0xce5e8dd9", + "functionSignature": "batchCall((bytes32,bytes)[])", + }, + "0xd5f8337f": { + "functionSelector": "0xd5f8337f", + "functionSignature": "registerSystemHook(bytes32,address,uint8)", + }, + "0xef5d6bbb": { + "functionSelector": "0xef5d6bbb", + "functionSignature": "transferOwnership(bytes32,address)", + }, + "0xf7f0e440": { + "functionSelector": "0xf7f0e440", + "functionSignature": "push(uint32)", + }, + }, + "InitModuleAddres": { + "": { + "value": "0xC19519644C381AbE163caB1c4AB66c3c91E508A5", + }, + }, + "InstalledModules": { + "0xC19519644C381AbE163caB1c4AB66c3c91E508A5|0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470": { + "argumentsHash": "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", + "isInstalled": true, + "moduleAddress": "0xC19519644C381AbE163caB1c4AB66c3c91E508A5", + }, + }, + "NamespaceDelegat": {}, + "NamespaceOwner": { + "0x6e73000000000000000000000000000000000000000000000000000000000000": { + "namespaceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + "owner": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + }, + "0x6e7373746f726500000000000000000000000000000000000000000000000000": { + "namespaceId": "0x6e7373746f726500000000000000000000000000000000000000000000000000", + "owner": "0x0C70aC0D3dbe7EA7f0eb625C65c1092b8Bb415c8", + }, + "0x6e73776f726c6400000000000000000000000000000000000000000000000000": { + "namespaceId": "0x6e73776f726c6400000000000000000000000000000000000000000000000000", + "owner": "0x0C70aC0D3dbe7EA7f0eb625C65c1092b8Bb415c8", + }, + }, + "ResourceAccess": { + "0x6e73000000000000000000000000000000000000000000000000000000000000|0x176906298819f6778127AD98261f778db597af56": { + "access": true, + "caller": "0x176906298819f6778127AD98261f778db597af56", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0x17ffDEfF94ed0b80c493A179d4B3b09D6d71f627": { + "access": true, + "caller": "0x17ffDEfF94ed0b80c493A179d4B3b09D6d71f627", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0x457a739FEbe34229495ac45d6c884E5404B7002b": { + "access": true, + "caller": "0x457a739FEbe34229495ac45d6c884E5404B7002b", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0x53E5c08d82A377167069Ade46d087Ab753538608": { + "access": true, + "caller": "0x53E5c08d82A377167069Ade46d087Ab753538608", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0x8E82b6c00aA060d3432129C57B159D8Dd10cFFdc": { + "access": true, + "caller": "0x8E82b6c00aA060d3432129C57B159D8Dd10cFFdc", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0x92aC32a7704741a284881aeC75dCECa7F1cE117c": { + "access": true, + "caller": "0x92aC32a7704741a284881aeC75dCECa7F1cE117c", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0xA274B9a7E743cd8dF3c6Fd0aBD47eD55Fc943BC3": { + "access": true, + "caller": "0xA274B9a7E743cd8dF3c6Fd0aBD47eD55Fc943BC3", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266": { + "access": true, + "caller": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e73000000000000000000000000000000000000000000000000000000000000|0xfc5b03FbAe0bca9422ee706687C00072475A0337": { + "access": true, + "caller": "0xfc5b03FbAe0bca9422ee706687C00072475A0337", + "resourceId": "0x6e73000000000000000000000000000000000000000000000000000000000000", + }, + "0x6e7373746f726500000000000000000000000000000000000000000000000000|0x0C70aC0D3dbe7EA7f0eb625C65c1092b8Bb415c8": { + "access": true, + "caller": "0x0C70aC0D3dbe7EA7f0eb625C65c1092b8Bb415c8", + "resourceId": "0x6e7373746f726500000000000000000000000000000000000000000000000000", + }, + "0x6e73776f726c6400000000000000000000000000000000000000000000000000|0x0C70aC0D3dbe7EA7f0eb625C65c1092b8Bb415c8": { + "access": true, + "caller": "0x0C70aC0D3dbe7EA7f0eb625C65c1092b8Bb415c8", + "resourceId": "0x6e73776f726c6400000000000000000000000000000000000000000000000000", + }, + }, + "SystemHooks": {}, + "SystemRegistry": { + "0x176906298819f6778127AD98261f778db597af56": { + "system": "0x176906298819f6778127AD98261f778db597af56", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x17ffDEfF94ed0b80c493A179d4B3b09D6d71f627": { + "system": "0x17ffDEfF94ed0b80c493A179d4B3b09D6d71f627", + "systemId": "0x737900000000000000000000000000004163636573734d616e6167656d656e74", + }, + "0x457a739FEbe34229495ac45d6c884E5404B7002b": { + "system": "0x457a739FEbe34229495ac45d6c884E5404B7002b", + "systemId": "0x737900000000000000000000000000004e756d6265724c69737453797374656d", + }, + "0x53E5c08d82A377167069Ade46d087Ab753538608": { + "system": "0x53E5c08d82A377167069Ade46d087Ab753538608", + "systemId": "0x73790000000000000000000000000000426174636843616c6c00000000000000", + }, + "0x8E82b6c00aA060d3432129C57B159D8Dd10cFFdc": { + "system": "0x8E82b6c00aA060d3432129C57B159D8Dd10cFFdc", + "systemId": "0x73790000000000000000000000000000566563746f7253797374656d00000000", + }, + "0x92aC32a7704741a284881aeC75dCECa7F1cE117c": { + "system": "0x92aC32a7704741a284881aeC75dCECa7F1cE117c", + "systemId": "0x737900000000000000000000000000004e756d62657253797374656d00000000", + }, + "0xA274B9a7E743cd8dF3c6Fd0aBD47eD55Fc943BC3": { + "system": "0xA274B9a7E743cd8dF3c6Fd0aBD47eD55Fc943BC3", + "systemId": "0x7379000000000000000000000000000042616c616e63655472616e7366657200", + }, + "0xfc5b03FbAe0bca9422ee706687C00072475A0337": { + "system": "0xfc5b03FbAe0bca9422ee706687C00072475A0337", + "systemId": "0x73790000000000000000000000000000437573746f6d4572726f727353797374", + }, + }, + "Systems": { + "0x737900000000000000000000000000004163636573734d616e6167656d656e74": { + "publicAccess": true, + "system": "0x17ffDEfF94ed0b80c493A179d4B3b09D6d71f627", + "systemId": "0x737900000000000000000000000000004163636573734d616e6167656d656e74", + }, + "0x7379000000000000000000000000000042616c616e63655472616e7366657200": { + "publicAccess": true, + "system": "0xA274B9a7E743cd8dF3c6Fd0aBD47eD55Fc943BC3", + "systemId": "0x7379000000000000000000000000000042616c616e63655472616e7366657200", + }, + "0x73790000000000000000000000000000426174636843616c6c00000000000000": { + "publicAccess": true, + "system": "0x53E5c08d82A377167069Ade46d087Ab753538608", + "systemId": "0x73790000000000000000000000000000426174636843616c6c00000000000000", + }, + "0x73790000000000000000000000000000437573746f6d4572726f727353797374": { + "publicAccess": true, + "system": "0xfc5b03FbAe0bca9422ee706687C00072475A0337", + "systemId": "0x73790000000000000000000000000000437573746f6d4572726f727353797374", + }, + "0x737900000000000000000000000000004e756d6265724c69737453797374656d": { + "publicAccess": true, + "system": "0x457a739FEbe34229495ac45d6c884E5404B7002b", + "systemId": "0x737900000000000000000000000000004e756d6265724c69737453797374656d", + }, + "0x737900000000000000000000000000004e756d62657253797374656d00000000": { + "publicAccess": true, + "system": "0x92aC32a7704741a284881aeC75dCECa7F1cE117c", + "systemId": "0x737900000000000000000000000000004e756d62657253797374656d00000000", + }, + "0x73790000000000000000000000000000526567697374726174696f6e00000000": { + "publicAccess": true, + "system": "0x176906298819f6778127AD98261f778db597af56", + "systemId": "0x73790000000000000000000000000000526567697374726174696f6e00000000", + }, + "0x73790000000000000000000000000000566563746f7253797374656d00000000": { + "publicAccess": true, + "system": "0x8E82b6c00aA060d3432129C57B159D8Dd10cFFdc", + "systemId": "0x73790000000000000000000000000000566563746f7253797374656d00000000", + }, + }, + "UserDelegationCo": {}, + }, + } + `); + }); +}); diff --git a/packages/store-sync/src/stash/createStorageAdapter.ts b/packages/store-sync/src/stash/createStorageAdapter.ts new file mode 100644 index 0000000000..43a73216f8 --- /dev/null +++ b/packages/store-sync/src/stash/createStorageAdapter.ts @@ -0,0 +1,157 @@ +import { hexToResource, resourceToLabel, spliceHex } from "@latticexyz/common"; +import { + KeySchema, + decodeKey, + decodeValueArgs, + encodeValueArgs, + getKeySchema, + getSchemaTypes, + getValueSchema, +} from "@latticexyz/protocol-parser/internal"; +import { size } from "viem"; +import { isTableRegistrationLog } from "../isTableRegistrationLog"; +import { logToTable } from "../logToTable"; +import { StorageAdapter, StorageAdapterBlock } from "../common"; +import { Stash, getConfig, getTable, registerTable } from "@latticexyz/stash/internal"; +import { debug } from "./debug"; +import { defineTable } from "@latticexyz/store/config/v2"; + +export type CreateStorageAdapterOptions = { + stash: Stash; +}; + +export type CreateStorageAdapterResult = { + storageAdapter: StorageAdapter; +}; + +const emptyValueArgs = { staticData: "0x", encodedLengths: "0x", dynamicData: "0x" } as const; + +export function createStorageAdapter({ stash }: CreateStorageAdapterOptions): CreateStorageAdapterResult { + async function storageAdapter({ logs }: StorageAdapterBlock): Promise { + const newTables = logs.filter(isTableRegistrationLog).map(logToTable); + + for (const newTable of newTables) { + // TODO: switch this to `newTable.label` once available + const existingTable = stash.get().config[newTable.namespace]?.[newTable.name]; + if (existingTable) { + console.warn("table already registered, ignoring", { + newTable, + existingTable, + }); + } else { + // TODO: create util for converting table config + // TODO: add label and namespaceLabel once available + registerTable({ + stash, + table: defineTable({ + ...newTable, + key: newTable.key, + label: newTable.name, + namespaceLabel: newTable.namespace, + schema: Object.fromEntries(Object.entries(newTable.schema).map(([field, { type }]) => [field, type])), + } as never), + }); + } + } + + // Convert logs in (partial) table updates + for (const log of logs) { + // TODO: find table config by table ID + const { namespace, name } = hexToResource(log.args.tableId); + // TODO: use label once available + const tableConfig = stash.get().config[namespace][name]; + const boundTable = getTable({ + stash, + table: getConfig({ stash, table: { namespaceLabel: namespace, label: name } }), + }); + + if (!tableConfig || !boundTable) { + debug(`skipping update for unknown table: ${resourceToLabel({ namespace, name })} at ${log.address}`); + continue; + } + + const valueSchema = getSchemaTypes(getValueSchema(tableConfig)); + const keySchema = getSchemaTypes(getKeySchema(tableConfig)) as KeySchema; + const key = decodeKey(keySchema, log.args.keyTuple); + + if (log.eventName === "Store_SetRecord") { + // TODO: is there a more elegant way to do this (converting table config to expected schema)? + const record = decodeValueArgs(valueSchema, log.args); + debug("setting table record", { + namespace: tableConfig.namespace, + name: tableConfig.name, + key, + record, + }); + + boundTable.setRecord({ key, record }); + } else if (log.eventName === "Store_SpliceStaticData") { + // TODO: add tests that this works when no record had been set before + const previousRecord = boundTable.getRecord({ key }); + + // TODO: maybe better to also stash the static data than to re-encode here? + const { + staticData: previousStaticData, + encodedLengths, + dynamicData, + } = previousRecord ? encodeValueArgs(valueSchema, previousRecord) : emptyValueArgs; + + const newStaticData = spliceHex(previousStaticData, log.args.start, size(log.args.data), log.args.data); + const newValue = decodeValueArgs(valueSchema, { + staticData: newStaticData, + encodedLengths, + dynamicData, + }); + + debug("setting record via splice static", { + namespace, + name, + key, + previousStaticData, + newStaticData, + previousRecord, + newValue, + }); + boundTable.setRecord({ key, record: newValue }); + } else if (log.eventName === "Store_SpliceDynamicData") { + // TODO: add tests that this works when no record had been set before + const previousRecord = boundTable.getRecord({ key }); + + // TODO: maybe better to also stash the dynamic data than to re-encode here? + console.log("what's going on now", previousRecord, valueSchema); + const { staticData, dynamicData: previousDynamicData } = previousRecord + ? encodeValueArgs(valueSchema, previousRecord) + : emptyValueArgs; + + const newDynamicData = spliceHex(previousDynamicData, log.args.start, log.args.deleteCount, log.args.data); + const newValue = decodeValueArgs(valueSchema, { + staticData, + // TODO: handle unchanged encoded lengths (comment taken from recs sync, what does this mean?) + encodedLengths: log.args.encodedLengths, + dynamicData: newDynamicData, + }); + + debug("setting record via splice dynamic", { + namespace, + name, + key, + previousDynamicData, + newDynamicData, + previousRecord, + newValue, + }); + + boundTable.setRecord({ key, record: newValue }); + } else if (log.eventName === "Store_DeleteRecord") { + debug("deleting record", { + namespace, + name, + key, + }); + boundTable.deleteRecord({ key }); + } + } + } + + return { storageAdapter }; +} diff --git a/packages/store-sync/src/stash/debug.ts b/packages/store-sync/src/stash/debug.ts new file mode 100644 index 0000000000..fe255ff620 --- /dev/null +++ b/packages/store-sync/src/stash/debug.ts @@ -0,0 +1,10 @@ +import { debug as parentDebug } from "../debug"; + +export const debug = parentDebug.extend("stash"); +export const error = parentDebug.extend("stash"); + +// Pipe debug output to stdout instead of stderr +debug.log = console.debug.bind(console); + +// Pipe error output to stderr +error.log = console.error.bind(console); diff --git a/packages/store-sync/src/stash/index.ts b/packages/store-sync/src/stash/index.ts new file mode 100644 index 0000000000..acfaf6b1ad --- /dev/null +++ b/packages/store-sync/src/stash/index.ts @@ -0,0 +1,2 @@ +export * from "./createStorageAdapter"; +export * from "./syncToStash"; diff --git a/packages/store-sync/src/stash/syncToStash.ts b/packages/store-sync/src/stash/syncToStash.ts new file mode 100644 index 0000000000..4931c4d3bc --- /dev/null +++ b/packages/store-sync/src/stash/syncToStash.ts @@ -0,0 +1,98 @@ +import { Store as StoreConfig } from "@latticexyz/store"; +import { SyncOptions, SyncResult } from "../common"; +import { createStorageAdapter } from "./createStorageAdapter"; +import { createStoreSync } from "../createStoreSync"; +import { SyncStep } from "../SyncStep"; +import { BoundTable, Stash, registerTable } from "@latticexyz/stash/internal"; +import { defineTable } from "@latticexyz/store/config/v2"; + +type SyncToStashOptions = SyncOptions & { + stash: Stash; + config: StoreConfig; + startSync?: boolean; +}; + +const syncProgressConfig = defineTable({ + namespace: "store_sync", + label: "SyncProgress", + schema: { + step: "string", + percentage: "uint32", + latestBlockNumber: "uint256", + lastBlockNumberProcessed: "uint256", + message: "string", + }, + key: [], +}); + +type SyncToStashResult = SyncResult & { + stopSync: () => void; + SyncProgress: BoundTable; +}; + +export async function syncToStash({ + stash, + config, + startSync = true, + ...syncOptions +}: SyncToStashOptions): Promise { + const { storageAdapter } = createStorageAdapter({ + stash, + }); + + // Create SyncProgress table + const SyncProgress = registerTable({ + stash, + table: syncProgressConfig, + }); + + /** + * Two potential approaches: + * + * - Fetch initial state from dozer SQL + * - Encode to logs + * - Fetch logs from min to max from RPC and append them at the end + * - Pass everything as `initialRecords` to `createStoreSync` + * + * OR + * + * - Fetch initial state from dozer SQL, keep in temporary decoded structure + * - Fetch logs from min to max from RPC, decode and apply to temporary structure + * - Write everything in one go to the stash + * - Skip fetching from indexer, continue syncing from RPC + * + * In both approaches we need the local tables to not send updates until the sync is done, + * then send an update with the entire stash. + * + * The first approach seems like a lot of processing overhead (encoding records) but would be + * compatible with all state libs. The second approach would be specific to ZustandQuery. + * + * Let's do first approach for now. + */ + + const storeSync = await createStoreSync({ + storageAdapter, + config, + ...syncOptions, + onProgress: ({ step, percentage, latestBlockNumber, lastBlockNumberProcessed, message }) => { + // already live, no need for more progress updates + if (SyncProgress.getRecord({ key: {} })?.step === SyncStep.LIVE) return; + + SyncProgress.setRecord({ + key: {}, + record: { step, percentage, latestBlockNumber, lastBlockNumberProcessed, message }, + }); + }, + }); + + const sub = startSync ? storeSync.storedBlockLogs$.subscribe() : null; + const stopSync = (): void => { + sub?.unsubscribe(); + }; + + return { + ...storeSync, + stopSync, + SyncProgress, + }; +} diff --git a/packages/store-sync/tsup.config.ts b/packages/store-sync/tsup.config.ts index d8f6860bea..d47fcb0bb5 100644 --- a/packages/store-sync/tsup.config.ts +++ b/packages/store-sync/tsup.config.ts @@ -3,6 +3,7 @@ import { defineConfig } from "tsup"; export default defineConfig({ entry: [ "src/index.ts", + "src/dozer/index.ts", "src/sqlite/index.ts", "src/postgres/index.ts", "src/postgres-decoded/index.ts", @@ -10,6 +11,7 @@ export default defineConfig({ "src/trpc-indexer/index.ts", "src/indexer-client/index.ts", "src/zustand/index.ts", + "src/stash/index.ts", ], target: "esnext", format: ["esm"], diff --git a/packages/store/ts/config/v2/input.ts b/packages/store/ts/config/v2/input.ts index 43509ab357..ca103761bf 100644 --- a/packages/store/ts/config/v2/input.ts +++ b/packages/store/ts/config/v2/input.ts @@ -31,6 +31,11 @@ export type TableInput = { * Defaults to the nearest namespace in the config or root namespace if not set. */ readonly namespace?: string; + /** + * Human-readable namespace label. + * Defaults to the nearest namespace in the config or root namespace if not set. + */ + readonly namespaceLabel?: string; /** * Table name used in table's resource ID. * Defaults to the first 16 characters of `label` if not set. @@ -47,7 +52,7 @@ export type TableShorthandInput = SchemaInput | string; export type TablesInput = { // remove label and namespace as these are set contextually // and allow defining a table using shorthand - readonly [label: string]: Omit | TableShorthandInput; + readonly [label: string]: Omit | TableShorthandInput; }; export type CodegenInput = Partial; diff --git a/packages/store/ts/config/v2/namespace.ts b/packages/store/ts/config/v2/namespace.ts index 4d0f22a40d..fd133a2f8d 100644 --- a/packages/store/ts/config/v2/namespace.ts +++ b/packages/store/ts/config/v2/namespace.ts @@ -36,7 +36,7 @@ export type resolveNamespace = input { readonly [label in keyof input["tables"]]: mergeIfUndefined< expandTableShorthand, - { readonly namespace: string } + { readonly namespace: string; readonly namespaceLabel: input["label"] } >; }, scope @@ -49,14 +49,14 @@ export function resolveNamespace { - const label = input.label; - const namespace = input.namespace ?? label.slice(0, 14); + const namespaceLabel = input.label; + const namespace = input.namespace ?? namespaceLabel.slice(0, 14); return { - label, + label: namespaceLabel, namespace, tables: resolveTables( flatMorph(input.tables ?? {}, (label, table) => { - return [label, mergeIfUndefined(expandTableShorthand(table, scope), { namespace })]; + return [label, mergeIfUndefined(expandTableShorthand(table, scope), { namespace, namespaceLabel })]; }), scope, ), diff --git a/packages/store/ts/config/v2/store.test.ts b/packages/store/ts/config/v2/store.test.ts index 6943925c8e..6e2275cd1b 100644 --- a/packages/store/ts/config/v2/store.test.ts +++ b/packages/store/ts/config/v2/store.test.ts @@ -28,6 +28,7 @@ describe("defineStore", () => { label: "Example", type: "table", namespace: "", + namespaceLabel: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", schema: { @@ -47,6 +48,7 @@ describe("defineStore", () => { label: "Example", type: "table", namespace: "", + namespaceLabel: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", schema: { @@ -80,6 +82,7 @@ describe("defineStore", () => { readonly Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` @@ -113,6 +116,7 @@ describe("defineStore", () => { readonly Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` @@ -180,6 +184,7 @@ describe("defineStore", () => { label: "Example", type: "table", namespace: "", + namespaceLabel: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", schema: { @@ -199,6 +204,7 @@ describe("defineStore", () => { label: "Example", type: "table", namespace: "", + namespaceLabel: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", schema: { @@ -232,6 +238,7 @@ describe("defineStore", () => { readonly Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "root" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` @@ -265,6 +272,7 @@ describe("defineStore", () => { readonly root__Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "root" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` diff --git a/packages/store/ts/config/v2/table.test.ts b/packages/store/ts/config/v2/table.test.ts index 69d99eafea..690822c5cf 100644 --- a/packages/store/ts/config/v2/table.test.ts +++ b/packages/store/ts/config/v2/table.test.ts @@ -57,6 +57,7 @@ describe("resolveTable", () => { label: "", type: "table", namespace: "" as string, + namespaceLabel: "", name: "" as string, tableId: resourceToHex({ type: "table", namespace: "", name: "" }), schema: { @@ -82,6 +83,7 @@ describe("resolveTable", () => { label: "", type: "table", namespace: "" as string, + namespaceLabel: "", name: "" as string, tableId: resourceToHex({ type: "table", namespace: "", name: "" }), schema: { @@ -113,6 +115,7 @@ describe("resolveTable", () => { label: "", type: "table", namespace: "" as string, + namespaceLabel: "", name: "" as string, tableId: resourceToHex({ type: "table", namespace: "", name: "" }), schema: { diff --git a/packages/store/ts/config/v2/table.ts b/packages/store/ts/config/v2/table.ts index 6937a671ba..24242defd8 100644 --- a/packages/store/ts/config/v2/table.ts +++ b/packages/store/ts/config/v2/table.ts @@ -140,6 +140,11 @@ export type resolveTable = input extends Tab ? { readonly label: input["label"]; readonly type: undefined extends input["type"] ? typeof TABLE_DEFAULTS.type : input["type"]; + readonly namespaceLabel: undefined extends input["namespaceLabel"] + ? undefined extends input["namespace"] + ? typeof TABLE_DEFAULTS.namespace + : input["namespace"] + : input["namespaceLabel"]; readonly namespace: string; readonly name: string; readonly tableId: Hex; @@ -159,6 +164,7 @@ export function resolveTable { Example: { label: "Example", type: "table", + namespaceLabel: "", namespace: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", @@ -163,6 +164,7 @@ describe("defineWorld", () => { Example: { label: "Example", type: "table", + namespaceLabel: "", namespace: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", @@ -205,6 +207,7 @@ describe("defineWorld", () => { readonly Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` @@ -255,6 +258,7 @@ describe("defineWorld", () => { readonly Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` @@ -324,6 +328,7 @@ describe("defineWorld", () => { Example: { label: "Example", type: "table", + namespaceLabel: "root", namespace: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", @@ -344,6 +349,7 @@ describe("defineWorld", () => { root__Example: { label: "Example", type: "table", + namespaceLabel: "root", namespace: "", name: "Example", tableId: "0x746200000000000000000000000000004578616d706c65000000000000000000", @@ -386,6 +392,7 @@ describe("defineWorld", () => { readonly root__Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "root" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` @@ -436,6 +443,7 @@ describe("defineWorld", () => { readonly Example: { readonly label: "Example" readonly type: "table" + readonly namespaceLabel: "root" readonly namespace: string readonly name: string readonly tableId: \`0x\${string}\` diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c0d064ab13..0002758735 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -43,7 +43,7 @@ importers: version: 7.0.0 glob: specifier: ^10.4.2 - version: 10.4.2 + version: 10.4.5 husky: specifier: '>=6' version: 6.0.0 @@ -82,7 +82,7 @@ importers: version: 7.0.0 glob: specifier: ^10.4.2 - version: 10.4.2 + version: 10.4.5 yargs: specifier: ^17.7.1 version: 17.7.1 @@ -189,7 +189,7 @@ importers: version: 6.3.0 glob: specifier: ^10.4.2 - version: 10.4.2 + version: 10.4.5 openurl: specifier: ^1.1.1 version: 1.1.1 @@ -705,6 +705,67 @@ importers: specifier: ^6.7.0 version: 6.7.0(postcss@8.4.23)(typescript@5.4.2) + packages/stash: + dependencies: + '@arktype/util': + specifier: 0.0.40 + version: 0.0.40 + '@latticexyz/config': + specifier: workspace:* + version: link:../config + '@latticexyz/protocol-parser': + specifier: workspace:* + version: link:../protocol-parser + '@latticexyz/recs': + specifier: workspace:* + version: link:../recs + '@latticexyz/schema-type': + specifier: workspace:* + version: link:../schema-type + '@latticexyz/store': + specifier: workspace:* + version: link:../store + mutative: + specifier: ^1.0.6 + version: 1.0.6 + react: + specifier: ^18.2.0 + version: 18.2.0 + rxjs: + specifier: 7.5.5 + version: 7.5.5 + viem: + specifier: 2.9.20 + version: 2.9.20(typescript@5.4.2)(zod@3.23.8) + zustand: + specifier: ^4.3.7 + version: 4.3.7(react@18.2.0) + zustand-mutative: + specifier: ^1.0.1 + version: 1.0.1(@types/react@18.2.22)(mutative@1.0.6)(react@18.2.0)(zustand@4.3.7(react@18.2.0)) + devDependencies: + '@arktype/attest': + specifier: 0.7.5 + version: 0.7.5(typescript@5.4.2) + '@testing-library/react': + specifier: ^16.0.0 + version: 16.0.0(@testing-library/dom@10.4.0)(@types/react-dom@18.2.7)(@types/react@18.2.22)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@testing-library/react-hooks': + specifier: ^8.0.1 + version: 8.0.1(@types/react@18.2.22)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@types/react': + specifier: 18.2.22 + version: 18.2.22 + react-dom: + specifier: ^18.2.0 + version: 18.2.0(react@18.2.0) + tsup: + specifier: ^6.7.0 + version: 6.7.0(postcss@8.4.23)(typescript@5.4.2) + vitest: + specifier: 0.34.6 + version: 0.34.6(jsdom@22.1.0) + packages/store: dependencies: '@ark/util': @@ -907,6 +968,9 @@ importers: '@latticexyz/schema-type': specifier: workspace:* version: link:../schema-type + '@latticexyz/stash': + specifier: workspace:* + version: link:../stash '@latticexyz/store': specifier: workspace:* version: link:../store @@ -1065,7 +1129,7 @@ importers: version: https://codeload.github.com/foundry-rs/forge-std/tar.gz/74cfb77e308dd188d2f58864aaf44963ae6b88b1 glob: specifier: ^10.4.2 - version: 10.4.2 + version: 10.4.5 solhint: specifier: ^3.3.7 version: 3.3.7 @@ -1126,7 +1190,7 @@ importers: version: https://codeload.github.com/foundry-rs/forge-std/tar.gz/74cfb77e308dd188d2f58864aaf44963ae6b88b1 glob: specifier: ^10.4.2 - version: 10.4.2 + version: 10.4.5 solhint: specifier: ^3.3.7 version: 3.3.7 @@ -1223,6 +1287,27 @@ packages: '@ark/util@0.2.0': resolution: {integrity: sha512-L38ALnAR1wXxYQN9pn0ir+9ybPKA0O1+hl8PKZOvT9JSbTues4kEB8lgo80DeE0X1Jw+qoHe4lxgSwaHNa3xyg==} + '@arktype/attest@0.7.5': + resolution: {integrity: sha512-ZOF9uqLbvoVO6RHhlByJEBBj5qhWLpaCK/wWa5guD1OQR1/a7JZ9jCrDAcaASt6tYBA3dGhDerXhc7FcDWlRQw==} + hasBin: true + peerDependencies: + typescript: '*' + + '@arktype/fs@0.0.19': + resolution: {integrity: sha512-ZEiSc6DgJANSgMTee9lueumCH/3gIikJWT8wlN8CyVk/IDFKFbJb3/BHj906Aw+APvCc8oyvu+2Hanshkgh4Bg==} + + '@arktype/schema@0.1.2': + resolution: {integrity: sha512-ggvxs5P0toqd/4/XK76URQrtyOYpbYcLhirEZeTso6FxkloPa0lT+whPg7DNQj5qi2OQXLUHBYKMx9DOb13ViQ==} + + '@arktype/util@0.0.38': + resolution: {integrity: sha512-IvYMGnkUASJllRk3mdBVgckomKx2LNsDTrWCxz04EBK1OuU+4fJ/smSjxgZVWfopNXZds9sHNxZgTJOIw7GvJw==} + + '@arktype/util@0.0.40': + resolution: {integrity: sha512-dwC3xZh9Bz6LWSJq71AUoh06zB0qM65N4zS/NNogbumhbO55yot7yqDlv0qeBMNOWXj/gX7l7l58v0EqEaXN2w==} + + '@arktype/util@0.0.41': + resolution: {integrity: sha512-0YURzJ42v+lhlP1t5Dj90YezETRTCdFU0oM4xMVpYsmPx/DHJzr9n7AX1QPAlYWH4wY7hYY3gwai3O+8VntPgw==} + '@aws-crypto/ie11-detection@3.0.0': resolution: {integrity: sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==} @@ -2454,6 +2539,10 @@ packages: '@solidity-parser/parser@0.17.0': resolution: {integrity: sha512-Nko8R0/kUo391jsEHHxrGM07QFdnPGvlmox4rmH0kNiNAashItAilhy4Mv4pK5gQmW5f4sXAF58fwJbmlkGcVw==} + '@testing-library/dom@10.4.0': + resolution: {integrity: sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==} + engines: {node: '>=18'} + '@testing-library/react-hooks@8.0.1': resolution: {integrity: sha512-Aqhl2IVmLt8IovEVarNDFuJDVWVvhnr9/GCU6UUnrYXwgDFF9h2L2o2P9KBni1AST5sT6riAyoukFLyjQUgD/g==} engines: {node: '>=12'} @@ -2470,6 +2559,21 @@ packages: react-test-renderer: optional: true + '@testing-library/react@16.0.0': + resolution: {integrity: sha512-guuxUKRWQ+FgNX0h0NS0FIq3Q3uLtWVpBzcLOggmfMoUpgBnzBzvLLd4fbm6yS8ydJd94cIfY4yP9qUQjM2KwQ==} + engines: {node: '>=18'} + peerDependencies: + '@testing-library/dom': ^10.0.0 + '@types/react': ^18.0.0 + '@types/react-dom': ^18.0.0 + react: ^18.0.0 + react-dom: ^18.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@tootallnate/once@2.0.0': resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} engines: {node: '>= 10'} @@ -2485,6 +2589,9 @@ packages: '@types/accepts@1.3.7': resolution: {integrity: sha512-Pay9fq2lM2wXPWbteBsRAGiWH2hig4ZE2asK+mm7kUzlxRTfL961rj89I6zV/E3PcIkDqyuBEcMxFT7rccugeQ==} + '@types/aria-query@5.0.4': + resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} + '@types/babel__core@7.20.0': resolution: {integrity: sha512-+n8dL/9GWblDO0iU6eZAwEIJVr5DWigtle+Q6HLOrh/pdbXOhOtqzq8VPPE2zvNJzSKY4vH/z3iT3tn0A3ypiQ==} @@ -2727,6 +2834,9 @@ packages: resolution: {integrity: sha512-RnlSOPh14QbopGCApgkSx5UBgGda5MX1cHqp2fsqfiDyCwGL/m1jaeB9fzu7didVS81LQqGZZuxFBcg8YU8EVw==} hasBin: true + '@typescript/vfs@1.5.0': + resolution: {integrity: sha512-AJS307bPgbsZZ9ggCT3wwpg3VbTKMFNHfaY/uF0ahSkYYrPF2dSSKDNIDIQAHm9qJqbLvCsSJH7yN4Vs/CsMMg==} + '@typescript/vfs@1.5.3': resolution: {integrity: sha512-OSZ/o3wwD5VPZVdGGsXWk7sRGRtwrGnqA4zwmb33FTs7Wxmad0QTkQCbaNyqWA8hL09TCwAthdp8yjFA5G1lvw==} peerDependencies: @@ -2942,12 +3052,18 @@ packages: resolution: {integrity: sha512-XLWeRTNBJRzQkbMweLIxdtnvpE7iYUBraPwrIJX57FjL4D1RHLMJRM1AyEP6KZHgvjW7TSnxF8MpGic7YdTGOA==} engines: {node: '>=8', npm: '>=5'} + aria-query@5.3.0: + resolution: {integrity: sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==} + arktype@1.0.29-alpha: resolution: {integrity: sha512-glMLgVhIQRSkR3tymiS+POAcWVJH09sfrgic0jHnyFL8BlhHAJZX2BzdImU9zYr1y9NBqy+U93ZNrRTHXsKRDw==} arktype@2.0.0-beta.3: resolution: {integrity: sha512-a08Q7Mj+5hes+UvV9CdqWoXPc/qdYpCIANcg8Bhs+iYyIdKGxMpj976oevd19uRkx0qtqMwS/O4QeyvHT0/oRQ==} + arktype@2.0.0-dev.11: + resolution: {integrity: sha512-k+WVQoHsHsTyTiVQkO201mxLQxyXHmy3buJW8TXLOkr4X2yOUCp0K1SBscuG9OEJoc8MjpvoIharjPHEkFI7kg==} + array-includes@3.1.6: resolution: {integrity: sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==} engines: {node: '>= 0.4'} @@ -3447,6 +3563,10 @@ packages: resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} engines: {node: '>= 0.8'} + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + destroy@1.2.0: resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} @@ -3489,6 +3609,9 @@ packages: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} + dom-accessibility-api@0.5.16: + resolution: {integrity: sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==} + domexception@4.0.0: resolution: {integrity: sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==} engines: {node: '>=12'} @@ -4045,9 +4168,6 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} - get-func-name@2.0.0: - resolution: {integrity: sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==} - get-func-name@2.0.2: resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==} @@ -4098,9 +4218,8 @@ packages: resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} engines: {node: '>=10.13.0'} - glob@10.4.2: - resolution: {integrity: sha512-GwMlUF6PkPo3Gk21UxkCohOv0PLcIXVtKyLlpEI28R/cO/4eNOdmLk3CMW1wROV/WR/EsZOWAfBbBOqYvs88/w==} - engines: {node: '>=16 || 14 >=14.18'} + glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} hasBin: true glob@7.1.6: @@ -4496,9 +4615,8 @@ packages: resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} engines: {node: '>=8'} - jackspeak@3.4.0: - resolution: {integrity: sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==} - engines: {node: '>=14'} + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} jake@10.8.5: resolution: {integrity: sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==} @@ -4858,9 +4976,8 @@ packages: resolution: {integrity: sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA==} deprecated: Please upgrade to 2.3.7 which fixes GHSA-4q6p-r6v2-jvc5 - lru-cache@10.3.0: - resolution: {integrity: sha512-CQl19J/g+Hbjbv4Y3mFNNXFEL/5t/KCg8POCuUqd4rMKjGG+j1ybER83hxV58zL+dFI1PTkt3GNFSHRt+d8qEQ==} - engines: {node: 14 || >=16.14} + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} lru-cache@4.1.5: resolution: {integrity: sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==} @@ -4879,6 +4996,10 @@ packages: lru-queue@0.1.0: resolution: {integrity: sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==} + lz-string@1.5.0: + resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==} + hasBin: true + magic-string@0.30.5: resolution: {integrity: sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==} engines: {node: '>=12'} @@ -5038,6 +5159,10 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mutative@1.0.6: + resolution: {integrity: sha512-6mcjGc09LGNQizP/in3NYbNFWwor2cMozm/W0fSXS+lhQXsrvRmZ2xG6MGW6aft6xBjrM8wT3pORO1eCZujMxw==} + engines: {node: '>=14.0'} + mute-stream@0.0.7: resolution: {integrity: sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ==} @@ -6292,9 +6417,6 @@ packages: tslib@1.14.1: resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} - tslib@2.5.0: - resolution: {integrity: sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==} - tslib@2.6.2: resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} @@ -6742,6 +6864,14 @@ packages: zod@3.23.8: resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + zustand-mutative@1.0.1: + resolution: {integrity: sha512-ciTdCgfjOiaCURJ9je/pOboZOBrFCyO34YLQYQD0Z+wOPiBpfFwaBpm/ibTJLEIKElL5xkhvPXes7pl9gEtdpg==} + peerDependencies: + '@types/react': ^18.0 || ^17.0 + mutative: ^1.0.6 + react: ^18.0 || ^17.0 + zustand: ^4.0 + zustand@4.3.7: resolution: {integrity: sha512-dY8ERwB9Nd21ellgkBZFhudER8KVlelZm8388B5nDAXhO/+FZDhYMuRnqDgu5SYyRgz/iaf8RKnbUs/cHfOGlQ==} engines: {node: '>=12.7.0'} @@ -6790,6 +6920,29 @@ snapshots: '@ark/util@0.2.0': {} + '@arktype/attest@0.7.5(typescript@5.4.2)': + dependencies: + '@arktype/fs': 0.0.19 + '@arktype/util': 0.0.41 + '@typescript/analyze-trace': 0.10.1 + '@typescript/vfs': 1.5.0 + arktype: 2.0.0-dev.11 + typescript: 5.4.2 + transitivePeerDependencies: + - supports-color + + '@arktype/fs@0.0.19': {} + + '@arktype/schema@0.1.2': + dependencies: + '@arktype/util': 0.0.38 + + '@arktype/util@0.0.38': {} + + '@arktype/util@0.0.40': {} + + '@arktype/util@0.0.41': {} + '@aws-crypto/ie11-detection@3.0.0': dependencies: tslib: 1.14.1 @@ -8459,6 +8612,26 @@ snapshots: '@solidity-parser/parser@0.17.0': {} + '@testing-library/dom@10.4.0': + dependencies: + '@babel/code-frame': 7.21.4 + '@babel/runtime': 7.21.0 + '@types/aria-query': 5.0.4 + aria-query: 5.3.0 + chalk: 4.1.2 + dom-accessibility-api: 0.5.16 + lz-string: 1.5.0 + pretty-format: 27.5.1 + + '@testing-library/react-hooks@8.0.1(@types/react@18.2.22)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@babel/runtime': 7.21.0 + react: 18.2.0 + react-error-boundary: 3.1.4(react@18.2.0) + optionalDependencies: + '@types/react': 18.2.22 + react-dom: 18.2.0(react@18.2.0) + '@testing-library/react-hooks@8.0.1(@types/react@18.2.22)(react-test-renderer@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@babel/runtime': 7.21.0 @@ -8468,6 +8641,16 @@ snapshots: '@types/react': 18.2.22 react-test-renderer: 18.2.0(react@18.2.0) + '@testing-library/react@16.0.0(@testing-library/dom@10.4.0)(@types/react-dom@18.2.7)(@types/react@18.2.22)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + dependencies: + '@babel/runtime': 7.21.0 + '@testing-library/dom': 10.4.0 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + optionalDependencies: + '@types/react': 18.2.22 + '@types/react-dom': 18.2.7 + '@tootallnate/once@2.0.0': {} '@trpc/client@10.34.0(@trpc/server@10.34.0)': @@ -8480,6 +8663,8 @@ snapshots: dependencies: '@types/node': 18.15.11 + '@types/aria-query@5.0.4': {} + '@types/babel__core@7.20.0': dependencies: '@babel/parser': 7.21.4 @@ -8790,6 +8975,12 @@ snapshots: treeify: 1.1.0 yargs: 16.2.0 + '@typescript/vfs@1.5.0': + dependencies: + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + '@typescript/vfs@1.5.3(typescript@5.4.2)': dependencies: debug: 4.3.4 @@ -8989,6 +9180,10 @@ snapshots: dependencies: iftype: 4.0.9 + aria-query@5.3.0: + dependencies: + dequal: 2.0.3 + arktype@1.0.29-alpha: {} arktype@2.0.0-beta.3: @@ -8996,6 +9191,11 @@ snapshots: '@ark/schema': 0.3.0 '@ark/util': 0.2.0 + arktype@2.0.0-dev.11: + dependencies: + '@arktype/schema': 0.1.2 + '@arktype/util': 0.0.38 + array-includes@3.1.6: dependencies: call-bind: 1.0.2 @@ -9567,6 +9767,8 @@ snapshots: depd@2.0.0: {} + dequal@2.0.3: {} + destroy@1.2.0: {} detect-indent@6.1.0: {} @@ -9595,6 +9797,8 @@ snapshots: dependencies: esutils: 2.0.3 + dom-accessibility-api@0.5.16: {} + domexception@4.0.0: dependencies: webidl-conversions: 7.0.0 @@ -10313,8 +10517,6 @@ snapshots: get-caller-file@2.0.5: {} - get-func-name@2.0.0: {} - get-func-name@2.0.2: {} get-intrinsic@1.1.3: @@ -10366,10 +10568,10 @@ snapshots: dependencies: is-glob: 4.0.3 - glob@10.4.2: + glob@10.4.5: dependencies: foreground-child: 3.2.1 - jackspeak: 3.4.0 + jackspeak: 3.4.3 minimatch: 9.0.5 minipass: 7.1.2 package-json-from-dist: 1.0.0 @@ -10777,7 +10979,7 @@ snapshots: html-escaper: 2.0.2 istanbul-lib-report: 3.0.0 - jackspeak@3.4.0: + jackspeak@3.4.3: dependencies: '@isaacs/cliui': 8.0.2 optionalDependencies: @@ -11435,9 +11637,9 @@ snapshots: loupe@2.3.6: dependencies: - get-func-name: 2.0.0 + get-func-name: 2.0.2 - lru-cache@10.3.0: {} + lru-cache@10.4.3: {} lru-cache@4.1.5: dependencies: @@ -11458,6 +11660,8 @@ snapshots: dependencies: es5-ext: 0.10.62 + lz-string@1.5.0: {} + magic-string@0.30.5: dependencies: '@jridgewell/sourcemap-codec': 1.4.15 @@ -11620,6 +11824,8 @@ snapshots: ms@2.1.3: {} + mutative@1.0.6: {} + mute-stream@0.0.7: {} mute-stream@0.0.8: {} @@ -11900,7 +12106,7 @@ snapshots: path-scurry@1.11.1: dependencies: - lru-cache: 10.3.0 + lru-cache: 10.4.3 minipass: 7.1.2 path-to-regexp@6.2.1: {} @@ -12324,7 +12530,7 @@ snapshots: rxjs@7.5.5: dependencies: - tslib: 2.5.0 + tslib: 2.6.2 rxjs@7.8.1: dependencies: @@ -12881,8 +13087,6 @@ snapshots: tslib@1.14.1: {} - tslib@2.5.0: {} - tslib@2.6.2: {} tsscmp@1.0.6: {} @@ -13337,6 +13541,13 @@ snapshots: zod@3.23.8: {} + zustand-mutative@1.0.1(@types/react@18.2.22)(mutative@1.0.6)(react@18.2.0)(zustand@4.3.7(react@18.2.0)): + dependencies: + '@types/react': 18.2.22 + mutative: 1.0.6 + react: 18.2.0 + zustand: 4.3.7(react@18.2.0) + zustand@4.3.7(react@18.2.0): dependencies: use-sync-external-store: 1.2.0(react@18.2.0) diff --git a/templates/react-new/.gitignore b/templates/react-new/.gitignore new file mode 100644 index 0000000000..e94138a8d7 --- /dev/null +++ b/templates/react-new/.gitignore @@ -0,0 +1,3 @@ +.env +node_modules +.attest \ No newline at end of file diff --git a/templates/react-new/client/bench.ts b/templates/react-new/client/bench.ts new file mode 100644 index 0000000000..df11a462ae --- /dev/null +++ b/templates/react-new/client/bench.ts @@ -0,0 +1,23 @@ +import { bench } from "@ark/attest"; +import { DozerSyncFilter, selectFrom } from "@latticexyz/store-sync/dozer"; +import mudConfig from "../mud.config"; +import { createStore } from "@latticexyz/stash/internal"; +import { sync } from "./sync"; + +bench("sync", async () => { + const dozerUrl = "https://redstone2.dozer.skystrife.xyz/q"; + const storeAddress = "0x9d05cc196c87104a7196fcca41280729b505dbbf"; + const yesterday = Date.now() / 1000 - 24 * 60 * 60; + const filters: DozerSyncFilter[] = [ + selectFrom({ + table: mudConfig.tables.MatchSky, + where: `"createdAt" > ${yesterday}`, + }), + ]; + const store = createStore(mudConfig); + + await sync({ dozerUrl, storeAddress, filters, store }); + + console.log("Done syncing"); + console.log("store", Object.values(store.getState().records[""].MatchSky)); +}).types([8201, "instantiations"]); diff --git a/templates/react-new/client/index.ts b/templates/react-new/client/index.ts new file mode 100644 index 0000000000..9c2afca369 --- /dev/null +++ b/templates/react-new/client/index.ts @@ -0,0 +1,25 @@ +import { DozerSyncFilter, selectFrom } from "@latticexyz/store-sync/dozer"; +import mudConfig from "../mud.config"; +import { sync } from "./sync"; +import { createStore } from "@latticexyz/stash/internal"; + +async function main() { + const dozerUrl = "https://redstone2.dozer.skystrife.xyz/q"; + const storeAddress = "0x9d05cc196c87104a7196fcca41280729b505dbbf"; + const yesterday = Date.now() / 1000 - 24 * 60 * 60; + const filters: DozerSyncFilter[] = [ + selectFrom({ + table: mudConfig.tables.MatchSky, + where: `"createdAt" > ${yesterday}`, + }), + ]; + const store = createStore(mudConfig); + + console.log("query", filters); + await sync({ dozerUrl, storeAddress, filters, store }); + + console.log("Done syncing"); + console.log("store", Object.values(store.getState().records[""].MatchSky)); +} + +main(); diff --git a/templates/react-new/client/sync.ts b/templates/react-new/client/sync.ts new file mode 100644 index 0000000000..8cb9452f30 --- /dev/null +++ b/templates/react-new/client/sync.ts @@ -0,0 +1,27 @@ +import { StorageAdapterBlock } from "@latticexyz/store-sync"; +import { DozerSyncFilter, fetchInitialBlockLogsDozer } from "@latticexyz/store-sync/dozer"; +import { createStorageAdapter } from "@latticexyz/store-sync/stash"; +import { Store } from "@latticexyz/stash/internal"; +import { Hex } from "viem"; + +type SyncOptions = { + dozerUrl: string; + storeAddress: Hex; + filters: DozerSyncFilter[]; + store: Store; +}; + +export async function sync({ dozerUrl, storeAddress, filters, store }: SyncOptions) { + const initialStorageAdapterBlock: StorageAdapterBlock = { + blockNumber: 0n, + logs: [], + }; + + console.log("Fetching from dozer"); + const { initialBlockLogs } = await fetchInitialBlockLogsDozer({ dozerUrl, storeAddress, filters, chainId: 690 }); + + console.log("initial storage adapter block", initialStorageAdapterBlock); + + const { storageAdapter } = createStorageAdapter({ store }); + storageAdapter(initialBlockLogs); +} diff --git a/templates/react-new/mud.config.ts b/templates/react-new/mud.config.ts new file mode 100644 index 0000000000..ed071f9aad --- /dev/null +++ b/templates/react-new/mud.config.ts @@ -0,0 +1,16 @@ +import { defineWorld } from "@latticexyz/world"; + +const config = defineWorld({ + tables: { + MatchSky: { + key: ["matchEntity"], + schema: { + matchEntity: "bytes32", + createdAt: "uint256", + reward: "uint256", + }, + }, + }, +}); + +export default config; diff --git a/templates/react-new/package.json b/templates/react-new/package.json new file mode 100644 index 0000000000..4327df30c8 --- /dev/null +++ b/templates/react-new/package.json @@ -0,0 +1,25 @@ +{ + "name": "mud-demo-3", + "version": "1.0.0", + "private": true, + "description": "", + "scripts": { + "bench": "tsx client/bench.ts" + }, + "dependencies": { + "@latticexyz/cli": "link:../../packages/cli", + "@latticexyz/common": "link:../../packages/common", + "@latticexyz/config": "link:../../packages/config", + "@latticexyz/protocol-parser": "link:../../packages/protocol-parser", + "@latticexyz/stash": "link:../../packages/stash", + "@latticexyz/store": "link:../../packages/store", + "@latticexyz/store-sync": "link:../../packages/store-sync", + "@latticexyz/world": "link:../../packages/world", + "viem": "^2.18.6" + }, + "devDependencies": { + "@ark/attest": "^0.10.2", + "tsx": "^4.16.3", + "typescript": "^5.5.4" + } +} diff --git a/templates/react-new/pnpm-workspace.yaml b/templates/react-new/pnpm-workspace.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/templates/react-new/tsconfig.json b/templates/react-new/tsconfig.json new file mode 100644 index 0000000000..258a47be3f --- /dev/null +++ b/templates/react-new/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "@latticexyz/common/tsconfig.base.json" +}