-
Notifications
You must be signed in to change notification settings - Fork 203
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
WIP: stash state lib #2944
base: main
Are you sure you want to change the base?
WIP: stash state lib #2944
Changes from all commits
c399839
04d1dda
e06d45c
a7ae895
b2ece42
8f052c4
ddd4b54
8caae95
94dd1ed
807ff50
4b05fe0
1d5d3a8
ddfea89
c5e3397
60d93e1
6cbf8a7
0badd04
a501f4a
bf707ad
9062655
c509799
a487d52
30698be
bb684cc
9a896ce
d847202
74e42cc
0eaef49
c310bdb
c948d50
d9257fd
084e026
d53a45e
f37d000
78e2746
716b993
d9b9583
56f2e9c
4524d28
734c508
aab867d
518bc38
9d0aed1
528c1b3
4435117
4d16a78
a51dbea
988bbe2
3a325d1
218a8f9
531fec8
9bede30
8520a8e
8ed3a3c
1bb493e
b21e7c7
93edbe9
cfe744c
2ec06e9
8a8c6eb
aec1d2e
8d25dd8
a46bff7
73e8ac8
1b97bb3
fe502fc
172e5b9
1c46c81
8bdf989
247dc0f
533fed2
0be5a41
a3f6819
cc703df
3a1352d
434cd24
ab43659
37b3929
60e9126
f115738
7110185
c250c7a
e3e8bfd
fbdd491
72655a2
30cdd74
6a686d8
034457c
fd4a4cc
432b034
3839b8a
f3daf4d
be85990
85d9c84
78eb841
69859bf
3682c1f
1c732f3
7322d79
1af8961
048f7d4
1d1dd57
0eecbaa
e5eb978
2371048
0d8fe0b
5d17ce9
a5536e5
c063e33
7ae1083
5db9987
175c0bd
9a02d5b
4b46dc7
544c3f2
6d4eba8
e8e0fe5
14ad493
e3c6d0b
7f4a600
8c97a79
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,4 @@ | ||
import { Table } from "@latticexyz/config"; | ||
import { | ||
DynamicAbiType, | ||
SchemaAbiType, | ||
|
@@ -51,3 +52,5 @@ export type ValueArgs = { | |
encodedLengths: Hex; | ||
dynamicData: Hex; | ||
}; | ||
|
||
export type PartialTable = Pick<Table, "schema" | "key">; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. is there a better name for this? maybe when I was using it as |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
import { describe, expect, it } from "vitest"; | ||
import { decodeDozerField } from "./decodeDozerField"; | ||
|
||
describe("decodeDozerField", () => { | ||
it("should decode numbers to the expected value type", () => { | ||
expect(decodeDozerField("uint48", "1")).toBe(1); | ||
expect(decodeDozerField("uint56", "1")).toBe(1n); | ||
}); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
import { AbiType } from "@latticexyz/config"; | ||
import { | ||
ArrayAbiType, | ||
SchemaAbiTypeToPrimitiveType, | ||
arrayToStaticAbiType, | ||
schemaAbiTypeToDefaultValue, | ||
} from "@latticexyz/schema-type/internal"; | ||
|
||
export function decodeDozerField<abiType extends AbiType>( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. we talked about moving this over to store-sync (or near where its used) since its not part of the protocol |
||
abiType: abiType, | ||
data: string | boolean | string[], | ||
): SchemaAbiTypeToPrimitiveType<abiType> { | ||
const defaultValueType = typeof schemaAbiTypeToDefaultValue[abiType]; | ||
if (Array.isArray(data)) { | ||
return data.map((element) => decodeDozerField(arrayToStaticAbiType(abiType as ArrayAbiType), element)) as never; | ||
} | ||
if (defaultValueType === "number") { | ||
return Number(data) as never; | ||
} | ||
if (defaultValueType === "bigint") { | ||
return BigInt(data) as never; | ||
} | ||
return data as never; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
import { describe, expect, it } from "vitest"; | ||
import { decodeDozerRecords } from "./decodeDozerRecords"; | ||
|
||
describe("decodeDozerRecord", () => { | ||
const schema = { | ||
address: { type: "address", internalType: "address" }, | ||
uint256: { type: "uint256", internalType: "uint256" }, | ||
uint32: { type: "uint32", internalType: "uint32" }, | ||
bool: { type: "bool", internalType: "bool" }, | ||
bytes: { type: "bytes", internalType: "bytes" }, | ||
string: { type: "string", internalType: "string" }, | ||
uint32Arr: { type: "uint32[]", internalType: "uint32[]" }, | ||
} as const; | ||
|
||
it("decodes dozer record", () => { | ||
const dozerRecord = [ | ||
"0x0000000000000000000000000000000000000000", | ||
"1234", | ||
"1234", | ||
true, | ||
"0x1234", | ||
"hello world", | ||
["1234", "5678"], | ||
]; | ||
const decodedRecord = { | ||
address: "0x0000000000000000000000000000000000000000", | ||
uint256: 1234n, | ||
uint32: 1234, | ||
bool: true, | ||
bytes: "0x1234", | ||
string: "hello world", | ||
uint32Arr: [1234, 5678], | ||
}; | ||
|
||
const decoded = decodeDozerRecords({ schema, records: [dozerRecord] }); | ||
expect(decoded).toStrictEqual([decodedRecord]); | ||
}); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
import { Schema } from "@latticexyz/config"; | ||
import { decodeDozerField } from "./decodeDozerField"; | ||
import { getSchemaPrimitives } from "./getSchemaPrimitives"; | ||
|
||
type DozerQueryHeader = string[]; | ||
type DozerQueryRecord = (string | boolean | string[])[]; | ||
|
||
// First item in the result is the header | ||
export type DozerQueryResult = [DozerQueryHeader, ...DozerQueryRecord[]]; | ||
|
||
export type DecodeDozerRecordsArgs = { | ||
schema: Schema; | ||
records: DozerQueryResult; | ||
}; | ||
|
||
/** | ||
* Trim the header row from the query result | ||
*/ | ||
function trimHeader(result: DozerQueryResult): DozerQueryRecord[] { | ||
return result.slice(1); | ||
} | ||
|
||
export type DecodeDozerRecordsResult<schema extends Schema = Schema> = getSchemaPrimitives<schema>[]; | ||
|
||
export function decodeDozerRecords<schema extends Schema>({ | ||
schema, | ||
records, | ||
}: DecodeDozerRecordsArgs): DecodeDozerRecordsResult<schema> { | ||
const fieldNames = Object.keys(schema); | ||
if (records.length > 0 && fieldNames.length !== records[0].length) { | ||
throw new Error( | ||
`Mismatch between schema and query result.\nSchema: [${fieldNames.join(", ")}]\nQuery result: [${records[0].join(", ")}]`, | ||
); | ||
} | ||
|
||
return trimHeader(records).map((record) => | ||
Object.fromEntries( | ||
Object.keys(schema).map((fieldName, index) => [ | ||
fieldName, | ||
decodeDozerField(schema[fieldName].type, record[index]), | ||
]), | ||
), | ||
) as never; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
import { describe, expect, it } from "vitest"; | ||
import { getKey } from "./getKey"; | ||
|
||
describe("getKey", () => { | ||
it("should return the key fields of the record", () => { | ||
const table = { | ||
schema: { | ||
key1: { type: "uint32", internalType: "uint32" }, | ||
key2: { type: "uint256", internalType: "uint256" }, | ||
value1: { type: "string", internalType: "string" }, | ||
value2: { type: "string", internalType: "string" }, | ||
}, | ||
key: ["key1", "key2"], | ||
} as const; | ||
const record = { key1: 1, key2: 2n, value1: "hello", value2: "world" }; | ||
const key = getKey(table, record); | ||
|
||
expect(key).toMatchInlineSnapshot(` | ||
{ | ||
"key1": 1, | ||
"key2": 2n, | ||
} | ||
`); | ||
}); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
import { getKeySchema } from "./getKeySchema"; | ||
import { getSchemaPrimitives } from "./getSchemaPrimitives"; | ||
import { PartialTable } from "./common"; | ||
|
||
export function getKey<table extends PartialTable>( | ||
table: table, | ||
record: getSchemaPrimitives<table["schema"]>, | ||
): getSchemaPrimitives<getKeySchema<table>> { | ||
return Object.fromEntries(table.key.map((fieldName) => [fieldName, record[fieldName]])) as never; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
import { describe, expect, it } from "vitest"; | ||
import { getValue } from "./getValue"; | ||
|
||
describe("getValue", () => { | ||
it("should return the key fields of the record", () => { | ||
const table = { | ||
schema: { | ||
key1: { type: "uint32", internalType: "uint32" }, | ||
key2: { type: "uint256", internalType: "uint256" }, | ||
value1: { type: "string", internalType: "string" }, | ||
value2: { type: "string", internalType: "string" }, | ||
}, | ||
key: ["key1", "key2"], | ||
} as const; | ||
const record = { key1: 1, key2: 2n, value1: "hello", value2: "world" }; | ||
const value = getValue(table, record); | ||
|
||
expect(value).toMatchInlineSnapshot(` | ||
{ | ||
"value1": "hello", | ||
"value2": "world", | ||
} | ||
`); | ||
}); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
import { PartialTable } from "./common"; | ||
import { getValueSchema } from "./getValueSchema"; | ||
import { getSchemaPrimitives } from "./getSchemaPrimitives"; | ||
|
||
export function getValue<table extends PartialTable>( | ||
table: table, | ||
record: getSchemaPrimitives<table["schema"]>, | ||
): getSchemaPrimitives<getValueSchema<table>> { | ||
return Object.fromEntries( | ||
Object.keys(table.schema) | ||
.filter((fieldName) => !table.key.includes(fieldName)) | ||
.map((fieldName) => [fieldName, record[fieldName]]), | ||
) as never; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
# @latticexyz/stash |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
# Stash | ||
|
||
High performance client store and query engine for MUD |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
{ | ||
"name": "@latticexyz/stash", | ||
"version": "2.0.12", | ||
"description": "High performance client store and query engine for MUD", | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/latticexyz/mud.git", | ||
"directory": "packages/stash" | ||
}, | ||
"license": "MIT", | ||
"type": "module", | ||
"exports": { | ||
".": "./dist/index.js", | ||
"./internal": "./dist/internal.js", | ||
"./recs": "./dist/recs.js" | ||
}, | ||
"typesVersions": { | ||
"*": { | ||
"index": [ | ||
"./dist/index.d.ts" | ||
], | ||
"internal": [ | ||
"./dist/internal.d.ts" | ||
], | ||
"recs": [ | ||
"./dist/recs.d.ts" | ||
] | ||
} | ||
}, | ||
"files": [ | ||
"dist" | ||
], | ||
"scripts": { | ||
"bench": "tsx src/bench.ts", | ||
"build": "tsup", | ||
"clean": "rimraf dist", | ||
"dev": "tsup --watch", | ||
"test": "vitest typecheck --run --passWithNoTests && vitest --run --passWithNoTests", | ||
"test:ci": "pnpm run test" | ||
}, | ||
"dependencies": { | ||
"@arktype/util": "0.0.40", | ||
"@latticexyz/config": "workspace:*", | ||
"@latticexyz/protocol-parser": "workspace:*", | ||
"@latticexyz/recs": "workspace:*", | ||
"@latticexyz/schema-type": "workspace:*", | ||
"@latticexyz/store": "workspace:*", | ||
"mutative": "^1.0.6", | ||
"react": "^18.2.0", | ||
"rxjs": "7.5.5", | ||
"viem": "2.9.20", | ||
"zustand": "^4.3.7", | ||
"zustand-mutative": "^1.0.1" | ||
}, | ||
"devDependencies": { | ||
"@arktype/attest": "0.7.5", | ||
"@testing-library/react": "^16.0.0", | ||
"@testing-library/react-hooks": "^8.0.1", | ||
"@types/react": "18.2.22", | ||
"react-dom": "^18.2.0", | ||
"tsup": "^6.7.0", | ||
"vitest": "0.34.6" | ||
}, | ||
"publishConfig": { | ||
"access": "public" | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
# TODOs | ||
|
||
- Set up performance benchmarks for setting records, reading records, running queries, updating records with subscribers | ||
- Replace objects with Maps for performance ? (https://rehmat-sayany.medium.com/using-map-over-objects-in-javascript-a-performance-benchmark-ff2f351851ed) | ||
- could be useful for TableRecords, Keys | ||
- maybe add option to include records in the query result? | ||
- Maybe turn `entityKey` into a tagged string? So we could make it the return type of `encodeKey`, | ||
and allow us to use Symbol (to reduce memory overhead) or something else later without breaking change | ||
- add more query fragments - ie GreaterThan, LessThan, Range, etc | ||
- we might be able to enable different key shapes if we add something like a `keySelector` | ||
- getKeySchema expects a full table as type, but only needs schema and key | ||
|
||
Ideas | ||
|
||
- Update streams: | ||
- if each query added a new subscriber to the main state, each state update would trigger _every_ subscriber | ||
- instead we could add a subscriber per table, which can be subscribed to again, so we only have to iterate through all tables once, | ||
and then through all subscribers per table (only those who care about updates to this table) | ||
- Query fragments: | ||
- Instead of pre-defined query types (Has, HasValue, Not, NotValue), could we define fragments in a self-contained way, so | ||
it's easy to add a new type of query fragment without changing the core code? | ||
- The main complexity is the logic to initialize the initial set with the first query fragment, | ||
but it's probably not that critical - we could just run the first fragment on all entities of the first table, | ||
unless an initialSet is provided. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
import { describe, it } from "vitest"; | ||
import { createStash } from "../createStash"; | ||
import { defineStore } from "@latticexyz/store/config/v2"; | ||
import { setRecord } from "./setRecord"; | ||
import { encodeKey } from "./encodeKey"; | ||
import { attest } from "@ark/attest"; | ||
import { decodeKey } from "./decodeKey"; | ||
|
||
describe("decodeKey", () => { | ||
it("should decode an encoded table key", () => { | ||
const config = defineStore({ | ||
namespace: "namespace1", | ||
tables: { | ||
table1: { | ||
schema: { field1: "string", field2: "uint32", field3: "uint256" }, | ||
key: ["field2", "field3"], | ||
}, | ||
}, | ||
}); | ||
const stash = createStash(config); | ||
const table = config.namespaces.namespace1.tables.table1; | ||
const key = { field2: 1, field3: 2n }; | ||
setRecord({ stash, table, key, record: { field1: "hello" } }); | ||
|
||
const encodedKey = encodeKey({ table, key }); | ||
attest<typeof key>(decodeKey({ stash, table, encodedKey })).equals({ field2: 1, field3: 2n }); | ||
}); | ||
}); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
if we're not gonna add this to table config output, this should probably live in protocol-parser for now
(I realize we have
KeySchema
there already but its outdated and we have a lot of work to migrate old helpers over to new shapes)