Skip to content

Commit

Permalink
feat(store-sync): add store sync package (#1075)
Browse files Browse the repository at this point in the history
* feat(block-events-stream): add block events stream package

* feat(store-sync): add store sync package

* wip anvil test

* Revert "wip anvil test"

This reverts commit 1952a98.

* accidentally left in a store refernence

* Update packages/block-events-stream/src/createBlockEventsStream.ts

Co-authored-by: alvarius <[email protected]>

* make streams closeable

I don't love this design

* clean up

* add log back in

* move comments

* refactor with just streams

* add README with example

* renamed

* rename again and take in a tuple as input

* fix scope

* add TODO

* add tests for grouping logs

* wip rxjs tests

* move fetchLogs to async generator, add tests

* add block range tests

* get rid of old approach

* add note about timers

* use concatMap instead of exhaustMap

* update readme

* feat(schema-type): add isDynamicAbiType (#1096)

* update with new block events stream

* missed a spot

* refine types, move logging to debug

* add test

* wip config types

* refactor as a list of storage operations

* getting closer

* config is already expanded

* fix types

* clean up

* remove log

* pass through log to storage operation

* update test snapshot

* Create eighty-tigers-argue.md

---------

Co-authored-by: alvarius <[email protected]>
Co-authored-by: alvrs <[email protected]>
3 people authored Jul 6, 2023
1 parent 0a230b4 commit 904fd7d
Showing 19 changed files with 567 additions and 6 deletions.
8 changes: 8 additions & 0 deletions .changeset/eighty-tigers-argue.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
"@latticexyz/block-logs-stream": patch
"@latticexyz/protocol-parser": patch
"@latticexyz/store-sync": minor
"@latticexyz/store": patch
---

Add store sync package
8 changes: 6 additions & 2 deletions docs/pages/client-side.mdx
Original file line number Diff line number Diff line change
@@ -197,7 +197,9 @@ const multiKey = client.tables.MultiKey.get({ first: "0xDEAD", second: 42 });
```tsx
import { useRow } from "@latticexyz/react";
function ExampleComponent() {
const { network: { storeCache } } = useMUD();
const {
network: { storeCache },
} = useMUD();
const position = useRow(storeCache, { table: "Position", key: { key: "0x01" } });
// -> { namespace: "", table: "Position", key: { key: "0x01" }, value: { x: 1, y: 2 } }
const { value } = position;
@@ -232,7 +234,9 @@ const positionsFiltered = client.tables.Position.scan({
```tsx
import { useRows } from "@latticexyz/react";
function ExampleComponent() {
const { network: { storeCache } } = useMUD();
const {
network: { storeCache },
} = useMUD();
const positions = useRows(storeCache, { table: "Position" });
// -> [{key: "0x00", value: {x: 10, y: 32}, namespace: config["namespace"], table: Position}, ...]
const positionsFiltered = useRows(storeCache, { table: "Position", key: { gt: { key: "0x02" } } });
10 changes: 7 additions & 3 deletions packages/block-logs-stream/src/groupLogsByBlockNumber.ts
Original file line number Diff line number Diff line change
@@ -4,6 +4,12 @@ import { bigIntSort } from "./utils";
import { isDefined } from "@latticexyz/common/utils";
import { debug } from "./debug";

export type GroupLogsByBlockNumberResult<TLog extends Log> = {
blockNumber: BlockNumber;
blockHash: Hex;
logs: readonly NonPendingLog<TLog>[];
}[];

/**
* Groups logs by their block number.
*
@@ -17,9 +23,7 @@ import { debug } from "./debug";
* @returns An array of objects where each object represents a distinct block and includes the block number,
* the block hash, and an array of logs for that block.
*/
export function groupLogsByBlockNumber<TLog extends Log>(
logs: readonly TLog[]
): { blockNumber: BlockNumber; blockHash: Hex; logs: readonly NonPendingLog<TLog>[] }[] {
export function groupLogsByBlockNumber<TLog extends Log>(logs: readonly TLog[]): GroupLogsByBlockNumberResult<TLog> {
// Pending logs don't have block numbers, so filter them out.
const nonPendingLogs = logs.filter(isNonPendingLog);
if (logs.length !== nonPendingLogs.length) {
1 change: 1 addition & 0 deletions packages/block-logs-stream/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
export * from "./blockRangeToLogs";
export * from "./createBlockStream";
export * from "./fetchLogs";
export * from "./getLogs";
export * from "./groupLogsByBlockNumber";
export * from "./isNonPendingBlock";
export * from "./isNonPendingLog";
13 changes: 13 additions & 0 deletions packages/protocol-parser/src/decodeField.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { Hex } from "viem";
import { SchemaAbiType, SchemaAbiTypeToPrimitiveType, isDynamicAbiType } from "@latticexyz/schema-type";
import { decodeDynamicField } from "./decodeDynamicField";
import { decodeStaticField } from "./decodeStaticField";

export function decodeField<
TAbiType extends SchemaAbiType,
TPrimitiveType extends SchemaAbiTypeToPrimitiveType<TAbiType>
>(abiType: TAbiType, data: Hex): TPrimitiveType {
return (
isDynamicAbiType(abiType) ? decodeDynamicField(abiType, data) : decodeStaticField(abiType, data)
) as TPrimitiveType;
}
5 changes: 5 additions & 0 deletions packages/protocol-parser/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
export * from "./abiTypesToSchema";
export * from "./common";
export * from "./decodeDynamicField";
export * from "./decodeField";
export * from "./decodeKeyTuple";
export * from "./decodeRecord";
export * from "./decodeStaticField";
@@ -9,4 +10,8 @@ export * from "./encodeKeyTuple";
export * from "./encodeRecord";
export * from "./errors";
export * from "./hexToPackedCounter";
export * from "./hexToSchema";
export * from "./hexToTableSchema";
export * from "./schemaIndexToAbiType";
export * from "./schemaToHex";
export * from "./staticDataLength";
9 changes: 9 additions & 0 deletions packages/protocol-parser/src/schemaIndexToAbiType.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import { SchemaAbiType } from "@latticexyz/schema-type";
import { Schema } from "./common";

export function schemaIndexToAbiType(schema: Schema, schemaIndex: number): SchemaAbiType {
if (schemaIndex < schema.staticFields.length) {
return schema.staticFields[schemaIndex];
}
return schema.dynamicFields[schemaIndex - schema.staticFields.length];
}
6 changes: 6 additions & 0 deletions packages/store-sync/.eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"extends": ["../../.eslintrc"],
"rules": {
"@typescript-eslint/explicit-function-return-type": "error"
}
}
1 change: 1 addition & 0 deletions packages/store-sync/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
dist
6 changes: 6 additions & 0 deletions packages/store-sync/.npmignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
*

!dist/**
!src/**
!package.json
!README.md
45 changes: 45 additions & 0 deletions packages/store-sync/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{
"name": "@latticexyz/store-sync",
"version": "1.42.0",
"description": "Utilities to sync MUD Store events with a client or cache",
"repository": {
"type": "git",
"url": "https://github.com/latticexyz/mud.git",
"directory": "packages/store-sync"
},
"license": "MIT",
"type": "module",
"exports": {
".": "./dist/index.js"
},
"types": "src/index.ts",
"scripts": {
"build": "pnpm run build:js",
"build:js": "tsup",
"clean": "pnpm run clean:js",
"clean:js": "rimraf dist",
"dev": "tsup --watch",
"lint": "eslint .",
"test": "vitest --run"
},
"dependencies": {
"@latticexyz/block-logs-stream": "workspace:*",
"@latticexyz/common": "workspace:*",
"@latticexyz/protocol-parser": "workspace:*",
"@latticexyz/schema-type": "workspace:*",
"@latticexyz/store": "workspace:*",
"@latticexyz/store-cache": "workspace:*",
"@latticexyz/utils": "workspace:*",
"debug": "^4.3.4",
"viem": "1.1.7"
},
"devDependencies": {
"@types/debug": "^4.1.7",
"tsup": "^6.7.0",
"vitest": "0.31.4"
},
"publishConfig": {
"access": "public"
},
"gitHead": "914a1e0ae4a573d685841ca2ea921435057deb8f"
}
153 changes: 153 additions & 0 deletions packages/store-sync/src/blockEventsToStorage.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { BlockEventsToStorageOptions, blockEventsToStorage } from "./blockEventsToStorage";
import storeConfig from "@latticexyz/store/mud.config";

const mockedCallbacks = {
registerTableSchema: vi.fn<
Parameters<BlockEventsToStorageOptions["registerTableSchema"]>,
ReturnType<BlockEventsToStorageOptions["registerTableSchema"]>
>(),
registerTableMetadata: vi.fn<
Parameters<BlockEventsToStorageOptions["registerTableMetadata"]>,
ReturnType<BlockEventsToStorageOptions["registerTableMetadata"]>
>(),
getTableSchema: vi.fn<
Parameters<BlockEventsToStorageOptions["getTableSchema"]>,
ReturnType<BlockEventsToStorageOptions["getTableSchema"]>
>(),
getTableMetadata: vi.fn<
Parameters<BlockEventsToStorageOptions["getTableMetadata"]>,
ReturnType<BlockEventsToStorageOptions["getTableMetadata"]>
>(),
};

const mockedDecode = blockEventsToStorage<typeof storeConfig>(mockedCallbacks as any as BlockEventsToStorageOptions);

describe("blockEventsToStorage", () => {
beforeEach(() => {
vi.clearAllMocks();
});

it("call setField with data properly decoded", async () => {
mockedCallbacks.getTableSchema.mockImplementation(async ({ namespace, name }) => {
if (namespace === "mudstore" && name === "StoreMetadata") {
return {
namespace: "mudstore",
name: "StoreMetadata",
schema: {
keySchema: {
staticFields: ["bytes32"],
dynamicFields: [],
},
valueSchema: {
staticFields: [],
dynamicFields: ["string", "bytes"],
},
},
};
}

if (namespace === "" && name === "Inventory") {
return {
namespace: "",
name: "Inventory",
schema: {
keySchema: {
staticFields: ["address", "uint32", "uint32"],
dynamicFields: [],
},
valueSchema: {
staticFields: ["uint32"],
dynamicFields: [],
},
},
};
}
});

mockedCallbacks.getTableMetadata.mockImplementation(async ({ namespace, name }) => {
if (namespace === "" && name === "Inventory") {
return {
namespace: "",
name: "Inventory",
keyNames: ["owner", "item", "itemVariant"],
valueNames: ["amount"],
};
}
});

const operations = await mockedDecode({
blockNumber: 5448n,
blockHash: "0x03e962e7402b2ab295b92feac342a132111dd14b0d1fd4d4a0456fdc77981577",
logs: [
{
address: "0x5fbdb2315678afecb367f032d93f642f64180aa3",
topics: ["0xd01f9f1368f831528fc9fe6442366b2b7d957fbfff3bcf7c24d9ab5fe51f8c46"],
data: "0x00000000000000000000000000000000496e76656e746f7279000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000796eb990a3f9c431c69149c7a168b91596d87f600000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000040000000800000000000000000000000000000000000000000000000000000000",
blockHash: "0x03e962e7402b2ab295b92feac342a132111dd14b0d1fd4d4a0456fdc77981577",
blockNumber: 5448n,
transactionHash: "0xa6986924609542dc4c2d81c53799d8eab47109ef34ee1e422de595e19ee9bfa4",
transactionIndex: 0,
logIndex: 0,
removed: false,
args: {
table: "0x00000000000000000000000000000000496e76656e746f727900000000000000",
key: [
"0x000000000000000000000000796eb990a3f9c431c69149c7a168b91596d87f60",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"0x0000000000000000000000000000000000000000000000000000000000000001",
],
schemaIndex: 0,
data: "0x00000008",
},
eventName: "StoreSetField",
},
],
});

expect(operations).toMatchInlineSnapshot(`
{
"blockHash": "0x03e962e7402b2ab295b92feac342a132111dd14b0d1fd4d4a0456fdc77981577",
"blockNumber": 5448n,
"operations": [
{
"keyTuple": {
"item": 1,
"itemVariant": 1,
"owner": "0x796eb990A3F9C431C69149c7a168b91596D87F60",
},
"log": {
"address": "0x5fbdb2315678afecb367f032d93f642f64180aa3",
"args": {
"data": "0x00000008",
"key": [
"0x000000000000000000000000796eb990a3f9c431c69149c7a168b91596d87f60",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"0x0000000000000000000000000000000000000000000000000000000000000001",
],
"schemaIndex": 0,
"table": "0x00000000000000000000000000000000496e76656e746f727900000000000000",
},
"blockHash": "0x03e962e7402b2ab295b92feac342a132111dd14b0d1fd4d4a0456fdc77981577",
"blockNumber": 5448n,
"data": "0x00000000000000000000000000000000496e76656e746f7279000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000003000000000000000000000000796eb990a3f9c431c69149c7a168b91596d87f600000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000040000000800000000000000000000000000000000000000000000000000000000",
"eventName": "StoreSetField",
"logIndex": 0,
"removed": false,
"topics": [
"0xd01f9f1368f831528fc9fe6442366b2b7d957fbfff3bcf7c24d9ab5fe51f8c46",
],
"transactionHash": "0xa6986924609542dc4c2d81c53799d8eab47109ef34ee1e422de595e19ee9bfa4",
"transactionIndex": 0,
},
"name": "Inventory",
"namespace": "",
"type": "SetField",
"value": 8,
"valueName": "amount",
},
],
}
`);
});
});
234 changes: 234 additions & 0 deletions packages/store-sync/src/blockEventsToStorage.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,234 @@
import {
TableSchema,
decodeField,
decodeKeyTuple,
decodeRecord,
hexToTableSchema,
schemaIndexToAbiType,
} from "@latticexyz/protocol-parser";
import { GroupLogsByBlockNumberResult, GetLogsResult } from "@latticexyz/block-logs-stream";
import { StoreEventsAbi, StoreConfig } from "@latticexyz/store";
import { TableId } from "@latticexyz/common";
import { Hex, decodeAbiParameters, parseAbiParameters } from "viem";
import { debug } from "./debug";
// TODO: move these type helpers into store?
import { Key, Value } from "@latticexyz/store-cache";
import { isDefined } from "@latticexyz/common/utils";

// TODO: change table schema/metadata APIs once we get both schema and field names in the same event

// TODO: export these from store or world
export const schemaTableId = new TableId("mudstore", "schema");
export const metadataTableId = new TableId("mudstore", "StoreMetadata");

// I don't love carrying all these types through. Ideally this should be the shape of the thing we want, rather than the specific return type from a function.
export type StoreEventsLog = GetLogsResult<StoreEventsAbi>[number];
export type BlockEvents = GroupLogsByBlockNumberResult<StoreEventsLog>[number];

export type StoredTableSchema = {
namespace: string;
name: string;
schema: TableSchema;
};

export type StoredTableMetadata = {
namespace: string;
name: string;
keyNames: readonly string[];
valueNames: readonly string[];
};

export type BaseStorageOperation = {
log: StoreEventsLog;
namespace: string;
};

export type SetRecordOperation<TConfig extends StoreConfig> = BaseStorageOperation & {
type: "SetRecord";
} & {
[TTable in keyof TConfig["tables"]]: {
name: TTable;
keyTuple: Key<TConfig, TTable>;
record: Value<TConfig, TTable>;
};
}[keyof TConfig["tables"]];

export type SetFieldOperation<TConfig extends StoreConfig> = BaseStorageOperation & {
type: "SetField";
} & {
[TTable in keyof TConfig["tables"]]: {
name: TTable;
keyTuple: Key<TConfig, TTable>;
} & {
[TValue in keyof Value<TConfig, TTable>]: {
// TODO: standardize on calling these "fields" or "values" or maybe "columns"
valueName: TValue;
value: Value<TConfig, TTable>[TValue];
};
}[keyof Value<TConfig, TTable>];
}[keyof TConfig["tables"]];

export type DeleteRecordOperation<TConfig extends StoreConfig> = BaseStorageOperation & {
type: "DeleteRecord";
} & {
[TTable in keyof TConfig["tables"]]: {
name: TTable;
keyTuple: Key<TConfig, TTable>;
};
}[keyof TConfig["tables"]];

export type StorageOperation<TConfig extends StoreConfig> =
| SetFieldOperation<TConfig>
| SetRecordOperation<TConfig>
| DeleteRecordOperation<TConfig>;

export type BlockEventsToStorageOptions = {
registerTableSchema: (data: StoredTableSchema) => Promise<void>;
registerTableMetadata: (data: StoredTableMetadata) => Promise<void>;
getTableSchema: (opts: Pick<StoredTableSchema, "namespace" | "name">) => Promise<StoredTableSchema | undefined>;
getTableMetadata: (opts: Pick<StoredTableMetadata, "namespace" | "name">) => Promise<StoredTableMetadata | undefined>;
};

export function blockEventsToStorage<TConfig extends StoreConfig = StoreConfig>({
registerTableMetadata,
registerTableSchema,
getTableMetadata,
getTableSchema,
}: BlockEventsToStorageOptions): (block: BlockEvents) => Promise<{
blockNumber: BlockEvents["blockNumber"];
blockHash: BlockEvents["blockHash"];
operations: StorageOperation<TConfig>[];
}> {
return async (block) => {
// Find and register all new table schemas
// Store schemas are immutable, so we can parallelize this
await Promise.all(
block.logs.map(async (log) => {
if (log.eventName !== "StoreSetRecord") return;
if (log.args.table !== schemaTableId.toHex()) return;

const [tableForSchema, ...otherKeys] = log.args.key;
if (otherKeys.length) {
debug("registerSchema event is expected to have only one key in key tuple, but got multiple", log);
}

const tableId = TableId.fromHex(tableForSchema);
const schema = hexToTableSchema(log.args.data);

await registerTableSchema({ ...tableId, schema });
})
);

const metadataTableSchema = await getTableSchema(metadataTableId);
if (!metadataTableSchema) {
// TODO: better error
throw new Error("metadata table schema was not registered");
}

// Find and register all new table metadata
// Table metadata is technically mutable, but all of our code assumes its immutable, so we'll continue that trend
// TODO: rework contracts so schemas+tables are combined and immutable
await Promise.all(
block.logs.map(async (log) => {
if (log.eventName !== "StoreSetRecord") return;
if (log.args.table !== metadataTableId.toHex()) return;

const [tableForSchema, ...otherKeys] = log.args.key;
if (otherKeys.length) {
debug("setMetadata event is expected to have only one key in key tuple, but got multiple", log);
}

const tableId = TableId.fromHex(tableForSchema);
const [tableName, abiEncodedFieldNames] = decodeRecord(metadataTableSchema.schema.valueSchema, log.args.data);
const valueNames = decodeAbiParameters(parseAbiParameters("string[]"), abiEncodedFieldNames as Hex)[0];

// TODO: add key names to table registration when we refactor it
await registerTableMetadata({ ...tableId, keyNames: [], valueNames });
})
);

const tables = Array.from(new Set(block.logs.map((log) => log.args.table))).map((tableHex) =>
TableId.fromHex(tableHex)
);
// TODO: combine these once we refactor table registration
const tableSchemas = Object.fromEntries(
await Promise.all(tables.map(async (table) => [table.toHex(), await getTableSchema(table)]))
) as Record<Hex, StoredTableSchema>;
const tableMetadatas = Object.fromEntries(
await Promise.all(tables.map(async (table) => [table.toHex(), await getTableMetadata(table)]))
) as Record<Hex, StoredTableMetadata>;

const operations = block.logs
.map((log): StorageOperation<TConfig> | undefined => {
const tableId = TableId.fromHex(log.args.table);
const tableSchema = tableSchemas[log.args.table];
const tableMetadata = tableMetadatas[log.args.table];
if (!tableSchema) {
debug("no table schema found for event, skipping", tableId.toString(), log);
return;
}
if (!tableMetadata) {
debug("no table metadata found for event, skipping", tableId.toString(), log);
return;
}

const keyTupleValues = decodeKeyTuple(tableSchema.schema.keySchema, log.args.key);
const keyTuple = Object.fromEntries(
keyTupleValues.map((value, i) => [tableMetadata.keyNames[i] ?? i, value])
) as Key<TConfig, keyof TConfig["tables"]>;

if (log.eventName === "StoreSetRecord" || log.eventName === "StoreEphemeralRecord") {
const values = decodeRecord(tableSchema.schema.valueSchema, log.args.data);
const record = Object.fromEntries(tableMetadata.valueNames.map((name, i) => [name, values[i]])) as Value<
TConfig,
keyof TConfig["tables"]
>;
// TODO: decide if we should handle ephemeral records separately?
// they'll eventually be turned into "events", but unclear if that should translate to client storage operations
return {
log,
type: "SetRecord",
...tableId,
keyTuple,
record,
};
}

if (log.eventName === "StoreSetField") {
const valueName = tableMetadata.valueNames[log.args.schemaIndex] as string &
keyof Value<TConfig, keyof TConfig["tables"]>;
const value = decodeField(
schemaIndexToAbiType(tableSchema.schema.valueSchema, log.args.schemaIndex),
log.args.data
) as Value<TConfig, keyof TConfig["tables"]>[typeof valueName];
return {
log,
type: "SetField",
...tableId,
keyTuple,
valueName,
value,
};
}

if (log.eventName === "StoreDeleteRecord") {
return {
log,
type: "DeleteRecord",
...tableId,
keyTuple,
};
}

debug("unknown store event or log, skipping", log);
return;
})
.filter(isDefined);

return {
blockNumber: block.blockNumber,
blockHash: block.blockHash,
operations,
};
};
}
3 changes: 3 additions & 0 deletions packages/store-sync/src/debug.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import createDebug from "debug";

export const debug = createDebug("mud:store-sync");
1 change: 1 addition & 0 deletions packages/store-sync/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export * from "./blockEventsToStorage";
14 changes: 14 additions & 0 deletions packages/store-sync/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"compilerOptions": {
"target": "es2021",
"module": "esnext",
"moduleResolution": "node",
"declaration": true,
"sourceMap": true,
"outDir": "dist",
"isolatedModules": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true
}
}
11 changes: 11 additions & 0 deletions packages/store-sync/tsup.config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import { defineConfig } from "tsup";

export default defineConfig({
entry: ["src/index.ts"],
target: "esnext",
format: ["esm"],
dts: false,
sourcemap: true,
clean: true,
minify: true,
});
3 changes: 3 additions & 0 deletions packages/store/ts/storeEventsAbi.ts
Original file line number Diff line number Diff line change
@@ -2,3 +2,6 @@ import { parseAbi, AbiEvent } from "abitype";
import { storeEvents } from "./storeEvents";

export const storeEventsAbi = parseAbi(storeEvents) satisfies readonly AbiEvent[];

export type StoreEventsAbi = typeof storeEventsAbi;
export type StoreEventsAbiItem = (typeof storeEventsAbi)[number];
42 changes: 41 additions & 1 deletion pnpm-lock.yaml

0 comments on commit 904fd7d

Please sign in to comment.