diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml
index 67e61c746e..2d536f614b 100644
--- a/.github/workflows/e2e.yml
+++ b/.github/workflows/e2e.yml
@@ -12,18 +12,11 @@ jobs:
     runs-on: ubuntu-latest
     services:
       postgres:
-        image: ghcr.io/latticexyz/postgres-wal-logical:latest
-        env:
-          POSTGRES_USER: runner
-          POSTGRES_DB: mode_ephemeral
-          POSTGRES_HOST_AUTH_METHOD: trust
-        options: >-
-          --health-cmd pg_isready
-          --health-interval 10s
-          --health-timeout 5s
-          --health-retries 5
+        image: postgres:12.1-alpine
         ports:
           - 5432:5432
+        # needed because the postgres container does not provide a healthcheck
+        options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
     steps:
       - name: Checkout
         uses: actions/checkout@v3
@@ -56,5 +49,7 @@ jobs:
         uses: ./.github/actions/require-empty-diff
 
       - name: Run sync tests
+        env:
+          DATABASE_URL: "postgres://postgres@localhost:5432/postgres"
         working-directory: ./e2e/packages/sync-test
         run: pnpm test
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 939a09e004..8ec86755a4 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -10,6 +10,13 @@ jobs:
   test:
     name: Run tests
     runs-on: ubuntu-latest
+    services:
+      postgres:
+        image: postgres:12.1-alpine
+        ports:
+          - 5432:5432
+        # needed because the postgres container does not provide a healthcheck
+        options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
     steps:
       - name: Checkout
         uses: actions/checkout@v3
@@ -21,6 +28,8 @@ jobs:
         uses: ./.github/actions/build
 
       - name: Run tests
+        env:
+          DATABASE_URL: "postgres://postgres@localhost:5432/postgres"
         run: pnpm test
 
       - name: Generate gas reports
diff --git a/e2e/packages/sync-test/indexerSync.test.ts b/e2e/packages/sync-test/indexerSync.test.ts
index e86398ba2f..093ff0dff5 100644
--- a/e2e/packages/sync-test/indexerSync.test.ts
+++ b/e2e/packages/sync-test/indexerSync.test.ts
@@ -23,6 +23,17 @@ import {
 import { range } from "@latticexyz/utils";
 import path from "node:path";
 import { rpcHttpUrl } from "./setup/constants";
+import { z } from "zod";
+
+const env = z
+  .object({
+    DATABASE_URL: z.string().default("postgres://127.0.0.1/postgres"),
+  })
+  .parse(process.env, {
+    errorMap: (issue) => ({
+      message: `Missing or invalid environment variable: ${issue.path.join(".")}`,
+    }),
+  });
 
 describe("Sync from indexer", async () => {
   const asyncErrorHandler = createAsyncErrorHandler();
@@ -54,14 +65,21 @@ describe("Sync from indexer", async () => {
     expect(asyncErrorHandler.getErrors()[0]).toContain("error fetching initial state from indexer");
   });
 
-  describe("indexer online", () => {
+  describe.each([["sqlite"], ["postgres"]] as const)("%s indexer", (indexerType) => {
     let indexerIteration = 1;
     let indexer: ReturnType<typeof startIndexer>;
 
     beforeEach(async () => {
       // Start indexer
       const port = 3000 + indexerIteration++;
-      indexer = startIndexer(port, path.join(__dirname, `anvil-${port}.db`), rpcHttpUrl, asyncErrorHandler.reportError);
+      indexer = startIndexer({
+        port,
+        rpcHttpUrl,
+        reportError: asyncErrorHandler.reportError,
+        ...(indexerType === "postgres"
+          ? { indexer: "postgres", databaseUrl: env.DATABASE_URL }
+          : { indexer: "sqlite", sqliteFilename: path.join(__dirname, `anvil-${port}.db`) }),
+      });
       await indexer.doneSyncing;
     });
 
diff --git a/e2e/packages/sync-test/package.json b/e2e/packages/sync-test/package.json
index f17ad472bc..25ce8b27c9 100644
--- a/e2e/packages/sync-test/package.json
+++ b/e2e/packages/sync-test/package.json
@@ -25,6 +25,7 @@
     "typescript": "5.1.6",
     "viem": "1.6.0",
     "vite": "^4.2.1",
-    "vitest": "^0.31.0"
+    "vitest": "^0.31.0",
+    "zod": "^3.22.2"
   }
 }
diff --git a/e2e/packages/sync-test/setup/startIndexer.ts b/e2e/packages/sync-test/setup/startIndexer.ts
index 4d8d69e26e..7225947c71 100644
--- a/e2e/packages/sync-test/setup/startIndexer.ts
+++ b/e2e/packages/sync-test/setup/startIndexer.ts
@@ -3,12 +3,23 @@ import { execa } from "execa";
 import { rmSync } from "node:fs";
 import path from "node:path";
 
-export function startIndexer(
-  port: number,
-  sqliteFilename: string,
-  rpcUrl: string,
-  reportError: (error: string) => void
-) {
+type IndexerOptions =
+  | {
+      indexer: "sqlite";
+      sqliteFilename: string;
+    }
+  | {
+      indexer: "postgres";
+      databaseUrl: string;
+    };
+
+type StartIndexerOptions = {
+  port: number;
+  rpcHttpUrl: string;
+  reportError: (error: string) => void;
+} & IndexerOptions;
+
+export function startIndexer(opts: StartIndexerOptions) {
   let resolve: () => void;
   let reject: (reason?: string) => void;
   const doneSyncing = new Promise<void>((res, rej) => {
@@ -16,17 +27,19 @@ export function startIndexer(
     reject = rej;
   });
 
-  console.log(chalk.magenta("[indexer]:"), "start syncing");
+  const env = {
+    DEBUG: "mud:*",
+    PORT: opts.port.toString(),
+    CHAIN_ID: "31337",
+    RPC_HTTP_URL: opts.rpcHttpUrl,
+    SQLITE_FILENAME: opts.indexer === "sqlite" ? opts.sqliteFilename : undefined,
+    DATABASE_URL: opts.indexer === "postgres" ? opts.databaseUrl : undefined,
+  };
+  console.log(chalk.magenta("[indexer]:"), "starting indexer", env);
 
-  const proc = execa("pnpm", ["start"], {
+  const proc = execa("pnpm", opts.indexer === "postgres" ? ["start:postgres"] : ["start:sqlite"], {
     cwd: path.join(__dirname, "..", "..", "..", "..", "packages", "store-indexer"),
-    env: {
-      DEBUG: "mud:*",
-      PORT: port.toString(),
-      CHAIN_ID: "31337",
-      RPC_HTTP_URL: rpcUrl,
-      SQLITE_FILENAME: sqliteFilename,
-    },
+    env,
   });
 
   proc.on("error", (error) => {
@@ -56,10 +69,12 @@ export function startIndexer(
 
   function cleanUp() {
     // attempt to clean up sqlite file
-    try {
-      rmSync(sqliteFilename);
-    } catch (error) {
-      console.log("could not delete", sqliteFilename, error);
+    if (opts.indexer === "sqlite") {
+      try {
+        rmSync(opts.sqliteFilename);
+      } catch (error) {
+        console.log("could not delete", opts.sqliteFilename, error);
+      }
     }
   }
 
@@ -70,7 +85,7 @@ export function startIndexer(
   });
 
   return {
-    url: `http://127.0.0.1:${port}/trpc`,
+    url: `http://127.0.0.1:${opts.port}/trpc`,
     doneSyncing,
     process: proc,
     kill: () =>
diff --git a/e2e/pnpm-lock.yaml b/e2e/pnpm-lock.yaml
index 0a2ba0fbab..cda97c459b 100644
--- a/e2e/pnpm-lock.yaml
+++ b/e2e/pnpm-lock.yaml
@@ -79,7 +79,7 @@ importers:
         version: 1.7.0
       viem:
         specifier: 1.6.0
-        version: 1.6.0(typescript@5.1.6)
+        version: 1.6.0(typescript@5.1.6)(zod@3.22.2)
     devDependencies:
       rimraf:
         specifier: ^3.0.2
@@ -170,7 +170,7 @@ importers:
         version: 0.0.6
       abitype:
         specifier: 0.9.3
-        version: 0.9.3(typescript@5.1.6)
+        version: 0.9.3(typescript@5.1.6)(zod@3.22.2)
       chalk:
         specifier: ^5.2.0
         version: 5.2.0
@@ -191,13 +191,16 @@ importers:
         version: 5.1.6
       viem:
         specifier: 1.6.0
-        version: 1.6.0(typescript@5.1.6)
+        version: 1.6.0(typescript@5.1.6)(zod@3.22.2)
       vite:
         specifier: ^4.2.1
         version: 4.3.5(@types/node@20.1.3)
       vitest:
         specifier: ^0.31.0
         version: 0.31.4(jsdom@22.0.0)
+      zod:
+        specifier: ^3.22.2
+        version: 3.22.2
 
 packages:
 
@@ -941,7 +944,7 @@ packages:
     resolution: {integrity: sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==}
     dev: true
 
-  /abitype@0.9.3(typescript@5.1.6):
+  /abitype@0.9.3(typescript@5.1.6)(zod@3.22.2):
     resolution: {integrity: sha512-dz4qCQLurx97FQhnb/EIYTk/ldQ+oafEDUqC0VVIeQS1Q48/YWt/9YNfMmp9SLFqN41ktxny3c8aYxHjmFIB/w==}
     peerDependencies:
       typescript: '>=5.0.4'
@@ -953,6 +956,7 @@ packages:
         optional: true
     dependencies:
       typescript: 5.1.6
+      zod: 3.22.2
 
   /abort-controller-x@0.4.1:
     resolution: {integrity: sha512-lJ2ssrl3FoTK3cX/g15lRCkXFWKiwRTRtBjfwounO2EM/Q65rI/MEZsfsch1juWU2pH2aLSaq0HGowlDP/imrw==}
@@ -2264,7 +2268,7 @@ packages:
     hasBin: true
     dev: true
 
-  /viem@1.6.0(typescript@5.1.6):
+  /viem@1.6.0(typescript@5.1.6)(zod@3.22.2):
     resolution: {integrity: sha512-ae9Twkd0q2Qlj4yYpWjb4DzYAhKY0ibEpRH8FJaTywZXNpTjFidSdBaT0CVn1BaH7O7cnX4/O47zvDUMGJD1AA==}
     peerDependencies:
       typescript: '>=5.0.4'
@@ -2279,7 +2283,7 @@ packages:
       '@scure/bip39': 1.2.0
       '@types/ws': 8.5.5
       '@wagmi/chains': 1.6.0(typescript@5.1.6)
-      abitype: 0.9.3(typescript@5.1.6)
+      abitype: 0.9.3(typescript@5.1.6)(zod@3.22.2)
       isomorphic-ws: 5.0.0(ws@8.12.0)
       typescript: 5.1.6
       ws: 8.12.0
@@ -2561,6 +2565,9 @@ packages:
     engines: {node: '>=12.20'}
     dev: true
 
+  /zod@3.22.2:
+    resolution: {integrity: sha512-wvWkphh5WQsJbVk1tbx1l1Ly4yg+XecD+Mq280uBGt9wa5BKSWf4Mhp6GmrkPixhMxmabYY7RbzlwVP32pbGCg==}
+
   github.com/dapphub/ds-test/e282159d5170298eb2455a6c05280ab5a73a4ef0:
     resolution: {tarball: https://codeload.github.com/dapphub/ds-test/tar.gz/e282159d5170298eb2455a6c05280ab5a73a4ef0}
     name: ds-test
diff --git a/packages/common/src/utils/identity.ts b/packages/common/src/utils/identity.ts
new file mode 100644
index 0000000000..5aca6fd99d
--- /dev/null
+++ b/packages/common/src/utils/identity.ts
@@ -0,0 +1,3 @@
+export function identity<T>(value: T): T {
+  return value;
+}
diff --git a/packages/common/src/utils/index.ts b/packages/common/src/utils/index.ts
index 5f06e434e0..bc40e68700 100644
--- a/packages/common/src/utils/index.ts
+++ b/packages/common/src/utils/index.ts
@@ -4,6 +4,7 @@ export * from "./bigIntMin";
 export * from "./bigIntSort";
 export * from "./chunk";
 export * from "./curry";
+export * from "./identity";
 export * from "./isDefined";
 export * from "./isNotNull";
 export * from "./wait";
diff --git a/packages/store-indexer/bin/postgres-indexer.ts b/packages/store-indexer/bin/postgres-indexer.ts
new file mode 100644
index 0000000000..9a7ea96d1e
--- /dev/null
+++ b/packages/store-indexer/bin/postgres-indexer.ts
@@ -0,0 +1,138 @@
+import { z } from "zod";
+import { DefaultLogger, eq } from "drizzle-orm";
+import { createPublicClient, fallback, webSocket, http, Transport } from "viem";
+import fastify from "fastify";
+import { fastifyTRPCPlugin } from "@trpc/server/adapters/fastify";
+import { AppRouter, createAppRouter } from "@latticexyz/store-sync/trpc-indexer";
+import { createQueryAdapter } from "../src/postgres/createQueryAdapter";
+import type { Chain } from "viem/chains";
+import * as mudChains from "@latticexyz/common/chains";
+import * as chains from "viem/chains";
+import { isNotNull } from "@latticexyz/common/utils";
+import { combineLatest, filter, first } from "rxjs";
+import { drizzle } from "drizzle-orm/postgres-js";
+import postgres from "postgres";
+import { cleanDatabase, postgresStorage, schemaVersion } from "@latticexyz/store-sync/postgres";
+import { createStoreSync } from "@latticexyz/store-sync";
+
+const possibleChains = Object.values({ ...mudChains, ...chains }) as Chain[];
+
+// TODO: refine zod type to be either CHAIN_ID or RPC_HTTP_URL/RPC_WS_URL
+const env = z
+  .object({
+    CHAIN_ID: z.coerce.number().positive().optional(),
+    RPC_HTTP_URL: z.string().optional(),
+    RPC_WS_URL: z.string().optional(),
+    START_BLOCK: z.coerce.bigint().nonnegative().default(0n),
+    MAX_BLOCK_RANGE: z.coerce.bigint().positive().default(1000n),
+    HOST: z.string().default("0.0.0.0"),
+    PORT: z.coerce.number().positive().default(3001),
+    DATABASE_URL: z.string(),
+  })
+  .parse(process.env, {
+    errorMap: (issue) => ({
+      message: `Missing or invalid environment variable: ${issue.path.join(".")}`,
+    }),
+  });
+
+const chain = env.CHAIN_ID != null ? possibleChains.find((c) => c.id === env.CHAIN_ID) : undefined;
+if (env.CHAIN_ID != null && !chain) {
+  console.warn(`No chain found for chain ID ${env.CHAIN_ID}`);
+}
+
+const transports: Transport[] = [
+  env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : null,
+  env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : null,
+].filter(isNotNull);
+
+const publicClient = createPublicClient({
+  chain,
+  transport: fallback(
+    // If one or more RPC URLs are provided, we'll configure the transport with only those RPC URLs
+    transports.length > 0
+      ? transports
+      : // Otherwise use the chain defaults
+        [webSocket(), http()]
+  ),
+  pollingInterval: 1000,
+});
+
+// Fetch the chain ID from the RPC if no chain object was found for the provided chain ID.
+// We do this to match the downstream logic, which also attempts to find the chain ID.
+const chainId = chain?.id ?? (await publicClient.getChainId());
+
+const database = drizzle(postgres(env.DATABASE_URL), {
+  logger: new DefaultLogger(),
+});
+
+let startBlock = env.START_BLOCK;
+
+const storageAdapter = await postgresStorage({ database, publicClient });
+
+// Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.
+try {
+  const currentChainStates = await database
+    .select()
+    .from(storageAdapter.internalTables.chain)
+    .where(eq(storageAdapter.internalTables.chain.chainId, chainId))
+    .execute();
+  // TODO: replace this type workaround with `noUncheckedIndexedAccess: true` when we can fix all the issues related (https://github.com/latticexyz/mud/issues/1212)
+  const currentChainState: (typeof currentChainStates)[number] | undefined = currentChainStates[0];
+
+  if (currentChainState != null) {
+    if (currentChainState.schemaVersion != schemaVersion) {
+      console.log(
+        "schema version changed from",
+        currentChainState.schemaVersion,
+        "to",
+        schemaVersion,
+        "cleaning database"
+      );
+      await cleanDatabase(database);
+    } else if (currentChainState.lastUpdatedBlockNumber != null) {
+      console.log("resuming from block number", currentChainState.lastUpdatedBlockNumber + 1n);
+      startBlock = currentChainState.lastUpdatedBlockNumber + 1n;
+    }
+  }
+} catch (error) {
+  // ignore errors, this is optional
+}
+
+const { latestBlockNumber$, blockStorageOperations$ } = await createStoreSync({
+  storageAdapter,
+  publicClient,
+  startBlock,
+  maxBlockRange: env.MAX_BLOCK_RANGE,
+});
+
+combineLatest([latestBlockNumber$, blockStorageOperations$])
+  .pipe(
+    filter(
+      ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) => latestBlockNumber === lastBlockNumberProcessed
+    ),
+    first()
+  )
+  .subscribe(() => {
+    console.log("all caught up");
+  });
+
+// @see https://fastify.dev/docs/latest/
+const server = fastify({
+  maxParamLength: 5000,
+});
+
+await server.register(import("@fastify/cors"));
+
+// @see https://trpc.io/docs/server/adapters/fastify
+server.register(fastifyTRPCPlugin<AppRouter>, {
+  prefix: "/trpc",
+  trpcOptions: {
+    router: createAppRouter(),
+    createContext: async () => ({
+      queryAdapter: await createQueryAdapter(database),
+    }),
+  },
+});
+
+await server.listen({ host: env.HOST, port: env.PORT });
+console.log(`indexer server listening on http://${env.HOST}:${env.PORT}`);
diff --git a/packages/store-indexer/bin/sqlite-indexer.ts b/packages/store-indexer/bin/sqlite-indexer.ts
index c16d7b33aa..1433768aef 100644
--- a/packages/store-indexer/bin/sqlite-indexer.ts
+++ b/packages/store-indexer/bin/sqlite-indexer.ts
@@ -6,15 +6,14 @@ import Database from "better-sqlite3";
 import { createPublicClient, fallback, webSocket, http, Transport } from "viem";
 import fastify from "fastify";
 import { fastifyTRPCPlugin } from "@trpc/server/adapters/fastify";
-import { createAppRouter } from "@latticexyz/store-sync/trpc-indexer";
+import { AppRouter, createAppRouter } from "@latticexyz/store-sync/trpc-indexer";
 import { chainState, schemaVersion, syncToSqlite } from "@latticexyz/store-sync/sqlite";
-import { createStorageAdapter } from "../src/sqlite/createStorageAdapter";
+import { createQueryAdapter } from "../src/sqlite/createQueryAdapter";
 import type { Chain } from "viem/chains";
 import * as mudChains from "@latticexyz/common/chains";
 import * as chains from "viem/chains";
 import { isNotNull } from "@latticexyz/common/utils";
 import { combineLatest, filter, first } from "rxjs";
-import { debug } from "../src/debug";
 
 const possibleChains = Object.values({ ...mudChains, ...chains }) as Chain[];
 
@@ -26,6 +25,7 @@ const env = z
     RPC_WS_URL: z.string().optional(),
     START_BLOCK: z.coerce.bigint().nonnegative().default(0n),
     MAX_BLOCK_RANGE: z.coerce.bigint().positive().default(1000n),
+    HOST: z.string().default("0.0.0.0"),
     PORT: z.coerce.number().positive().default(3001),
     SQLITE_FILENAME: z.string().default("indexer.db"),
   })
@@ -116,15 +116,15 @@ const server = fastify({
 await server.register(import("@fastify/cors"));
 
 // @see https://trpc.io/docs/server/adapters/fastify
-server.register(fastifyTRPCPlugin, {
+server.register(fastifyTRPCPlugin<AppRouter>, {
   prefix: "/trpc",
   trpcOptions: {
     router: createAppRouter(),
     createContext: async () => ({
-      storageAdapter: await createStorageAdapter(database),
+      queryAdapter: await createQueryAdapter(database),
     }),
   },
 });
 
-await server.listen({ port: env.PORT });
-console.log(`indexer server listening on http://127.0.0.1:${env.PORT}`);
+await server.listen({ host: env.HOST, port: env.PORT });
+console.log(`indexer server listening on http://${env.HOST}:${env.PORT}`);
diff --git a/packages/store-indexer/package.json b/packages/store-indexer/package.json
index ac53abbe52..a4f9431668 100644
--- a/packages/store-indexer/package.json
+++ b/packages/store-indexer/package.json
@@ -20,10 +20,14 @@
     "clean:js": "rimraf dist",
     "dev": "tsup --watch",
     "lint": "eslint .",
-    "start": "tsx bin/sqlite-indexer",
-    "start:local": "SQLITE_FILENAME=anvil.db CHAIN_ID=31337 pnpm start",
-    "start:testnet": "SQLITE_FILENAME=testnet.db CHAIN_ID=4242 START_BLOCK=19037160 pnpm start",
-    "start:testnet2": "SQLITE_FILENAME=testnet2.db CHAIN_ID=4243 pnpm start",
+    "start:postgres": "tsx bin/postgres-indexer",
+    "start:postgres:local": "DATABASE_URL=postgres://127.0.0.1/postgres CHAIN_ID=31337 pnpm start:postgres",
+    "start:postgres:testnet": "DATABASE_URL=postgres://127.0.0.1/postgres CHAIN_ID=4242 START_BLOCK=19037160 pnpm start:postgres",
+    "start:postgres:testnet2": "DATABASE_URL=postgres://127.0.0.1/postgres CHAIN_ID=4243 pnpm start:postgres",
+    "start:sqlite": "tsx bin/sqlite-indexer",
+    "start:sqlite:local": "SQLITE_FILENAME=anvil.db CHAIN_ID=31337 pnpm start:sqlite",
+    "start:sqlite:testnet": "SQLITE_FILENAME=testnet.db CHAIN_ID=4242 START_BLOCK=19037160 pnpm start:sqlite",
+    "start:sqlite:testnet2": "SQLITE_FILENAME=testnet2.db CHAIN_ID=4243 pnpm start:sqlite",
     "test": "tsc --noEmit --skipLibCheck"
   },
   "dependencies": {
@@ -39,6 +43,7 @@
     "debug": "^4.3.4",
     "drizzle-orm": "^0.27.0",
     "fastify": "^4.21.0",
+    "postgres": "^3.3.5",
     "rxjs": "7.5.5",
     "superjson": "^1.12.4",
     "viem": "1.6.0",
diff --git a/packages/store-indexer/src/postgres/createQueryAdapter.ts b/packages/store-indexer/src/postgres/createQueryAdapter.ts
new file mode 100644
index 0000000000..17600fca5d
--- /dev/null
+++ b/packages/store-indexer/src/postgres/createQueryAdapter.ts
@@ -0,0 +1,54 @@
+import { eq } from "drizzle-orm";
+import { PgDatabase } from "drizzle-orm/pg-core";
+import { buildTable, buildInternalTables, getTables } from "@latticexyz/store-sync/postgres";
+import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer";
+import { debug } from "../debug";
+import { getAddress } from "viem";
+
+/**
+ * Creates a query adapter for the tRPC server/client to query data from Postgres.
+ *
+ * @param {PgDatabase<any>} database Postgres database object from Drizzle
+ * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.
+ */
+export async function createQueryAdapter(database: PgDatabase<any>): Promise<QueryAdapter> {
+  const adapter: QueryAdapter = {
+    async findAll(chainId, address) {
+      const internalTables = buildInternalTables();
+      const tables = (await getTables(database)).filter(
+        (table) => address != null && getAddress(address) === getAddress(table.address)
+      );
+
+      const tablesWithRecords = await Promise.all(
+        tables.map(async (table) => {
+          const sqliteTable = buildTable(table);
+          const records = await database.select().from(sqliteTable).where(eq(sqliteTable.__isDeleted, false)).execute();
+          return {
+            ...table,
+            records: records.map((record) => ({
+              key: Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]])),
+              value: Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]])),
+            })),
+          };
+        })
+      );
+
+      const metadata = await database
+        .select()
+        .from(internalTables.chain)
+        .where(eq(internalTables.chain.chainId, chainId))
+        .execute();
+      const { lastUpdatedBlockNumber } = metadata[0] ?? {};
+
+      const result = {
+        blockNumber: lastUpdatedBlockNumber ?? null,
+        tables: tablesWithRecords,
+      };
+
+      debug("findAll", chainId, address, result);
+
+      return result;
+    },
+  };
+  return adapter;
+}
diff --git a/packages/store-indexer/src/sqlite/createStorageAdapter.ts b/packages/store-indexer/src/sqlite/createQueryAdapter.ts
similarity index 83%
rename from packages/store-indexer/src/sqlite/createStorageAdapter.ts
rename to packages/store-indexer/src/sqlite/createQueryAdapter.ts
index d3d02f7088..eae41e90c4 100644
--- a/packages/store-indexer/src/sqlite/createStorageAdapter.ts
+++ b/packages/store-indexer/src/sqlite/createQueryAdapter.ts
@@ -1,17 +1,17 @@
 import { eq } from "drizzle-orm";
 import { BaseSQLiteDatabase } from "drizzle-orm/sqlite-core";
 import { createSqliteTable, chainState, getTables } from "@latticexyz/store-sync/sqlite";
-import { StorageAdapter } from "@latticexyz/store-sync/trpc-indexer";
+import { QueryAdapter } from "@latticexyz/store-sync/trpc-indexer";
 import { debug } from "../debug";
 
 /**
  * Creates a storage adapter for the tRPC server/client to query data from SQLite.
  *
  * @param {BaseSQLiteDatabase<"sync", any>} database SQLite database object from Drizzle
- * @returns {Promise<StorageAdapter>} A set of methods used by tRPC endpoints.
+ * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.
  */
-export async function createStorageAdapter(database: BaseSQLiteDatabase<"sync", any>): Promise<StorageAdapter> {
-  const adapter: StorageAdapter = {
+export async function createQueryAdapter(database: BaseSQLiteDatabase<"sync", any>): Promise<QueryAdapter> {
+  const adapter: QueryAdapter = {
     async findAll(chainId, address) {
       const tables = getTables(database).filter((table) => table.address === address);
 
diff --git a/packages/store-sync/package.json b/packages/store-sync/package.json
index 7db4f0a041..406dcb2cb9 100644
--- a/packages/store-sync/package.json
+++ b/packages/store-sync/package.json
@@ -11,8 +11,9 @@
   "type": "module",
   "exports": {
     ".": "./dist/index.js",
-    "./sqlite": "./dist/sqlite/index.js",
+    "./postgres": "./dist/postgres/index.js",
     "./recs": "./dist/recs/index.js",
+    "./sqlite": "./dist/sqlite/index.js",
     "./trpc-indexer": "./dist/trpc-indexer/index.js"
   },
   "typesVersions": {
@@ -20,12 +21,15 @@
       "index": [
         "./src/index.ts"
       ],
-      "sqlite": [
-        "./src/sqlite/index.ts"
+      "postgres": [
+        "./src/postgres/index.ts"
       ],
       "recs": [
         "./src/recs/index.ts"
       ],
+      "sqlite": [
+        "./src/sqlite/index.ts"
+      ],
       "trpc-indexer": [
         "./src/trpc-indexer/index.ts"
       ]
@@ -38,7 +42,8 @@
     "clean:js": "rimraf dist",
     "dev": "tsup --watch",
     "lint": "eslint .",
-    "test": "vitest --run"
+    "test": "vitest --run",
+    "test:local": "DATABASE_URL=http://127.0.0.1:5432/postgres vitest"
   },
   "dependencies": {
     "@latticexyz/block-logs-stream": "workspace:*",
@@ -50,10 +55,10 @@
     "@latticexyz/world": "workspace:*",
     "@trpc/client": "10.34.0",
     "@trpc/server": "10.34.0",
-    "better-sqlite3": "^8.4.0",
     "debug": "^4.3.4",
     "drizzle-orm": "^0.27.0",
     "kysely": "^0.26.1",
+    "postgres": "^3.3.5",
     "rxjs": "7.5.5",
     "sql.js": "^1.8.0",
     "superjson": "^1.12.4",
@@ -61,7 +66,6 @@
     "zod": "^3.21.4"
   },
   "devDependencies": {
-    "@types/better-sqlite3": "^7.6.4",
     "@types/debug": "^4.1.7",
     "@types/sql.js": "^1.4.4",
     "tsup": "^6.7.0",
diff --git a/packages/store-sync/src/blockLogsToStorage.test.ts b/packages/store-sync/src/blockLogsToStorage.test.ts
index f40d569831..e4dc6bdfeb 100644
--- a/packages/store-sync/src/blockLogsToStorage.test.ts
+++ b/packages/store-sync/src/blockLogsToStorage.test.ts
@@ -1,26 +1,21 @@
 import { beforeEach, describe, expect, it, vi } from "vitest";
-import { BlockLogsToStorageOptions, blockLogsToStorage } from "./blockLogsToStorage";
+import { blockLogsToStorage } from "./blockLogsToStorage";
 import storeConfig from "@latticexyz/store/mud.config";
 import { isDefined } from "@latticexyz/common/utils";
 import { tableIdToHex } from "@latticexyz/common";
+import { StorageAdapter } from "./common";
 
 const mockedCallbacks = {
-  registerTables: vi.fn<
-    Parameters<BlockLogsToStorageOptions["registerTables"]>,
-    ReturnType<BlockLogsToStorageOptions["registerTables"]>
-  >(),
-  getTables: vi.fn<
-    Parameters<BlockLogsToStorageOptions["getTables"]>,
-    ReturnType<BlockLogsToStorageOptions["getTables"]>
-  >(),
+  registerTables: vi.fn<Parameters<StorageAdapter["registerTables"]>, ReturnType<StorageAdapter["registerTables"]>>(),
+  getTables: vi.fn<Parameters<StorageAdapter["getTables"]>, ReturnType<StorageAdapter["getTables"]>>(),
   storeOperations: vi.fn<
-    Parameters<BlockLogsToStorageOptions["storeOperations"]>,
-    ReturnType<BlockLogsToStorageOptions["storeOperations"]>
+    Parameters<StorageAdapter["storeOperations"]>,
+    ReturnType<StorageAdapter["storeOperations"]>
   >(),
 };
 
 const mockedDecode = blockLogsToStorage<typeof storeConfig>(
-  mockedCallbacks as any as BlockLogsToStorageOptions<typeof storeConfig>
+  mockedCallbacks as any as StorageAdapter<typeof storeConfig>
 );
 
 describe("blockLogsToStorage", () => {
diff --git a/packages/store-sync/src/blockLogsToStorage.ts b/packages/store-sync/src/blockLogsToStorage.ts
index 41658a5ceb..814fbda4c7 100644
--- a/packages/store-sync/src/blockLogsToStorage.ts
+++ b/packages/store-sync/src/blockLogsToStorage.ts
@@ -8,7 +8,7 @@ import {
 import { decodeAbiParameters, getAddress, parseAbiParameters } from "viem";
 import { debug } from "./debug";
 import { isDefined } from "@latticexyz/common/utils";
-import { BlockLogs, StorageOperation, Table } from "./common";
+import { BlockLogs, StorageAdapter, StorageOperation, Table } from "./common";
 import { hexToTableId, tableIdToHex } from "@latticexyz/common";
 import storeConfig from "@latticexyz/store/mud.config";
 
@@ -16,18 +16,6 @@ import storeConfig from "@latticexyz/store/mud.config";
 const schemasTable = storeConfig.tables.Tables;
 const schemasTableId = tableIdToHex(storeConfig.namespace, schemasTable.name);
 
-export type BlockLogsToStorageOptions<TConfig extends StoreConfig = StoreConfig> = {
-  registerTables: (opts: { blockNumber: BlockLogs["blockNumber"]; tables: Table[] }) => Promise<void>;
-  getTables: (opts: {
-    blockNumber: BlockLogs["blockNumber"];
-    tables: Pick<Table, "address" | "namespace" | "name">[];
-  }) => Promise<Table[]>;
-  storeOperations: (opts: {
-    blockNumber: BlockLogs["blockNumber"];
-    operations: StorageOperation<TConfig>[];
-  }) => Promise<void>;
-};
-
 export type BlockStorageOperations<TConfig extends StoreConfig = StoreConfig> = {
   blockNumber: BlockLogs["blockNumber"];
   operations: StorageOperation<TConfig>[];
@@ -41,7 +29,7 @@ export function blockLogsToStorage<TConfig extends StoreConfig = StoreConfig>({
   registerTables,
   getTables,
   storeOperations,
-}: BlockLogsToStorageOptions<TConfig>): BlockLogsToStorageResult<TConfig> {
+}: StorageAdapter<TConfig>): BlockLogsToStorageResult<TConfig> {
   return async (block) => {
     // Find table schema registration events
     const newTables = block.logs
diff --git a/packages/store-sync/src/common.ts b/packages/store-sync/src/common.ts
index 764fa93fcc..6feccfe802 100644
--- a/packages/store-sync/src/common.ts
+++ b/packages/store-sync/src/common.ts
@@ -121,3 +121,15 @@ export type SyncResult<TConfig extends StoreConfig = StoreConfig> = {
   blockStorageOperations$: Observable<BlockStorageOperations<TConfig>>;
   waitForTransaction: (tx: Hex) => Promise<void>;
 };
+
+export type StorageAdapter<TConfig extends StoreConfig = StoreConfig> = {
+  registerTables: (opts: { blockNumber: BlockLogs["blockNumber"]; tables: Table[] }) => Promise<void>;
+  getTables: (opts: {
+    blockNumber: BlockLogs["blockNumber"];
+    tables: Pick<Table, "address" | "namespace" | "name">[];
+  }) => Promise<Table[]>;
+  storeOperations: (opts: {
+    blockNumber: BlockLogs["blockNumber"];
+    operations: StorageOperation<TConfig>[];
+  }) => Promise<void>;
+};
diff --git a/packages/store-sync/src/createStoreSync.ts b/packages/store-sync/src/createStoreSync.ts
index 200281d373..2e0cfc0857 100644
--- a/packages/store-sync/src/createStoreSync.ts
+++ b/packages/store-sync/src/createStoreSync.ts
@@ -1,6 +1,6 @@
 import { ConfigToKeyPrimitives, ConfigToValuePrimitives, StoreConfig, storeEventsAbi } from "@latticexyz/store";
 import { Hex, TransactionReceiptNotFoundError } from "viem";
-import { SetRecordOperation, SyncOptions, SyncResult, TableWithRecords } from "./common";
+import { SetRecordOperation, StorageAdapter, SyncOptions, SyncResult, TableWithRecords } from "./common";
 import { createBlockStream, blockRangeToLogs, groupLogsByBlockNumber } from "@latticexyz/block-logs-stream";
 import {
   filter,
@@ -23,14 +23,13 @@ import {
 import { BlockStorageOperations, blockLogsToStorage } from "./blockLogsToStorage";
 import { debug as parentDebug } from "./debug";
 import { createIndexerClient } from "./trpc-indexer";
-import { BlockLogsToStorageOptions } from "./blockLogsToStorage";
 import { SyncStep } from "./SyncStep";
 import { chunk, isDefined } from "@latticexyz/common/utils";
 
 const debug = parentDebug.extend("createStoreSync");
 
 type CreateStoreSyncOptions<TConfig extends StoreConfig = StoreConfig> = SyncOptions<TConfig> & {
-  storageAdapter: BlockLogsToStorageOptions<TConfig>;
+  storageAdapter: StorageAdapter<TConfig>;
   onProgress?: (opts: {
     step: SyncStep;
     percentage: number;
diff --git a/packages/store-sync/src/index.ts b/packages/store-sync/src/index.ts
index a56300146c..01c883d7e2 100644
--- a/packages/store-sync/src/index.ts
+++ b/packages/store-sync/src/index.ts
@@ -1,3 +1,4 @@
 export * from "./blockLogsToStorage";
 export * from "./common";
+export * from "./createStoreSync";
 export * from "./SyncStep";
diff --git a/packages/store-sync/src/postgres/buildColumn.ts b/packages/store-sync/src/postgres/buildColumn.ts
new file mode 100644
index 0000000000..86ea59c268
--- /dev/null
+++ b/packages/store-sync/src/postgres/buildColumn.ts
@@ -0,0 +1,238 @@
+import { AnyPgColumnBuilder, boolean, text } from "drizzle-orm/pg-core";
+import { SchemaAbiType } from "@latticexyz/schema-type";
+import { assertExhaustive } from "@latticexyz/common/utils";
+import { asAddress, asBigInt, asHex, asJson, asNumber } from "./columnTypes";
+
+export function buildColumn(name: string, schemaAbiType: SchemaAbiType): AnyPgColumnBuilder {
+  switch (schemaAbiType) {
+    case "bool":
+      return boolean(name);
+
+    case "uint8":
+    case "uint16":
+    case "int8":
+    case "int16":
+      // smallint = 2 bytes (https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-INT)
+      return asNumber(name, "smallint");
+
+    case "uint24":
+    case "uint32":
+    case "int24":
+    case "int32":
+      // integer = 4 bytes (https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-INT)
+      return asNumber(name, "integer");
+
+    case "uint40":
+    case "uint48":
+    case "int40":
+    case "int48":
+      // bigint = 8 bytes (https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-INT)
+      return asNumber(name, "bigint");
+
+    case "uint56":
+    case "uint64":
+    case "int56":
+    case "int64":
+      // bigint = 8 bytes (https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-INT)
+      return asBigInt(name, "bigint");
+
+    case "uint72":
+    case "uint80":
+    case "uint88":
+    case "uint96":
+    case "uint104":
+    case "uint112":
+    case "uint120":
+    case "uint128":
+    case "uint136":
+    case "uint144":
+    case "uint152":
+    case "uint160":
+    case "uint168":
+    case "uint176":
+    case "uint184":
+    case "uint192":
+    case "uint200":
+    case "uint208":
+    case "uint216":
+    case "uint224":
+    case "uint232":
+    case "uint240":
+    case "uint248":
+    case "uint256":
+    case "int72":
+    case "int80":
+    case "int88":
+    case "int96":
+    case "int104":
+    case "int112":
+    case "int120":
+    case "int128":
+    case "int136":
+    case "int144":
+    case "int152":
+    case "int160":
+    case "int168":
+    case "int176":
+    case "int184":
+    case "int192":
+    case "int200":
+    case "int208":
+    case "int216":
+    case "int224":
+    case "int232":
+    case "int240":
+    case "int248":
+    case "int256":
+      // variable length (https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL)
+      // we could refine this to the specific length for each type, but maybe not worth it
+      return asBigInt(name, "numeric");
+
+    case "bytes1":
+    case "bytes2":
+    case "bytes3":
+    case "bytes4":
+    case "bytes5":
+    case "bytes6":
+    case "bytes7":
+    case "bytes8":
+    case "bytes9":
+    case "bytes10":
+    case "bytes11":
+    case "bytes12":
+    case "bytes13":
+    case "bytes14":
+    case "bytes15":
+    case "bytes16":
+    case "bytes17":
+    case "bytes18":
+    case "bytes19":
+    case "bytes20":
+    case "bytes21":
+    case "bytes22":
+    case "bytes23":
+    case "bytes24":
+    case "bytes25":
+    case "bytes26":
+    case "bytes27":
+    case "bytes28":
+    case "bytes29":
+    case "bytes30":
+    case "bytes31":
+    case "bytes32":
+    case "bytes":
+      return asHex(name);
+
+    case "address":
+      return asAddress(name);
+
+    case "uint8[]":
+    case "uint16[]":
+    case "uint24[]":
+    case "uint32[]":
+    case "uint40[]":
+    case "uint48[]":
+    case "uint56[]":
+    case "uint64[]":
+    case "uint72[]":
+    case "uint80[]":
+    case "uint88[]":
+    case "uint96[]":
+    case "uint104[]":
+    case "uint112[]":
+    case "uint120[]":
+    case "uint128[]":
+    case "uint136[]":
+    case "uint144[]":
+    case "uint152[]":
+    case "uint160[]":
+    case "uint168[]":
+    case "uint176[]":
+    case "uint184[]":
+    case "uint192[]":
+    case "uint200[]":
+    case "uint208[]":
+    case "uint216[]":
+    case "uint224[]":
+    case "uint232[]":
+    case "uint240[]":
+    case "uint248[]":
+    case "uint256[]":
+    case "int8[]":
+    case "int16[]":
+    case "int24[]":
+    case "int32[]":
+    case "int40[]":
+    case "int48[]":
+    case "int56[]":
+    case "int64[]":
+    case "int72[]":
+    case "int80[]":
+    case "int88[]":
+    case "int96[]":
+    case "int104[]":
+    case "int112[]":
+    case "int120[]":
+    case "int128[]":
+    case "int136[]":
+    case "int144[]":
+    case "int152[]":
+    case "int160[]":
+    case "int168[]":
+    case "int176[]":
+    case "int184[]":
+    case "int192[]":
+    case "int200[]":
+    case "int208[]":
+    case "int216[]":
+    case "int224[]":
+    case "int232[]":
+    case "int240[]":
+    case "int248[]":
+    case "int256[]":
+    case "bytes1[]":
+    case "bytes2[]":
+    case "bytes3[]":
+    case "bytes4[]":
+    case "bytes5[]":
+    case "bytes6[]":
+    case "bytes7[]":
+    case "bytes8[]":
+    case "bytes9[]":
+    case "bytes10[]":
+    case "bytes11[]":
+    case "bytes12[]":
+    case "bytes13[]":
+    case "bytes14[]":
+    case "bytes15[]":
+    case "bytes16[]":
+    case "bytes17[]":
+    case "bytes18[]":
+    case "bytes19[]":
+    case "bytes20[]":
+    case "bytes21[]":
+    case "bytes22[]":
+    case "bytes23[]":
+    case "bytes24[]":
+    case "bytes25[]":
+    case "bytes26[]":
+    case "bytes27[]":
+    case "bytes28[]":
+    case "bytes29[]":
+    case "bytes30[]":
+    case "bytes31[]":
+    case "bytes32[]":
+    case "bool[]":
+      return asJson(name);
+
+    // TODO: normalize like address column type
+    case "address[]":
+      return asJson(name);
+
+    case "string":
+      return text(name);
+
+    default:
+      assertExhaustive(schemaAbiType, `Missing column type for schema ABI type ${schemaAbiType}`);
+  }
+}
diff --git a/packages/store-sync/src/postgres/buildInternalTables.ts b/packages/store-sync/src/postgres/buildInternalTables.ts
new file mode 100644
index 0000000000..c269a9f9d0
--- /dev/null
+++ b/packages/store-sync/src/postgres/buildInternalTables.ts
@@ -0,0 +1,32 @@
+import { integer, pgSchema, text } from "drizzle-orm/pg-core";
+import { DynamicAbiType, StaticAbiType } from "@latticexyz/schema-type";
+import { transformSchemaName } from "./transformSchemaName";
+import { asAddress, asBigInt, asJson, asNumber } from "./columnTypes";
+
+// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
+export function buildInternalTables() {
+  const schema = pgSchema(transformSchemaName("__mud_internal"));
+  return {
+    chain: schema.table("chain", {
+      // TODO: change schema version to varchar/text?
+      schemaVersion: integer("schema_version").notNull().primaryKey(),
+      chainId: asNumber("chain_id", "bigint").notNull().primaryKey(),
+      lastUpdatedBlockNumber: asBigInt("last_updated_block_number", "numeric"),
+      // TODO: last block hash?
+      lastError: text("last_error"),
+    }),
+    tables: schema.table("tables", {
+      schemaVersion: integer("schema_version").primaryKey(),
+      key: text("key").notNull().primaryKey(),
+      address: asAddress("address").notNull(),
+      tableId: text("table_id").notNull(),
+      namespace: text("namespace").notNull(),
+      name: text("name").notNull(),
+      keySchema: asJson<Record<string, StaticAbiType>>("key_schema").notNull(),
+      valueSchema: asJson<Record<string, StaticAbiType | DynamicAbiType>>("value_schema").notNull(),
+      lastUpdatedBlockNumber: asBigInt("last_updated_block_number", "numeric"),
+      // TODO: last block hash?
+      lastError: text("last_error"),
+    }),
+  };
+}
diff --git a/packages/store-sync/src/postgres/buildTable.test.ts b/packages/store-sync/src/postgres/buildTable.test.ts
new file mode 100644
index 0000000000..5d9d192a88
--- /dev/null
+++ b/packages/store-sync/src/postgres/buildTable.test.ts
@@ -0,0 +1,510 @@
+import { describe, it, expect } from "vitest";
+import { buildTable } from "./buildTable";
+
+describe("buildTable", () => {
+  it("should create table from schema", async () => {
+    const table = buildTable({
+      address: "0xffffffffffffffffffffffffffffffffffffffff",
+      namespace: "test",
+      name: "users",
+      keySchema: { x: "uint32", y: "uint32" },
+      valueSchema: { name: "string", addr: "address" },
+    });
+
+    expect(table).toMatchInlineSnapshot(`
+      PgTable {
+        "__isDeleted": PgBoolean {
+          "config": {
+            "default": undefined,
+            "name": "__isDeleted",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "name": "__isDeleted",
+          "notNull": true,
+          "primary": false,
+          "table": [Circular],
+        },
+        "__key": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "__key",
+            "notNull": true,
+            "primaryKey": true,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "__key",
+          "notNull": true,
+          "primary": true,
+          "sqlName": "bytea",
+          "table": [Circular],
+        },
+        "__lastUpdatedBlockNumber": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "__lastUpdatedBlockNumber",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "__lastUpdatedBlockNumber",
+          "notNull": true,
+          "primary": false,
+          "sqlName": "numeric",
+          "table": [Circular],
+        },
+        "addr": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "addr",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "addr",
+          "notNull": true,
+          "primary": false,
+          "sqlName": "bytea",
+          "table": [Circular],
+        },
+        "name": PgText {
+          "config": {
+            "default": undefined,
+            "enumValues": [],
+            "name": "name",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "enumValues": [],
+          "hasDefault": undefined,
+          "name": "name",
+          "notNull": true,
+          "primary": false,
+          "table": [Circular],
+        },
+        "x": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "x",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "x",
+          "notNull": true,
+          "primary": false,
+          "sqlName": "integer",
+          "table": [Circular],
+        },
+        "y": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "y",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "y",
+          "notNull": true,
+          "primary": false,
+          "sqlName": "integer",
+          "table": [Circular],
+        },
+        Symbol(drizzle:Name): "users",
+        Symbol(drizzle:OriginalName): "users",
+        Symbol(drizzle:Schema): "0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF__test",
+        Symbol(drizzle:Columns): {
+          "__isDeleted": PgBoolean {
+            "config": {
+              "default": undefined,
+              "name": "__isDeleted",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "name": "__isDeleted",
+            "notNull": true,
+            "primary": false,
+            "table": [Circular],
+          },
+          "__key": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "__key",
+              "notNull": true,
+              "primaryKey": true,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "__key",
+            "notNull": true,
+            "primary": true,
+            "sqlName": "bytea",
+            "table": [Circular],
+          },
+          "__lastUpdatedBlockNumber": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "__lastUpdatedBlockNumber",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "__lastUpdatedBlockNumber",
+            "notNull": true,
+            "primary": false,
+            "sqlName": "numeric",
+            "table": [Circular],
+          },
+          "addr": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "addr",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "addr",
+            "notNull": true,
+            "primary": false,
+            "sqlName": "bytea",
+            "table": [Circular],
+          },
+          "name": PgText {
+            "config": {
+              "default": undefined,
+              "enumValues": [],
+              "name": "name",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "enumValues": [],
+            "hasDefault": undefined,
+            "name": "name",
+            "notNull": true,
+            "primary": false,
+            "table": [Circular],
+          },
+          "x": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "x",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "x",
+            "notNull": true,
+            "primary": false,
+            "sqlName": "integer",
+            "table": [Circular],
+          },
+          "y": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "y",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "y",
+            "notNull": true,
+            "primary": false,
+            "sqlName": "integer",
+            "table": [Circular],
+          },
+        },
+        Symbol(drizzle:BaseName): "users",
+        Symbol(drizzle:IsAlias): false,
+        Symbol(drizzle:ExtraConfigBuilder): undefined,
+        Symbol(drizzle:IsDrizzleTable): true,
+        Symbol(drizzle:PgInlineForeignKeys): [],
+      }
+    `);
+  });
+
+  it("can create a singleton table", async () => {
+    const table = buildTable({
+      address: "0xffffffffffffffffffffffffffffffffffffffff",
+      namespace: "test",
+      name: "users",
+      keySchema: {},
+      valueSchema: { addrs: "address[]" },
+    });
+
+    expect(table).toMatchInlineSnapshot(`
+      PgTable {
+        "__isDeleted": PgBoolean {
+          "config": {
+            "default": undefined,
+            "name": "__isDeleted",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "name": "__isDeleted",
+          "notNull": true,
+          "primary": false,
+          "table": [Circular],
+        },
+        "__key": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "__key",
+            "notNull": true,
+            "primaryKey": true,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "__key",
+          "notNull": true,
+          "primary": true,
+          "sqlName": "bytea",
+          "table": [Circular],
+        },
+        "__lastUpdatedBlockNumber": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "__lastUpdatedBlockNumber",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "__lastUpdatedBlockNumber",
+          "notNull": true,
+          "primary": false,
+          "sqlName": "numeric",
+          "table": [Circular],
+        },
+        "addrs": PgCustomColumn {
+          "config": {
+            "customTypeParams": {
+              "dataType": [Function],
+              "fromDriver": [Function],
+              "toDriver": [Function],
+            },
+            "default": undefined,
+            "fieldConfig": undefined,
+            "name": "addrs",
+            "notNull": true,
+            "primaryKey": false,
+          },
+          "default": undefined,
+          "hasDefault": undefined,
+          "mapFrom": [Function],
+          "mapTo": [Function],
+          "name": "addrs",
+          "notNull": true,
+          "primary": false,
+          "sqlName": "text",
+          "table": [Circular],
+        },
+        Symbol(drizzle:Name): "users",
+        Symbol(drizzle:OriginalName): "users",
+        Symbol(drizzle:Schema): "0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF__test",
+        Symbol(drizzle:Columns): {
+          "__isDeleted": PgBoolean {
+            "config": {
+              "default": undefined,
+              "name": "__isDeleted",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "name": "__isDeleted",
+            "notNull": true,
+            "primary": false,
+            "table": [Circular],
+          },
+          "__key": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "__key",
+              "notNull": true,
+              "primaryKey": true,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "__key",
+            "notNull": true,
+            "primary": true,
+            "sqlName": "bytea",
+            "table": [Circular],
+          },
+          "__lastUpdatedBlockNumber": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "__lastUpdatedBlockNumber",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "__lastUpdatedBlockNumber",
+            "notNull": true,
+            "primary": false,
+            "sqlName": "numeric",
+            "table": [Circular],
+          },
+          "addrs": PgCustomColumn {
+            "config": {
+              "customTypeParams": {
+                "dataType": [Function],
+                "fromDriver": [Function],
+                "toDriver": [Function],
+              },
+              "default": undefined,
+              "fieldConfig": undefined,
+              "name": "addrs",
+              "notNull": true,
+              "primaryKey": false,
+            },
+            "default": undefined,
+            "hasDefault": undefined,
+            "mapFrom": [Function],
+            "mapTo": [Function],
+            "name": "addrs",
+            "notNull": true,
+            "primary": false,
+            "sqlName": "text",
+            "table": [Circular],
+          },
+        },
+        Symbol(drizzle:BaseName): "users",
+        Symbol(drizzle:IsAlias): false,
+        Symbol(drizzle:ExtraConfigBuilder): undefined,
+        Symbol(drizzle:IsDrizzleTable): true,
+        Symbol(drizzle:PgInlineForeignKeys): [],
+      }
+    `);
+  });
+});
diff --git a/packages/store-sync/src/postgres/buildTable.ts b/packages/store-sync/src/postgres/buildTable.ts
new file mode 100644
index 0000000000..8974900e2a
--- /dev/null
+++ b/packages/store-sync/src/postgres/buildTable.ts
@@ -0,0 +1,81 @@
+import { AnyPgColumnBuilder, PgTableWithColumns, pgSchema } from "drizzle-orm/pg-core";
+import { SchemaAbiType, StaticAbiType } from "@latticexyz/schema-type";
+import { buildColumn } from "./buildColumn";
+import { Address, getAddress } from "viem";
+import { transformSchemaName } from "./transformSchemaName";
+
+// TODO: convert camel case to snake case for DB storage?
+export const metaColumns = {
+  __key: buildColumn("__key", "bytes").notNull().primaryKey(),
+  __lastUpdatedBlockNumber: buildColumn("__lastUpdatedBlockNumber", "uint256").notNull(),
+  // TODO: last updated block hash?
+  __isDeleted: buildColumn("__isDeleted", "bool").notNull(),
+} as const satisfies Record<string, AnyPgColumnBuilder>;
+
+type PgTableFromSchema<
+  TKeySchema extends Record<string, StaticAbiType>,
+  TValueSchema extends Record<string, SchemaAbiType>
+> = PgTableWithColumns<{
+  name: string;
+  schema: string;
+  columns: {
+    // TODO: figure out column types
+    [metaColumn in keyof typeof metaColumns]: any;
+  } & {
+    // TODO: figure out column types
+    [keyColumn in keyof TKeySchema]: any;
+  } & {
+    // TODO: figure out column types
+    [valueColumn in keyof TValueSchema]: any;
+  };
+}>;
+
+type BuildTableOptions<
+  TKeySchema extends Record<string, StaticAbiType>,
+  TValueSchema extends Record<string, SchemaAbiType>
+> = {
+  address: Address;
+  namespace: string;
+  name: string;
+  keySchema: TKeySchema;
+  valueSchema: TValueSchema;
+};
+
+type BuildTableResult<
+  TKeySchema extends Record<string, StaticAbiType>,
+  TValueSchema extends Record<string, SchemaAbiType>
+> = PgTableFromSchema<TKeySchema, TValueSchema>;
+
+export function buildTable<
+  TKeySchema extends Record<string, StaticAbiType>,
+  TValueSchema extends Record<string, SchemaAbiType>
+>({
+  address,
+  namespace,
+  name,
+  keySchema,
+  valueSchema,
+}: BuildTableOptions<TKeySchema, TValueSchema>): BuildTableResult<TKeySchema, TValueSchema> {
+  const schemaName = transformSchemaName(`${getAddress(address)}__${namespace}`);
+
+  const keyColumns = Object.fromEntries(
+    Object.entries(keySchema).map(([name, type]) => [name, buildColumn(name, type).notNull()])
+  );
+
+  const valueColumns = Object.fromEntries(
+    Object.entries(valueSchema).map(([name, type]) => [name, buildColumn(name, type).notNull()])
+  );
+
+  // TODO: make sure there are no meta columns that overlap with key/value columns
+  // TODO: index meta columns?
+
+  const columns = {
+    ...keyColumns,
+    ...valueColumns,
+    ...metaColumns,
+  };
+
+  const table = pgSchema(schemaName).table(name, columns);
+
+  return table as PgTableFromSchema<TKeySchema, TValueSchema>;
+}
diff --git a/packages/store-sync/src/postgres/cleanDatabase.ts b/packages/store-sync/src/postgres/cleanDatabase.ts
new file mode 100644
index 0000000000..eff82aaf50
--- /dev/null
+++ b/packages/store-sync/src/postgres/cleanDatabase.ts
@@ -0,0 +1,34 @@
+import { PgDatabase } from "drizzle-orm/pg-core";
+import { buildInternalTables } from "./buildInternalTables";
+import { getTables } from "./getTables";
+import { buildTable } from "./buildTable";
+import { getSchema } from "./getSchema";
+import { isDefined } from "@latticexyz/common/utils";
+import { debug } from "./debug";
+import { getTableName, sql } from "drizzle-orm";
+import { pgDialect } from "./pgDialect";
+
+// This intentionally just cleans up known schemas/tables/rows. We could drop the database but that's scary.
+
+export async function cleanDatabase(db: PgDatabase<any>): Promise<void> {
+  const internalTables = buildInternalTables();
+  // TODO: check if internalTables schema matches, delete if not
+
+  const tables = (await getTables(db)).map(buildTable);
+
+  const schemaNames = [...new Set(tables.map(getSchema))].filter(isDefined);
+
+  for (const schemaName of schemaNames) {
+    try {
+      debug(`dropping namespace ${schemaName} and all of its tables`);
+      await db.execute(sql.raw(pgDialect.schema.dropSchema(schemaName).ifExists().cascade().compile().sql));
+    } catch (error) {
+      debug(`failed to drop namespace ${schemaName}`, error);
+    }
+  }
+
+  for (const internalTable of Object.values(internalTables)) {
+    debug(`deleting all rows from ${getSchema(internalTable)}.${getTableName(internalTable)}`);
+    await db.delete(internalTable);
+  }
+}
diff --git a/packages/store-sync/src/postgres/columnTypes.ts b/packages/store-sync/src/postgres/columnTypes.ts
new file mode 100644
index 0000000000..97fd352080
--- /dev/null
+++ b/packages/store-sync/src/postgres/columnTypes.ts
@@ -0,0 +1,82 @@
+import { customType, PgCustomColumnBuilder } from "drizzle-orm/pg-core";
+import { ColumnBuilderBaseConfig } from "drizzle-orm";
+import superjson from "superjson";
+import { Address, ByteArray, bytesToHex, getAddress, Hex, hexToBytes } from "viem";
+
+export const asJson = <TData>(
+  name: string
+): PgCustomColumnBuilder<ColumnBuilderBaseConfig & { data: TData; driverParam: string }> =>
+  customType<{ data: TData; driverData: string }>({
+    dataType() {
+      // TODO: move to json column type? if we do, we'll prob wanna choose something other than superjson since it adds one level of depth (json/meta keys)
+      return "text";
+    },
+    toDriver(data: TData): string {
+      return superjson.stringify(data);
+    },
+    fromDriver(driverData: string): TData {
+      return superjson.parse(driverData);
+    },
+  })(name);
+
+export const asNumber = (
+  name: string,
+  columnType: string
+): PgCustomColumnBuilder<ColumnBuilderBaseConfig & { data: number; driverParam: string }> =>
+  customType<{ data: number; driverData: string }>({
+    dataType() {
+      return columnType;
+    },
+    toDriver(data: number): string {
+      return String(data);
+    },
+    fromDriver(driverData: string): number {
+      return Number(driverData);
+    },
+  })(name);
+
+export const asBigInt = (
+  name: string,
+  columnType: string
+): PgCustomColumnBuilder<ColumnBuilderBaseConfig & { data: bigint; driverParam: string }> =>
+  customType<{ data: bigint; driverData: string }>({
+    dataType() {
+      return columnType;
+    },
+    toDriver(data: bigint): string {
+      return String(data);
+    },
+    fromDriver(driverData: string): bigint {
+      return BigInt(driverData);
+    },
+  })(name);
+
+export const asHex = (
+  name: string
+): PgCustomColumnBuilder<ColumnBuilderBaseConfig & { data: Hex; driverParam: ByteArray }> =>
+  customType<{ data: Hex; driverData: ByteArray }>({
+    dataType() {
+      return "bytea";
+    },
+    toDriver(data: Hex): ByteArray {
+      return hexToBytes(data);
+    },
+    fromDriver(driverData: ByteArray): Hex {
+      return bytesToHex(driverData);
+    },
+  })(name);
+
+export const asAddress = (
+  name: string
+): PgCustomColumnBuilder<ColumnBuilderBaseConfig & { data: Address; driverParam: ByteArray }> =>
+  customType<{ data: Address; driverData: ByteArray }>({
+    dataType() {
+      return "bytea";
+    },
+    toDriver(data: Address): ByteArray {
+      return hexToBytes(data);
+    },
+    fromDriver(driverData: ByteArray): Address {
+      return getAddress(bytesToHex(driverData));
+    },
+  })(name);
diff --git a/packages/store-sync/src/postgres/debug.ts b/packages/store-sync/src/postgres/debug.ts
new file mode 100644
index 0000000000..306f33b44b
--- /dev/null
+++ b/packages/store-sync/src/postgres/debug.ts
@@ -0,0 +1,3 @@
+import { debug as parentDebug } from "../debug";
+
+export const debug = parentDebug.extend("postgres");
diff --git a/packages/store-sync/src/postgres/getSchema.test.ts b/packages/store-sync/src/postgres/getSchema.test.ts
new file mode 100644
index 0000000000..c0c9a045a5
--- /dev/null
+++ b/packages/store-sync/src/postgres/getSchema.test.ts
@@ -0,0 +1,12 @@
+import { describe, it, expect } from "vitest";
+import { getSchema } from "./getSchema";
+import { pgTable, pgSchema } from "drizzle-orm/pg-core";
+
+// Test to make sure getSchema matches drizzle internals. May need to update getSchema if these tests start failing. Hopefully by then, drizzle will have exposed their own getSchema method.
+
+describe("getSchema", () => {
+  it("should return schema if set", async () => {
+    expect(getSchema(pgTable("no schema", {}))).toBeUndefined();
+    expect(getSchema(pgSchema("some schema").table("with schema", {}))).toBe("some schema");
+  });
+});
diff --git a/packages/store-sync/src/postgres/getSchema.ts b/packages/store-sync/src/postgres/getSchema.ts
new file mode 100644
index 0000000000..240046fcd4
--- /dev/null
+++ b/packages/store-sync/src/postgres/getSchema.ts
@@ -0,0 +1,6 @@
+import { PgTable } from "drizzle-orm/pg-core";
+
+// TODO: PR to drizzle to expose `getSchema` like `getTableName`
+export function getSchema(table: PgTable<any>): string | undefined {
+  return (table as any)[Symbol.for("drizzle:Schema")];
+}
diff --git a/packages/store-sync/src/postgres/getTableKey.ts b/packages/store-sync/src/postgres/getTableKey.ts
new file mode 100644
index 0000000000..423830ce23
--- /dev/null
+++ b/packages/store-sync/src/postgres/getTableKey.ts
@@ -0,0 +1,6 @@
+import { getAddress } from "viem";
+import { Table } from "../common";
+
+export function getTableKey(table: Pick<Table, "address" | "namespace" | "name">): string {
+  return `${getAddress(table.address)}:${table.namespace}:${table.name}`;
+}
diff --git a/packages/store-sync/src/postgres/getTables.ts b/packages/store-sync/src/postgres/getTables.ts
new file mode 100644
index 0000000000..c88919c54d
--- /dev/null
+++ b/packages/store-sync/src/postgres/getTables.ts
@@ -0,0 +1,24 @@
+import { PgDatabase } from "drizzle-orm/pg-core";
+import { inArray } from "drizzle-orm";
+import { Table } from "../common";
+import { buildInternalTables } from "./buildInternalTables";
+import { tableIdToHex } from "@latticexyz/common";
+
+export async function getTables(db: PgDatabase<any>, keys: string[] = []): Promise<Table[]> {
+  const internalTables = buildInternalTables();
+
+  const tables = await db
+    .select()
+    .from(internalTables.tables)
+    .where(keys.length ? inArray(internalTables.tables.key, [...new Set(keys)]) : undefined);
+
+  return tables.map((table) => ({
+    address: table.address,
+    tableId: tableIdToHex(table.namespace, table.name),
+    namespace: table.namespace,
+    name: table.name,
+    keySchema: table.keySchema,
+    valueSchema: table.valueSchema,
+    lastUpdatedBlockNumber: table.lastUpdatedBlockNumber,
+  }));
+}
diff --git a/packages/store-sync/src/postgres/index.ts b/packages/store-sync/src/postgres/index.ts
new file mode 100644
index 0000000000..d743dafd83
--- /dev/null
+++ b/packages/store-sync/src/postgres/index.ts
@@ -0,0 +1,8 @@
+export * from "./buildTable";
+export * from "./cleanDatabase";
+export * from "./getTables";
+export * from "./buildInternalTables";
+export * from "./schemaVersion";
+export * from "./postgresStorage";
+export * from "./setupTables";
+export * from "./syncToPostgres";
diff --git a/packages/store-sync/src/postgres/pgDialect.ts b/packages/store-sync/src/postgres/pgDialect.ts
new file mode 100644
index 0000000000..f5f42a526b
--- /dev/null
+++ b/packages/store-sync/src/postgres/pgDialect.ts
@@ -0,0 +1,10 @@
+import { DummyDriver, Kysely, PostgresAdapter, PostgresIntrospector, PostgresQueryCompiler } from "kysely";
+
+export const pgDialect = new Kysely({
+  dialect: {
+    createAdapter: (): PostgresAdapter => new PostgresAdapter(),
+    createDriver: (): DummyDriver => new DummyDriver(),
+    createIntrospector: (db: Kysely<unknown>): PostgresIntrospector => new PostgresIntrospector(db),
+    createQueryCompiler: (): PostgresQueryCompiler => new PostgresQueryCompiler(),
+  },
+});
diff --git a/packages/store-sync/src/postgres/postgresStorage.test.ts b/packages/store-sync/src/postgres/postgresStorage.test.ts
new file mode 100644
index 0000000000..e3165c7bd7
--- /dev/null
+++ b/packages/store-sync/src/postgres/postgresStorage.test.ts
@@ -0,0 +1,136 @@
+import { beforeEach, describe, expect, it, vi } from "vitest";
+import { DefaultLogger } from "drizzle-orm";
+import { drizzle } from "drizzle-orm/postgres-js";
+import postgres from "postgres";
+import { createPublicClient, http } from "viem";
+import { foundry } from "viem/chains";
+import { blockLogsToStorage } from "../blockLogsToStorage";
+import * as transformSchemaNameExports from "./transformSchemaName";
+import { getTables } from "./getTables";
+import { PostgresStorageAdapter, postgresStorage } from "./postgresStorage";
+import { buildTable } from "./buildTable";
+
+vi.spyOn(transformSchemaNameExports, "transformSchemaName").mockImplementation(
+  (schemaName) => `${process.pid}_${process.env.VITEST_POOL_ID}__${schemaName}`
+);
+
+describe("postgresStorage", async () => {
+  const db = drizzle(postgres(process.env.DATABASE_URL!), {
+    logger: new DefaultLogger(),
+  });
+
+  const publicClient = createPublicClient({
+    chain: foundry,
+    transport: http(),
+  });
+
+  let storageAdapter: PostgresStorageAdapter;
+
+  beforeEach(async () => {
+    storageAdapter = await postgresStorage({ database: db, publicClient });
+    return storageAdapter.cleanUp;
+  });
+
+  it("should create tables and data from block log", async () => {
+    await blockLogsToStorage(storageAdapter)({
+      blockNumber: 5448n,
+      logs: [
+        {
+          address: "0x5fbdb2315678afecb367f032d93f642f64180aa3",
+          topics: ["0xd01f9f1368f831528fc9fe6442366b2b7d957fbfff3bcf7c24d9ab5fe51f8c46"],
+          data: "0x000000000000000000000000000000005265736f7572636554797065000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000496e76656e746f72790000000000000000000000000000000000000000000000000000000000000000000000000000010200000000000000000000000000000000000000000000000000000000000000",
+          blockHash: "0x4ad3752c86f900332e0d2d8903480e7206747d233586574d16f006eebdb5138b",
+          blockNumber: 2n,
+          transactionHash: "0xaa54bf18053cce5d4d2906538a60cb1d9958cc3c10c34b5f9fdc92fe6a6abab4",
+          transactionIndex: 16,
+          logIndex: 54,
+          removed: false,
+          args: {
+            table: "0x000000000000000000000000000000005265736f757263655479706500000000",
+            key: ["0x00000000000000000000000000000000496e76656e746f727900000000000000"],
+            schemaIndex: 0,
+            data: "0x02",
+          },
+          eventName: "StoreSetField",
+        },
+        {
+          address: "0x5fbdb2315678afecb367f032d93f642f64180aa3",
+          topics: ["0x912af873e852235aae78a1d25ae9bb28b616a67c36898c53a14fd8184504ee32"],
+          data: "0x6d756473746f726500000000000000005461626c657300000000000000000000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000496e76656e746f7279000000000000000000000000000000000000000000000000000000000000000000000000000260001c030061030300000000000000000000000000000000000000000000000000000401000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a000000001600000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000056f776e657200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046974656d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b6974656d56617269616e740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000006616d6f756e740000000000000000000000000000000000000000000000000000",
+          blockHash: "0x4ad3752c86f900332e0d2d8903480e7206747d233586574d16f006eebdb5138b",
+          blockNumber: 2n,
+          transactionHash: "0xaa54bf18053cce5d4d2906538a60cb1d9958cc3c10c34b5f9fdc92fe6a6abab4",
+          transactionIndex: 16,
+          logIndex: 55,
+          removed: false,
+          args: {
+            table: "0x6d756473746f726500000000000000005461626c657300000000000000000000",
+            key: ["0x00000000000000000000000000000000496e76656e746f727900000000000000"],
+            data: "0x001c030061030300000000000000000000000000000000000000000000000000000401000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a000000001600000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000056f776e657200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046974656d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b6974656d56617269616e740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000006616d6f756e740000000000000000000000000000000000000000000000000000",
+          },
+          eventName: "StoreSetRecord",
+        },
+      ],
+    });
+
+    expect(await db.select().from(storageAdapter.internalTables.chain)).toMatchInlineSnapshot(`
+      [
+        {
+          "chainId": 31337,
+          "lastError": null,
+          "lastUpdatedBlockNumber": 5448n,
+          "schemaVersion": 1,
+        },
+      ]
+    `);
+
+    expect(await db.select().from(storageAdapter.internalTables.tables)).toMatchInlineSnapshot(`
+      [
+        {
+          "address": "0x5FbDB2315678afecb367f032d93F642f64180aa3",
+          "key": "0x5FbDB2315678afecb367f032d93F642f64180aa3::Inventory",
+          "keySchema": {
+            "item": "uint32",
+            "itemVariant": "uint32",
+            "owner": "address",
+          },
+          "lastError": null,
+          "lastUpdatedBlockNumber": 5448n,
+          "name": "Inventory",
+          "namespace": "",
+          "schemaVersion": 1,
+          "tableId": "0x00000000000000000000000000000000496e76656e746f727900000000000000",
+          "valueSchema": {
+            "amount": "uint32",
+          },
+        },
+      ]
+    `);
+
+    const tables = await getTables(db, []);
+    expect(tables).toMatchInlineSnapshot(`
+      [
+        {
+          "address": "0x5FbDB2315678afecb367f032d93F642f64180aa3",
+          "keySchema": {
+            "item": "uint32",
+            "itemVariant": "uint32",
+            "owner": "address",
+          },
+          "lastUpdatedBlockNumber": 5448n,
+          "name": "Inventory",
+          "namespace": "",
+          "tableId": "0x00000000000000000000000000000000496e76656e746f727900000000000000",
+          "valueSchema": {
+            "amount": "uint32",
+          },
+        },
+      ]
+    `);
+
+    const sqlTable = buildTable(tables[0]);
+    expect(await db.select().from(sqlTable)).toMatchInlineSnapshot("[]");
+
+    await storageAdapter.cleanUp();
+  });
+});
diff --git a/packages/store-sync/src/postgres/postgresStorage.ts b/packages/store-sync/src/postgres/postgresStorage.ts
new file mode 100644
index 0000000000..1937a9796a
--- /dev/null
+++ b/packages/store-sync/src/postgres/postgresStorage.ts
@@ -0,0 +1,192 @@
+import { PublicClient, concatHex, encodeAbiParameters } from "viem";
+import { PgDatabase, QueryResultHKT } from "drizzle-orm/pg-core";
+import { eq, inArray } from "drizzle-orm";
+import { buildTable } from "./buildTable";
+import { schemaToDefaults } from "../schemaToDefaults";
+import { StoreConfig } from "@latticexyz/store";
+import { debug } from "./debug";
+import { buildInternalTables } from "./buildInternalTables";
+import { getTables } from "./getTables";
+import { schemaVersion } from "./schemaVersion";
+import { tableIdToHex } from "@latticexyz/common";
+import { setupTables } from "./setupTables";
+import { getTableKey } from "./getTableKey";
+import { StorageAdapter } from "../common";
+
+// Currently assumes one DB per chain ID
+
+export type PostgresStorageAdapter<TConfig extends StoreConfig = StoreConfig> = StorageAdapter<TConfig> & {
+  internalTables: ReturnType<typeof buildInternalTables>;
+  cleanUp: () => Promise<void>;
+};
+
+export async function postgresStorage<TConfig extends StoreConfig = StoreConfig>({
+  database,
+  publicClient,
+}: {
+  database: PgDatabase<QueryResultHKT>;
+  publicClient: PublicClient;
+  config?: TConfig;
+}): Promise<PostgresStorageAdapter<TConfig>> {
+  const cleanUp: (() => Promise<void>)[] = [];
+
+  const chainId = publicClient.chain?.id ?? (await publicClient.getChainId());
+
+  const internalTables = buildInternalTables();
+  cleanUp.push(await setupTables(database, Object.values(internalTables)));
+
+  const storageAdapter = {
+    async registerTables({ blockNumber, tables }) {
+      const sqlTables = tables.map((table) =>
+        buildTable({
+          address: table.address,
+          namespace: table.namespace,
+          name: table.name,
+          keySchema: table.keySchema,
+          valueSchema: table.valueSchema,
+        })
+      );
+
+      cleanUp.push(await setupTables(database, sqlTables));
+
+      await database.transaction(async (tx) => {
+        for (const table of tables) {
+          await tx
+            .insert(internalTables.tables)
+            .values({
+              schemaVersion,
+              key: getTableKey(table),
+              address: table.address,
+              tableId: tableIdToHex(table.namespace, table.name),
+              namespace: table.namespace,
+              name: table.name,
+              keySchema: table.keySchema,
+              valueSchema: table.valueSchema,
+              lastUpdatedBlockNumber: blockNumber,
+            })
+            .onConflictDoNothing()
+            .execute();
+        }
+      });
+    },
+    async getTables({ tables }) {
+      // TODO: fetch any missing schemas from RPC
+      // TODO: cache schemas in memory?
+      return getTables(database, tables.map(getTableKey));
+    },
+    async storeOperations({ blockNumber, operations }) {
+      // This is currently parallelized per world (each world has its own database).
+      // This may need to change if we decide to put multiple worlds into one DB (e.g. a namespace per world, but all under one DB).
+      // If so, we'll probably want to wrap the entire block worth of operations in a transaction.
+
+      const tables = await getTables(database, operations.map(getTableKey));
+
+      await database.transaction(async (tx) => {
+        const tablesWithOperations = tables.filter((table) =>
+          operations.some((op) => getTableKey(op) === getTableKey(table))
+        );
+        if (tablesWithOperations.length) {
+          await tx
+            .update(internalTables.tables)
+            .set({ lastUpdatedBlockNumber: blockNumber })
+            .where(inArray(internalTables.tables.key, [...new Set(tablesWithOperations.map(getTableKey))]))
+            .execute();
+        }
+
+        for (const operation of operations) {
+          const table = tables.find((table) => getTableKey(table) === getTableKey(operation));
+          if (!table) {
+            debug(`table ${operation.namespace}:${operation.name} not found, skipping operation`, operation);
+            continue;
+          }
+
+          const sqlTable = buildTable(table);
+          const key = concatHex(
+            Object.entries(table.keySchema).map(([keyName, type]) =>
+              encodeAbiParameters([{ type }], [operation.key[keyName]])
+            )
+          );
+
+          if (operation.type === "SetRecord") {
+            debug("SetRecord", operation);
+            await tx
+              .insert(sqlTable)
+              .values({
+                __key: key,
+                __lastUpdatedBlockNumber: blockNumber,
+                __isDeleted: false,
+                ...operation.key,
+                ...operation.value,
+              })
+              .onConflictDoUpdate({
+                target: sqlTable.__key,
+                set: {
+                  __lastUpdatedBlockNumber: blockNumber,
+                  __isDeleted: false,
+                  ...operation.value,
+                },
+              })
+              .execute();
+          } else if (operation.type === "SetField") {
+            debug("SetField", operation);
+            await tx
+              .insert(sqlTable)
+              .values({
+                __key: key,
+                __lastUpdatedBlockNumber: blockNumber,
+                __isDeleted: false,
+                ...operation.key,
+                ...schemaToDefaults(table.valueSchema),
+                [operation.fieldName]: operation.fieldValue,
+              })
+              .onConflictDoUpdate({
+                target: sqlTable.__key,
+                set: {
+                  __lastUpdatedBlockNumber: blockNumber,
+                  __isDeleted: false,
+                  [operation.fieldName]: operation.fieldValue,
+                },
+              })
+              .execute();
+          } else if (operation.type === "DeleteRecord") {
+            // TODO: should we upsert so we at least have a DB record of when a thing was created/deleted within the same block?
+            debug("DeleteRecord", operation);
+            await tx
+              .update(sqlTable)
+              .set({
+                __lastUpdatedBlockNumber: blockNumber,
+                __isDeleted: true,
+              })
+              .where(eq(sqlTable.__key, key))
+              .execute();
+          }
+        }
+
+        await tx
+          .insert(internalTables.chain)
+          .values({
+            schemaVersion,
+            chainId,
+            lastUpdatedBlockNumber: blockNumber,
+          })
+          .onConflictDoUpdate({
+            target: [internalTables.chain.schemaVersion, internalTables.chain.chainId],
+            set: {
+              lastUpdatedBlockNumber: blockNumber,
+            },
+          })
+          .execute();
+      });
+    },
+  } as StorageAdapter<TConfig>;
+
+  return {
+    ...storageAdapter,
+    internalTables,
+    cleanUp: async (): Promise<void> => {
+      for (const fn of cleanUp) {
+        await fn();
+      }
+    },
+  };
+}
diff --git a/packages/store-sync/src/postgres/schemaVersion.ts b/packages/store-sync/src/postgres/schemaVersion.ts
new file mode 100644
index 0000000000..397579c6b3
--- /dev/null
+++ b/packages/store-sync/src/postgres/schemaVersion.ts
@@ -0,0 +1,4 @@
+// When this is incremented, it forces all indexers to reindex from scratch the next time they start up.
+// Only use this when the schemas change, until we get proper schema migrations.
+// TODO: instead of this, detect schema changes and drop/recreate tables as needed
+export const schemaVersion = 1;
diff --git a/packages/store-sync/src/postgres/setupTables.test.ts b/packages/store-sync/src/postgres/setupTables.test.ts
new file mode 100644
index 0000000000..93ab5b7dc8
--- /dev/null
+++ b/packages/store-sync/src/postgres/setupTables.test.ts
@@ -0,0 +1,46 @@
+import { beforeEach, describe, expect, it, vi } from "vitest";
+import { buildInternalTables } from "./buildInternalTables";
+import { PgDatabase, QueryResultHKT } from "drizzle-orm/pg-core";
+import { DefaultLogger } from "drizzle-orm";
+import { drizzle } from "drizzle-orm/postgres-js";
+import postgres from "postgres";
+import { setupTables } from "./setupTables";
+import * as transformSchemaNameExports from "./transformSchemaName";
+
+vi.spyOn(transformSchemaNameExports, "transformSchemaName").mockImplementation(
+  (schemaName) => `${process.pid}_${process.env.VITEST_POOL_ID}__${schemaName}`
+);
+
+describe("setupTables", async () => {
+  let db: PgDatabase<QueryResultHKT>;
+  const internalTables = buildInternalTables();
+
+  beforeEach(async () => {
+    db = drizzle(postgres(process.env.DATABASE_URL!), {
+      logger: new DefaultLogger(),
+    });
+  });
+
+  describe("before running", () => {
+    it("should be missing schemas", async () => {
+      await expect(db.select().from(internalTables.chain)).rejects.toThrow(
+        /relation "\w+mud_internal.chain" does not exist/
+      );
+      await expect(db.select().from(internalTables.tables)).rejects.toThrow(
+        /relation "\w+mud_internal.tables" does not exist/
+      );
+    });
+  });
+
+  describe("after running", () => {
+    beforeEach(async () => {
+      const cleanUp = await setupTables(db, Object.values(internalTables));
+      return cleanUp;
+    });
+
+    it("should have schemas", async () => {
+      expect(await db.select().from(internalTables.chain)).toMatchInlineSnapshot("[]");
+      expect(await db.select().from(internalTables.tables)).toMatchInlineSnapshot("[]");
+    });
+  });
+});
diff --git a/packages/store-sync/src/postgres/setupTables.ts b/packages/store-sync/src/postgres/setupTables.ts
new file mode 100644
index 0000000000..1ddcedaa6f
--- /dev/null
+++ b/packages/store-sync/src/postgres/setupTables.ts
@@ -0,0 +1,65 @@
+import { AnyPgColumn, PgTableWithColumns, PgDatabase } from "drizzle-orm/pg-core";
+import { getTableColumns, getTableName, sql } from "drizzle-orm";
+import { ColumnDataType } from "kysely";
+import { getSchema } from "./getSchema";
+import { isDefined } from "@latticexyz/common/utils";
+import { debug } from "./debug";
+import { pgDialect } from "./pgDialect";
+
+export async function setupTables(
+  db: PgDatabase<any>,
+  tables: PgTableWithColumns<any>[]
+): Promise<() => Promise<void>> {
+  // TODO: add table to internal tables here
+  // TODO: look up table schema and check if it matches expected schema, drop if not
+
+  const schemaNames = [...new Set(tables.map(getSchema).filter(isDefined))];
+
+  await db.transaction(async (tx) => {
+    for (const schemaName of schemaNames) {
+      debug(`creating namespace ${schemaName}`);
+      await tx.execute(sql.raw(pgDialect.schema.createSchema(schemaName).ifNotExists().compile().sql));
+    }
+
+    for (const table of tables) {
+      const schemaName = getSchema(table);
+      const scopedDb = schemaName ? pgDialect.withSchema(schemaName) : pgDialect;
+
+      const tableName = getTableName(table);
+
+      let query = scopedDb.schema.createTable(tableName).ifNotExists();
+
+      const columns = Object.values(getTableColumns(table)) as AnyPgColumn[];
+      for (const column of columns) {
+        query = query.addColumn(column.name, column.getSQLType() as ColumnDataType, (col) => {
+          if (column.notNull) {
+            col = col.notNull();
+          }
+          if (column.hasDefault && typeof column.default !== "undefined") {
+            col = col.defaultTo(column.default);
+          }
+          return col;
+        });
+      }
+
+      const primaryKeys = columns.filter((column) => column.primary).map((column) => column.name);
+      if (primaryKeys.length) {
+        query = query.addPrimaryKeyConstraint(`${tableName}__pk`, primaryKeys as any);
+      }
+
+      debug(`creating table ${tableName} in namespace ${schemaName}`);
+      await tx.execute(sql.raw(query.compile().sql));
+    }
+  });
+
+  return async () => {
+    for (const schemaName of schemaNames) {
+      try {
+        debug(`dropping namespace ${schemaName} and all of its tables`);
+        await db.execute(sql.raw(pgDialect.schema.dropSchema(schemaName).ifExists().cascade().compile().sql));
+      } catch (error) {
+        debug(`failed to drop namespace ${schemaName}`, error);
+      }
+    }
+  };
+}
diff --git a/packages/store-sync/src/postgres/syncToPostgres.ts b/packages/store-sync/src/postgres/syncToPostgres.ts
new file mode 100644
index 0000000000..be649f4e78
--- /dev/null
+++ b/packages/store-sync/src/postgres/syncToPostgres.ts
@@ -0,0 +1,58 @@
+import { StoreConfig } from "@latticexyz/store";
+import { PgDatabase } from "drizzle-orm/pg-core";
+import { SyncOptions, SyncResult } from "../common";
+import { postgresStorage } from "./postgresStorage";
+import { createStoreSync } from "../createStoreSync";
+
+type SyncToPostgresOptions<TConfig extends StoreConfig = StoreConfig> = SyncOptions<TConfig> & {
+  /**
+   * [Postgres database object from Drizzle][0].
+   *
+   * [0]: https://orm.drizzle.team/docs/installation-and-db-connection/postgresql/postgresjs
+   */
+  database: PgDatabase<any>;
+  startSync?: boolean;
+};
+
+type SyncToPostgresResult<TConfig extends StoreConfig = StoreConfig> = SyncResult<TConfig> & {
+  stopSync: () => void;
+};
+
+/**
+ * Creates an indexer to process and store blockchain events.
+ *
+ * @param {CreateIndexerOptions} options See `CreateIndexerOptions`.
+ * @returns A function to unsubscribe from the block stream, effectively stopping the indexer.
+ */
+export async function syncToPostgres<TConfig extends StoreConfig = StoreConfig>({
+  config,
+  database,
+  publicClient,
+  address,
+  startBlock,
+  maxBlockRange,
+  indexerUrl,
+  initialState,
+  startSync = true,
+}: SyncToPostgresOptions<TConfig>): Promise<SyncToPostgresResult<TConfig>> {
+  const storeSync = await createStoreSync({
+    storageAdapter: await postgresStorage({ database, publicClient, config }),
+    config,
+    address,
+    publicClient,
+    startBlock,
+    maxBlockRange,
+    indexerUrl,
+    initialState,
+  });
+
+  const sub = startSync ? storeSync.blockStorageOperations$.subscribe() : null;
+  const stopSync = (): void => {
+    sub?.unsubscribe();
+  };
+
+  return {
+    ...storeSync,
+    stopSync,
+  };
+}
diff --git a/packages/store-sync/src/postgres/transformSchemaName.ts b/packages/store-sync/src/postgres/transformSchemaName.ts
new file mode 100644
index 0000000000..63693eafbb
--- /dev/null
+++ b/packages/store-sync/src/postgres/transformSchemaName.ts
@@ -0,0 +1,4 @@
+// This is overridden in tests to better parallelize against the same database
+export function transformSchemaName(schemaName: string): string {
+  return schemaName;
+}
diff --git a/packages/store-sync/src/recs/recsStorage.ts b/packages/store-sync/src/recs/recsStorage.ts
index 71cee4dd4e..5881ed2bf6 100644
--- a/packages/store-sync/src/recs/recsStorage.ts
+++ b/packages/store-sync/src/recs/recsStorage.ts
@@ -1,4 +1,3 @@
-import { BlockLogsToStorageOptions } from "../blockLogsToStorage";
 import { StoreConfig } from "@latticexyz/store";
 import { debug } from "./debug";
 import {
@@ -17,6 +16,7 @@ import { getTableEntity } from "./getTableEntity";
 import { StoreComponentMetadata } from "./common";
 import { tableIdToHex } from "@latticexyz/common";
 import { encodeEntity } from "./encodeEntity";
+import { StorageAdapter } from "../common";
 
 export function recsStorage<TConfig extends StoreConfig = StoreConfig>({
   components,
@@ -24,7 +24,7 @@ export function recsStorage<TConfig extends StoreConfig = StoreConfig>({
   components: ReturnType<typeof defineInternalComponents> &
     Record<string, RecsComponent<RecsSchema, StoreComponentMetadata>>;
   config?: TConfig;
-}): BlockLogsToStorageOptions<TConfig> {
+}): StorageAdapter<TConfig> {
   // TODO: do we need to store block number?
 
   const componentsByTableId = Object.fromEntries(
@@ -87,5 +87,5 @@ export function recsStorage<TConfig extends StoreConfig = StoreConfig>({
         }
       }
     },
-  } as BlockLogsToStorageOptions<TConfig>;
+  } as StorageAdapter<TConfig>;
 }
diff --git a/packages/store-sync/src/sqlite/internalTables.ts b/packages/store-sync/src/sqlite/internalTables.ts
index 6920469345..057336839a 100644
--- a/packages/store-sync/src/sqlite/internalTables.ts
+++ b/packages/store-sync/src/sqlite/internalTables.ts
@@ -4,7 +4,7 @@ import { DynamicAbiType, StaticAbiType } from "@latticexyz/schema-type";
 
 export const chainState = sqliteTable("__chainState", {
   schemaVersion: integer("schema_version").notNull().primaryKey(),
-  chainId: integer("chainId").notNull().primaryKey(),
+  chainId: integer("chain_id").notNull().primaryKey(),
   lastUpdatedBlockNumber: blob("last_updated_block_number", { mode: "bigint" }),
   // TODO: last block hash?
   lastError: text("last_error"),
diff --git a/packages/store-sync/src/sqlite/sqliteStorage.ts b/packages/store-sync/src/sqlite/sqliteStorage.ts
index f5f32a2989..fb2073b11c 100644
--- a/packages/store-sync/src/sqlite/sqliteStorage.ts
+++ b/packages/store-sync/src/sqlite/sqliteStorage.ts
@@ -5,13 +5,13 @@ import { sqliteTableToSql } from "./sqliteTableToSql";
 import { createSqliteTable } from "./createSqliteTable";
 import { schemaToDefaults } from "../schemaToDefaults";
 import { TableId } from "@latticexyz/common/deprecated";
-import { BlockLogsToStorageOptions } from "../blockLogsToStorage";
 import { StoreConfig } from "@latticexyz/store";
 import { debug } from "./debug";
 import { getTableName } from "./getTableName";
 import { chainState, mudStoreTables } from "./internalTables";
 import { getTables } from "./getTables";
 import { schemaVersion } from "./schemaVersion";
+import { StorageAdapter } from "../common";
 
 export async function sqliteStorage<TConfig extends StoreConfig = StoreConfig>({
   database,
@@ -20,7 +20,7 @@ export async function sqliteStorage<TConfig extends StoreConfig = StoreConfig>({
   database: BaseSQLiteDatabase<"sync", void>;
   publicClient: PublicClient;
   config?: TConfig;
-}): Promise<BlockLogsToStorageOptions<TConfig>> {
+}): Promise<StorageAdapter<TConfig>> {
   const chainId = publicClient.chain?.id ?? (await publicClient.getChainId());
 
   // TODO: should these run lazily before first `registerTables`?
@@ -185,5 +185,5 @@ export async function sqliteStorage<TConfig extends StoreConfig = StoreConfig>({
           .run();
       });
     },
-  } as BlockLogsToStorageOptions<TConfig>;
+  } as StorageAdapter<TConfig>;
 }
diff --git a/packages/store-sync/src/sqlite/sqliteTableToSql.ts b/packages/store-sync/src/sqlite/sqliteTableToSql.ts
index a23b311e40..7ba7f085f7 100644
--- a/packages/store-sync/src/sqlite/sqliteTableToSql.ts
+++ b/packages/store-sync/src/sqlite/sqliteTableToSql.ts
@@ -1,10 +1,14 @@
 import { AnySQLiteColumn, SQLiteTableWithColumns } from "drizzle-orm/sqlite-core";
-import { ColumnDataType, Kysely, SqliteDialect } from "kysely";
-import SqliteDatabase from "better-sqlite3";
+import { ColumnDataType, DummyDriver, Kysely, SqliteAdapter, SqliteIntrospector, SqliteQueryCompiler } from "kysely";
 import { getTableColumns, getTableName } from "drizzle-orm";
 
 const db = new Kysely<any>({
-  dialect: new SqliteDialect({ database: new SqliteDatabase(":memory:") }),
+  dialect: {
+    createAdapter: (): SqliteAdapter => new SqliteAdapter(),
+    createDriver: (): DummyDriver => new DummyDriver(),
+    createIntrospector: (db: Kysely<unknown>): SqliteIntrospector => new SqliteIntrospector(db),
+    createQueryCompiler: (): SqliteQueryCompiler => new SqliteQueryCompiler(),
+  },
 });
 
 export function sqliteTableToSql(table: SQLiteTableWithColumns<any>): string {
diff --git a/packages/store-sync/src/trpc-indexer/common.ts b/packages/store-sync/src/trpc-indexer/common.ts
index 7089b74b0b..c9ca712880 100644
--- a/packages/store-sync/src/trpc-indexer/common.ts
+++ b/packages/store-sync/src/trpc-indexer/common.ts
@@ -1,7 +1,7 @@
 import { Hex } from "viem";
 import { TableWithRecords } from "../common";
 
-export type StorageAdapter = {
+export type QueryAdapter = {
   findAll: (
     chainId: number,
     address?: Hex
diff --git a/packages/store-sync/src/trpc-indexer/createAppRouter.ts b/packages/store-sync/src/trpc-indexer/createAppRouter.ts
index 2e91c0c3cf..c965dac7c5 100644
--- a/packages/store-sync/src/trpc-indexer/createAppRouter.ts
+++ b/packages/store-sync/src/trpc-indexer/createAppRouter.ts
@@ -1,12 +1,12 @@
 import { z } from "zod";
-import { StorageAdapter } from "./common";
+import { QueryAdapter } from "./common";
 import { isHex } from "viem";
 import { initTRPC } from "@trpc/server";
 import superjson from "superjson";
 
 // eslint-disable-next-line @typescript-eslint/explicit-function-return-type
 export function createAppRouter() {
-  const t = initTRPC.context<{ storageAdapter: StorageAdapter }>().create({
+  const t = initTRPC.context<{ queryAdapter: QueryAdapter }>().create({
     transformer: superjson,
   });
 
@@ -18,10 +18,10 @@ export function createAppRouter() {
           address: z.string().refine(isHex).optional(),
         })
       )
-      .query(async (opts): ReturnType<StorageAdapter["findAll"]> => {
-        const { storageAdapter } = opts.ctx;
+      .query(async (opts): ReturnType<QueryAdapter["findAll"]> => {
+        const { queryAdapter } = opts.ctx;
         const { chainId, address } = opts.input;
-        return storageAdapter.findAll(chainId, address);
+        return queryAdapter.findAll(chainId, address);
       }),
   });
 }
diff --git a/packages/store-sync/tsup.config.ts b/packages/store-sync/tsup.config.ts
index f2e8f2539d..7bc63a088a 100644
--- a/packages/store-sync/tsup.config.ts
+++ b/packages/store-sync/tsup.config.ts
@@ -1,7 +1,13 @@
 import { defineConfig } from "tsup";
 
 export default defineConfig({
-  entry: ["src/index.ts", "src/sqlite/index.ts", "src/recs/index.ts", "src/trpc-indexer/index.ts"],
+  entry: [
+    "src/index.ts",
+    "src/sqlite/index.ts",
+    "src/postgres/index.ts",
+    "src/recs/index.ts",
+    "src/trpc-indexer/index.ts",
+  ],
   target: "esnext",
   format: ["esm"],
   dts: false,
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 2234634ee1..7d61b0d740 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -779,10 +779,13 @@ importers:
         version: 4.3.4(supports-color@8.1.1)
       drizzle-orm:
         specifier: ^0.27.0
-        version: 0.27.0(@types/better-sqlite3@7.6.4)(@types/sql.js@1.4.4)(better-sqlite3@8.4.0)(kysely@0.26.1)(sql.js@1.8.0)
+        version: 0.27.0(@types/better-sqlite3@7.6.4)(better-sqlite3@8.4.0)(postgres@3.3.5)
       fastify:
         specifier: ^4.21.0
         version: 4.21.0
+      postgres:
+        specifier: ^3.3.5
+        version: 3.3.5
       rxjs:
         specifier: 7.5.5
         version: 7.5.5
@@ -844,18 +847,18 @@ importers:
       '@trpc/server':
         specifier: 10.34.0
         version: 10.34.0
-      better-sqlite3:
-        specifier: ^8.4.0
-        version: 8.4.0
       debug:
         specifier: ^4.3.4
         version: 4.3.4(supports-color@8.1.1)
       drizzle-orm:
         specifier: ^0.27.0
-        version: 0.27.0(@types/better-sqlite3@7.6.4)(@types/sql.js@1.4.4)(better-sqlite3@8.4.0)(kysely@0.26.1)(sql.js@1.8.0)
+        version: 0.27.0(@types/sql.js@1.4.4)(kysely@0.26.1)(postgres@3.3.5)(sql.js@1.8.0)
       kysely:
         specifier: ^0.26.1
         version: 0.26.1
+      postgres:
+        specifier: ^3.3.5
+        version: 3.3.5
       rxjs:
         specifier: 7.5.5
         version: 7.5.5
@@ -872,9 +875,6 @@ importers:
         specifier: ^3.21.4
         version: 3.21.4
     devDependencies:
-      '@types/better-sqlite3':
-        specifier: ^7.6.4
-        version: 7.6.4
       '@types/debug':
         specifier: ^4.1.7
         version: 4.1.7
@@ -5013,7 +5013,7 @@ packages:
       detect-libc: 1.0.3
     dev: true
 
-  /drizzle-orm@0.27.0(@types/better-sqlite3@7.6.4)(@types/sql.js@1.4.4)(better-sqlite3@8.4.0)(kysely@0.26.1)(sql.js@1.8.0):
+  /drizzle-orm@0.27.0(@types/better-sqlite3@7.6.4)(better-sqlite3@8.4.0)(postgres@3.3.5):
     resolution: {integrity: sha512-LGiJ0icB+wQwgbSCOvAjONY8Ec6G/EDzQQP5PmUaQYeI9OqgpVKHC2T1fFIbvk5dabWsbokJ5NOciVAxriStig==}
     peerDependencies:
       '@aws-sdk/client-rds-data': '>=3'
@@ -5076,9 +5076,75 @@ packages:
         optional: true
     dependencies:
       '@types/better-sqlite3': 7.6.4
-      '@types/sql.js': 1.4.4
       better-sqlite3: 8.4.0
+      postgres: 3.3.5
+    dev: false
+
+  /drizzle-orm@0.27.0(@types/sql.js@1.4.4)(kysely@0.26.1)(postgres@3.3.5)(sql.js@1.8.0):
+    resolution: {integrity: sha512-LGiJ0icB+wQwgbSCOvAjONY8Ec6G/EDzQQP5PmUaQYeI9OqgpVKHC2T1fFIbvk5dabWsbokJ5NOciVAxriStig==}
+    peerDependencies:
+      '@aws-sdk/client-rds-data': '>=3'
+      '@cloudflare/workers-types': '>=3'
+      '@libsql/client': '*'
+      '@neondatabase/serverless': '>=0.1'
+      '@opentelemetry/api': ^1.4.1
+      '@planetscale/database': '>=1'
+      '@types/better-sqlite3': '*'
+      '@types/pg': '*'
+      '@types/sql.js': '*'
+      '@vercel/postgres': '*'
+      better-sqlite3: '>=7'
+      bun-types: '*'
+      knex: '*'
+      kysely: '*'
+      mysql2: '>=2'
+      pg: '>=8'
+      postgres: '>=3'
+      sql.js: '>=1'
+      sqlite3: '>=5'
+    peerDependenciesMeta:
+      '@aws-sdk/client-rds-data':
+        optional: true
+      '@cloudflare/workers-types':
+        optional: true
+      '@libsql/client':
+        optional: true
+      '@neondatabase/serverless':
+        optional: true
+      '@opentelemetry/api':
+        optional: true
+      '@planetscale/database':
+        optional: true
+      '@types/better-sqlite3':
+        optional: true
+      '@types/pg':
+        optional: true
+      '@types/sql.js':
+        optional: true
+      '@vercel/postgres':
+        optional: true
+      better-sqlite3:
+        optional: true
+      bun-types:
+        optional: true
+      knex:
+        optional: true
+      kysely:
+        optional: true
+      mysql2:
+        optional: true
+      pg:
+        optional: true
+      postgres:
+        optional: true
+      sql.js:
+        optional: true
+      sqlite3:
+        optional: true
+    dependencies:
+      '@types/sql.js': 1.4.4
       kysely: 0.26.1
+      postgres: 3.3.5
       sql.js: 1.8.0
     dev: false
 
@@ -8964,6 +9030,10 @@ packages:
       source-map-js: 1.0.2
     dev: true
 
+  /postgres@3.3.5:
+    resolution: {integrity: sha512-+JD93VELV9gHkqpV5gdL5/70HdGtEw4/XE1S4BC8f1mcPmdib3K5XsKVbnR1XcAyC41zOnifJ+9YRKxdIsXiUw==}
+    dev: false
+
   /prebuild-install@7.1.1:
     resolution: {integrity: sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==}
     engines: {node: '>=10'}