diff --git a/yarn-project/blob-sink/.eslintrc.cjs b/yarn-project/blob-sink/.eslintrc.cjs new file mode 100644 index 00000000000..e659927475c --- /dev/null +++ b/yarn-project/blob-sink/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/blob-sink/README.md b/yarn-project/blob-sink/README.md new file mode 100644 index 00000000000..649e8eab867 --- /dev/null +++ b/yarn-project/blob-sink/README.md @@ -0,0 +1,21 @@ +## Blob Sink + +A HTTP api that losely emulates the https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars API. +We do not support all of the possible values of block_id, namely `genesis`, `head`, `finalized`. As we are not using any of these values in our +blobs integration. + +## When is this used? + +This service will run alongside end to end tests to capture the blob transactions that are sent alongside a `propose` transaction. + +### Why? + +Once we make the transition to blob transactions, we will need to be able to query for blobs. One way to do this is to run an entire L1 execution layer and consensus layer pair alongside all of our e2e tests and inside the sandbox. But this is a bit much, so instead the blob sink can be used to store and request blobs, without needing to run an entire consensus layer pair client. + +### Other Usecases + +Blobs are only held in the L1 consensus layer for a period of ~3 weeks, the blob sink can be used to store blobs for longer. + +### How? + +The blob sink is a simple HTTP server that can be run alongside the e2e tests. It will store the blobs in a local file system and provide an API to query for them. diff --git a/yarn-project/blob-sink/package.json b/yarn-project/blob-sink/package.json new file mode 100644 index 00000000000..7090bf99527 --- /dev/null +++ b/yarn-project/blob-sink/package.json @@ -0,0 +1,84 @@ +{ + "name": "@aztec/blob-sink", + "version": "0.1.0", + "type": "module", + "exports": { + ".": "./dest/index.js" + }, + "inherits": [ + "../package.common.json" + ], + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} RAYON_NUM_THREADS=${RAYON_NUM_THREADS:-4} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --maxWorkers=${JEST_MAX_WORKERS:-8}" + }, + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "reporters": [ + "default" + ], + "testTimeout": 30000, + "setupFiles": [ + "../../foundation/src/jest/setup.mjs" + ] + }, + "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:*", + "@aztec/telemetry-client": "workspace:*", + "express": "^4.21.1", + "source-map-support": "^0.5.21", + "tslib": "^2.4.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", + "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", + "@types/supertest": "^6.0.2", + "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", + "supertest": "^7.0.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "types": "./dest/index.d.ts", + "engines": { + "node": ">=18" + } +} diff --git a/yarn-project/blob-sink/src/blob-sink.test.ts b/yarn-project/blob-sink/src/blob-sink.test.ts new file mode 100644 index 00000000000..3107b953d47 --- /dev/null +++ b/yarn-project/blob-sink/src/blob-sink.test.ts @@ -0,0 +1,134 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import request from 'supertest'; + +import { BlobSinkServer } from './server.js'; + +describe('BlobSinkService', () => { + let service: BlobSinkServer; + + beforeEach(async () => { + service = new BlobSinkServer({ + port: 0, // Using port 0 lets the OS assign a random available port + }); + await service.start(); + }); + + afterEach(async () => { + await service.stop(); + }); + + describe('should store and retrieve a blob sidecar', () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const testFields2 = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blob2 = Blob.fromFields(testFields2); + const blockId = '0x1234'; + + beforeEach(async () => { + // Post the blob + const postResponse = await request(service.getApp()) + .post('/blob_sidecar') + .send({ + // eslint-disable-next-line camelcase + block_id: blockId, + blobs: [ + { + index: 0, + blob: blob.toBuffer(), + }, + { + index: 1, + blob: blob2.toBuffer(), + }, + ], + }); + + expect(postResponse.status).toBe(200); + }); + + it('should retrieve the blob', async () => { + // Retrieve the blob + const getResponse = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}`); + + expect(getResponse.status).toBe(200); + + // Convert the response blob back to a Blob object and verify it matches + const retrievedBlobs = getResponse.body.data; + + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should retrieve specific indicies', async () => { + // We can also request specific indicies + const getWithIndicies = await request(service.getApp()).get( + `/eth/v1/beacon/blob_sidecars/${blockId}?indices=0,1`, + ); + + expect(getWithIndicies.status).toBe(200); + expect(getWithIndicies.body.data.length).toBe(2); + + const retrievedBlobs = getWithIndicies.body.data; + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + const retrievedBlob2 = Blob.fromBuffer(Buffer.from(retrievedBlobs[1].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + expect(retrievedBlob2.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob2.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should retreive a single index', async () => { + const getWithIndicies = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}?indices=1`); + + expect(getWithIndicies.status).toBe(200); + expect(getWithIndicies.body.data.length).toBe(1); + + const retrievedBlobs = getWithIndicies.body.data; + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob2.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + }); + + it('should return an error if invalid indicies are provided', async () => { + const blockId = '0x1234'; + + const response = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}?indices=word`); + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid indices parameter'); + }); + + it('should return an error if the block ID is invalid (POST)', async () => { + const response = await request(service.getApp()).post('/blob_sidecar').send({ + // eslint-disable-next-line camelcase + block_id: undefined, + }); + + expect(response.status).toBe(400); + }); + + it('should return an error if the block ID is invalid (GET)', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/invalid-id'); + + expect(response.status).toBe(400); + }); + + it('should return 404 for non-existent blob', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/0x999999'); + + expect(response.status).toBe(404); + }); + + it('should reject negative block IDs', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/-123'); + + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid block_id parameter'); + }); +}); diff --git a/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts new file mode 100644 index 00000000000..2636ec726d9 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts @@ -0,0 +1,142 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export function describeBlobStore(getBlobStore: () => BlobStore) { + let blobStore: BlobStore; + + beforeEach(() => { + blobStore = getBlobStore(); + }); + + it('should store and retrieve a blob', async () => { + // Create a test blob with random fields + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + + // Store the blob + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + + // Retrieve the blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + + // Verify the blob was retrieved and matches + expect(retrievedBlob).toBeDefined(); + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('Should allow requesting a specific index of blob', async () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + const blobWithIndex2 = new BlobWithIndex(blob, 1); + + await blobStore.addBlobSidecars(blockId, [blobWithIndex, blobWithIndex2]); + + const retrievedBlobs = await blobStore.getBlobSidecars(blockId, [0]); + const [retrievedBlob] = retrievedBlobs!; + + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + + const retrievedBlobs2 = await blobStore.getBlobSidecars(blockId, [1]); + const [retrievedBlob2] = retrievedBlobs2!; + + expect(retrievedBlob2.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('Differentiate between blockHash and slot', async () => { + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const testFieldsSlot = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blobSlot = Blob.fromFields(testFieldsSlot); + const blockId = '0x12345'; + const slot = '12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + const blobWithIndexSlot = new BlobWithIndex(blobSlot, 0); + + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + await blobStore.addBlobSidecars(slot, [blobWithIndexSlot]); + + const retrievedBlobs = await blobStore.getBlobSidecars(blockId, [0]); + const [retrievedBlob] = retrievedBlobs!; + + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + + const retrievedBlobs2 = await blobStore.getBlobSidecars(slot, [0]); + const [retrievedBlob2] = retrievedBlobs2!; + + expect(retrievedBlob2.blob.fieldsHash.toString()).toBe(blobSlot.fieldsHash.toString()); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blobSlot.commitment.toString('hex')); + }); + + it('should return undefined for non-existent blob', async () => { + const nonExistentBlob = await blobStore.getBlobSidecars('999999'); + expect(nonExistentBlob).toBeUndefined(); + }); + + it('should handle multiple blobs with different block IDs', async () => { + // Create two different blobs + const blob1 = Blob.fromFields([Fr.random(), Fr.random()]); + const blob2 = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + // Store both blobs + await blobStore.addBlobSidecars('1', [blobWithIndex1]); + await blobStore.addBlobSidecars('2', [blobWithIndex2]); + + // Retrieve and verify both blobs + const retrieved1 = await blobStore.getBlobSidecars('1'); + const retrieved2 = await blobStore.getBlobSidecars('2'); + const [retrievedBlob1] = retrieved1!; + const [retrievedBlob2] = retrieved2!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should overwrite blob when using same block ID', async () => { + // Create two different blobs + const originalBlob = Blob.fromFields([Fr.random()]); + const newBlob = Blob.fromFields([Fr.random(), Fr.random()]); + const blockId = '1'; + const originalBlobWithIndex = new BlobWithIndex(originalBlob, 0); + const newBlobWithIndex = new BlobWithIndex(newBlob, 0); + + // Store original blob + await blobStore.addBlobSidecars(blockId, [originalBlobWithIndex]); + + // Overwrite with new blob + await blobStore.addBlobSidecars(blockId, [newBlobWithIndex]); + + // Retrieve and verify it's the new blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(newBlob.commitment.toString('hex')); + expect(retrievedBlob.blob.commitment.toString('hex')).not.toBe(originalBlob.commitment.toString('hex')); + }); + + it('should handle multiple blobs with the same block ID', async () => { + const blob1 = Blob.fromFields([Fr.random()]); + const blob2 = Blob.fromFields([Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + await blobStore.addBlobSidecars('1', [blobWithIndex1, blobWithIndex2]); + const retrievedBlobs = await blobStore.getBlobSidecars('1'); + const [retrievedBlob1, retrievedBlob2] = retrievedBlobs!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); +} diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts new file mode 100644 index 00000000000..8b523dbaef1 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts @@ -0,0 +1,8 @@ +import { openTmpStore } from '@aztec/kv-store/lmdb'; + +import { describeBlobStore } from './blob_store_test_suite.js'; +import { DiskBlobStore } from './disk_blob_store.js'; + +describe('DiskBlobStore', () => { + describeBlobStore(() => new DiskBlobStore(openTmpStore())); +}); diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts new file mode 100644 index 00000000000..63e4dc10ab6 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts @@ -0,0 +1,32 @@ +import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; + +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class DiskBlobStore implements BlobStore { + blobs: AztecMap; + + constructor(store: AztecKVStore) { + this.blobs = store.openMap('blobs'); + } + + public getBlobSidecars(blockId: string, indices?: number[]): Promise { + const blobBuffer = this.blobs.get(`${blockId}`); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + + const blobsWithIndexes = BlobsWithIndexes.fromBuffer(blobBuffer); + if (indices) { + // If indices are provided, return the blobs at the specified indices + return Promise.resolve(blobsWithIndexes.getBlobsFromIndices(indices)); + } + // If no indices are provided, return all blobs + return Promise.resolve(blobsWithIndexes.blobs); + } + + public async addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + await this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/blobstore/index.ts b/yarn-project/blob-sink/src/blobstore/index.ts new file mode 100644 index 00000000000..fd3901930cf --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/index.ts @@ -0,0 +1,3 @@ +export * from './memory_blob_store.js'; +export * from './disk_blob_store.js'; +export * from './interface.js'; diff --git a/yarn-project/blob-sink/src/blobstore/interface.ts b/yarn-project/blob-sink/src/blobstore/interface.ts new file mode 100644 index 00000000000..27d7fac25c2 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/interface.ts @@ -0,0 +1,12 @@ +import { type BlobWithIndex } from '../types/index.js'; + +export interface BlobStore { + /** + * Get a blob by block id + */ + getBlobSidecars: (blockId: string, indices?: number[]) => Promise; + /** + * Add a blob to the store + */ + addBlobSidecars: (blockId: string, blobSidecars: BlobWithIndex[]) => Promise; +} diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts new file mode 100644 index 00000000000..2f13926cd1a --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts @@ -0,0 +1,6 @@ +import { describeBlobStore } from './blob_store_test_suite.js'; +import { MemoryBlobStore } from './memory_blob_store.js'; + +describe('MemoryBlobStore', () => { + describeBlobStore(() => new MemoryBlobStore()); +}); diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts new file mode 100644 index 00000000000..efe013f9b01 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts @@ -0,0 +1,25 @@ +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class MemoryBlobStore implements BlobStore { + private blobs: Map = new Map(); + + public getBlobSidecars(blockId: string, indices?: number[]): Promise { + const blobBuffer = this.blobs.get(blockId); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + const blobsWithIndexes = BlobsWithIndexes.fromBuffer(blobBuffer); + if (indices) { + // If indices are provided, return the blobs at the specified indices + return Promise.resolve(blobsWithIndexes.getBlobsFromIndices(indices)); + } + // If no indices are provided, return all blobs + return Promise.resolve(blobsWithIndexes.blobs); + } + + public addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/config.ts b/yarn-project/blob-sink/src/config.ts new file mode 100644 index 00000000000..e18311f9f1d --- /dev/null +++ b/yarn-project/blob-sink/src/config.ts @@ -0,0 +1,7 @@ +import { type DataStoreConfig } from '@aztec/kv-store/config'; + +export interface BlobSinkConfig { + port?: number; + dataStoreConfig?: DataStoreConfig; + otelMetricsCollectorUrl?: string; +} diff --git a/yarn-project/blob-sink/src/factory.ts b/yarn-project/blob-sink/src/factory.ts new file mode 100644 index 00000000000..43a0df8e6c3 --- /dev/null +++ b/yarn-project/blob-sink/src/factory.ts @@ -0,0 +1,27 @@ +import { type AztecKVStore } from '@aztec/kv-store'; +import { createStore } from '@aztec/kv-store/lmdb'; +import { type TelemetryClient } from '@aztec/telemetry-client'; + +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkServer } from './server.js'; + +// If data store settings are provided, the store is created and returned. +// Otherwise, undefined is returned and an in memory store will be used. +async function getDataStoreConfig(config?: BlobSinkConfig): Promise { + if (!config?.dataStoreConfig) { + return undefined; + } + return await createStore('blob-sink', config.dataStoreConfig); +} + +/** + * Creates a blob sink service from the provided config. + */ +export async function createBlobSinkServer( + config?: BlobSinkConfig, + telemetry?: TelemetryClient, +): Promise { + const store = await getDataStoreConfig(config); + + return new BlobSinkServer(config, store, telemetry); +} diff --git a/yarn-project/blob-sink/src/index.ts b/yarn-project/blob-sink/src/index.ts new file mode 100644 index 00000000000..25844130c2f --- /dev/null +++ b/yarn-project/blob-sink/src/index.ts @@ -0,0 +1,3 @@ +export * from './server.js'; +export * from './config.js'; +export * from './factory.js'; diff --git a/yarn-project/blob-sink/src/metrics.ts b/yarn-project/blob-sink/src/metrics.ts new file mode 100644 index 00000000000..28e2b6308c0 --- /dev/null +++ b/yarn-project/blob-sink/src/metrics.ts @@ -0,0 +1,27 @@ +import { type Histogram, Metrics, type TelemetryClient, type UpDownCounter } from '@aztec/telemetry-client'; + +import { type BlobWithIndex } from './types/blob_with_index.js'; + +export class BlobSinkMetrics { + /** The number of blobs in the blob store */ + private objectsInBlobStore: UpDownCounter; + + /** Tracks blob size */ + private blobSize: Histogram; + + constructor(telemetry: TelemetryClient) { + const name = 'BlobSink'; + this.objectsInBlobStore = telemetry.getMeter(name).createUpDownCounter(Metrics.BLOB_SINK_OBJECTS_IN_BLOB_STORE, { + description: 'The current number of blobs in the blob store', + }); + + this.blobSize = telemetry.getMeter(name).createHistogram(Metrics.BLOB_SINK_BLOB_SIZE, { + description: 'The non zero size of blobs in the blob store', + }); + } + + public recordBlobReciept(blobs: BlobWithIndex[]) { + this.objectsInBlobStore.add(blobs.length); + blobs.forEach(b => this.blobSize.record(b.blob.getSize())); + } +} diff --git a/yarn-project/blob-sink/src/server.ts b/yarn-project/blob-sink/src/server.ts new file mode 100644 index 00000000000..45c79f6991d --- /dev/null +++ b/yarn-project/blob-sink/src/server.ts @@ -0,0 +1,170 @@ +import { Blob } from '@aztec/foundation/blob'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type AztecKVStore } from '@aztec/kv-store'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import express, { type Express, type Request, type Response, json } from 'express'; +import { type Server } from 'http'; +import { z } from 'zod'; + +import { type BlobStore, DiskBlobStore } from './blobstore/index.js'; +import { MemoryBlobStore } from './blobstore/memory_blob_store.js'; +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkMetrics } from './metrics.js'; +import { type PostBlobSidecarRequest, blockIdSchema, indicesSchema } from './types/api.js'; +import { BlobWithIndex } from './types/index.js'; + +/** + * Example usage: + * const service = new BlobSinkService({ port: 5052 }); + * await service.start(); + * ... later ... + * await service.stop(); + */ +export class BlobSinkServer { + public readonly port: number; + + private app: Express; + private server: Server | null = null; + private blobStore: BlobStore; + private metrics: BlobSinkMetrics; + private log: Logger = createLogger('aztec:blob-sink'); + + constructor(config?: BlobSinkConfig, store?: AztecKVStore, telemetry: TelemetryClient = new NoopTelemetryClient()) { + this.port = config?.port ?? 5052; // 5052 is beacon chain default http port + this.app = express(); + + // Setup middleware + this.app.use(json({ limit: '1mb' })); // Increase the limit to allow for a blob to be sent + + this.metrics = new BlobSinkMetrics(telemetry); + + this.blobStore = store === undefined ? new MemoryBlobStore() : new DiskBlobStore(store); + + // Setup routes + this.setupRoutes(); + } + + private setupRoutes() { + this.app.get('/eth/v1/beacon/blob_sidecars/:block_id', this.handleGetBlobSidecar.bind(this)); + this.app.post('/blob_sidecar', this.handlePostBlobSidecar.bind(this)); + } + + private async handleGetBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id } = req.params; + const { indices } = req.query; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.safeParse(block_id); + if (!parsedBlockId.success) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + const parsedIndices = indicesSchema.safeParse(indices); + if (!parsedIndices.success) { + res.status(400).json({ + error: 'Invalid indices parameter', + }); + return; + } + + const blobs = await this.blobStore.getBlobSidecars(parsedBlockId.data.toString(), parsedIndices.data); + + if (!blobs) { + res.status(404).json({ error: 'Blob not found' }); + return; + } + + res.json({ + version: 'deneb', + data: blobs.map(blob => blob.toJSON()), + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: 'Invalid block_id parameter', + details: error.errors, + }); + } else { + res.status(500).json({ + error: 'Internal server error', + }); + } + } + } + + private async handlePostBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id, blobs } = req.body; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.parse(block_id); + if (!parsedBlockId) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + this.log.info(`Received blob sidecar for block ${parsedBlockId}`); + + const blobObjects: BlobWithIndex[] = this.parseBlobData(blobs); + + await this.blobStore.addBlobSidecars(parsedBlockId.toString(), blobObjects); + this.metrics.recordBlobReciept(blobObjects); + + this.log.info(`Blob sidecar stored successfully for block ${parsedBlockId}`); + + res.json({ message: 'Blob sidecar stored successfully' }); + } catch (error) { + res.status(400).json({ + error: 'Invalid blob data', + }); + } + } + + private parseBlobData(blobs: PostBlobSidecarRequest['blobs']): BlobWithIndex[] { + return blobs.map(({ index, blob }) => new BlobWithIndex(Blob.fromBuffer(Buffer.from(blob.data)), index)); + } + + public start(): Promise { + return new Promise(resolve => { + this.server = this.app.listen(this.port, () => { + this.log.info(`Server is running on http://localhost:${this.port}`); + resolve(); + }); + }); + } + + public stop(): Promise { + this.log.info('Stopping blob sink'); + return new Promise((resolve, reject) => { + if (!this.server) { + resolve(); + this.log.info('Blob sink already stopped'); + return; + } + + this.server.close(err => { + if (err) { + reject(err); + return; + } + this.server = null; + this.log.info('Blob sink stopped'); + resolve(); + }); + }); + } + + public getApp(): Express { + return this.app; + } +} diff --git a/yarn-project/blob-sink/src/types/api.ts b/yarn-project/blob-sink/src/types/api.ts new file mode 100644 index 00000000000..cd408ecdedb --- /dev/null +++ b/yarn-project/blob-sink/src/types/api.ts @@ -0,0 +1,49 @@ +import { z } from 'zod'; + +export interface PostBlobSidecarRequest { + // eslint-disable-next-line camelcase + block_id: string; + blobs: Array<{ + index: number; + blob: { + type: string; + data: string; + }; + }>; +} + +export const blockRootSchema = z + .string() + .regex(/^0x[0-9a-fA-F]{0,64}$/) + .max(66); +export const slotSchema = z.number().int().positive(); + +// Define the Zod schema for an array of numbers +export const indicesSchema = z.optional( + z + .string() + .refine(str => str.split(',').every(item => !isNaN(Number(item))), { + message: 'All items in the query must be valid numbers.', + }) + .transform(str => str.split(',').map(Number)), +); // Convert to an array of numbers + +// Validation schemas +// Block identifier. Can be one of: , . +// Note the spec https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars does allows for "head", "genesis", "finalized" as valid block ids, +// but we explicitly do not support these values. +export const blockIdSchema = blockRootSchema.or(slotSchema); + +export const postBlobSidecarSchema = z.object({ + // eslint-disable-next-line camelcase + block_id: blockIdSchema, + blobs: z.array( + z.object({ + index: z.number(), + blob: z.object({ + type: z.string(), + data: z.string(), + }), + }), + ), +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.test.ts b/yarn-project/blob-sink/src/types/blob_with_index.test.ts new file mode 100644 index 00000000000..d29c6b98b88 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.test.ts @@ -0,0 +1,31 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex, BlobsWithIndexes } from './blob_with_index.js'; + +describe('BlobWithIndex Serde', () => { + it('should serialize and deserialize', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex = new BlobWithIndex(blob, 0); + const serialized = blobWithIndex.toBuffer(); + + const deserialized = BlobWithIndex.fromBuffer(serialized); + + expect(blobWithIndex).toEqual(deserialized); + }); +}); + +describe('BlobsWithIndexes Serde', () => { + it('should serialize and deserialize', () => { + const blobs = [ + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 0), + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 1), + ]; + const blobsWithIndexes = new BlobsWithIndexes(blobs); + + const serialized = blobsWithIndexes.toBuffer(); + const deserialized = BlobsWithIndexes.fromBuffer(serialized); + + expect(deserialized).toEqual(blobsWithIndexes); + }); +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.ts b/yarn-project/blob-sink/src/types/blob_with_index.ts new file mode 100644 index 00000000000..60446f2ff16 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.ts @@ -0,0 +1,51 @@ +import { Blob } from '@aztec/foundation/blob'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +/** Serialized an array of blobs with their indexes to be stored at a given block id */ +export class BlobsWithIndexes { + constructor(public blobs: BlobWithIndex[]) {} + + public toBuffer(): Buffer { + return serializeToBuffer(this.blobs.length, this.blobs); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobsWithIndexes { + const reader = BufferReader.asReader(buffer); + return new BlobsWithIndexes(reader.readArray(reader.readNumber(), BlobWithIndex)); + } + + public getBlobsFromIndices(indices: number[]): BlobWithIndex[] { + return this.blobs.filter((_, index) => indices.includes(index)); + } +} + +/** We store blobs alongside their index in the block */ +export class BlobWithIndex { + constructor( + /** The blob */ + public blob: Blob, + /** The index of the blob in the block */ + public index: number, + ) {} + + public toBuffer(): Buffer { + return serializeToBuffer([this.blob, this.index]); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobWithIndex { + const reader = BufferReader.asReader(buffer); + return new BlobWithIndex(reader.readObject(Blob), reader.readNumber()); + } + + // Follows the structure the beacon node api expects + public toJSON(): { blob: string; index: number; kzg_commitment: string; kzg_proof: string } { + return { + blob: this.blob.toBuffer().toString('hex'), + index: this.index, + // eslint-disable-next-line camelcase + kzg_commitment: this.blob.commitment.toString('hex'), + // eslint-disable-next-line camelcase + kzg_proof: this.blob.proof.toString('hex'), + }; + } +} diff --git a/yarn-project/blob-sink/src/types/index.ts b/yarn-project/blob-sink/src/types/index.ts new file mode 100644 index 00000000000..396b8fc805e --- /dev/null +++ b/yarn-project/blob-sink/src/types/index.ts @@ -0,0 +1 @@ +export * from './blob_with_index.js'; diff --git a/yarn-project/blob-sink/tsconfig.json b/yarn-project/blob-sink/tsconfig.json new file mode 100644 index 00000000000..535eabe5863 --- /dev/null +++ b/yarn-project/blob-sink/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../circuit-types" + }, + { + "path": "../foundation" + }, + { + "path": "../kv-store" + }, + { + "path": "../telemetry-client" + } + ], + "include": ["src"] +} diff --git a/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts b/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts index 7624b088a5e..aa996cae5a9 100644 --- a/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts +++ b/yarn-project/circuits.js/src/structs/blobs/blob_public_inputs.test.ts @@ -20,7 +20,7 @@ describe('BlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blob = new Blob(Array(400).fill(new Fr(3))); + const blob = Blob.fromFields(Array(400).fill(new Fr(3))); const converted = BlobPublicInputs.fromBlob(blob); expect(converted.z).toEqual(blob.challengeZ); expect(Buffer.from(converted.y.toString(16), 'hex')).toEqual(blob.evaluationY); @@ -55,7 +55,7 @@ describe('BlockBlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => new Blob(Array(400).fill(new Fr(i + 1)))); + const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => Blob.fromFields(Array(400).fill(new Fr(i + 1)))); const converted = BlockBlobPublicInputs.fromBlobs(blobs); converted.inner.forEach((blobPI, i) => { expect(blobPI.z).toEqual(blobs[i].challengeZ); diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index b84ebb2f27e..a9b1f9d2d68 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -32,6 +32,7 @@ "@aztec/aztec-node": "workspace:^", "@aztec/aztec.js": "workspace:^", "@aztec/bb-prover": "workspace:^", + "@aztec/blob-sink": "workspace:^", "@aztec/bot": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 7b61fc01c02..de355e1fae6 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -71,6 +71,9 @@ config.l1RpcUrl = config.l1RpcUrl || 'http://127.0.0.1:8545'; const numberOfConsecutiveBlocks = 2; +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher integration', () => { let publicClient: PublicClient; let walletClient: WalletClient; @@ -182,6 +185,7 @@ describe('L1Publisher integration', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: config.ethereumSlotDuration, + blobSinkUrl: BLOB_SINK_URL, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 50857c56baf..a618bc6ec2e 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -360,13 +360,23 @@ describe('e2e_synching', () => { return; } - const { teardown, logger, deployL1ContractsValues, config, cheatCodes, aztecNode, sequencer, watcher, pxe } = - await setup(0, { - salt: SALT, - l1StartTime: START_TIME, - skipProtocolContracts: true, - assumeProvenThrough, - }); + const { + teardown, + logger, + deployL1ContractsValues, + config, + cheatCodes, + aztecNode, + sequencer, + watcher, + pxe, + blobSink, + } = await setup(0, { + salt: SALT, + l1StartTime: START_TIME, + skipProtocolContracts: true, + assumeProvenThrough, + }); await (aztecNode as any).stop(); await (sequencer as any).stop(); @@ -383,6 +393,7 @@ describe('e2e_synching', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, + blobSinkUrl: `http://localhost:${blobSink?.port ?? 5052}`, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 929768285c7..42663cf4cc0 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -14,6 +14,7 @@ import { type Wallet, } from '@aztec/aztec.js'; import { deployInstance, registerContractClass } from '@aztec/aztec.js/deployment'; +import { type BlobSinkServer, createBlobSinkServer } from '@aztec/blob-sink'; import { type DeployL1ContractsArgs, createL1Clients, getL1ContractsConfigEnvVars, l1Artifacts } from '@aztec/ethereum'; import { EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -29,6 +30,7 @@ import { type Anvil } from '@viem/anvil'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; import fs from 'fs/promises'; +import getPort from 'get-port'; import { tmpdir } from 'os'; import path, { join } from 'path'; import { type Hex, getContract } from 'viem'; @@ -53,6 +55,7 @@ export type SubsystemsContext = { watcher: AnvilTestWatcher; cheatCodes: CheatCodes; dateProvider: TestDateProvider; + blobSink: BlobSinkServer; directoryToCleanup?: string; }; @@ -254,6 +257,7 @@ async function teardown(context: SubsystemsContext | undefined) { await context.bbConfig?.cleanup(); await context.anvil.stop(); await context.watcher.stop(); + await context.blobSink.stop(); if (context.directoryToCleanup) { await fs.rm(context.directoryToCleanup, { recursive: true, force: true }); } @@ -278,6 +282,8 @@ async function setupFromFresh( ): Promise { logger.verbose(`Initializing state...`); + const blobSinkPort = await getPort(); + // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = { ...getConfigEnvVars(), ...opts }; @@ -291,6 +297,17 @@ async function setupFromFresh( } else { aztecNodeConfig.dataDirectory = statePath; } + aztecNodeConfig.blobSinkUrl = `http://localhost:${blobSinkPort}`; + + // Setup blob sink service + const blobSink = await createBlobSinkServer({ + port: blobSinkPort, + dataStoreConfig: { + dataDirectory: aztecNodeConfig.dataDirectory, + dataStoreMapSizeKB: aztecNodeConfig.dataStoreMapSizeKB, + }, + }); + await blobSink.start(); // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. logger.verbose('Starting anvil...'); @@ -407,6 +424,7 @@ async function setupFromFresh( watcher, cheatCodes, dateProvider, + blobSink, directoryToCleanup, }; } @@ -420,12 +438,25 @@ async function setupFromState(statePath: string, logger: Logger): Promise Promise; }; @@ -382,6 +387,11 @@ export async function setup( return await setupWithRemoteEnvironment(publisherHdAccount!, config, logger, numberOfAccounts); } + // Blob sink service - blobs get posted here and served from here + const blobSinkPort = await getPort(); + const blobSink = await createBlobSinkServer({ port: blobSinkPort }); + config.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + const deployL1ContractsValues = opts.deployL1ContractsValues ?? (await setupL1Contracts(config.l1RpcUrl, publisherHdAccount!, logger, opts, chain)); @@ -494,6 +504,7 @@ export async function setup( await anvil?.stop(); await watcher.stop(); + await blobSink?.stop(); if (directoryToCleanup) { logger.verbose(`Cleaning up data directory at ${directoryToCleanup}`); @@ -514,6 +525,7 @@ export async function setup( sequencer, watcher, dateProvider, + blobSink, teardown, }; } diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 08932fbdb4a..a8117b5a5db 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../bb-prover" }, + { + "path": "../blob-sink" + }, { "path": "../bot" }, diff --git a/yarn-project/foundation/src/blob/blob.test.ts b/yarn-project/foundation/src/blob/blob.test.ts index e4a5746ec06..da4caa8fc74 100644 --- a/yarn-project/foundation/src/blob/blob.test.ts +++ b/yarn-project/foundation/src/blob/blob.test.ts @@ -78,15 +78,19 @@ describe('blob', () => { // This test ensures that the Blob class correctly matches the c-kzg lib // The values here are used to test Noir's blob evaluation in noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr -> test_400 const blobItems = Array(400).fill(new Fr(3)); - const ourBlob = new Blob(blobItems); + const ourBlob = Blob.fromFields(blobItems); const blobItemsHash = poseidon2Hash(Array(400).fill(new Fr(3))); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); - expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); + + // We add zeros before getting commitment as we do not store the blob along with + // all of the zeros + const dataWithZeros = Buffer.concat([ourBlob.data], BYTES_PER_BLOB); + expect(blobToKzgCommitment(dataWithZeros)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); expect(z).toEqual(ourBlob.challengeZ); - const res = computeKzgProof(ourBlob.data, ourBlob.challengeZ.toBuffer()); + const res = computeKzgProof(dataWithZeros, ourBlob.challengeZ.toBuffer()); expect(res[0]).toEqual(ourBlob.proof); expect(res[1]).toEqual(ourBlob.evaluationY); @@ -112,8 +116,9 @@ describe('blob', () => { const blobItemsHash = poseidon2Hash(blobItems); const blobs = Blob.getBlobs(blobItems); blobs.forEach(ourBlob => { - // const ourBlob = new Blob(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); + // const ourBlob = Blob.fromFields(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); + expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); @@ -132,4 +137,11 @@ describe('blob', () => { expect(isValid).toBe(true); }); }); + + it('Should serialise and deserialise a blob', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobBuffer = blob.toBuffer(); + const deserialisedBlob = Blob.fromBuffer(blobBuffer); + expect(blob.fieldsHash.equals(deserialisedBlob.fieldsHash)).toBe(true); + }); }); diff --git a/yarn-project/foundation/src/blob/index.ts b/yarn-project/foundation/src/blob/index.ts index 6c1651f4c56..dddb124f1d3 100644 --- a/yarn-project/foundation/src/blob/index.ts +++ b/yarn-project/foundation/src/blob/index.ts @@ -3,7 +3,7 @@ import type { Blob as BlobBuffer } from 'c-kzg'; import { poseidon2Hash, sha256 } from '../crypto/index.js'; import { Fr } from '../fields/index.js'; -import { serializeToBuffer } from '../serialize/index.js'; +import { BufferReader, serializeToBuffer } from '../serialize/index.js'; // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: /* eslint-disable import/no-named-as-default-member */ @@ -36,48 +36,47 @@ export const VERSIONED_HASH_VERSION_KZG = 0x01; * A class to create, manage, and prove EVM blobs. */ export class Blob { - /** The blob to be broadcast on L1 in bytes form. */ - public readonly data: BlobBuffer; - /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ - public readonly fieldsHash: Fr; - /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ - public readonly challengeZ: Fr; - /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ - public readonly evaluationY: Buffer; - /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ - public readonly commitment: Buffer; - /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - public readonly proof: Buffer; - constructor( - /** All fields to be broadcast in the blob. */ - fields: Fr[], - /** If we want to broadcast more fields than fit into a blob, we hash those and used it as the fieldsHash across all blobs. - * This is much simpler and cheaper in the circuit to do, but MUST BE CHECKED before injecting here. - */ - multiBlobFieldsHash?: Fr, - ) { + /** The blob to be broadcast on L1 in bytes form. */ + public readonly data: BlobBuffer, + /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ + public readonly fieldsHash: Fr, + /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ + public readonly challengeZ: Fr, + /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ + public readonly evaluationY: Buffer, + /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ + public readonly commitment: Buffer, + /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ + public readonly proof: Buffer, + ) {} + + static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { if (fields.length > FIELD_ELEMENTS_PER_BLOB) { throw new Error( `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, ); } - this.data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); + const dataWithoutZeros = serializeToBuffer(fields); + const data = Buffer.concat([dataWithoutZeros], BYTES_PER_BLOB); + // This matches the output of SpongeBlob.squeeze() in the blob circuit - this.fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); - this.commitment = Buffer.from(blobToKzgCommitment(this.data)); - this.challengeZ = poseidon2Hash([this.fieldsHash, ...this.commitmentToFields()]); - const res = computeKzgProof(this.data, this.challengeZ.toBuffer()); - if (!verifyKzgProof(this.commitment, this.challengeZ.toBuffer(), res[1], res[0])) { + const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); + const commitment = Buffer.from(blobToKzgCommitment(data)); + const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); + const res = computeKzgProof(data, challengeZ.toBuffer()); + if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { throw new Error(`KZG proof did not verify.`); } - this.proof = Buffer.from(res[0]); - this.evaluationY = Buffer.from(res[1]); + const proof = Buffer.from(res[0]); + const evaluationY = Buffer.from(res[1]); + + return new Blob(dataWithoutZeros, fieldsHash, challengeZ, evaluationY, commitment, proof); } // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] commitmentToFields(): [Fr, Fr] { - return [new Fr(this.commitment.subarray(0, 31)), new Fr(this.commitment.subarray(31, 48))]; + return commitmentToFields(this.commitment); } // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers @@ -93,6 +92,49 @@ export class Blob { return hash; } + toBuffer(): Buffer { + return Buffer.from( + serializeToBuffer( + this.data.length, + this.data, + this.fieldsHash, + this.challengeZ, + this.evaluationY.length, + this.evaluationY, + this.commitment.length, + this.commitment, + this.proof.length, + this.proof, + ), + ); + } + + static fromBuffer(buf: Buffer | BufferReader): Blob { + const reader = BufferReader.asReader(buf); + return new Blob( + reader.readUint8Array(), + reader.readObject(Fr), + reader.readObject(Fr), + reader.readBuffer(), + reader.readBuffer(), + reader.readBuffer(), + ); + } + + /** + * Pad the blob data to it's full size before posting + */ + get dataWithZeros(): BlobBuffer { + return Buffer.concat([this.data], BYTES_PER_BLOB); + } + + /** + * Get the size of the blob in bytes + */ + getSize() { + return this.data.length; + } + // Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: // * input[:32] - versioned_hash // * input[32:64] - z @@ -145,8 +187,13 @@ export class Blob { const res = []; for (let i = 0; i < numBlobs; i++) { const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; - res.push(new Blob(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); + res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); } return res; } } + +// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] +function commitmentToFields(commitment: Buffer): [Fr, Fr] { + return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 4ce23b9946e..3fa142074e0 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -100,6 +100,7 @@ export type EnvVar = | 'P2P_UDP_ANNOUNCE_ADDR' | 'P2P_UDP_LISTEN_ADDR' | 'PEER_ID_PRIVATE_KEY' + | 'PROVER_BLOB_SINK_URL' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' | 'PROVER_AGENT_ENABLED' @@ -136,6 +137,7 @@ export type EnvVar = | 'REGISTRY_CONTRACT_ADDRESS' | 'ROLLUP_CONTRACT_ADDRESS' | 'SEQ_ALLOWED_SETUP_FN' + | 'SEQ_BLOB_SINK_URL' | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' | 'SEQ_MIN_TX_PER_BLOCK' diff --git a/yarn-project/foundation/src/serialize/buffer_reader.ts b/yarn-project/foundation/src/serialize/buffer_reader.ts index 7abe3f59336..84b2ea86277 100644 --- a/yarn-project/foundation/src/serialize/buffer_reader.ts +++ b/yarn-project/foundation/src/serialize/buffer_reader.ts @@ -307,6 +307,20 @@ export class BufferReader { return this.readBytes(size); } + /** + * Reads a buffer from the current position of the reader and advances the index. + * The method first reads the size (number) of bytes to be read, and then returns + * a Buffer with that size containing the bytes. Useful for reading variable-length + * binary data encoded as (size, data) format. + * + * @returns A Buffer containing the read bytes. + */ + public readUint8Array(): Uint8Array { + const size = this.readNumber(); + this.#rangeCheck(size); + return this.readBytes(size); + } + /** * Reads and constructs a map object from the current buffer using the provided deserializer. * The method reads the number of entries in the map, followed by iterating through each key-value pair. diff --git a/yarn-project/foundation/src/serialize/serialize.ts b/yarn-project/foundation/src/serialize/serialize.ts index 6698a7081e2..fc2638ac3e7 100644 --- a/yarn-project/foundation/src/serialize/serialize.ts +++ b/yarn-project/foundation/src/serialize/serialize.ts @@ -109,6 +109,7 @@ export function deserializeField(buf: Buffer, offset = 0) { export type Bufferable = | boolean | Buffer + | Uint8Array | number | bigint | string diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts index dfc5df7f105..cb3abd077e3 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts @@ -4,7 +4,7 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { AztecKVTxPool } from './aztec_kv_tx_pool.js'; import { describeTxPool } from './tx_pool_test_suite.js'; -describe('In-Memory TX pool', () => { +describe('KV TX pool', () => { let txPool: AztecKVTxPool; beforeEach(() => { txPool = new AztecKVTxPool(openTmpStore(), new NoopTelemetryClient()); diff --git a/yarn-project/package.json b/yarn-project/package.json index 0a2e6fbc9eb..68b32f53c60 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -26,6 +26,7 @@ "aztec-node", "validator-client", "bb-prover", + "blob-sink", "bot", "builder", "pxe", diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 6721e3080b5..fb874975daa 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -64,6 +64,7 @@ "@types/node": "^18.7.23", "concurrently": "^7.6.0", "eslint": "^8.37.0", + "express": "^4.21.1", "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "levelup": "^5.1.1", diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 367f2aa6677..d77efa57ca2 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -24,6 +24,11 @@ export type PublisherConfig = L1TxUtilsConfig & { * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; + + /** + * The URL of the blob sink. + */ + blobSinkUrl?: string; }; export const getTxSenderConfigMappings: ( @@ -72,6 +77,11 @@ export const getPublisherConfigMappings: ( description: 'The interval to wait between publish retries.', }, ...l1TxUtilsConfigMappings, + blobSinkUrl: { + env: `${scope}_BLOB_SINK_URL`, + description: 'The URL of the blob sink.', + parseEnv: (val?: string) => val, + }, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 64ac88119d5..689c03c71d1 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -13,6 +13,9 @@ import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { jest } from '@jest/globals'; +import express, { json } from 'express'; +import { type Server } from 'http'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type GetTransactionReceiptReturnType, @@ -68,6 +71,9 @@ class MockRollupContract { } } +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher', () => { let rollupContractRead: MockProxy; let rollupContractWrite: MockProxy; @@ -85,11 +91,16 @@ describe('L1Publisher', () => { let blockHash: Buffer; let body: Buffer; + let mockBlobSinkServer: Server | undefined = undefined; + + // An l1 publisher with some private methods exposed let publisher: L1Publisher; const GAS_GUESS = 300_000n; beforeEach(() => { + mockBlobSinkServer = undefined; + l2Block = L2Block.random(42); header = l2Block.header.toBuffer(); @@ -112,6 +123,7 @@ describe('L1Publisher', () => { publicClient = mock(); l1TxUtils = mock(); const config = { + blobSinkUrl: BLOB_SINK_URL, l1RpcUrl: `http://127.0.0.1:8545`, l1ChainId: 1, publisherPrivateKey: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`, @@ -138,19 +150,66 @@ describe('L1Publisher', () => { (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); }); + const closeServer = (server: Server): Promise => { + return new Promise((resolve, reject) => { + server.close(err => { + if (err) { + reject(err); + return; + } + resolve(); + }); + }); + }; + + afterEach(async () => { + if (mockBlobSinkServer) { + await closeServer(mockBlobSinkServer); + mockBlobSinkServer = undefined; + } + }); + + // Run a mock blob sink in the background, and test that the correct data is sent to it + const runBlobSinkServer = (blobs: Blob[]) => { + const app = express(); + app.use(json({ limit: '10mb' })); + + app.post('/blob_sidecar', (req, res) => { + const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => + Blob.fromBuffer(Buffer.from(b.blob.data)), + ); + + expect(blobsBuffers).toEqual(blobs); + res.status(200).send(); + }); + + return new Promise(resolve => { + mockBlobSinkServer = app.listen(BLOB_SINK_PORT, () => { + // Resolve when the server is listening + resolve(); + }); + }); + }; + it('publishes and propose l2 block to l1', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - const result = await publisher.proposeL2Block(l2Block); + const kzg = Blob.getViemKzgInstance(); - expect(result).toEqual(true); + const expectedBlobs = Blob.getBlobs(l2Block.body.toBlobFields()); - const kzg = Blob.getViemKzgInstance(); + // Check the blobs were forwarded to the blob sink service + const sendToBlobSinkSpy = jest.spyOn(publisher as any, 'sendBlobsToBlobSink'); - const blobs = Blob.getBlobs(l2Block.body.toBlobFields()); + // Expect the blob sink server to receive the blobs + await runBlobSinkServer(expectedBlobs); - const blobInput = Blob.getEthBlobEvaluationInputs(blobs); + const result = await publisher.proposeL2Block(l2Block); + + expect(result).toEqual(true); + + const blobInput = Blob.getEthBlobEvaluationInputs(expectedBlobs); const args = [ { @@ -173,8 +232,14 @@ describe('L1Publisher', () => { data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), }, { fixedGas: GAS_GUESS + L1Publisher.PROPOSE_GAS_GUESS }, - { blobs: blobs.map(b => b.data), kzg, maxFeePerBlobGas: 10000000000n }, + { blobs: expectedBlobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n }, ); + + expect(sendToBlobSinkSpy).toHaveBeenCalledTimes(1); + // If this does not return true, then the mocked server will have errored, and + // the expects that run there will have failed + const returnValuePromise = sendToBlobSinkSpy.mock.results[0].value; + expect(await returnValuePromise).toBe(true); }); it('does not retry if sending a propose tx fails', async () => { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index c27d4710031..1d82c69b33d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -95,6 +95,8 @@ export type MinimalTransactionReceipt = { logs: any[]; /** Block number in which this tx was mined. */ blockNumber: bigint; + /** The block hash in which this tx was mined */ + blockHash: `0x${string}`; }; /** Arguments to the process method of the rollup contract */ @@ -175,6 +177,8 @@ export class L1Publisher { protected account: PrivateKeyAccount; protected ethereumSlotDuration: bigint; + private blobSinkUrl: string | undefined; + // @note - with blobs, the below estimate seems too large. // Total used for full block from int_l1_pub e2e test: 1m (of which 86k is 1x blob) // Total used for emptier block from above test: 429k (of which 84k is 1x blob) @@ -189,6 +193,7 @@ export class L1Publisher { ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); + this.blobSinkUrl = config.blobSinkUrl; this.metrics = new L1PublisherMetrics(client, 'L1Publisher'); const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey, l1Contracts } = config; @@ -594,15 +599,18 @@ export class L1Publisher { const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); const digest = getHashedSignaturePayload(consensusPayload, SignatureDomainSeperator.blockAttestation); + + const blobs = Blob.getBlobs(block.body.toBlobFields()); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), blockHash: block.header.hash().toBuffer(), body: block.body.toBuffer(), - blobs: Blob.getBlobs(block.body.toBlobFields()), + blobs, attestations, txHashes: txHashes ?? [], }; + // Publish body and propose block (if not already published) if (this.interrupted) { this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); @@ -647,6 +655,12 @@ export class L1Publisher { }; this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); + + // Send the blobs to the blob sink + this.sendBlobsToBlobSink(receipt.blockHash, blobs).catch(_err => { + this.log.error('Failed to send blobs to blob sink'); + }); + return true; } @@ -661,7 +675,7 @@ export class L1Publisher { address: this.rollupContract.address, }, { - blobs: proposeTxArgs.blobs.map(b => b.data), + blobs: proposeTxArgs.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, }, @@ -966,7 +980,7 @@ export class L1Publisher { }, {}, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1056,7 +1070,7 @@ export class L1Publisher { fixedGas: gas, }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1095,7 +1109,7 @@ export class L1Publisher { }, { fixedGas: gas }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.dataWithZeros), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1137,6 +1151,7 @@ export class L1Publisher { gasPrice: receipt.effectiveGasPrice, logs: receipt.logs, blockNumber: receipt.blockNumber, + blockHash: receipt.blockHash, }; } @@ -1152,9 +1167,51 @@ export class L1Publisher { protected async sleepOrInterrupted() { await this.interruptibleSleep.sleep(this.sleepTimeMs); } + + /** + * Send blobs to the blob sink + * + * If a blob sink url is configured, then we send blobs to the blob sink + * - for now we use the blockHash as the identifier for the blobs; + * In the future this will move to be the beacon block id - which takes a bit more work + * to calculate and will need to be mocked in e2e tests + */ + protected async sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { + // TODO(md): for now we are assuming the indexes of the blobs will be 0, 1, 2 + // When in reality they will not, but for testing purposes this is fine + if (!this.blobSinkUrl) { + this.log.verbose('No blob sink url configured'); + return false; + } + + this.log.verbose(`Sending ${blobs.length} blobs to blob sink`); + try { + const res = await fetch(`${this.blobSinkUrl}/blob_sidecar`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + // eslint-disable-next-line camelcase + block_id: blockHash, + blobs: blobs.map((b, i) => ({ blob: b.toBuffer(), index: i })), + }), + }); + + if (res.ok) { + return true; + } + + this.log.error('Failed to send blobs to blob sink', res.status); + return false; + } catch (err) { + this.log.error(`Error sending blobs to blob sink`, err); + return false; + } + } } -/** +/* * Returns cost of calldata usage in Ethereum. * @param data - Calldata. * @returns 4 for each zero byte, 16 for each nonzero. diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 5e2d33befe8..d68b39399c6 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -6,6 +6,9 @@ * @see {@link https://opentelemetry.io/docs/specs/semconv/general/metrics/ | OpenTelemetry Metrics} for naming conventions. */ +export const BLOB_SINK_OBJECTS_IN_BLOB_STORE = 'aztec.blob_sink.objects_in_blob_store'; +export const BLOB_SINK_BLOB_SIZE = 'aztec.blob_sink.blob_size'; + /** How long it takes to simulate a circuit */ export const CIRCUIT_SIMULATION_DURATION = 'aztec.circuit.simulation.duration'; export const CIRCUIT_SIMULATION_INPUT_SIZE = 'aztec.circuit.simulation.input_size'; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index c4806a57b34..e1fa34cd008 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -334,6 +334,32 @@ __metadata: languageName: node linkType: soft +"@aztec/blob-sink@workspace:^, @aztec/blob-sink@workspace:blob-sink": + version: 0.0.0-use.local + resolution: "@aztec/blob-sink@workspace:blob-sink" + dependencies: + "@aztec/circuit-types": "workspace:^" + "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:*" + "@aztec/telemetry-client": "workspace:*" + "@jest/globals": "npm:^29.5.0" + "@types/jest": "npm:^29.5.0" + "@types/memdown": "npm:^3.0.0" + "@types/node": "npm:^18.7.23" + "@types/source-map-support": "npm:^0.5.10" + "@types/supertest": "npm:^6.0.2" + express: "npm:^4.21.1" + jest: "npm:^29.5.0" + jest-mock-extended: "npm:^3.0.3" + source-map-support: "npm:^0.5.21" + supertest: "npm:^7.0.0" + ts-node: "npm:^10.9.1" + tslib: "npm:^2.4.0" + typescript: "npm:^5.0.4" + zod: "npm:^3.23.8" + languageName: unknown + linkType: soft + "@aztec/bot@workspace:^, @aztec/bot@workspace:bot": version: 0.0.0-use.local resolution: "@aztec/bot@workspace:bot" @@ -531,6 +557,7 @@ __metadata: "@aztec/aztec-node": "workspace:^" "@aztec/aztec.js": "workspace:^" "@aztec/bb-prover": "workspace:^" + "@aztec/blob-sink": "workspace:^" "@aztec/bot": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" @@ -795,7 +822,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": +"@aztec/kv-store@workspace:*, @aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": version: 0.0.0-use.local resolution: "@aztec/kv-store@workspace:kv-store" dependencies: @@ -1188,6 +1215,7 @@ __metadata: "@types/node": "npm:^18.7.23" concurrently: "npm:^7.6.0" eslint: "npm:^8.37.0" + express: "npm:^4.21.1" jest: "npm:^29.5.0" jest-mock-extended: "npm:^3.0.3" levelup: "npm:^5.1.1" @@ -1240,7 +1268,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": +"@aztec/telemetry-client@workspace:*, @aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": version: 0.0.0-use.local resolution: "@aztec/telemetry-client@workspace:telemetry-client" dependencies: @@ -5713,6 +5741,18 @@ __metadata: languageName: node linkType: hard +"@types/superagent@npm:^8.1.0": + version: 8.1.9 + resolution: "@types/superagent@npm:8.1.9" + dependencies: + "@types/cookiejar": "npm:^2.1.5" + "@types/methods": "npm:^1.1.4" + "@types/node": "npm:*" + form-data: "npm:^4.0.0" + checksum: 10/6d9687b0bc3d693b900ef76000b02437a70879c3219b28606879c086d786bb1e48429813e72e32dd0aafc94c053a78a2aa8be67c45bc8e6b968ca62d6d5cc554 + languageName: node + linkType: hard + "@types/supertest@npm:^2.0.12": version: 2.0.16 resolution: "@types/supertest@npm:2.0.16" @@ -5722,6 +5762,16 @@ __metadata: languageName: node linkType: hard +"@types/supertest@npm:^6.0.2": + version: 6.0.2 + resolution: "@types/supertest@npm:6.0.2" + dependencies: + "@types/methods": "npm:^1.1.4" + "@types/superagent": "npm:^8.1.0" + checksum: 10/4b67fb2d1bfbb7ff0a7dfaaf190cdf2e0014522615fb2dc53c214bdac95b4ee42696dd1df13332c90a7765cc52934c9cc0c428bf0f9e8189167aef01042e7448 + languageName: node + linkType: hard + "@types/wrap-ansi@npm:^3.0.0": version: 3.0.0 resolution: "@types/wrap-ansi@npm:3.0.0" @@ -10699,7 +10749,7 @@ __metadata: languageName: node linkType: hard -"express@npm:^4.19.2": +"express@npm:^4.19.2, express@npm:^4.21.1": version: 4.21.1 resolution: "express@npm:4.21.1" dependencies: @@ -11072,6 +11122,17 @@ __metadata: languageName: node linkType: hard +"formidable@npm:^3.5.1": + version: 3.5.2 + resolution: "formidable@npm:3.5.2" + dependencies: + dezalgo: "npm:^1.0.4" + hexoid: "npm:^2.0.0" + once: "npm:^1.4.0" + checksum: 10/b9d87af44be8ba82f8f4955c240e65c559aedb84fecce6b294d97b256db66e6a20d50e799776fdf29ee46cb83857231d12c416c735696b18d3895b85620704f4 + languageName: node + linkType: hard + "forwarded@npm:0.2.0": version: 0.2.0 resolution: "forwarded@npm:0.2.0" @@ -11670,6 +11731,13 @@ __metadata: languageName: node linkType: hard +"hexoid@npm:^2.0.0": + version: 2.0.0 + resolution: "hexoid@npm:2.0.0" + checksum: 10/73d8e135bdd9326d0fa9ea05356741d48a3e67fbd3b2ce14c4f7b523a1cdabe70fa42f2c53447244886a0aecdf7873d4124abc30093a72d15188805f7a7ee406 + languageName: node + linkType: hard + "hmac-drbg@npm:^1.0.1": version: 1.0.1 resolution: "hmac-drbg@npm:1.0.1" @@ -18300,6 +18368,23 @@ __metadata: languageName: node linkType: hard +"superagent@npm:^9.0.1": + version: 9.0.2 + resolution: "superagent@npm:9.0.2" + dependencies: + component-emitter: "npm:^1.3.0" + cookiejar: "npm:^2.1.4" + debug: "npm:^4.3.4" + fast-safe-stringify: "npm:^2.1.1" + form-data: "npm:^4.0.0" + formidable: "npm:^3.5.1" + methods: "npm:^1.1.2" + mime: "npm:2.6.0" + qs: "npm:^6.11.0" + checksum: 10/d3c0c9051ceec84d5b431eaa410ad81bcd53255cea57af1fc66d683a24c34f3ba4761b411072a9bf489a70e3d5b586a78a0e6f2eac6a561067e7d196ddab0907 + languageName: node + linkType: hard + "supertest@npm:^6.3.3": version: 6.3.4 resolution: "supertest@npm:6.3.4" @@ -18310,6 +18395,16 @@ __metadata: languageName: node linkType: hard +"supertest@npm:^7.0.0": + version: 7.0.0 + resolution: "supertest@npm:7.0.0" + dependencies: + methods: "npm:^1.1.2" + superagent: "npm:^9.0.1" + checksum: 10/73bf2a37e13856a1b3e6a37b9df5cec8e506aa0360a5f5ecd989d1f4b0edf168883e306012e81e371d5252c17d4c7bef4ba30633dbf3877cbf52fc7af51cca9b + languageName: node + linkType: hard + "supports-color@npm:^2.0.0": version: 2.0.0 resolution: "supports-color@npm:2.0.0"