Skip to content

Commit

Permalink
feat: blob sink in sandbox without extra process (#11032)
Browse files Browse the repository at this point in the history
  • Loading branch information
Maddiaa0 authored Jan 9, 2025
1 parent 44e01f4 commit 4600f54
Show file tree
Hide file tree
Showing 42 changed files with 390 additions and 71 deletions.
1 change: 1 addition & 0 deletions yarn-project/archiver/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
]
},
"dependencies": {
"@aztec/blob-sink": "workspace:^",
"@aztec/circuit-types": "workspace:^",
"@aztec/circuits.js": "workspace:^",
"@aztec/ethereum": "workspace:^",
Expand Down
4 changes: 4 additions & 0 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { type BlobSinkClientInterface } from '@aztec/blob-sink/client';
import { InboxLeaf, type L1RollupConstants, L2Block } from '@aztec/circuit-types';
import { GENESIS_ARCHIVE_ROOT, PrivateLog } from '@aztec/circuits.js';
import { DefaultL1ContractsConfig } from '@aztec/ethereum';
Expand Down Expand Up @@ -53,6 +54,7 @@ describe('Archiver', () => {

let publicClient: MockProxy<PublicClient<HttpTransport, Chain>>;
let instrumentation: MockProxy<ArchiverInstrumentation>;
let blobSinkClient: MockProxy<BlobSinkClientInterface>;
let archiverStore: ArchiverDataStore;
let now: number;
let l1Constants: L1RollupConstants;
Expand Down Expand Up @@ -92,6 +94,7 @@ describe('Archiver', () => {
);
}) as any,
});
blobSinkClient = mock<BlobSinkClientInterface>();

const tracer = new NoopTelemetryClient().getTracer();
instrumentation = mock<ArchiverInstrumentation>({ isEnabled: () => true, tracer });
Expand All @@ -109,6 +112,7 @@ describe('Archiver', () => {
{ rollupAddress, inboxAddress, registryAddress },
archiverStore,
{ pollingIntervalMs: 1000, batchSize: 1000 },
blobSinkClient,
instrumentation,
l1Constants,
);
Expand Down
7 changes: 5 additions & 2 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { type BlobSinkClientInterface } from '@aztec/blob-sink/client';
import {
type GetUnencryptedLogsResponse,
type InBlock,
Expand Down Expand Up @@ -115,6 +116,7 @@ export class Archiver implements ArchiveSource, Traceable {
private readonly l1Addresses: { rollupAddress: EthAddress; inboxAddress: EthAddress; registryAddress: EthAddress },
readonly dataStore: ArchiverDataStore,
private readonly config: { pollingIntervalMs: number; batchSize: number },
private readonly _blobSinkClient: BlobSinkClientInterface,
private readonly instrumentation: ArchiverInstrumentation,
private readonly l1constants: L1RollupConstants,
private readonly log: Logger = createLogger('archiver'),
Expand Down Expand Up @@ -145,7 +147,7 @@ export class Archiver implements ArchiveSource, Traceable {
public static async createAndSync(
config: ArchiverConfig,
archiverStore: ArchiverDataStore,
telemetry: TelemetryClient,
deps: { telemetry: TelemetryClient; blobSinkClient: BlobSinkClientInterface },
blockUntilSynced = true,
): Promise<Archiver> {
const chain = createEthereumChain(config.l1RpcUrl, config.l1ChainId);
Expand Down Expand Up @@ -176,7 +178,8 @@ export class Archiver implements ArchiveSource, Traceable {
pollingIntervalMs: config.archiverPollingIntervalMS ?? 10_000,
batchSize: config.archiverBatchSize ?? 100,
},
await ArchiverInstrumentation.new(telemetry, () => archiverStore.estimateSize()),
deps.blobSinkClient,
await ArchiverInstrumentation.new(deps.telemetry, () => archiverStore.estimateSize()),
{ l1StartBlock, l1GenesisTime, epochDuration, slotDuration, ethereumSlotDuration },
);
await archiver.start(blockUntilSynced);
Expand Down
4 changes: 3 additions & 1 deletion yarn-project/archiver/src/factory.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { type BlobSinkClientInterface } from '@aztec/blob-sink/client';
import { type ArchiverApi, type Service } from '@aztec/circuit-types';
import {
type ContractClassPublic,
Expand All @@ -23,6 +24,7 @@ import { createArchiverClient } from './rpc/index.js';

export async function createArchiver(
config: ArchiverConfig & DataStoreConfig,
blobSinkClient: BlobSinkClientInterface,
telemetry: TelemetryClient = new NoopTelemetryClient(),
opts: { blockUntilSync: boolean } = { blockUntilSync: true },
): Promise<ArchiverApi & Maybe<Service>> {
Expand All @@ -31,7 +33,7 @@ export async function createArchiver(
const archiverStore = new KVArchiverDataStore(store, config.maxLogs);
await registerProtocolContracts(archiverStore);
await registerCommonContracts(archiverStore);
return Archiver.createAndSync(config, archiverStore, telemetry, opts.blockUntilSync);
return Archiver.createAndSync(config, archiverStore, { telemetry, blobSinkClient }, opts.blockUntilSync);
} else {
return createArchiverClient(config.archiverUrl);
}
Expand Down
3 changes: 3 additions & 0 deletions yarn-project/archiver/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
"tsBuildInfoFile": ".tsbuildinfo"
},
"references": [
{
"path": "../blob-sink"
},
{
"path": "../circuit-types"
},
Expand Down
1 change: 1 addition & 0 deletions yarn-project/aztec-node/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
"dependencies": {
"@aztec/archiver": "workspace:^",
"@aztec/bb-prover": "workspace:^",
"@aztec/blob-sink": "workspace:^",
"@aztec/circuit-types": "workspace:^",
"@aztec/circuits.js": "workspace:^",
"@aztec/epoch-cache": "workspace:^",
Expand Down
8 changes: 6 additions & 2 deletions yarn-project/aztec-node/src/aztec-node/server.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { createArchiver } from '@aztec/archiver';
import { BBCircuitVerifier, TestCircuitVerifier } from '@aztec/bb-prover';
import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
import {
type AztecNode,
type ClientProtocolCircuitVerifier,
Expand Down Expand Up @@ -139,11 +140,13 @@ export class AztecNodeService implements AztecNode, Traceable {
logger?: Logger;
publisher?: L1Publisher;
dateProvider?: DateProvider;
blobSinkClient?: BlobSinkClientInterface;
} = {},
): Promise<AztecNodeService> {
const telemetry = deps.telemetry ?? new NoopTelemetryClient();
const log = deps.logger ?? createLogger('node');
const dateProvider = deps.dateProvider ?? new DateProvider();
const blobSinkClient = deps.blobSinkClient ?? createBlobSinkClient(config.blobSinkUrl);
const ethereumChain = createEthereumChain(config.l1RpcUrl, config.l1ChainId);
//validate that the actual chain id matches that specified in configuration
if (config.l1ChainId !== ethereumChain.chainInfo.id) {
Expand All @@ -152,7 +155,7 @@ export class AztecNodeService implements AztecNode, Traceable {
);
}

const archiver = await createArchiver(config, telemetry, { blockUntilSync: true });
const archiver = await createArchiver(config, blobSinkClient, telemetry, { blockUntilSync: true });

// we identify the P2P transaction protocol by using the rollup contract address.
// this may well change in future
Expand Down Expand Up @@ -190,6 +193,7 @@ export class AztecNodeService implements AztecNode, Traceable {
const sequencer = config.disableValidator
? undefined
: await SequencerClient.new(config, {
...deps,
validatorClient,
p2pClient,
worldStateSynchronizer,
Expand All @@ -199,7 +203,7 @@ export class AztecNodeService implements AztecNode, Traceable {
l1ToL2MessageSource: archiver,
telemetry,
dateProvider,
...deps,
blobSinkClient,
});

return new AztecNodeService(
Expand Down
3 changes: 3 additions & 0 deletions yarn-project/aztec-node/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
{
"path": "../bb-prover"
},
{
"path": "../blob-sink"
},
{
"path": "../circuit-types"
},
Expand Down
1 change: 1 addition & 0 deletions yarn-project/aztec/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
"@aztec/aztec-node": "workspace:^",
"@aztec/aztec.js": "workspace:^",
"@aztec/bb-prover": "workspace:^",
"@aztec/blob-sink": "workspace:^",
"@aztec/bot": "workspace:^",
"@aztec/builder": "workspace:^",
"@aztec/circuit-types": "workspace:^",
Expand Down
5 changes: 4 additions & 1 deletion yarn-project/aztec/src/cli/cmds/start_archiver.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { Archiver, type ArchiverConfig, KVArchiverDataStore, archiverConfigMappings } from '@aztec/archiver';
import { createLogger } from '@aztec/aztec.js';
import { createBlobSinkClient } from '@aztec/blob-sink/client';
import { ArchiverApiSchema } from '@aztec/circuit-types';
import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server';
import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
Expand Down Expand Up @@ -31,7 +32,9 @@ export async function startArchiver(
const archiverStore = new KVArchiverDataStore(store, archiverConfig.maxLogs);

const telemetry = await createAndStartTelemetryClient(getTelemetryClientConfig());
const archiver = await Archiver.createAndSync(archiverConfig, archiverStore, telemetry, true);
// TODO(https://github.com/AztecProtocol/aztec-packages/issues/10056): place CL url in config here
const blobSinkClient = createBlobSinkClient();
const archiver = await Archiver.createAndSync(archiverConfig, archiverStore, { telemetry, blobSinkClient }, true);
services.archiver = [archiver, ArchiverApiSchema];
signalHandlers.push(archiver.stop);
return services;
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/aztec/src/cli/cmds/start_node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ export async function startNode(
const telemetry = await createAndStartTelemetryClient(telemetryConfig);

// Create and start Aztec Node
const node = await createAztecNode(nodeConfig, telemetry);
const node = await createAztecNode(nodeConfig, { telemetry });

// Add node and p2p to services list
services.node = [node, AztecNodeApiSchema];
Expand Down
14 changes: 10 additions & 4 deletions yarn-project/aztec/src/sandbox.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import { type AztecNodeConfig, AztecNodeService, getConfigEnvVars } from '@aztec/aztec-node';
import { AnvilTestWatcher, EthCheatCodes, SignerlessWallet, retryUntil } from '@aztec/aztec.js';
import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint';
import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
import { type AztecNode } from '@aztec/circuit-types';
import { setupCanonicalL2FeeJuice } from '@aztec/cli/setup-contracts';
import {
Expand Down Expand Up @@ -143,8 +144,10 @@ export async function createSandbox(config: Partial<SandboxConfig> = {}) {
await watcher.start();
}

const client = await createAndStartTelemetryClient(getTelemetryClientConfig());
const node = await createAztecNode(aztecNodeConfig, client);
const telemetry = await createAndStartTelemetryClient(getTelemetryClientConfig());
// Create a local blob sink client inside the sandbox, no http connectivity
const blobSinkClient = createBlobSinkClient();
const node = await createAztecNode(aztecNodeConfig, { telemetry, blobSinkClient });
const pxe = await createAztecPXE(node);

if (config.enableGas) {
Expand All @@ -168,9 +171,12 @@ export async function createSandbox(config: Partial<SandboxConfig> = {}) {
* Create and start a new Aztec RPC HTTP Server
* @param config - Optional Aztec node settings.
*/
export async function createAztecNode(config: Partial<AztecNodeConfig> = {}, telemetryClient?: TelemetryClient) {
export async function createAztecNode(
config: Partial<AztecNodeConfig> = {},
deps: { telemetry?: TelemetryClient; blobSinkClient?: BlobSinkClientInterface } = {},
) {
const aztecNodeConfig: AztecNodeConfig = { ...getConfigEnvVars(), ...config };
const node = await AztecNodeService.createAndSync(aztecNodeConfig, { telemetry: telemetryClient });
const node = await AztecNodeService.createAndSync(aztecNodeConfig, deps);
return node;
}

Expand Down
3 changes: 3 additions & 0 deletions yarn-project/aztec/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
{
"path": "../bb-prover"
},
{
"path": "../blob-sink"
},
{
"path": "../bot"
},
Expand Down
3 changes: 2 additions & 1 deletion yarn-project/blob-sink/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
"version": "0.1.0",
"type": "module",
"exports": {
".": "./dest/index.js"
"./server": "./dest/server/index.js",
"./client": "./dest/client/index.js"
},
"inherits": [
"../package.common.json"
Expand Down
72 changes: 72 additions & 0 deletions yarn-project/blob-sink/src/client/blob-sink-client-tests.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import { Blob } from '@aztec/foundation/blob';
import { Fr } from '@aztec/foundation/fields';

import { type BlobSinkClientInterface } from './interface.js';

/**
* Shared test suite for blob sink clients
* @param createClient - Function that creates a client instance for testing
* @param cleanup - Optional cleanup function to run after each test
*/
export function runBlobSinkClientTests(
createClient: () => Promise<{ client: BlobSinkClientInterface; cleanup: () => Promise<void> }>,
) {
let client: BlobSinkClientInterface;
let cleanup: () => Promise<void>;

beforeEach(async () => {
const setup = await createClient();
client = setup.client;
cleanup = setup.cleanup;
});

afterEach(async () => {
await cleanup();
});

it('should send and retrieve blobs', async () => {
const testFields = [Fr.random(), Fr.random(), Fr.random()];
const blob = Blob.fromFields(testFields);
const blockId = '0x1234';

const success = await client.sendBlobsToBlobSink(blockId, [blob]);
expect(success).toBe(true);

const retrievedBlobs = await client.getBlobSidecar(blockId);
expect(retrievedBlobs).toHaveLength(1);
expect(retrievedBlobs[0].fieldsHash.toString()).toBe(blob.fieldsHash.toString());
expect(retrievedBlobs[0].commitment.toString('hex')).toBe(blob.commitment.toString('hex'));
});

it('should handle multiple blobs', async () => {
const blobs = [
Blob.fromFields([Fr.random(), Fr.random()]),
Blob.fromFields([Fr.random(), Fr.random()]),
Blob.fromFields([Fr.random(), Fr.random()]),
];
const blockId = '0x5678';

const success = await client.sendBlobsToBlobSink(blockId, blobs);
expect(success).toBe(true);

const retrievedBlobs = await client.getBlobSidecar(blockId);
expect(retrievedBlobs).toHaveLength(3);

for (let i = 0; i < blobs.length; i++) {
expect(retrievedBlobs[i].fieldsHash.toString()).toBe(blobs[i].fieldsHash.toString());
expect(retrievedBlobs[i].commitment.toString('hex')).toBe(blobs[i].commitment.toString('hex'));
}

// Can request blobs by index
const retrievedBlobsByIndex = await client.getBlobSidecar(blockId, [0, 2]);
expect(retrievedBlobsByIndex).toHaveLength(2);
expect(retrievedBlobsByIndex[0].fieldsHash.toString()).toBe(blobs[0].fieldsHash.toString());
expect(retrievedBlobsByIndex[1].fieldsHash.toString()).toBe(blobs[2].fieldsHash.toString());
});

it('should return empty array for non-existent block', async () => {
const blockId = '0xnonexistent';
const retrievedBlobs = await client.getBlobSidecar(blockId);
expect(retrievedBlobs).toEqual([]);
});
}
13 changes: 13 additions & 0 deletions yarn-project/blob-sink/src/client/factory.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { MemoryBlobStore } from '../blobstore/memory_blob_store.js';
import { HttpBlobSinkClient } from './http.js';
import { type BlobSinkClientInterface } from './interface.js';
import { LocalBlobSinkClient } from './local.js';

export function createBlobSinkClient(blobSinkUrl?: string): BlobSinkClientInterface {
if (!blobSinkUrl) {
const blobStore = new MemoryBlobStore();
return new LocalBlobSinkClient(blobStore);
}

return new HttpBlobSinkClient(blobSinkUrl);
}
35 changes: 35 additions & 0 deletions yarn-project/blob-sink/src/client/http.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import { Blob } from '@aztec/foundation/blob';
import { Fr } from '@aztec/foundation/fields';

import { BlobSinkServer } from '../server/server.js';
import { runBlobSinkClientTests } from './blob-sink-client-tests.js';
import { HttpBlobSinkClient } from './http.js';

describe('HttpBlobSinkClient', () => {
runBlobSinkClientTests(async () => {
const server = new BlobSinkServer({
port: 0,
});
await server.start();

const client = new HttpBlobSinkClient(`http://localhost:${server.port}`);

return {
client,
cleanup: async () => {
await server.stop();
},
};
});

it('should handle server connection errors gracefully', async () => {
const client = new HttpBlobSinkClient('http://localhost:12345'); // Invalid port
const blob = Blob.fromFields([Fr.random()]);

const success = await client.sendBlobsToBlobSink('0x1234', [blob]);
expect(success).toBe(false);

const retrievedBlobs = await client.getBlobSidecar('0x1234');
expect(retrievedBlobs).toEqual([]);
});
});
Loading

0 comments on commit 4600f54

Please sign in to comment.