Skip to content

Commit

Permalink
initial integration tests working with UltraHonk
Browse files Browse the repository at this point in the history
  • Loading branch information
vezenovm committed Jul 31, 2024
1 parent daad75c commit 7617ed6
Show file tree
Hide file tree
Showing 4 changed files with 308 additions and 3 deletions.
141 changes: 140 additions & 1 deletion compiler/integration-tests/test/node/prove_and_verify.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,11 @@ import { expect } from 'chai';
import assert_lt_json from '../../circuits/assert_lt/target/assert_lt.json' assert { type: 'json' };
import fold_fibonacci_json from '../../circuits/fold_fibonacci/target/fold_fibonacci.json' assert { type: 'json' };
import { Noir } from '@noir-lang/noir_js';
import { BarretenbergBackend as Backend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg';
import {
BarretenbergBackend as Backend,
BarretenbergVerifier as Verifier,
UltraHonkBackend,
} from '@noir-lang/backend_barretenberg';
import { CompiledCircuit } from '@noir-lang/types';

const assert_lt_program = assert_lt_json as CompiledCircuit;
Expand Down Expand Up @@ -150,3 +154,138 @@ it('end-to-end proof creation and verification for multiple ACIR circuits (inner
const isValid = await backend.verifyProof(proof);
expect(isValid).to.be.true;
});

const honkBackend = new UltraHonkBackend(assert_lt_program);

it('UltraHonk end-to-end proof creation and verification (outer)', async () => {
// Noir.Js part
const inputs = {
x: '2',
y: '3',
};

const program = new Noir(assert_lt_program);

const { witness } = await program.execute(inputs);

// bb.js part
//
// Proof creation
const proof = await honkBackend.generateProof(witness);

// Proof verification
const isValid = await honkBackend.verifyProof(proof);
expect(isValid).to.be.true;
});

it('UltraHonk end-to-end proof creation and verification (outer) -- Verifier API', async () => {
// Noir.Js part
const inputs = {
x: '2',
y: '3',
};

// Execute program
const program = new Noir(assert_lt_program);
const { witness } = await program.execute(inputs);

// Generate proof
const proof = await honkBackend.generateProof(witness);

const verificationKey = await honkBackend.getVerificationKey();

// Proof verification
const verifier = new Verifier();
const isValid = await verifier.verifyUltraHonkProof(proof, verificationKey);
expect(isValid).to.be.true;
});

it('UltraHonk end-to-end proof creation and verification (inner)', async () => {
// Noir.Js part
const inputs = {
x: '2',
y: '3',
};

const program = new Noir(assert_lt_program);

const { witness } = await program.execute(inputs);

// bb.js part
//
// Proof creation
const proof = await honkBackend.generateProof(witness);

// Proof verification
const isValid = await honkBackend.verifyProof(proof);
expect(isValid).to.be.true;
});

it('UltraHonk end-to-end proving and verification with different instances', async () => {
// Noir.Js part
const inputs = {
x: '2',
y: '3',
};

const program = new Noir(assert_lt_program);

const { witness } = await program.execute(inputs);

// bb.js part
const proof = await honkBackend.generateProof(witness);

const verifier = new UltraHonkBackend(assert_lt_program);
const proof_is_valid = await verifier.verifyProof(proof);
expect(proof_is_valid).to.be.true;
});

it('[BUG] -- UltraHonk bb.js null function or function signature mismatch (outer-inner) ', async () => {
// Noir.Js part
const inputs = {
x: '2',
y: '3',
};

const program = new Noir(assert_lt_program);

const { witness } = await program.execute(inputs);

// bb.js part
//
// Proof creation
//
// Create a proof using both proving systems, the majority of the time
// one would only use outer proofs.
const proofOuter = await honkBackend.generateProof(witness);
const _proofInner = await honkBackend.generateProof(witness);

// Proof verification
//
const isValidOuter = await honkBackend.verifyProof(proofOuter);
expect(isValidOuter).to.be.true;
// We can also try verifying an inner proof and it will fail.
const isValidInner = await honkBackend.verifyProof(_proofInner);
expect(isValidInner).to.be.true;
});

it('UltraHonk end-to-end proof creation and verification for multiple ACIR circuits (inner)', async () => {
// Noir.Js part
const inputs = {
x: '10',
};

const program = new Noir(fold_fibonacci_program);

const { witness } = await program.execute(inputs);

// bb.js part
//
// Proof creation
const honkBackend = new UltraHonkBackend(fold_fibonacci_program);
const proof = await honkBackend.generateProof(witness);

// Proof verification
const isValid = await honkBackend.verifyProof(proof);
expect(isValid).to.be.true;
});
140 changes: 139 additions & 1 deletion tooling/noir_js_backend_barretenberg/src/backend.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { acirToUint8Array } from './serialize.js';
import { Backend, CompiledCircuit, ProofData, VerifierBackend } from '@noir-lang/types';
import { BackendOptions } from './types.js';
import { deflattenPublicInputs } from './public_inputs.js';
import { reconstructProofWithPublicInputs } from './verifier.js';
import { reconstructProofWithPublicInputs, reconstructProofWithPublicInputsHonk } from './verifier.js';
import { type Barretenberg } from '@aztec/bb.js';

// This is the number of bytes in a UltraPlonk proof
Expand Down Expand Up @@ -50,6 +50,7 @@ export class BarretenbergBackend implements Backend, VerifierBackend {
this.acirUncompressedBytecode,
honkRecursion,
);

const crs = await Crs.new(subgroupSize + 1);
await api.commonInitSlabAllocator(subgroupSize);
await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data()));
Expand Down Expand Up @@ -143,3 +144,140 @@ export class BarretenbergBackend implements Backend, VerifierBackend {
await this.api.destroy();
}
}

// Buffers are prepended with their size. The size takes 4 bytes.
const serializedBufferSize = 4;
const fieldByteSize = 32;
const publicInputOffset = 3;
const publicInputsOffsetBytes = publicInputOffset * fieldByteSize;

export class UltraHonkBackend implements Backend, VerifierBackend {
// These type assertions are used so that we don't
// have to initialize `api` and `acirComposer` in the constructor.
// These are initialized asynchronously in the `init` function,
// constructors cannot be asynchronous which is why we do this.

protected api!: Barretenberg;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
protected acirComposer: any;
protected acirUncompressedBytecode: Uint8Array;

constructor(
acirCircuit: CompiledCircuit,
protected options: BackendOptions = { threads: 1 },
) {
const acirBytecodeBase64 = acirCircuit.bytecode;
this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64);
}

/** @ignore */
async instantiate(): Promise<void> {
if (!this.api) {
if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) {
this.options.threads = navigator.hardwareConcurrency;
} else {
try {
const os = await import('os');
this.options.threads = os.cpus().length;
} catch (e) {
console.log('Could not detect environment. Falling back to one thread.', e);
}
}
const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js');
const api = await Barretenberg.new(this.options);

const honkRecursion = true;
const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(
this.acirUncompressedBytecode,
honkRecursion,
);
const crs = await Crs.new(subgroupSize + 1);
await api.commonInitSlabAllocator(subgroupSize);
await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data()));

// We don't init a proving key here in the Honk API
// await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode);
this.api = api;
}
}

async generateProof(decompressedWitness: Uint8Array): Promise<ProofData> {
await this.instantiate();
const proofWithPublicInputs = await this.api.acirProveUltraHonk(
this.acirUncompressedBytecode,
gunzip(decompressedWitness),
);
const proofAsStrings = deflattenPublicInputs(proofWithPublicInputs.slice(4));

const numPublicInputs = Number(proofAsStrings[1]);

// Account for the serialized buffer size at start
const publicInputsOffset = publicInputsOffsetBytes + serializedBufferSize;
// Get the part before and after the public inputs
const proofStart = proofWithPublicInputs.slice(0, publicInputsOffset);
const publicInputsSplitIndex = numPublicInputs * fieldByteSize;
const proofEnd = proofWithPublicInputs.slice(publicInputsOffset + publicInputsSplitIndex);
// Construct the proof without the public inputs
const proof = new Uint8Array([...proofStart, ...proofEnd]);

// Fetch the number of public inputs out of the proof string
const publicInputsConcatenated = proofWithPublicInputs.slice(
publicInputsOffset,
publicInputsOffset + publicInputsSplitIndex,
);
const publicInputs = deflattenPublicInputs(publicInputsConcatenated);

return { proof, publicInputs };
}

async verifyProof(proofData: ProofData): Promise<boolean> {
const { RawBuffer } = await import('@aztec/bb.js');

const proof = reconstructProofWithPublicInputsHonk(proofData);

await this.instantiate();
const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);

return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(vkBuf));
}

async getVerificationKey(): Promise<Uint8Array> {
await this.instantiate();
return await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
}

// TODO: Make this accurate to handle Honk recursive aggregation in the browser
async generateRecursiveProofArtifacts(
_proofData: ProofData,
_numOfPublicInputs: number,
): Promise<{ proofAsFields: string[]; vkAsFields: string[]; vkHash: string }> {
await this.instantiate();
// TODO: This needs to be updated to handle recursive aggregation.
// There is still a proofAsFields method but we could consider getting rid of it as the proof itself
// is a list of field elements.
// const proof = reconstructProofWithPublicInputs(proofData);
// const proofAsFields = (await this.api.acirProofAsFieldsUltraHonk(proof)).slice(numOfPublicInputs);

// TODO: perhaps we should put this in the init function. Need to benchmark
// TODO how long it takes.
const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
const vk = await this.api.acirVkAsFieldsUltraHonk(vkBuf);

return {
// TODO: broken
proofAsFields: [],
vkAsFields: vk.map((vk) => vk.toString()),
// We use an empty string for the vk hash here as it is unneeded as part of the recursive artifacts
// The user can be expected to hash the vk inside their circuit to check whether the vk is the circuit
// they expect
vkHash: '',
};
}

async destroy(): Promise<void> {
if (!this.api) {
return;
}
await this.api.destroy();
}
}
2 changes: 1 addition & 1 deletion tooling/noir_js_backend_barretenberg/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export { BarretenbergBackend } from './backend.js';
export { BarretenbergBackend, UltraHonkBackend } from './backend.js';
export { BarretenbergVerifier } from './verifier.js';

// typedoc exports
Expand Down
28 changes: 28 additions & 0 deletions tooling/noir_js_backend_barretenberg/src/verifier.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,16 @@ export class BarretenbergVerifier {
return await this.api.acirVerifyProof(this.acirComposer, proof);
}

/** @description Verifies a proof */
async verifyUltraHonkProof(proofData: ProofData, verificationKey: Uint8Array): Promise<boolean> {
const { RawBuffer } = await import('@aztec/bb.js');

await this.instantiate();

const proof = reconstructProofWithPublicInputsHonk(proofData);
return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(verificationKey));
}

async destroy(): Promise<void> {
if (!this.api) {
return;
Expand All @@ -76,3 +86,21 @@ export function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Arr

return proofWithPublicInputs;
}

const serializedBufferSize = 4;
const fieldByteSize = 32;
const publicInputOffset = 3;
const publicInputsOffsetBytes = publicInputOffset * fieldByteSize;

export function reconstructProofWithPublicInputsHonk(proofData: ProofData): Uint8Array {
// Flatten publicInputs
const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs);

const proofStart = proofData.proof.slice(0, publicInputsOffsetBytes + serializedBufferSize);
const proofEnd = proofData.proof.slice(publicInputsOffsetBytes + serializedBufferSize);

// Concatenate publicInputs and proof
const proofWithPublicInputs = Uint8Array.from([...proofStart, ...publicInputsConcatenated, ...proofEnd]);

return proofWithPublicInputs;
}

0 comments on commit 7617ed6

Please sign in to comment.