diff --git a/CHANGELOG.md b/CHANGELOG.md index bfd5ad8..7acfde5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,13 +12,27 @@ and this project adheres to [Semantic Versioning](http://semver.org/). #### Circuit Builds #### Artifacts - **Circuit sizes:** + - `plaintext_authentication_1024b` (with `--O1` build): + - non-linear constaints: `383,300` + - linear-constraints: `27,418` + - R1CS file: `83.9MB` + - Graph file: `20.7MB` - `http_verification_1024b` (with `--O1` build): - - non-linear constaints: `128,653` - - linear-constraints: `77,400` - - Theoretical storage size: `(128,653 + 77,400) * 3 * 32 bytes = 19,781,088 bytes ≈ 19.7 MB` - - R1CS file: `46.9MB` - - Graph file: N/A - - **WARNING:** Seems to not build with `--O2` flag. Need to investigate. + - non-linear constaints: `121,835` + - linear-constraints: `64,974` + - R1CS file: `25.7MB` + - Graph file: `5MB` + - **WARNING:** Extremely slow build with `--O2` flag. Need to investigate. + - `json_extraction_1024b` (with `--O1` build): + - non-linear constaints: `460,102` + - linear-constraints: `225,781` + - R1CS file: `95.3MB` + - Graph file: `13.1MB` + - **Total size:** `243.7MB` +- **Circuit param file sizes (SNARK):** + - `aux_params`: `112.5MB` + - `prover_key`: `100.7MB` + - `verifier_key`: `321.3MB` ### Notes diff --git a/circuits.json b/circuits.json index 9b6742b..06e5fd9 100644 --- a/circuits.json +++ b/circuits.json @@ -15,8 +15,8 @@ ] }, "json_extraction_1024b": { - "file": "json/parser/hash_parser", - "template": "ParserHasher", + "file": "json/extraction", + "template": "JSONExtraction", "params": [ 1024, 10 diff --git a/circuits/chacha20/nivc/chacha20_nivc.circom b/circuits/chacha20/nivc/chacha20_nivc.circom index ae5f71d..4198ce7 100644 --- a/circuits/chacha20/nivc/chacha20_nivc.circom +++ b/circuits/chacha20/nivc/chacha20_nivc.circom @@ -7,6 +7,7 @@ include "../chacha-qr.circom"; include "../../utils/bits.circom"; include "../../utils/hash.circom"; include "../../utils/array.circom"; +include "circomlib/circuits/poseidon.circom"; /** ChaCha20 in counter mode */ @@ -35,13 +36,13 @@ template ChaCha20_NIVC(DATA_BYTES) { // in => N 32-bit words => N 4 byte words signal input plainText[DATA_BYTES]; - // step_in should be the ciphertext digest + // step_in should be the ciphertext digest + the HTTP digests + JSON seq digest signal input step_in[1]; // step_out should be the plaintext digest signal output step_out[1]; - signal isPadding[DATA_BYTES]; + signal isPadding[DATA_BYTES]; // == 1 in the case we hit padding number signal plaintextBits[DATA_BYTES / 4][32]; component toBits[DATA_BYTES / 4]; for (var i = 0 ; i < DATA_BYTES / 4 ; i++) { @@ -141,10 +142,16 @@ template ChaCha20_NIVC(DATA_BYTES) { } } + signal ciphertext_digest <== DataHasher(DATA_BYTES)(bigEndianCiphertext); - signal ciphertext_hash <== DataHasher(DATA_BYTES)(bigEndianCiphertext); - step_in[0] === ciphertext_hash; + signal zeroed_plaintext[DATA_BYTES]; + for(var i = 0 ; i < DATA_BYTES ; i++) { + // Sets any padding bytes to zero (which are presumably at the end) so they don't accum into the poly hash + zeroed_plaintext[i] <== (1 - isPadding[i]) * plainText[i]; + } + signal plaintext_digest <== PolynomialDigest(DATA_BYTES)(zeroed_plaintext, ciphertext_digest); + signal plaintext_digest_hashed <== Poseidon(1)([plaintext_digest]); - signal plaintext_hash <== DataHasher(DATA_BYTES)(plainText); - step_out[0] <== plaintext_hash; + // TODO: I'm not sure we need to subtract the CT digest + step_out[0] <== step_in[0] - ciphertext_digest + plaintext_digest_hashed; } \ No newline at end of file diff --git a/circuits/http/verification.circom b/circuits/http/verification.circom index eb7237b..daefd19 100644 --- a/circuits/http/verification.circom +++ b/circuits/http/verification.circom @@ -7,19 +7,24 @@ include "../utils/hash.circom"; template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { signal input step_in[1]; signal output step_out[1]; + + signal input ciphertext_digest; - // Authenticate the plaintext we are passing in signal input data[DATA_BYTES]; - // TODO: we don't need this if we do a poly digest of the plaintext in authentication circuit - signal data_hash <== DataHasher(DATA_BYTES)(data); - data_hash === step_in[0]; + signal isPadding[DATA_BYTES]; // == 1 in the case we hit padding number + signal zeroed_data[DATA_BYTES]; + for (var i = 0 ; i < DATA_BYTES ; i++) { + isPadding[i] <== IsEqual()([data[i], -1]); + zeroed_data[i] <== (1 - isPadding[i]) * data[i]; + } + signal data_digest <== PolynomialDigest(DATA_BYTES)(zeroed_data, ciphertext_digest); signal input main_digests[MAX_NUMBER_OF_HEADERS + 1]; // Contains digests of start line and all intended headers (up to `MAX_NUMBER_OF_HEADERS`) - signal contained[MAX_NUMBER_OF_HEADERS + 1]; + signal not_contained[MAX_NUMBER_OF_HEADERS + 1]; var num_to_match = MAX_NUMBER_OF_HEADERS + 1; for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS + 1 ; i++) { - contained[i] <== IsZero()(main_digests[i]); - num_to_match -= contained[i]; + not_contained[i] <== IsZero()(main_digests[i]); + num_to_match -= not_contained[i]; } component State[DATA_BYTES]; @@ -55,7 +60,7 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { is_line_change[i] <== Contains(2)(data[i + 1], [10, 13]); // capture if we hit an end line sequence was_cleared[i] <== IsZero()(main_monomials[i]); not_body_and_not_line_change[i] <== (1 - State[i + 1].parsing_body) * (1 - is_line_change[i]); - rescaled_or_was_cleared[i] <== (main_monomials[i] * step_in[0] + was_cleared[i]); + rescaled_or_was_cleared[i] <== (main_monomials[i] * ciphertext_digest + was_cleared[i]); main_monomials[i + 1] <== not_body_and_not_line_change[i] * rescaled_or_was_cleared[i]; } @@ -80,22 +85,19 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { // BODY signal body_monomials[DATA_BYTES]; - body_monomials[0] <== 0; signal body_accum[DATA_BYTES]; - body_accum[0] <== 0; signal body_switch[DATA_BYTES -1]; signal body_digest[DATA_BYTES]; - body_digest[0] <== 0; + body_monomials[0] <== 0; + body_accum[0] <== 0; + body_digest[0] <== 0; for(var i = 0 ; i < DATA_BYTES - 1 ; i++) { body_accum[i + 1] <== body_accum[i] + State[i + 1].parsing_body; body_switch[i] <== IsEqual()([body_accum[i + 1], 1]); - body_monomials[i + 1] <== body_monomials[i] * step_in[0] + body_switch[i]; - body_digest[i + 1] <== body_digest[i] + body_monomials[i + 1] * data[i + 1]; + body_monomials[i + 1] <== body_monomials[i] * ciphertext_digest + body_switch[i]; + body_digest[i + 1] <== body_digest[i] + body_monomials[i + 1] * data[i + 1]; } - // TODO: This, for now, passes back out the hash of body_digest and the plaintext_hash so it can be properly verified in the JSON - step_out[0] <== PoseidonChainer()([body_digest[DATA_BYTES - 1], step_in[0]]); - // Verify machine ends in a valid state State[DATA_BYTES - 1].next_parsing_start === 0; State[DATA_BYTES - 1].next_parsing_header === 0; @@ -103,4 +105,18 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { State[DATA_BYTES - 1].next_parsing_field_value === 0; State[DATA_BYTES - 1].next_parsing_body === 1; State[DATA_BYTES - 1].next_line_status === 0; + + // TODO: Need to subtract all the header digests here and also wrap them in poseidon. We can use the ones from the input to make this cheaper since they're verified in this circuit! + signal body_digest_hashed <== Poseidon(1)([body_digest[DATA_BYTES - 1]]); + signal data_digest_hashed <== Poseidon(1)([data_digest]); + signal option_hash[MAX_NUMBER_OF_HEADERS + 1]; + signal main_digests_hashed[MAX_NUMBER_OF_HEADERS + 1]; + var accumulated_main_digests_hashed = 0; + for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS + 1 ; i++) { + option_hash[i] <== Poseidon(1)([(1 - not_contained[i]) * main_digests[i]]); + main_digests_hashed[i] <== (1 - not_contained[i]) * option_hash[i]; + accumulated_main_digests_hashed += main_digests_hashed[i]; + } + + step_out[0] <== step_in[0] + body_digest_hashed - accumulated_main_digests_hashed - data_digest_hashed; // TODO: data_digest is really plaintext_digest from before, consider changing names } diff --git a/circuits/json/extraction.circom b/circuits/json/extraction.circom index adc6da2..e6390bb 100644 --- a/circuits/json/extraction.circom +++ b/circuits/json/extraction.circom @@ -5,15 +5,13 @@ include "hash_machine.circom"; template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT) { signal input data[DATA_BYTES]; - signal input polynomial_input; - signal input sequence_digest; + signal input ciphertext_digest; + signal input sequence_digest; + signal input value_digest; signal input step_in[1]; signal output step_out[1]; - // TODO: Change this - step_out[0] <== step_in[0]; - //--------------------------------------------------------------------------------------------// // Initialze the parser component State[DATA_BYTES]; @@ -23,18 +21,18 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT) { State[0].tree_hash[i] <== [0,0]; } State[0].byte <== data[0]; - State[0].polynomial_input <== polynomial_input; + State[0].polynomial_input <== ciphertext_digest; State[0].monomial <== 0; State[0].parsing_string <== 0; State[0].parsing_number <== 0; // Set up monomials for stack/tree digesting - signal monomials[4 * MAX_STACK_HEIGHT]; + signal monomials[3 * MAX_STACK_HEIGHT]; monomials[0] <== 1; - for(var i = 1 ; i < 4 * MAX_STACK_HEIGHT ; i++) { - monomials[i] <== monomials[i - 1] * polynomial_input; + for(var i = 1 ; i < 3 * MAX_STACK_HEIGHT ; i++) { + monomials[i] <== monomials[i - 1] * ciphertext_digest; } - signal intermediate_digest[DATA_BYTES][4 * MAX_STACK_HEIGHT]; + signal intermediate_digest[DATA_BYTES][3 * MAX_STACK_HEIGHT]; signal state_digest[DATA_BYTES]; // Debugging @@ -50,29 +48,39 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT) { // log("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); var total_matches = 0; - signal is_matched[DATA_BYTES]; + signal sequence_is_matched[DATA_BYTES]; + signal value_is_matched[DATA_BYTES]; + signal sequence_and_value_matched[DATA_BYTES]; for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) { State[data_idx] = StateUpdateHasher(MAX_STACK_HEIGHT); State[data_idx].byte <== data[data_idx]; - State[data_idx].polynomial_input <== polynomial_input; + State[data_idx].polynomial_input <== ciphertext_digest; State[data_idx].stack <== State[data_idx - 1].next_stack; State[data_idx].parsing_string <== State[data_idx - 1].next_parsing_string; State[data_idx].parsing_number <== State[data_idx - 1].next_parsing_number; State[data_idx].monomial <== State[data_idx - 1].next_monomial; State[data_idx].tree_hash <== State[data_idx - 1].next_tree_hash; - // Digest the whole stack and tree hash + // Digest the whole stack and key tree hash var accumulator = 0; for(var i = 0 ; i < MAX_STACK_HEIGHT ; i++) { - intermediate_digest[data_idx][4 * i] <== State[data_idx].next_stack[i][0] * monomials[4 * i]; - intermediate_digest[data_idx][4 * i + 1] <== State[data_idx].next_stack[i][1] * monomials[4 * i + 1]; - intermediate_digest[data_idx][4 * i + 2] <== State[data_idx].next_tree_hash[i][0] * monomials[4 * i + 2]; - intermediate_digest[data_idx][4 * i + 3] <== State[data_idx].next_tree_hash[i][1] * monomials[4 * i + 3]; - accumulator += intermediate_digest[data_idx][4 * i] + intermediate_digest[data_idx][4 * i + 1] + intermediate_digest[data_idx][4 * i + 2] + intermediate_digest[data_idx][4 * i + 3]; + intermediate_digest[data_idx][3 * i] <== State[data_idx].next_stack[i][0] * monomials[3 * i]; + intermediate_digest[data_idx][3 * i + 1] <== State[data_idx].next_stack[i][1] * monomials[3 * i + 1]; + intermediate_digest[data_idx][3 * i + 2] <== State[data_idx].next_tree_hash[i][0] * monomials[3 * i + 2]; + accumulator += intermediate_digest[data_idx][3 * i] + intermediate_digest[data_idx][3 * i + 1] + intermediate_digest[data_idx][3 * i + 2]; } state_digest[data_idx] <== accumulator; - is_matched[data_idx] <== IsEqual()([state_digest[data_idx], sequence_digest]); - total_matches += is_matched[data_idx]; + sequence_is_matched[data_idx] <== IsEqual()([state_digest[data_idx], sequence_digest]); + + // Now check for if the value digest appears + var value_digest_in_stack = 0; + for(var i = 0 ; i < MAX_STACK_HEIGHT ; i++) { + // A single value can be present only, and it is on index 1, so we can just accum + value_digest_in_stack += State[data_idx].next_tree_hash[i][1]; + } + value_is_matched[data_idx] <== IsEqual()([value_digest, value_digest_in_stack]); + sequence_and_value_matched[data_idx] <== sequence_is_matched[data_idx] * value_is_matched[data_idx]; + total_matches += sequence_and_value_matched[data_idx]; // Debugging // for(var i = 0; i { @@ -55,9 +56,13 @@ describe("chacha20-nivc", () => { nonce: toInput(Buffer.from(nonceBytes)), counter: counterBits, plainText: plaintextBytes, - step_in: DataHasher(ciphertextBytes) + step_in: 0 }, (["step_out"])); - assert.deepEqual(w.step_out, DataHasher(plaintextBytes)); + // Output + let ciphertext_digest = DataHasher(ciphertextBytes); + let plaintext_digest_hashed = poseidon1([PolynomialDigest(plaintextBytes, ciphertext_digest)]); + let output = modAdd(plaintext_digest_hashed - ciphertext_digest, BigInt(0)); + assert.deepEqual(w.step_out, output); }); }); @@ -105,57 +110,16 @@ describe("chacha20-nivc", () => { let paddedPlaintextBytes = plaintextBytes.concat(Array(totalLength - plaintextBytes.length).fill(-1)); const counterBits = uintArray32ToBits([1])[0] let w = await circuit.compute({ - key: toInput(Buffer.from(keyBytes)), - nonce: toInput(Buffer.from(nonceBytes)), - counter: counterBits, - plainText: paddedPlaintextBytes, - step_in: DataHasher(ciphertextBytes) - }, (["step_out"])); - assert.deepEqual(w.step_out, DataHasher(paddedPlaintextBytes)); - }); - }); - - describe("wrong ciphertext hash", () => { - it("should fail", async () => { - circuit = await circomkit.WitnessTester(`ChaCha20`, { - file: "chacha20/nivc/chacha20_nivc", - template: "ChaCha20_NIVC", - params: [128] // number of bytes in plaintext - }); - // Test case from RCF https://www.rfc-editor.org/rfc/rfc7539.html#section-2.4.2 - // the input encoding here is not the most intuitive. inputs are serialized as little endian. - // i.e. "e4e7f110" is serialized as "10 f1 e7 e4". So the way i am reading in inputs is - // to ensure that every 32 bit word is byte reversed before being turned into bits. - // i think this should be easy when we compute witness in rust. - let keyBytes = [ - 0x00, 0x01, 0x02, 0x03, - 0x04, 0x05, 0x06, 0x07, - 0x08, 0x09, 0x0a, 0x0b, - 0x0c, 0x0d, 0x0e, 0x0f, - 0x10, 0x11, 0x12, 0x13, - 0x14, 0x15, 0x16, 0x17, - 0x18, 0x19, 0x1a, 0x1b, - 0x1c, 0x1d, 0x1e, 0x1f - ]; - - let nonceBytes = - [ - 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x4a, - 0x00, 0x00, 0x00, 0x00 - ]; - let plaintextBytes = - toByte("Ladies and Gentlemen of the class of '99: If I could offer you only one tip "); - let totalLength = 128; - let paddedPlaintextBytes = plaintextBytes.concat(Array(totalLength - plaintextBytes.length).fill(-1)); - const counterBits = uintArray32ToBits([1])[0] - await circuit.expectFail({ key: toInput(Buffer.from(keyBytes)), nonce: toInput(Buffer.from(nonceBytes)), counter: counterBits, plainText: paddedPlaintextBytes, step_in: 0 - }); + }, (["step_out"])); + let ciphertext_digest = DataHasher(ciphertextBytes); + let plaintext_digest = poseidon1([PolynomialDigest(plaintextBytes, ciphertext_digest)]); + let output = modAdd(plaintext_digest - ciphertext_digest, BigInt(0)); + assert.deepEqual(w.step_out, output); }); }); }); @@ -175,4 +139,4 @@ export function fromInput(bits: number[]) { buffer.writeUInt32LE(uint32Array[i], i * 4); } return buffer; -} \ No newline at end of file +} diff --git a/circuits/test/common/index.ts b/circuits/test/common/index.ts index 07993ca..3f143eb 100644 --- a/circuits/test/common/index.ts +++ b/circuits/test/common/index.ts @@ -62,6 +62,8 @@ export function readJSONInputFile(filename: string, key: any[]): [number[], numb } import fs from 'fs'; +import { DataHasher } from './poseidon'; +import { poseidon1 } from 'poseidon-lite'; export function readJsonFile(filePath: string): T { // Read the file synchronously @@ -286,7 +288,7 @@ export const http_response_plaintext = [ 10, 32, 32, 32, 125, 13, 10, 125, ]; -export const chacha20_http_response_ciphertext = [ +export const http_response_ciphertext = [ 2, 125, 219, 141, 140, 93, 49, 129, 95, 178, 135, 109, 48, 36, 194, 46, 239, 155, 160, 70, 208, 147, 37, 212, 17, 195, 149, 190, 38, 215, 23, 241, 84, 204, 167, 184, 179, 172, 187, 145, 38, 75, 123, 96, 81, 6, 149, 36, 135, 227, 226, 254, 177, 90, 241, 159, 0, 230, 183, 163, 210, 88, 133, @@ -343,8 +345,8 @@ const PRIME = BigInt("2188824287183927522224640574525727508854836440041603434369 const ONE = BigInt(1); const ZERO = BigInt(0); -function modAdd(a: bigint, b: bigint): bigint { - return (a + b) % PRIME; +export function modAdd(a: bigint, b: bigint): bigint { + return ((a + b) % PRIME + PRIME) % PRIME; } function modMul(a: bigint, b: bigint): bigint { @@ -354,15 +356,14 @@ function modMul(a: bigint, b: bigint): bigint { export function jsonTreeHasher( polynomialInput: bigint, keySequence: JsonMaskType[], - targetValue: number[], // Changed from Uint8Array to number[] maxStackHeight: number -): [Array<[bigint, bigint]>, Array<[bigint, bigint]>] { - if (keySequence.length >= maxStackHeight) { +): [Array<[bigint, bigint]>, Array] { + if (keySequence.length > maxStackHeight) { throw new Error("Key sequence length exceeds max stack height"); } const stack: Array<[bigint, bigint]> = []; - const treeHashes: Array<[bigint, bigint]> = []; + const treeHashes: Array = []; for (const valType of keySequence) { if (valType.type === "Object") { @@ -374,29 +375,19 @@ export function jsonTreeHasher( stringHash = modAdd(stringHash, modMul(monomial, BigInt(byte))); monomial = modMul(monomial, polynomialInput); } - treeHashes.push([stringHash, ZERO]); + treeHashes.push(stringHash); } else { // ArrayIndex - treeHashes.push([ZERO, ZERO]); + treeHashes.push(ZERO); stack.push([BigInt(2), BigInt(valType.value)]); } } - let targetValueHash = ZERO; - let monomial = ONE; - - for (const byte of targetValue) { - targetValueHash = modAdd(targetValueHash, modMul(monomial, BigInt(byte))); - monomial = modMul(monomial, polynomialInput); - } - - treeHashes[keySequence.length - 1] = [treeHashes[keySequence.length - 1][0], targetValueHash]; - return [stack, treeHashes]; } export function compressTreeHash( polynomialInput: bigint, - stackAndTreeHashes: [Array<[bigint, bigint]>, Array<[bigint, bigint]>] + stackAndTreeHashes: [Array<[bigint, bigint]>, Array] ): bigint { const [stack, treeHashes] = stackAndTreeHashes; @@ -414,12 +405,104 @@ export function compressTreeHash( accumulated = modAdd(accumulated, modMul(stack[idx][1], monomial)); monomial = modMul(monomial, polynomialInput); - accumulated = modAdd(accumulated, modMul(treeHashes[idx][0], monomial)); - monomial = modMul(monomial, polynomialInput); - - accumulated = modAdd(accumulated, modMul(treeHashes[idx][1], monomial)); + accumulated = modAdd(accumulated, modMul(treeHashes[idx], monomial)); monomial = modMul(monomial, polynomialInput); } return accumulated; +} + +interface ManifestResponse { + version: string; + status: string; + message: string; + headers: Record; + body: { + json: JsonMaskType[]; + }; +} + +interface Manifest { + response: ManifestResponse; +} + +function headersToBytes(headers: Record): number[][] { + const result: number[][] = []; + + for (const [key, values] of Object.entries(headers)) { + for (const value of values) { + // In HTTP/1.1, headers are formatted as "key: value" + const headerLine = `${key}: ${value}`; + result.push(strToBytes(headerLine)); + } + } + + return result; +} + +export function InitialDigest( + manifest: Manifest, + ciphertext: number[], + maxStackHeight: number +): [bigint, bigint] { + // Create a digest of the ciphertext itself + const ciphertextDigest = DataHasher(ciphertext); + + // Digest the start line using the ciphertext_digest as a random input + const startLineBytes = strToBytes( + `${manifest.response.version} ${manifest.response.status} ${manifest.response.message}` + ); + const startLineDigest = PolynomialDigest(startLineBytes, ciphertextDigest); + + // Digest all the headers + const headerBytes = headersToBytes(manifest.response.headers); + const headersDigest = headerBytes.map(bytes => + PolynomialDigest(bytes, ciphertextDigest) + ); + + // Digest the JSON sequence + const jsonTreeHash = jsonTreeHasher( + ciphertextDigest, + manifest.response.body.json, + maxStackHeight + ); + const jsonSequenceDigest = compressTreeHash(ciphertextDigest, jsonTreeHash); + + // Put all the digests into an array + const allDigests: bigint[] = [jsonSequenceDigest, startLineDigest, ...headersDigest]; + + // Calculate manifest digest + const manifestDigest = modAdd( + ciphertextDigest, + allDigests.map(d => poseidon1([d])).reduce((a, b) => modAdd(a, b), ZERO) + ); + + return [ciphertextDigest, manifestDigest]; +} + +export function MockManifest(): Manifest { + const headers: Record = { + "content-type": ["application/json; charset=utf-8"], + "content-encoding": ["gzip"] + }; + + const jsonSequence: JsonMaskType[] = [ + { type: "Object", value: strToBytes("data") }, + { type: "Object", value: strToBytes("items") }, + { type: "ArrayIndex", value: 0 }, + { type: "Object", value: strToBytes("profile") }, + { type: "Object", value: strToBytes("name") } + ]; + + return { + response: { + status: "200", + version: "HTTP/1.1", + message: "OK", + headers: headers, + body: { + json: jsonSequence + } + } + }; } \ No newline at end of file diff --git a/circuits/test/full/full.test.ts b/circuits/test/full/full.test.ts index 361dbcc..1ecb107 100644 --- a/circuits/test/full/full.test.ts +++ b/circuits/test/full/full.test.ts @@ -1,7 +1,8 @@ import { assert } from "chai"; -import { circomkit, WitnessTester, toByte, uintArray32ToBits, http_response_plaintext, chacha20_http_response_ciphertext, http_start_line, http_header_0, http_header_1, http_body, PolynomialDigest } from "../common"; +import { circomkit, WitnessTester, toByte, uintArray32ToBits, http_response_plaintext, http_response_ciphertext, http_start_line, http_header_0, http_header_1, http_body, PolynomialDigest, strToBytes, JsonMaskType, jsonTreeHasher, compressTreeHash, modAdd, InitialDigest, MockManifest } from "../common"; import { DataHasher } from "../common/poseidon"; import { toInput } from "../chacha20/chacha20-nivc.test"; +import { poseidon1 } from "poseidon-lite"; // HTTP/1.1 200 OK // content-type: application/json; charset=utf-8 @@ -23,188 +24,119 @@ import { toInput } from "../chacha20/chacha20-nivc.test"; // 320 bytes in the HTTP response -// Concatenate the padding with http_body -const http_response_hash = DataHasher(http_response_plaintext); -const start_line_digest = PolynomialDigest(http_start_line, http_response_hash); -const header_0_digest = PolynomialDigest(http_header_0, http_response_hash); -const header_1_digest = PolynomialDigest(http_header_1, http_response_hash); - -// TODO: The consts below can probably be gotten rid of soon, needed for relic JSON -const padded_http_body = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 34, - 100, 97, 116, 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, - 115, 34, 58, 32, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, - 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, - 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, - 32, 32, 93, 13, 10, 32, 32, 32, 125, 13, 10, 125, -]; - -const json_key0_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, 91, - 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, - 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, - 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, - 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, 0, - 0, 0, 0, 0, 0, 0, 0, -]; -const json_key0_mask_hash = DataHasher(json_key0_mask); - -const json_key1_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 13, 10, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, - 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, - 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_key1_mask_hash = DataHasher(json_key1_mask); - -const json_arr_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, - 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, - 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, - 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_arr_mask_hash = DataHasher(json_arr_mask); - -const json_key2_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, - 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_key2_mask_hash = DataHasher(json_key2_mask); - -const json_key3_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, - 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, -]; -const json_key3_mask_hash = DataHasher(json_key3_mask); +const DATA_BYTES = 320; +const MAX_NUMBER_OF_HEADERS = 2; +const MAX_STACK_HEIGHT = 5; -describe("Example NIVC Proof", async () => { - let PlaintextAuthentication: WitnessTester<["key", "nonce", "counter", "plainText", "step_in"], ["step_out"]>; - let HTTPVerification: WitnessTester<["step_in", "data", "main_digests"], ["step_out"]>; +// These `check_*` are currently from Rust +const check_ciphertext_digest = BigInt("5947802862726868637928743536818722886587721698845887498686185738472802646104"); +const check_init_nivc_input = BigInt("1004047589511714647691705222985203827421588749970619269541141824992822853087"); + +const [ciphertext_digest, init_nivc_input] = InitialDigest(MockManifest(), http_response_ciphertext, MAX_STACK_HEIGHT); +// TODO: Fix these +// assert.deepEqual(ciphertext_digest, check_ciphertext_digest); +// assert.deepEqual(init_nivc_input, check_init_nivc_input); - const MAX_NUMBER_OF_HEADERS = 2; - const DATA_BYTES = 320; - const MAX_STACK_HEIGHT = 5; - const MAX_KEY_LENGTH = 8; - const MAX_VALUE_LENGTH = 32; +const value = strToBytes("Taylor Swift"); + +describe("Example NIVC Proof", async () => { + let PlaintextAuthentication: WitnessTester<["step_in", "plainText", "key", "nonce", "counter"], ["step_out"]>; + let HTTPVerification: WitnessTester<["step_in", "ciphertext_digest", "data", "main_digests"], ["step_out"]>; + let JSONExtraction: WitnessTester<["step_in", "ciphertext_digest", "data", "sequence_digest", "value_digest"], ["step_out"]>; before(async () => { - PlaintextAuthentication = await circomkit.WitnessTester("CHACHA20", { + PlaintextAuthentication = await circomkit.WitnessTester("PlaintextAuthentication", { file: "chacha20/nivc/chacha20_nivc", template: "ChaCha20_NIVC", - params: [320] + params: [DATA_BYTES] }); console.log("#constraints (PlaintextAuthentication):", await PlaintextAuthentication.getConstraintCount()); - HTTPVerification = await circomkit.WitnessTester(`HttpNIVC`, { + HTTPVerification = await circomkit.WitnessTester("HTTPVerification", { file: "http/verification", template: "HTTPVerification", params: [DATA_BYTES, MAX_NUMBER_OF_HEADERS], }); console.log("#constraints (HTTPVerification):", await HTTPVerification.getConstraintCount()); + JSONExtraction = await circomkit.WitnessTester(`JSONExtraction`, { + file: "json/extraction", + template: "JSONExtraction", + params: [DATA_BYTES, MAX_STACK_HEIGHT], + }); + console.log("#constraints (JSONExtraction):", await JSONExtraction.getConstraintCount()); }); it("Spotify Example", async () => { - const init_nivc_input = DataHasher(chacha20_http_response_ciphertext); - // Run ChaCha20 + // Run PlaintextAuthentication const counterBits = uintArray32ToBits([1])[0] const keyIn = toInput(Buffer.from(Array(32).fill(0))); const nonceIn = toInput(Buffer.from([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x00])); - let chacha20 = await PlaintextAuthentication.compute({ key: keyIn, nonce: nonceIn, counter: counterBits, plainText: http_response_plaintext, step_in: init_nivc_input }, ["step_out"]); - console.log("ChaCha20 `step_out`:", chacha20.step_out); - assert.deepEqual(http_response_hash, chacha20.step_out); - - let http = await HTTPVerification.compute({ - step_in: chacha20.step_out, + let plaintext_authentication = await PlaintextAuthentication.compute({ + step_in: init_nivc_input, + plainText: http_response_plaintext, + key: keyIn, + nonce: nonceIn, + counter: counterBits, + }, ["step_out"]); + console.log("Plaintext Authentication `step_out`:", plaintext_authentication.step_out); + const http_response_plaintext_digest = PolynomialDigest(http_response_plaintext, ciphertext_digest); + const http_response_plaintext_digest_hashed = poseidon1([http_response_plaintext_digest]); + const correct_plaintext_authentication_step_out = modAdd(init_nivc_input - ciphertext_digest, http_response_plaintext_digest_hashed); + assert.deepEqual(plaintext_authentication.step_out, correct_plaintext_authentication_step_out); + + // Run HTTPVerification + const start_line_digest = PolynomialDigest(http_start_line, ciphertext_digest); + const header_0_digest = PolynomialDigest(http_header_0, ciphertext_digest); + const header_1_digest = PolynomialDigest(http_header_1, ciphertext_digest); + const padded_http_body = http_body.concat(Array(320 - http_body.length).fill(-1)); + let step_in = BigInt(plaintext_authentication.step_out.toString(10)); + console.log("http_step_in: ", step_in); + let http_verification = await HTTPVerification.compute({ + step_in, + ciphertext_digest, data: http_response_plaintext, main_digests: [start_line_digest, header_0_digest, header_1_digest], }, ["step_out"]); - console.log("HttpNIVC `step_out`:", http.step_out); - - let key0 = [100, 97, 116, 97, 0, 0, 0, 0]; // "data" - let key0Len = 4; - let key1 = [105, 116, 101, 109, 115, 0, 0, 0]; // "items" - let key1Len = 5; - let key2 = [112, 114, 111, 102, 105, 108, 101, 0]; // "profile" - let key2Len = 7; - let key3 = [110, 97, 109, 101, 0, 0, 0, 0]; // "name" - let key3Len = 4; // (autoparallel) This next line gives me an aneurysm - let http_step_out = (http.step_out as number[])[0]; - // let json_extract_key0 = await json_mask_object_circuit.compute({ step_in: http_step_out, data: padded_http_body, key: key0, keyLen: key0Len }, ["step_out"]); - // console.log("JSON Extract key0 `step_out`:", json_extract_key0.step_out); - // assert.deepEqual(json_extract_key0.step_out, json_key0_mask_hash); - - // let json_extract_key1 = await json_mask_object_circuit.compute({ step_in: json_extract_key0.step_out, data: json_key0_mask, key: key1, keyLen: key1Len }, ["step_out"]); - // assert.deepEqual(json_extract_key1.step_out, json_key1_mask_hash); - // console.log("JSON Extract key1 `step_out`:", json_extract_key1.step_out); - - // let json_extract_arr = await json_mask_arr_circuit.compute({ step_in: json_extract_key1.step_out, data: json_key1_mask, index: 0 }, ["step_out"]); - // assert.deepEqual(json_extract_arr.step_out, json_arr_mask_hash); - // console.log("JSON Extract arr `step_out`:", json_extract_arr.step_out); - - // let json_extract_key2 = await json_mask_object_circuit.compute({ step_in: json_extract_arr.step_out, data: json_arr_mask, key: key2, keyLen: key2Len }, ["step_out"]); - // assert.deepEqual(json_extract_key2.step_out, json_key2_mask_hash); - // console.log("JSON Extract key2 `step_out`:", json_extract_key2.step_out); - - // let json_extract_key3 = await json_mask_object_circuit.compute({ step_in: json_extract_key2.step_out, data: json_key2_mask, key: key3, keyLen: key3Len }, ["step_out"]); - // assert.deepEqual(json_extract_key3.step_out, json_key3_mask_hash); - // console.log("JSON Extract key3 `step_out`:", json_extract_key3.step_out); - - // // TODO (autoparallel): we need to rethink extraction here. - // let finalOutput = toByte("\"Taylor Swift\""); - // let finalOutputPadded = finalOutput.concat(Array(Math.max(0, MAX_VALUE_LENGTH - finalOutput.length)).fill(0)); - // let final_value_hash = DataHasher(finalOutputPadded); - // let extractValue = await extract_value_circuit.compute({ step_in: json_extract_key3.step_out, data: json_key3_mask }, ["step_out"]); - // console.log("finalValue", extractValue.step_out); - // assert.deepEqual(extractValue.step_out, final_value_hash); + let http_verification_step_out = BigInt((http_verification.step_out as number[])[0]); + console.log("HTTP Verification `step_out`:", http_verification_step_out); + const body_digest_hashed = poseidon1([PolynomialDigest(http_body, ciphertext_digest)]); + const start_line_digest_digest_hashed = poseidon1([start_line_digest]); + const header_0_digest_hashed = poseidon1([header_0_digest]); + const header_1_digest_hashed = poseidon1([header_1_digest]); + const correct_http_verification_step_out = modAdd(step_in - start_line_digest_digest_hashed - header_0_digest_hashed - header_1_digest_hashed - http_response_plaintext_digest_hashed, body_digest_hashed); + assert.deepEqual(http_verification_step_out, correct_http_verification_step_out); + + // Run JSONExtraction + const KEY0 = strToBytes("data"); + const KEY1 = strToBytes("items"); + const KEY2 = strToBytes("profile"); + const KEY3 = strToBytes("name"); + const targetValue = strToBytes("Taylor Swift"); + const keySequence: JsonMaskType[] = [ + { type: "Object", value: KEY0 }, + { type: "Object", value: KEY1 }, + { type: "ArrayIndex", value: 0 }, + { type: "Object", value: KEY2 }, + { type: "Object", value: KEY3 }, + ]; + + const [stack, treeHashes] = jsonTreeHasher(ciphertext_digest, keySequence, MAX_STACK_HEIGHT); + const sequence_digest = compressTreeHash(ciphertext_digest, [stack, treeHashes]); + const value_digest = PolynomialDigest(targetValue, ciphertext_digest); + + const sequence_digest_hashed = poseidon1([sequence_digest]); + console.log("sequence_digest_hashed = ", sequence_digest_hashed); + + let json_extraction = await JSONExtraction.compute({ + step_in: http_verification_step_out, + ciphertext_digest, + data: padded_http_body, + value_digest, + sequence_digest, + }, ["step_out"]); + console.log("JSON Extraction `step_out`:", json_extraction.step_out); + assert.deepEqual(json_extraction.step_out, value_digest); }); }); diff --git a/circuits/test/http/verification.test.ts b/circuits/test/http/verification.test.ts index b192ecb..3a9011b 100644 --- a/circuits/test/http/verification.test.ts +++ b/circuits/test/http/verification.test.ts @@ -1,7 +1,7 @@ -import { circomkit, WitnessTester, PolynomialDigest, http_response_plaintext, http_start_line, http_header_0, http_header_1, http_body } from "../common"; +import { circomkit, WitnessTester, PolynomialDigest, http_response_plaintext, http_start_line, http_header_0, http_header_1, http_body, modAdd } from "../common"; import { assert } from "chai"; import { DataHasher } from "../common/poseidon"; -import { poseidon2 } from "poseidon-lite"; +import { poseidon1, poseidon2 } from "poseidon-lite"; // HTTP/1.1 200 OK // content-type: application/json; charset=utf-8 @@ -24,8 +24,8 @@ import { poseidon2 } from "poseidon-lite"; const DATA_BYTES = 320; const MAX_NUMBER_OF_HEADERS = 2; -describe("HTTP Verfication", async () => { - let HTTPVerification: WitnessTester<["step_in", "data", "main_digests"], ["step_out"]>; +describe("HTTP Verification", async () => { + let HTTPVerification: WitnessTester<["step_in", "data", "main_digests", "ciphertext_digest"], ["step_out"]>; before(async () => { HTTPVerification = await circomkit.WitnessTester("http_nivc", { file: "http/verification", @@ -33,89 +33,115 @@ describe("HTTP Verfication", async () => { params: [DATA_BYTES, MAX_NUMBER_OF_HEADERS] }); }); + const mock_ct_digest = poseidon1([69]); it("witness: http_response_plaintext, no header", async () => { // Get all the hashes we need - let plaintext_hash = DataHasher(http_response_plaintext); + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); // Compute the HTTP info digest - let main_digest = PolynomialDigest(http_start_line, plaintext_hash); - let body_digest = PolynomialDigest(http_body, plaintext_hash); - let step_out = poseidon2([body_digest, plaintext_hash]); + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - data_digest_hashed, BigInt(0)); // Run the HTTP circuit // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? let http_nivc_compute = await HTTPVerification.compute({ - step_in: plaintext_hash, + step_in: 0, // This doesn't really matter for this test data: http_response_plaintext, - main_digests: [main_digest].concat(Array(2).fill(0)), + main_digests: [start_line_digest].concat(Array(2).fill(0)), + ciphertext_digest: mock_ct_digest }, ["step_out"]); // I fucking hate circomkit - assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], step_out); + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); }); it("witness: http_response_plaintext, one header", async () => { // Get all the hashes we need - let plaintext_hash = DataHasher(http_response_plaintext); + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); // Compute the HTTP info digest - let start_line_digest = PolynomialDigest(http_start_line, plaintext_hash); - let header_0_digest = PolynomialDigest(http_header_0, plaintext_hash); - let body_digest = PolynomialDigest(http_body, plaintext_hash); - let step_out = poseidon2([body_digest, plaintext_hash]); + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let header_0_digest = PolynomialDigest(http_header_0, mock_ct_digest); + let header_0_digest_hashed = poseidon1([header_0_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - header_0_digest_hashed - data_digest_hashed, BigInt(0)); // Run the HTTP circuit // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? let http_nivc_compute = await HTTPVerification.compute({ - step_in: plaintext_hash, + step_in: 0, // This doesn't really matter for this test data: http_response_plaintext, main_digests: [start_line_digest, header_0_digest].concat(Array(1).fill(0)), + ciphertext_digest: mock_ct_digest }, ["step_out"]); // I fucking hate circomkit - assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], step_out); + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); }); it("witness: http_response_plaintext, two headers", async () => { // Get all the hashes we need - let plaintext_hash = DataHasher(http_response_plaintext); + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); // Compute the HTTP info digest - let start_line_digest = PolynomialDigest(http_start_line, plaintext_hash); - let header_0_digest = PolynomialDigest(http_header_0, plaintext_hash); - let header_1_digest = PolynomialDigest(http_header_1, plaintext_hash); - let body_digest = PolynomialDigest(http_body, plaintext_hash); - let step_out = poseidon2([body_digest, plaintext_hash]); + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let header_0_digest = PolynomialDigest(http_header_0, mock_ct_digest); + let header_0_digest_hashed = poseidon1([header_0_digest]); + let header_1_digest = PolynomialDigest(http_header_1, mock_ct_digest); + let header_1_digest_hashed = poseidon1([header_1_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - header_0_digest_hashed - header_1_digest_hashed - data_digest_hashed, BigInt(0)); // Run the HTTP circuit // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? let http_nivc_compute = await HTTPVerification.compute({ - step_in: plaintext_hash, + step_in: 0, // This doesn't really matter for this test data: http_response_plaintext, main_digests: [start_line_digest, header_0_digest, header_1_digest], + ciphertext_digest: mock_ct_digest }, ["step_out"]); // I fucking hate circomkit - assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], step_out); + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); }); it("witness: http_response_plaintext, two headers, order does not matter", async () => { // Get all the hashes we need - let plaintext_hash = DataHasher(http_response_plaintext); + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); // Compute the HTTP info digest - let start_line_digest = PolynomialDigest(http_start_line, plaintext_hash); - let header_0_digest = PolynomialDigest(http_header_0, plaintext_hash); - let header_1_digest = PolynomialDigest(http_header_1, plaintext_hash); - let body_digest = PolynomialDigest(http_body, plaintext_hash); - let step_out = poseidon2([body_digest, plaintext_hash]); + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let header_0_digest = PolynomialDigest(http_header_0, mock_ct_digest); + let header_0_digest_hashed = poseidon1([header_0_digest]); + let header_1_digest = PolynomialDigest(http_header_1, mock_ct_digest); + let header_1_digest_hashed = poseidon1([header_1_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - header_0_digest_hashed - header_1_digest_hashed - data_digest_hashed, BigInt(0)); // Run the HTTP circuit // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? let http_nivc_compute = await HTTPVerification.compute({ - step_in: plaintext_hash, + step_in: 0, // This doesn't really matter for this test data: http_response_plaintext, main_digests: [header_1_digest, start_line_digest, header_0_digest], + ciphertext_digest: mock_ct_digest }, ["step_out"]); // I fucking hate circomkit - assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], step_out); + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); }); }); \ No newline at end of file diff --git a/circuits/test/json/hash_parser.test.ts b/circuits/test/json/extraction.test.ts similarity index 55% rename from circuits/test/json/hash_parser.test.ts rename to circuits/test/json/extraction.test.ts index 94a47e7..b910888 100644 --- a/circuits/test/json/hash_parser.test.ts +++ b/circuits/test/json/extraction.test.ts @@ -1,8 +1,9 @@ -import { poseidon2 } from "poseidon-lite"; -import { circomkit, WitnessTester, readJSONInputFile, strToBytes, JsonMaskType, jsonTreeHasher, compressTreeHash } from "../common"; +import { poseidon1, poseidon2 } from "poseidon-lite"; +import { circomkit, WitnessTester, readJSONInputFile, strToBytes, JsonMaskType, jsonTreeHasher, compressTreeHash, PolynomialDigest, modAdd } from "../common"; -describe("Hash Parser", () => { - let hash_parser: WitnessTester<["data", "polynomial_input", "sequence_digest", "step_in"]>; +describe("JSON Extraction", () => { + let hash_parser: WitnessTester<["step_in", "ciphertext_digest", "data", "sequence_digest", "value_digest"]>; + const mock_ct_digest = poseidon2([69, 420]); it(`input: array_only`, async () => { let filename = "array_only"; @@ -17,35 +18,45 @@ describe("Hash Parser", () => { console.log("#constraints:", await hash_parser.getConstraintCount()); // Test `42` in 0th slot - let polynomial_input = poseidon2([69, 420]); let targetValue = strToBytes("42"); let keySequence: JsonMaskType[] = [ { type: "ArrayIndex", value: 0 }, ]; - let [stack, treeHashes] = jsonTreeHasher(polynomial_input, keySequence, targetValue, MAX_STACK_HEIGHT); - let sequence_digest = compressTreeHash(polynomial_input, [stack, treeHashes]); + let [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + let sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + let sequence_digest_hashed = poseidon1([sequence_digest]); + let value_digest = PolynomialDigest(targetValue, mock_ct_digest); + let data_digest = PolynomialDigest(input, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + let step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + await hash_parser.expectPass({ data: input, - polynomial_input, + ciphertext_digest: mock_ct_digest, sequence_digest, - step_in: 0 + value_digest, + step_in }); console.log("> First subtest passed."); // Test `"b"` in 1st slot object - polynomial_input = poseidon2([69, 420]); targetValue = strToBytes("b"); keySequence = [ { type: "ArrayIndex", value: 1 }, { type: "Object", value: strToBytes("a") }, ]; - [stack, treeHashes] = jsonTreeHasher(polynomial_input, keySequence, targetValue, MAX_STACK_HEIGHT); - sequence_digest = compressTreeHash(polynomial_input, [stack, treeHashes]); + [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + sequence_digest_hashed = poseidon1([sequence_digest]); + value_digest = PolynomialDigest(targetValue, mock_ct_digest); + step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + await hash_parser.expectPass({ data: input, - polynomial_input, + ciphertext_digest: mock_ct_digest, sequence_digest, - step_in: 0 + value_digest, + step_in }); console.log("> Second subtest passed."); }); @@ -63,36 +74,45 @@ describe("Hash Parser", () => { console.log("#constraints:", await hash_parser.getConstraintCount()); // Test `420` in "k"'s 0th slot - let polynomial_input = poseidon2([69, 420]); let targetValue = strToBytes("420"); let keySequence: JsonMaskType[] = [ { type: "Object", value: strToBytes("k") }, { type: "ArrayIndex", value: 0 }, ]; - let [stack, treeHashes] = jsonTreeHasher(polynomial_input, keySequence, targetValue, MAX_STACK_HEIGHT); - let sequence_digest = compressTreeHash(polynomial_input, [stack, treeHashes]); + let [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + let sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + let sequence_digest_hashed = poseidon1([sequence_digest]); + let data_digest = PolynomialDigest(input, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + let value_digest = PolynomialDigest(targetValue, mock_ct_digest); + let step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + await hash_parser.expectPass({ data: input, - polynomial_input, + ciphertext_digest: mock_ct_digest, sequence_digest, - step_in: 0 + value_digest, + step_in }); console.log("> First subtest passed."); // Test `"d"` in "b"'s 3rd slot - polynomial_input = poseidon2([69, 420]); targetValue = strToBytes("d"); keySequence = [ { type: "Object", value: strToBytes("b") }, { type: "ArrayIndex", value: 3 }, ]; - [stack, treeHashes] = jsonTreeHasher(polynomial_input, keySequence, targetValue, MAX_STACK_HEIGHT); - sequence_digest = compressTreeHash(polynomial_input, [stack, treeHashes]); + [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + sequence_digest_hashed = poseidon1([sequence_digest]); + value_digest = PolynomialDigest(targetValue, mock_ct_digest); + step_in = modAdd(sequence_digest_hashed, data_digest_hashed); await hash_parser.expectPass({ data: input, - polynomial_input, + ciphertext_digest: mock_ct_digest, sequence_digest, - step_in: 0 + value_digest, + step_in }); console.log("> Second subtest passed."); }); @@ -107,7 +127,6 @@ describe("Hash Parser", () => { }); console.log("#constraints:", await hash_parser.getConstraintCount()); - const polynomial_input = poseidon2([69, 420]); const KEY0 = strToBytes("a"); const KEY1 = strToBytes("b"); const targetValue = strToBytes("4"); @@ -119,14 +138,20 @@ describe("Hash Parser", () => { { type: "ArrayIndex", value: 1 }, ]; - const [stack, treeHashes] = jsonTreeHasher(polynomial_input, keySequence, targetValue, 10); - const sequence_digest = compressTreeHash(polynomial_input, [stack, treeHashes]); + const [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, 10); + const sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + const sequence_digest_hashed = poseidon1([sequence_digest]); + const data_digest = PolynomialDigest(input, mock_ct_digest); + const data_digest_hashed = poseidon1([data_digest]); + const value_digest = PolynomialDigest(targetValue, mock_ct_digest); + const step_in = modAdd(sequence_digest_hashed, data_digest_hashed); await hash_parser.expectPass({ data: input, - polynomial_input, + ciphertext_digest: mock_ct_digest, sequence_digest, - step_in: 0 + value_digest, + step_in }); }); @@ -140,7 +165,6 @@ describe("Hash Parser", () => { }); console.log("#constraints:", await hash_parser.getConstraintCount()); - const polynomial_input = poseidon2([69, 420]); const KEY0 = strToBytes("data"); const KEY1 = strToBytes("items"); const KEY2 = strToBytes("profile"); @@ -155,14 +179,20 @@ describe("Hash Parser", () => { { type: "Object", value: KEY3 }, ]; - const [stack, treeHashes] = jsonTreeHasher(polynomial_input, keySequence, targetValue, 10); - const sequence_digest = compressTreeHash(polynomial_input, [stack, treeHashes]); + const [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, 10); + const sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + const sequence_digest_hashed = poseidon1([sequence_digest]); + const data_digest = PolynomialDigest(input, mock_ct_digest); + const data_digest_hashed = poseidon1([data_digest]); + const value_digest = PolynomialDigest(targetValue, mock_ct_digest); + const step_in = modAdd(sequence_digest_hashed, data_digest_hashed); await hash_parser.expectPass({ data: input, - polynomial_input, + ciphertext_digest: mock_ct_digest, sequence_digest, - step_in: 0 + value_digest, + step_in }); }); }) \ No newline at end of file diff --git a/circuits/test/utils/hash.test.ts b/circuits/test/utils/hash.test.ts index c62b969..94733b9 100644 --- a/circuits/test/utils/hash.test.ts +++ b/circuits/test/utils/hash.test.ts @@ -1,6 +1,7 @@ import assert from "assert"; -import { circomkit, WitnessTester } from "../common"; +import { circomkit, PolynomialDigest, WitnessTester } from "../common"; import { DataHasher, PoseidonModular } from "../common/poseidon"; +import { poseidon1 } from "poseidon-lite"; describe("hash", () => { describe("PoseidonModular_16", () => { @@ -181,4 +182,49 @@ describe("hash", () => { await circuit_small.expectPass({ in: http_start_line.concat(Array(32 - http_start_line.length).fill(-1)) }, { out: hash }); }); }); + + describe("PolynomialDigest", () => { + let circuit: WitnessTester<["bytes", "polynomial_input"], ["digest"]>; + + before(async () => { + circuit = await circomkit.WitnessTester(`PolynomialDigest`, { + file: "utils/hash", + template: "PolynomialDigest", + params: [4], + }); + console.log("#constraints:", await circuit.getConstraintCount()); + }); + + it("witness: bytes = [0,0,0,0], polynomial_input = 1", async () => { + const bytes = [0, 0, 0, 0]; + const polynomial_input = 0; + + await circuit.expectPass( + { bytes, polynomial_input }, + { digest: 0 } + ); + }); + + it("witness: bytes = [1,2,3,4], polynomial_input = 7", async () => { + const bytes = [1, 2, 3, 4]; + const polynomial_input = 7; + + await circuit.expectPass( + { bytes, polynomial_input }, + { digest: 1 + 2 * 7 + 3 * 7 ** 2 + 4 * 7 ** 3 } + ); + }); + + it("witness: bytes = [4*random], polynomial_input = random", async () => { + const bytes = Array.from({ length: 4 }, () => Math.floor(Math.random() * 256)); + const polynomial_input = poseidon1([BigInt(Math.floor(Math.random() * 694206942069420))]); + const digest = PolynomialDigest(bytes, polynomial_input); + + await circuit.expectPass( + { bytes, polynomial_input }, + { digest } + ); + }); + + }); }); diff --git a/circuits/utils/hash.circom b/circuits/utils/hash.circom index 12fa0ea..fd6e4df 100644 --- a/circuits/utils/hash.circom +++ b/circuits/utils/hash.circom @@ -111,4 +111,23 @@ template DataHasher(DATA_BYTES) { hashes[i+1] <== not_to_hash[i] * (hashes[i] - option_hash[i]) + option_hash[i]; // same as: (1 - not_to_hash[i]) * option_hash[i] + not_to_hash[i] * hash[i]; } out <== hashes[DATA_BYTES \ 16]; +} + +template PolynomialDigest(N) { + signal input bytes[N]; + signal input polynomial_input; + + signal output digest; + + signal monomials[N]; + signal terms[N]; + monomials[0] <== 1; + terms[0] <== bytes[0] * monomials[0]; + var accumulation = terms[0]; + for(var i = 1 ; i < N ; i++) { + monomials[i] <== monomials[i - 1] * polynomial_input; + terms[i] <== monomials[i] * bytes[i]; + accumulation += terms[i]; + } + digest <== accumulation; } \ No newline at end of file