Skip to content

Commit

Permalink
feat: optimized HTTPVerification (#82)
Browse files Browse the repository at this point in the history
* wip: better HTTP

- Trying to reduce file size and constraints substantially
- Also need to make this so it doesn't matter if there is padding around headers / etc.

* WIP: improving HTTP digesting

* WIP: http rewrite

* WIP: almost working no-header test

* WIP: working start/body

* working tests!

* cleanup

* Update masker.circom

* Update CHANGELOG.md
  • Loading branch information
Autoparallel authored Dec 7, 2024
1 parent a7971e3 commit da99172
Show file tree
Hide file tree
Showing 9 changed files with 440 additions and 321 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,15 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
### Changed
#### Circuit Builds
#### Artifacts
- **Circuit sizes:**
- `http_verification_1024b` (with `--O1` build):
- non-linear constaints: `128,653`
- linear-constraints: `77,400`
- Theoretical storage size: `(128,653 + 77,400) * 3 * 32 bytes = 19,781,088 bytes ≈ 19.7 MB`
- R1CS file: `46.9MB`
- Graph file: N/A
- **WARNING:** Seems to not build with `--O2` flag. Need to investigate.

### Notes

---
Expand Down
1 change: 1 addition & 0 deletions circuits/chacha20/nivc/chacha20_nivc.circom
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ template ChaCha20_NIVC(DATA_BYTES) {
}
}


signal ciphertext_hash <== DataHasher(DATA_BYTES)(bigEndianCiphertext);
step_in[0] === ciphertext_hash;

Expand Down
93 changes: 55 additions & 38 deletions circuits/http/verification.circom
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
pragma circom 2.1.9;

include "machine.circom";
// TODO: we don't need this if we do a poly digest of the plaintext in authentication circuit
include "../utils/hash.circom";

template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) {
Expand All @@ -9,14 +10,18 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) {

// Authenticate the plaintext we are passing in
signal input data[DATA_BYTES];
// TODO: we don't need this if we do a poly digest of the plaintext in authentication circuit
signal data_hash <== DataHasher(DATA_BYTES)(data);
data_hash === step_in[0];

signal input start_line_hash;
signal input header_hashes[MAX_NUMBER_OF_HEADERS];
signal input body_hash;
signal input main_digests[MAX_NUMBER_OF_HEADERS + 1]; // Contains digests of start line and all intended headers (up to `MAX_NUMBER_OF_HEADERS`)
signal contained[MAX_NUMBER_OF_HEADERS + 1];
var num_to_match = MAX_NUMBER_OF_HEADERS + 1;
for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS + 1 ; i++) {
contained[i] <== IsZero()(main_digests[i]);
num_to_match -= contained[i];
}

// TODO: could just have a parser template and reduce code here
component State[DATA_BYTES];
State[0] = HttpStateUpdate();
State[0].byte <== data[0];
Expand All @@ -38,46 +43,58 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) {
State[data_idx].line_status <== State[data_idx - 1].next_line_status;
}

// Get the start line shit
signal start_line[DATA_BYTES];
signal not_start_line_mask[DATA_BYTES];
for(var i = 0 ; i < DATA_BYTES ; i++) {
not_start_line_mask[i] <== IsZero()(State[i].parsing_start);
start_line[i] <== data[i] * (1 - not_start_line_mask[i]);
}
signal inner_start_line_hash <== DataHasher(DATA_BYTES)(start_line);
signal start_line_hash_equal_check <== IsEqual()([inner_start_line_hash, start_line_hash]);
start_line_hash_equal_check === 1;
signal main_monomials[DATA_BYTES];
main_monomials[0] <== 1;

// Get the header shit
signal header[MAX_NUMBER_OF_HEADERS][DATA_BYTES];
signal header_masks[MAX_NUMBER_OF_HEADERS][DATA_BYTES];
for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS ; i++) {
for(var j = 0 ; j < DATA_BYTES ; j++) {
header_masks[i][j] <== IsEqual()([State[j].parsing_header, i + 1]);
header[i][j] <== data[j] * header_masks[i][j];
}
}
signal inner_header_hashes[MAX_NUMBER_OF_HEADERS];
signal header_is_unused[MAX_NUMBER_OF_HEADERS]; // If a header hash is passed in as 0, it is not used (no way to compute preimage of 0)
signal header_hashes_equal_check[MAX_NUMBER_OF_HEADERS];
for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS ; i++) {
header_is_unused[i] <== IsZero()(header_hashes[i]);
inner_header_hashes[i] <== DataHasher(DATA_BYTES)(header[i]);
header_hashes_equal_check[i] <== IsEqual()([(1 - header_is_unused[i]) * inner_header_hashes[i], header_hashes[i]]);
header_hashes_equal_check[i] === 1;
signal is_line_change[DATA_BYTES-1];
signal was_cleared[DATA_BYTES-1];
signal not_body_and_not_line_change[DATA_BYTES-1];

signal rescaled_or_was_cleared[DATA_BYTES-1];
for(var i = 0 ; i < DATA_BYTES - 1 ; i++) {
is_line_change[i] <== Contains(2)(data[i + 1], [10, 13]); // capture if we hit an end line sequence
was_cleared[i] <== IsZero()(main_monomials[i]);
not_body_and_not_line_change[i] <== (1 - State[i + 1].parsing_body) * (1 - is_line_change[i]);
rescaled_or_was_cleared[i] <== (main_monomials[i] * step_in[0] + was_cleared[i]);
main_monomials[i + 1] <== not_body_and_not_line_change[i] * rescaled_or_was_cleared[i];
}

// Get the body shit
signal body[DATA_BYTES];
signal is_match[DATA_BYTES];
signal contains[DATA_BYTES];
signal is_zero[DATA_BYTES];
signal monomial_is_zero[DATA_BYTES];
signal accum_prev[DATA_BYTES];
var num_matched = 0;
signal inner_main_digest[DATA_BYTES + 1];
inner_main_digest[0] <== 0;
for(var i = 0 ; i < DATA_BYTES ; i++) {
body[i] <== data[i] * State[i].parsing_body;
monomial_is_zero[i] <== IsZero()(main_monomials[i]);
accum_prev[i] <== (1 - monomial_is_zero[i]) * inner_main_digest[i];
inner_main_digest[i+1] <== accum_prev[i] + data[i] * main_monomials[i];
is_zero[i] <== IsZero()(inner_main_digest[i+1]);
contains[i] <== Contains(MAX_NUMBER_OF_HEADERS + 1)(inner_main_digest[i+1], main_digests);
is_match[i] <== (1 - is_zero[i]) * contains[i];
num_matched += is_match[i];
}
num_matched === num_to_match;

// BODY
signal body_monomials[DATA_BYTES];
body_monomials[0] <== 0;
signal body_accum[DATA_BYTES];
body_accum[0] <== 0;
signal body_switch[DATA_BYTES -1];
signal body_digest[DATA_BYTES];
body_digest[0] <== 0;
for(var i = 0 ; i < DATA_BYTES - 1 ; i++) {
body_accum[i + 1] <== body_accum[i] + State[i + 1].parsing_body;
body_switch[i] <== IsEqual()([body_accum[i + 1], 1]);
body_monomials[i + 1] <== body_monomials[i] * step_in[0] + body_switch[i];
body_digest[i + 1] <== body_digest[i] + body_monomials[i + 1] * data[i + 1];
}
signal inner_body_hash <== DataHasher(DATA_BYTES)(body);
signal body_hash_equal_check <== IsEqual()([inner_body_hash, body_hash]);
body_hash_equal_check === 1;

step_out[0] <== inner_body_hash;
// TODO: This, for now, passes back out the hash of body_digest and the plaintext_hash so it can be properly verified in the JSON
step_out[0] <== PoseidonChainer()([body_digest[DATA_BYTES - 1], step_in[0]]);

// Verify machine ends in a valid state
State[DATA_BYTES - 1].next_parsing_start === 0;
Expand Down
168 changes: 131 additions & 37 deletions circuits/test/common/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,13 @@ export function toByte(data: string): number[] {

export function hexToBytes(hex: any) {
return hex.match(/.{1,2}/g).map((byte: any) => parseInt(byte, 16));
}
}

export function hexBytesToBigInt(hexBytes: number[]): any[] {
return hexBytes.map(byte => {
let n = BigInt(byte);
return n;
});
return hexBytes.map(byte => {
let n = BigInt(byte);
return n;
});
}

export function hexToBits(hex: string): number[] {
Expand Down Expand Up @@ -178,51 +178,51 @@ export function binaryStringToHex(binaryString: string): string {
* BE order.
*/
export function uint8ArrayToBitsBE(buff: Uint8Array | number[]) {
const res: number[] = []
for (let i = 0; i < buff.length; i++) {
for (let j = 0; j < 8; j++) {
if ((buff[i] >> 7-j) & 1) {
res.push(1);
} else {
res.push(0);
}
}
}
return res;
const res: number[] = []
for (let i = 0; i < buff.length; i++) {
for (let j = 0; j < 8; j++) {
if ((buff[i] >> 7 - j) & 1) {
res.push(1);
} else {
res.push(0);
}
}
}
return res;
}

export function toUint32Array(buf: Uint8Array) {
const arr = new Uint32Array(buf.length / 4)
const arrView = new DataView(buf.buffer, buf.byteOffset, buf.byteLength)
for(let i = 0;i < arr.length;i++) {
arr[i] = arrView.getUint32(i * 4, true)
}
return arr
const arr = new Uint32Array(buf.length / 4)
const arrView = new DataView(buf.buffer, buf.byteOffset, buf.byteLength)
for (let i = 0; i < arr.length; i++) {
arr[i] = arrView.getUint32(i * 4, true)
}
return arr
}

/**
* Converts a Uint32Array to an array of bits.
* LE order.
*/
export function uintArray32ToBits(uintArray: Uint32Array | number[]) {
const bits: number[][] = []
for (let i = 0; i < uintArray.length; i++) {
const uint = uintArray[i]
bits.push(numToBitsNumerical(uint))
}
const bits: number[][] = []
for (let i = 0; i < uintArray.length; i++) {
const uint = uintArray[i]
bits.push(numToBitsNumerical(uint))
}

return bits
return bits
}

export function numToBitsNumerical(num: number, bitCount = 32) {
const bits: number[] = []
for(let i = 2 ** (bitCount - 1);i >= 1;i /= 2) {
const bit = num >= i ? 1 : 0
bits.push(bit)
num -= bit * i
}
const bits: number[] = []
for (let i = 2 ** (bitCount - 1); i >= 1; i /= 2) {
const bit = num >= i ? 1 : 0
bits.push(bit)
num -= bit * i
}

return bits
return bits
}

export function bytesToBigInt(bytes: number[] | Uint8Array): bigint {
Expand All @@ -233,4 +233,98 @@ export function bytesToBigInt(bytes: number[] | Uint8Array): bigint {
}

return result;
}
}

export function PolynomialDigest(coeffs: number[], input: bigint): bigint {
const prime = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");

let result = BigInt(0);
let power = BigInt(1);

for (let i = 0; i < coeffs.length; i++) {
result = (result + BigInt(coeffs[i]) * power) % prime;
power = (power * input) % prime;
}

return result;
}

// HTTP/1.1 200 OK
// content-type: application/json; charset=utf-8
// content-encoding: gzip
// Transfer-Encoding: chunked
//
// {
// "data": {
// "items": [
// {
// "data": "Artist",
// "profile": {
// "name": "Taylor Swift"
// }
// }
// ]
// }
// }

// 320 bytes in the HTTP response
export const http_response_plaintext = [
72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10, 99, 111, 110, 116, 101, 110,
116, 45, 116, 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106,
115, 111, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 13, 10, 99,
111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, 105,
112, 13, 10, 84, 114, 97, 110, 115, 102, 101, 114, 45, 69, 110, 99, 111, 100, 105, 110, 103, 58,
32, 99, 104, 117, 110, 107, 101, 100, 13, 10, 13, 10, 123, 13, 10, 32, 32, 32, 34, 100, 97, 116,
97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, 115, 34, 58, 32,
91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115,
116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114,
111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119,
105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13,
10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13,
10, 32, 32, 32, 125, 13, 10, 125,
];

export const chacha20_http_response_ciphertext = [
2, 125, 219, 141, 140, 93, 49, 129, 95, 178, 135, 109, 48, 36, 194, 46, 239, 155, 160, 70, 208,
147, 37, 212, 17, 195, 149, 190, 38, 215, 23, 241, 84, 204, 167, 184, 179, 172, 187, 145, 38, 75,
123, 96, 81, 6, 149, 36, 135, 227, 226, 254, 177, 90, 241, 159, 0, 230, 183, 163, 210, 88, 133,
176, 9, 122, 225, 83, 171, 157, 185, 85, 122, 4, 110, 52, 2, 90, 36, 189, 145, 63, 122, 75, 94,
21, 163, 24, 77, 85, 110, 90, 228, 157, 103, 41, 59, 128, 233, 149, 57, 175, 121, 163, 185, 144,
162, 100, 17, 34, 9, 252, 162, 223, 59, 221, 106, 127, 104, 11, 121, 129, 154, 49, 66, 220, 65,
130, 171, 165, 43, 8, 21, 248, 12, 214, 33, 6, 109, 3, 144, 52, 124, 225, 206, 223, 213, 86, 186,
93, 170, 146, 141, 145, 140, 57, 152, 226, 218, 57, 30, 4, 131, 161, 0, 248, 172, 49, 206, 181,
47, 231, 87, 72, 96, 139, 145, 117, 45, 77, 134, 249, 71, 87, 178, 239, 30, 244, 156, 70, 118,
180, 176, 90, 92, 80, 221, 177, 86, 120, 222, 223, 244, 109, 150, 226, 142, 97, 171, 210, 38,
117, 143, 163, 204, 25, 223, 238, 209, 58, 59, 100, 1, 86, 241, 103, 152, 228, 37, 187, 79, 36,
136, 133, 171, 41, 184, 145, 146, 45, 192, 173, 219, 146, 133, 12, 246, 190, 5, 54, 99, 155, 8,
198, 156, 174, 99, 12, 210, 95, 5, 128, 166, 118, 50, 66, 26, 20, 3, 129, 232, 1, 192, 104, 23,
152, 212, 94, 97, 138, 162, 90, 185, 108, 221, 211, 247, 184, 253, 15, 16, 24, 32, 240, 240, 3,
148, 89, 30, 54, 161, 131, 230, 161, 217, 29, 229, 251, 33, 220, 230, 102, 131, 245, 27, 141,
220, 67, 16, 26,
];

export const http_start_line = [72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75];

export const http_header_0 = [
99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97,
116, 105, 111, 110, 47, 106, 115, 111, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117,
116, 102, 45, 56,
];

export const http_header_1 = [
99, 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122,
105, 112,
];
export const http_body = [
123, 13, 10, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32,
32, 34, 105, 116, 101, 109, 115, 34, 58, 32, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116,
97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32,
34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125,
13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, 10, 32, 32, 32, 125, 13, 10, 125,
];
2 changes: 1 addition & 1 deletion circuits/test/common/poseidon.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,4 +105,4 @@ export function DataHasher(input: number[]): bigint {

// Return the last hash
return hashes[Math.ceil(input.length / 16)];
}
}
Loading

0 comments on commit da99172

Please sign in to comment.