-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* feat: hash based JSON verification * WIP: save * resetting for clearer approach * good save state * feat: working hash version Though this will be too expensive, the idea works! * WIP: need to clear after comma * WIP: good progress * WIP: getting keys also now * feat: (mostly?) working tree hasher * seems to be correct for spotify * perf: first optimization * wip: brain hurty left a note to myself * fix: tree hasher seems correct now * TODO: note to self * feat: hash based JSON verification * WIP: save * resetting for clearer approach * good save state * feat: working hash version Though this will be too expensive, the idea works! * WIP: need to clear after comma * WIP: good progress * WIP: getting keys also now * feat: (mostly?) working tree hasher * seems to be correct for spotify * perf: first optimization * wip: brain hurty left a note to myself * fix: tree hasher seems correct now * TODO: note to self * cleanup from rebase * cleanup * WIP: seems to monomial correctly * rename * add in value to eval at * WIP: start looking for matches * made some fixes * it may be working! * now i can write tests! * more tests * more JSON hasher tests * cleanup
- Loading branch information
1 parent
da99172
commit 06c6920
Showing
18 changed files
with
793 additions
and
81 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,95 @@ | ||
pragma circom 2.1.9; | ||
|
||
include "../../utils/bits.circom"; | ||
include "hash_machine.circom"; | ||
|
||
template ParserHasher(DATA_BYTES, MAX_STACK_HEIGHT) { | ||
signal input data[DATA_BYTES]; | ||
signal input polynomial_input; | ||
signal input sequence_digest; | ||
|
||
//--------------------------------------------------------------------------------------------// | ||
// Initialze the parser | ||
component State[DATA_BYTES]; | ||
State[0] = StateUpdateHasher(MAX_STACK_HEIGHT); | ||
for(var i = 0; i < MAX_STACK_HEIGHT; i++) { | ||
State[0].stack[i] <== [0,0]; | ||
State[0].tree_hash[i] <== [0,0]; | ||
} | ||
State[0].byte <== data[0]; | ||
State[0].polynomial_input <== polynomial_input; | ||
State[0].monomial <== 0; | ||
State[0].parsing_string <== 0; | ||
State[0].parsing_number <== 0; | ||
|
||
// Set up monomials for stack/tree digesting | ||
signal monomials[4 * MAX_STACK_HEIGHT]; | ||
monomials[0] <== 1; | ||
for(var i = 1 ; i < 4 * MAX_STACK_HEIGHT ; i++) { | ||
monomials[i] <== monomials[i - 1] * polynomial_input; | ||
} | ||
signal intermediate_digest[DATA_BYTES][4 * MAX_STACK_HEIGHT]; | ||
signal state_digest[DATA_BYTES]; | ||
|
||
// Debugging | ||
// for(var i = 0; i<MAX_STACK_HEIGHT; i++) { | ||
// log("State[", 0, "].next_stack[", i,"] = [",State[0].next_stack[i][0], "][", State[0].next_stack[i][1],"]" ); | ||
// } | ||
// for(var i = 0; i<MAX_STACK_HEIGHT; i++) { | ||
// log("State[", 0, "].next_tree_hash[", i,"] = [",State[0].next_tree_hash[i][0], "][", State[0].next_tree_hash[i][1],"]" ); | ||
// } | ||
// log("State[", 0, "].next_monomial =", State[0].next_monomial); | ||
// log("State[", 0, "].next_parsing_string =", State[0].next_parsing_string); | ||
// log("State[", 0, "].next_parsing_number =", State[0].next_parsing_number); | ||
// log("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); | ||
|
||
var total_matches = 0; | ||
signal is_matched[DATA_BYTES]; | ||
for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) { | ||
State[data_idx] = StateUpdateHasher(MAX_STACK_HEIGHT); | ||
State[data_idx].byte <== data[data_idx]; | ||
State[data_idx].polynomial_input <== polynomial_input; | ||
State[data_idx].stack <== State[data_idx - 1].next_stack; | ||
State[data_idx].parsing_string <== State[data_idx - 1].next_parsing_string; | ||
State[data_idx].parsing_number <== State[data_idx - 1].next_parsing_number; | ||
State[data_idx].monomial <== State[data_idx - 1].next_monomial; | ||
State[data_idx].tree_hash <== State[data_idx - 1].next_tree_hash; | ||
|
||
// Digest the whole stack and tree hash | ||
var accumulator = 0; | ||
for(var i = 0 ; i < MAX_STACK_HEIGHT ; i++) { | ||
intermediate_digest[data_idx][4 * i] <== State[data_idx].next_stack[i][0] * monomials[4 * i]; | ||
intermediate_digest[data_idx][4 * i + 1] <== State[data_idx].next_stack[i][1] * monomials[4 * i + 1]; | ||
intermediate_digest[data_idx][4 * i + 2] <== State[data_idx].next_tree_hash[i][0] * monomials[4 * i + 2]; | ||
intermediate_digest[data_idx][4 * i + 3] <== State[data_idx].next_tree_hash[i][1] * monomials[4 * i + 3]; | ||
accumulator += intermediate_digest[data_idx][4 * i] + intermediate_digest[data_idx][4 * i + 1] + intermediate_digest[data_idx][4 * i + 2] + intermediate_digest[data_idx][4 * i + 3]; | ||
} | ||
state_digest[data_idx] <== accumulator; | ||
is_matched[data_idx] <== IsEqual()([state_digest[data_idx], sequence_digest]); | ||
total_matches += is_matched[data_idx]; | ||
|
||
// Debugging | ||
// for(var i = 0; i<MAX_STACK_HEIGHT; i++) { | ||
// log("State[", data_idx, "].next_stack[", i,"] = [",State[data_idx].next_stack[i][0], "][", State[data_idx].next_stack[i][1],"]" ); | ||
// } | ||
// for(var i = 0; i<MAX_STACK_HEIGHT; i++) { | ||
// log("State[", data_idx, "].next_tree_hash[", i,"] = [",State[data_idx].next_tree_hash[i][0], "][", State[data_idx].next_tree_hash[i][1],"]" ); | ||
// } | ||
// log("State[", data_idx, "].next_monomial =", State[data_idx].next_monomial); | ||
// log("State[", data_idx, "].next_parsing_string =", State[data_idx].next_parsing_string); | ||
// log("State[", data_idx, "].next_parsing_number =", State[data_idx].next_parsing_number); | ||
// log("++++++++++++++++++++++++++++++++++++++++++++++++"); | ||
// log("state_digest[", data_idx,"] = ", state_digest[data_idx]); | ||
// log("total_matches = ", total_matches); | ||
// log("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); | ||
} | ||
|
||
// TODO: Assert something about total matches but keep in mind we should try to output the target value hash | ||
total_matches === 1; | ||
|
||
// Constrain to have valid JSON | ||
for(var i = 0; i < MAX_STACK_HEIGHT; i++) { | ||
State[DATA_BYTES - 1].next_stack[i] === [0,0]; | ||
State[DATA_BYTES - 1].next_tree_hash[i] === [0,0]; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.