From e4da9c143dd5f247c752823962b8c9827810fd72 Mon Sep 17 00:00:00 2001 From: RogerTaule Date: Tue, 5 Mar 2024 13:09:16 +0100 Subject: [PATCH] MerkleTreeBN128 to work with any arity power of two --- .github/workflows/on-pull-request.yml | 27 +++ circuits.bn128/linearhash.circom | 17 +- circuits.bn128/merkle.circom | 86 +++---- circuits.bn128/merklehash.circom | 21 +- circuits.bn128/stark_verifier.circom.ejs | 52 ++--- package-lock.json | 21 -- package.json | 10 + .../hash/linearhash/linearhash.bn128.js | 8 +- .../hash/merklehash/merklehash_bn128.js | 41 ++-- .../hash/merklehash/merklehash_bn128_p.js | 69 +++--- .../merklehash/merklehash_bn128_worker.js | 40 ++-- src/helpers/transcript/transcript.bn128.js | 11 +- src/main_buildconsttree.js | 4 +- src/main_pil2circom.js | 8 +- src/main_prover.js | 14 +- src/main_verifier.js | 10 +- src/pil2circom.js | 6 +- src/stark/stark_buildConstTree.js | 7 +- src/stark/stark_gen.js | 8 +- src/stark/stark_setup.js | 5 +- src/stark/stark_verify.js | 7 +- .../linearhash.bn128.custom.test.circom | 6 - .../linearhash100.bn128.custom.test.circom | 6 - .../linearhash110.bn128.custom.test.circom | 6 - ....circom => linearhash16.bn128.test.circom} | 2 +- .../circom/linearhash16_100.bn128.test.circom | 5 + ...t.circom => linearhash4.bn128.test.circom} | 2 +- .../circom/linearhash4_100.bn128.test.circom | 5 + .../merklehash16.bn128.custom.test.circom | 6 - ....circom => merklehash16.bn128.test.circom} | 2 +- .../merklehash4.bn128.custom.test.circom | 6 - .../circom/merklehash4.bn128.test.circom | 5 + .../bn128/linearhash.bn128.circuit.test.js | 79 +++++-- .../bn128/merklehash.bn128.circuit.test.js | 78 +++++-- .../bn128/verifyEvals.bn128.circuit.test.js | 4 +- test/circuits/gl/circom/cmuladd.test.circom | 2 +- test/f3g.test.js | 2 +- test/merklehash.bn128.test.js | 111 ++++++++-- test/merklehash.test.js | 2 +- test/merklehash_bn128_p.test.js | 209 +++++++++++++++--- 40 files changed, 676 insertions(+), 334 deletions(-) delete mode 100644 test/circuits/bn128/circom/linearhash.bn128.custom.test.circom delete mode 100644 test/circuits/bn128/circom/linearhash100.bn128.custom.test.circom delete mode 100644 test/circuits/bn128/circom/linearhash110.bn128.custom.test.circom rename test/circuits/bn128/circom/{linearhash100.bn128.test.circom => linearhash16.bn128.test.circom} (67%) create mode 100644 test/circuits/bn128/circom/linearhash16_100.bn128.test.circom rename test/circuits/bn128/circom/{linearhash.bn128.test.circom => linearhash4.bn128.test.circom} (68%) create mode 100644 test/circuits/bn128/circom/linearhash4_100.bn128.test.circom delete mode 100644 test/circuits/bn128/circom/merklehash16.bn128.custom.test.circom rename test/circuits/bn128/circom/{merklehash.bn128.test.circom => merklehash16.bn128.test.circom} (61%) delete mode 100644 test/circuits/bn128/circom/merklehash4.bn128.custom.test.circom create mode 100644 test/circuits/bn128/circom/merklehash4.bn128.test.circom diff --git a/.github/workflows/on-pull-request.yml b/.github/workflows/on-pull-request.yml index 6f0c9d1b..d26d5fa5 100644 --- a/.github/workflows/on-pull-request.yml +++ b/.github/workflows/on-pull-request.yml @@ -122,6 +122,33 @@ jobs: - name: Check C12 run: npm run test:C12 + c12-custom-test: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: '16.17.0' + check-latest: true + cache: "npm" + + - name: "Install circom" + run: | + curl https://sh.rustup.rs -sSf -o rust.sh + bash -f rust.sh -y + git clone https://github.com/iden3/circom.git + cd circom + cargo build --release + cargo install --path circom + + - name: Install dependencies + run: npm ci + + - name: Create tmp directory + run: mkdir tmp + + - name: Check C12 with arity 4 + run: npm run test:C12:custom c18-test: runs-on: ubuntu-20.04 diff --git a/circuits.bn128/linearhash.circom b/circuits.bn128/linearhash.circom index 46963159..e872f6b5 100644 --- a/circuits.bn128/linearhash.circom +++ b/circuits.bn128/linearhash.circom @@ -5,7 +5,7 @@ include "poseidon.circom"; // Given a list on inputs over GL³, compute the linear hash of the list, mapping from GL³ to BN // via the map (x,y,z) |-> x + y·2⁶⁴ + z·2¹²⁸, which is injective but not surjective; // and hashing the resulting BN elements in chunks of 16 using Poseidon. -template LinearHash(nInputs, eSize) { +template LinearHash(nInputs, eSize, arity) { signal input in[nInputs][eSize]; signal output out; @@ -25,7 +25,8 @@ template LinearHash(nInputs, eSize) { out <== sAc; nHashes = 0; } else { - nHashes = (nElements256 - 1)\16 + 1; + + nHashes = (nElements256 - 1)\arity +1; } component hash[nHashes > 0 ? nHashes - 1 : 0]; @@ -33,11 +34,11 @@ template LinearHash(nInputs, eSize) { component lastHash; for (var i=0; i 0) { - nLastHash = nElements256 - (nHashes - 1)*16; + if (nHashes>0) { + nLastHash = nElements256 - (nHashes - 1)*arity; lastHash = PoseidonEx(nLastHash, 1); } @@ -58,7 +59,7 @@ template LinearHash(nInputs, eSize) { sAc = 0; nAc = 0; curHashIdx ++; - if (curHashIdx == 16) { + if (curHashIdx == arity) { curHash++; curHashIdx = 0; } @@ -72,7 +73,7 @@ template LinearHash(nInputs, eSize) { hash[curHash].inputs[curHashIdx] <== sAc; } curHashIdx ++; - if (curHashIdx == 16) { + if (curHashIdx == arity) { curHash = 0; curHashIdx = 0; } @@ -84,6 +85,7 @@ template LinearHash(nInputs, eSize) { } else { hash[i].initialState <== hash[i-1].out[0]; } + _ <== hash[i].out; } if (nHashes == 1) { lastHash.initialState <== 0; @@ -91,6 +93,7 @@ template LinearHash(nInputs, eSize) { lastHash.initialState <== hash[nHashes-2].out[0]; } + _ <== lastHash.out; out <== lastHash.out[0]; } } diff --git a/circuits.bn128/merkle.circom b/circuits.bn128/merkle.circom index 5a5f0655..acc634fe 100644 --- a/circuits.bn128/merkle.circom +++ b/circuits.bn128/merkle.circom @@ -1,14 +1,16 @@ pragma circom 2.1.0; +include "bitify.circom"; +include "comparators.circom"; include "poseidon.circom"; /* Given a leaf value, its sibling path and a key indicating the hashing position for each element in the path, calculate the merkle tree root - keyBits: number of bits in the key */ -template Merkle(keyBits) { - var arity = 16; +template Merkle(keyBits, arity) { var nLevels = 0; + var nBits = log2(arity); var n = 1 << keyBits; var nn = n; while (nn > 1) { @@ -21,80 +23,44 @@ template Merkle(keyBits) { signal input key[keyBits]; signal output root; - signal s[16]; - signal a, b, c, d, ab, ac, ad, bc, bd, cd, abc, abd, acd, bcd, abcd; + signal s[arity]; component mNext; component hash; + component keyNum; + if (nLevels == 0) { root <== value; - s <== [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]; - (a, b, c, d, ab, ac, ad, bc, bd, cd, abc, abd, acd, bcd, abcd) <== (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0); - } else { - if (keyBits>=1) { - d <== key[0]; - } else { - d <== 0; - } - if (keyBits>=2) { - c <== key[1]; - } else { - c <== 0; - } - if (keyBits>=3) { - b <== key[2]; - } else { - b <== 0; - } - if (keyBits>=4) { - a <== key[3]; - } else { - a <== 0; + for(var i = 0; i < arity; i++) { + s[i] <== 0; } - ab <== a*b; - ac <== a*c; - ad <== a*d; - bc <== b*c; - bd <== b*d; - cd <== c*d; - - abc <== ab*c; - abd <== ab*d; - acd <== ac*d; - bcd <== bc*d; - - abcd <== ab*cd; - - s[0] <== 1-d-c + cd-b + bd + bc-bcd-a + ad + ac-acd + ab-abd-abc + abcd; - s[1] <== d-cd-bd + bcd-ad + acd + abd-abcd; - s[2] <== c-cd-bc + bcd-ac + acd + abc-abcd; - s[3] <== cd-bcd-acd + abcd; - s[4] <== b-bd-bc + bcd-ab + abd + abc-abcd; - s[5] <== bd-bcd-abd + abcd; - s[6] <== bc-bcd-abc + abcd; - s[7] <== bcd-abcd; - s[8] <== a-ad-ac + acd-ab + abd + abc-abcd; - s[9] <== ad-acd-abd + abcd; - s[10] <== ac-acd-abc + abcd; - s[11] <== acd-abcd; - s[12] <== ab-abd-abc + abcd; - s[13] <== abd-abcd; - s[14] <== abc-abcd; - s[15] <== abcd; + } else { + keyNum = Bits2Num(nBits); + for(var i = 0; i < nBits; i++) { + if(keyBits >= i + 1) { + keyNum.in[i] <== key[i]; + } else { + keyNum.in[i] <== 0; + } + } + + for(var i = 0; i < arity; i++) { + s[i] <== IsEqual()([keyNum.out, i]); + } hash = Poseidon(arity); for (var i=0; i 0) { const firstUnused = Math.max(this.hiCnt, 1); - if(firstUnused < 17) { - this.code.push(`for(var i = ${firstUnused}; i < 17; i++){ + if(firstUnused < (arityTranscript + 1)) { + this.code.push(`for(var i = ${firstUnused}; i < ${arityTranscript + 1}; i++){ _ <== transcriptHash_${this.hCnt -1}[i]; // Unused transcript values }`) } } - while (this.pending.length<16) { + while (this.pending.length < arityTranscript) { this.pending.push("0"); } - this.code.push(`\n signal transcriptHash_${this.hCnt++}[17] <== PoseidonEx(16,17)([${this.pending.join(',')}], ${this.state});`); - for (let i=0; i<17; i++) { + this.code.push(`\n signal transcriptHash_${this.hCnt++}[${Number(arityTranscript) + 1}] <== PoseidonEx(${arityTranscript}, ${arityTranscript + 1})([${this.pending.join(',')}], ${this.state});`); + for (let i=0; i][<%- starkInfo.nConstants%>]; // Merkle proofs for each of the evaluations - signal input s0_siblings1[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings1[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; <% if (starkInfo.mapSectionsN.cm2_2ns > 0) { -%> - signal input s0_siblings2[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings2[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; <% } -%> <% if (starkInfo.mapSectionsN.cm3_2ns > 0) { -%> - signal input s0_siblings3[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings3[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; <% } -%> - signal input s0_siblings4[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; - signal input s0_siblingsC[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings4[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; + signal input s0_siblingsC[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; // Contains the root of the original polynomial and all the intermediate FRI polynomials except for the last step <% let si_roots = []; -%> @@ -690,7 +690,7 @@ template StarkVerifier() { // and the sibling paths for each query. <% for (let s=1; s signal input s<%- s %>_vals[<%- starkStruct.nQueries %>][<%- (1 << (starkStruct.steps[s-1].nBits - starkStruct.steps[s].nBits))*3 %>]; - signal input s<%- s %>_siblings[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[s].nBits -1) /4) +1 %>][16]; + signal input s<%- s %>_siblings[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[s].nBits -1) /nBitsArity) +1 %>][<%- arity %>]; <% } -%> // Evaluations of the final FRI polynomial over a set of points of size bounded its degree @@ -865,24 +865,24 @@ template StarkVerifier() { //Calculate merkle root for s0 vals for (var q=0; q<<%- starkStruct.nQueries %>; q++) { - VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm1_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>)(s0_vals1_p[q], s0_siblings1[q], ys[q], root1, enable); + VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm1_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>, <%- arity %>)(s0_vals1_p[q], s0_siblings1[q], ys[q], root1, enable); } <% if (starkInfo.mapSectionsN.cm2_2ns > 0) { -%> for (var q=0; q<<%- starkStruct.nQueries %>; q++) { - VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm2_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>)(s0_vals2_p[q], s0_siblings2[q], ys[q], root2, enable); + VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm2_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>, <%- arity %>)(s0_vals2_p[q], s0_siblings2[q], ys[q], root2, enable); } <% } -%> <% if (starkInfo.mapSectionsN.cm3_2ns > 0) { -%> for (var q=0; q<<%- starkStruct.nQueries %>; q++) { - VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm3_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>)(s0_vals3_p[q], s0_siblings3[q], ys[q], root3, enable); + VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm3_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>, <%- arity %>)(s0_vals3_p[q], s0_siblings3[q], ys[q], root3, enable); } <% } -%> for (var q=0; q<<%- starkStruct.nQueries %>; q++) { - VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm4_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>)(s0_vals4_p[q], s0_siblings4[q], ys[q], root4, enable); + VerifyMerkleHash(1, <%- starkInfo.mapSectionsN.cm4_2ns %>, <%- 1 << starkStruct.steps[0].nBits %>,<%- arity %>)(s0_vals4_p[q], s0_siblings4[q], ys[q], root4, enable); } for (var q=0; q<<%- starkStruct.nQueries %>; q++) { - VerifyMerkleHash(1, <%- starkInfo.nConstants %>, <%- 1 << starkStruct.steps[0].nBits %>)(s0_valsC_p[q], s0_siblingsC[q], ys[q], rootC, enable); + VerifyMerkleHash(1, <%- starkInfo.nConstants %>, <%- 1<< starkStruct.steps[0].nBits %>, <%- arity %>)(s0_valsC_p[q], s0_siblingsC[q], ys[q], rootC, enable); } <% for (let s=1; s @@ -890,7 +890,7 @@ template StarkVerifier() { for (var q=0; q<<%- starkStruct.nQueries %>; q++) { // Calculate merkle root for s<%- s %> vals for(var i = 0; i < <%-starkStruct.steps[s].nBits %>; i++) { s<%- s %>_keys_merkle[q][i] <== ys[q][i]; } - VerifyMerkleHash(3, <%- 1 << (starkStruct.steps[s-1].nBits - starkStruct.steps[s].nBits) %>, <%- 1 << starkStruct.steps[s].nBits %>)(s<%- s %>_vals_p[q], s<%- s %>_siblings[q], s<%- s %>_keys_merkle[q], s<%- s %>_root, enable); + VerifyMerkleHash(3, <%- 1 << (starkStruct.steps[s-1].nBits - starkStruct.steps[s].nBits) %>, <%- 1 << starkStruct.steps[s].nBits %>,<%- arity %>)(s<%- s %>_vals_p[q], s<%- s %>_siblings[q], s<%- s %>_keys_merkle[q], s<%- s %>_root, enable); } <% } -%> @@ -967,15 +967,15 @@ template Main() { <% } -%> signal input s0_vals4[<%- starkStruct.nQueries %>][<%- starkInfo.mapSectionsN.cm4_2ns %>]; signal input s0_valsC[<%- starkStruct.nQueries %>][<%- starkInfo.nConstants%>]; - signal input s0_siblings1[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings1[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; <% if (starkInfo.mapSectionsN.cm2_2ns > 0) { -%> - signal input s0_siblings2[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings2[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; <% } -%> <% if (starkInfo.mapSectionsN.cm3_2ns > 0) { -%> - signal input s0_siblings3[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings3[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; <% } -%> - signal input s0_siblings4[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; - signal input s0_siblingsC[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/4)+1 %>][16]; + signal input s0_siblings4[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; + signal input s0_siblingsC[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[0].nBits - 1)/nBitsArity)+1 %>][<%- arity %>]; <% for (let s=0; s signal input s<%- s+1 %>_root; @@ -986,7 +986,7 @@ template Main() { // and the sibling paths for each query. <% for (let s=1; s signal input s<%- s %>_vals[<%- starkStruct.nQueries %>][<%- (1 << (starkStruct.steps[s-1].nBits - starkStruct.steps[s].nBits))*3 %>]; - signal input s<%- s %>_siblings[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[s].nBits -1) /4) +1 %>][16]; + signal input s<%- s %>_siblings[<%- starkStruct.nQueries %>][<%- Math.floor((starkStruct.steps[s].nBits -1) /nBitsArity) +1 %>][<%- arity %>]; <% } -%> signal input finalPol[<%- 1 << starkStruct.steps[starkStruct.steps.length-1].nBits %>][3]; diff --git a/package-lock.json b/package-lock.json index 4c969cfa..c80f65da 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1594,20 +1594,6 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -3950,13 +3936,6 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, - "fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "optional": true - }, "function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", diff --git a/package.json b/package.json index 3913a8c1..90c8790a 100644 --- a/package.json +++ b/package.json @@ -14,12 +14,14 @@ "test:fibonacci:gpu": "npm run fibonacci_buildconst && npm run fibonacci_exec && npm run fibonacci_pil_verify && npm run fibonacci_buildstarkinfo_gpu && npm run fibonacci_buildconsttree_gpu && npm run fibonacci_prove && npm run fibonacci_verify && npm run fibonacci_starkVerifier_gpu", "test:fibonacci:C12:pil": "npm run fibonacci_C12_setup && npm run fibonacci_C12_exec && npm run fibonacci_C12_pil_verify", "test:fibonacci:C12:prove": "npm run fibonacci_C12_buildstarkinfo && npm run fibonacci_C12_buildconsttree && npm run fibonacci_C12_prove && npm run fibonacci_C12_verify && npm run fibonacci_C12_gencircomBN128 && npm run fibonacci_C12_starkVerifierBN128", + "test:fibonacci:C12:prove:custom": "npm run fibonacci_C12_buildstarkinfo && npm run fibonacci_C12_buildconsttree_custom && npm run fibonacci_C12_prove_custom && npm run fibonacci_C12_verify_custom && npm run fibonacci_C12_gencircomBN128_custom && npm run fibonacci_C12_starkVerifierBN128", "test:all": "npm run all_buildconst && npm run all_exec && npm run all_pil_verify && npm run all_buildstarkinfo && npm run all_buildconsttree && npm run all_prove && npm run all_verify && npm run all_starkVerifier", "test:all:gpu": "npm run all_buildconst && npm run all_exec && npm run all_pil_verify && npm run all_buildstarkinfo_gpu && npm run all_buildconsttree_gpu && npm run all_prove && npm run all_verify && npm run all_starkVerifier_gpu", "test:all:C18:pil": "npm run all_C18_setup && npm run all_C18_exec && npm run all_C18_pil_verify", "test:all:C18:prove": "npm run all_C18_buildstarkinfo && npm run all_C18_buildconsttree && npm run all_C18_prove && npm run all_C18_verify && npm run all_C18_gencircomBN128 && npm run all_C18_starkVerifierBN128", "test:C12": "npm run verifier_C12_setup && npm run verifier_C12_exec && npm run verifier_C12_pil_verify && npm run verifier_C12_buildstarkinfo && npm run verifier_C12_buildconsttree && npm run verifier_C12_prove && npm run verifier_C12_verify && npm run verifier_C12_gencircomBN128 && npm run verifier_C12_starkVerifierBN128", "test:C18": "npm run verifier_C18_setup && npm run verifier_C18_exec && npm run verifier_C18_pil_verify && npm run verifier_C18_buildstarkinfo && npm run verifier_C18_buildconsttree && npm run verifier_C18_prove && npm run verifier_C18_verify && npm run verifier_C18_gencircomBN128 && npm run verifier_C18_starkVerifierBN128", + "test:C12:custom": "npm run verifier_C12_setup && npm run verifier_C12_exec && npm run verifier_C12_pil_verify && npm run verifier_C12_buildstarkinfo && npm run verifier_C12_buildconsttree_custom && npm run verifier_C12_prove_custom && npm run verifier_C12_verify_custom && npm run verifier_C12_gencircomBN128_custom && npm run verifier_C12_starkVerifierBN128", "fibonacci_buildconst": "NODE_OPTIONS=--max-old-space-size=32000 node test/state_machines/sm_fibonacci/main_buildconst_fibonacci.js -o tmp/fibonacci.const", "fibonacci_exec": "NODE_OPTIONS=--max-old-space-size=32000 node test/state_machines/sm_fibonacci/main_exec_fibonacci.js -i test/state_machines/sm_fibonacci/fibonacci.input.json -o tmp/fibonacci.commit", "fibonacci_pil_verify": "NODE_OPTIONS=--max-old-space-size=32000 node node_modules/pilcom/src/main_pilverifier.js tmp/fibonacci.commit -p test/state_machines/sm_fibonacci/fibonacci_main.pil -c tmp/fibonacci.const", @@ -39,9 +41,13 @@ "fibonacci_C12_pil_verify": "NODE_OPTIONS=--max-old-space-size=32000 node node_modules/pilcom/src/main_pilverifier.js tmp/fibonacci.c12.commit -p tmp/fibonacci.c12.pil -c tmp/fibonacci.c12.const", "fibonacci_C12_buildstarkinfo": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_genstarkinfo.js -p tmp/fibonacci.c12.pil -s test/state_machines/sm_fibonacci/fibonacci.c12.starkstruct.json -i tmp/fibonacci.c12.starkinfo.json", "fibonacci_C12_buildconsttree": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_buildconsttree.js -c tmp/fibonacci.c12.const -p tmp/fibonacci.c12.pil -s test/state_machines/sm_fibonacci/fibonacci.c12.starkstruct.json -t tmp/fibonacci.c12.consttree -v tmp/fibonacci.c12.verkey.json", + "fibonacci_C12_buildconsttree_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_buildconsttree.js -c tmp/fibonacci.c12.const -p tmp/fibonacci.c12.pil -s test/state_machines/sm_fibonacci/fibonacci.c12.starkstruct.json -t tmp/fibonacci.c12.consttree -v tmp/fibonacci.c12.verkey.json --arity=4", "fibonacci_C12_prove": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_prover.js -m tmp/fibonacci.c12.commit -c tmp/fibonacci.c12.const -t tmp/fibonacci.c12.consttree -p tmp/fibonacci.c12.pil -s tmp/fibonacci.c12.starkinfo.json -o tmp/fibonacci.c12.proof.json -z tmp/fibonacci.c12.proof.zkin.json -b tmp/fibonacci.c12.public.json --proverAddr=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", + "fibonacci_C12_prove_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_prover.js -m tmp/fibonacci.c12.commit -c tmp/fibonacci.c12.const -t tmp/fibonacci.c12.consttree -p tmp/fibonacci.c12.pil -s tmp/fibonacci.c12.starkinfo.json -o tmp/fibonacci.c12.proof.json -z tmp/fibonacci.c12.proof.zkin.json -b tmp/fibonacci.c12.public.json --proverAddr=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 --arity=4", "fibonacci_C12_verify": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_verifier.js -s tmp/fibonacci.c12.starkinfo.json -o tmp/fibonacci.c12.proof.json -b tmp/fibonacci.c12.public.json -v tmp/fibonacci.c12.verkey.json", + "fibonacci_C12_verify_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_verifier.js -s tmp/fibonacci.c12.starkinfo.json -o tmp/fibonacci.c12.proof.json -b tmp/fibonacci.c12.public.json -v tmp/fibonacci.c12.verkey.json --arity=4", "fibonacci_C12_gencircomBN128": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_pil2circom.js -p tmp/fibonacci.c12.pil -s tmp/fibonacci.c12.starkinfo.json -v tmp/fibonacci.c12.verkey.json -o tmp/fibonacci.c12.verifier.circom", + "fibonacci_C12_gencircomBN128_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_pil2circom.js -p tmp/fibonacci.c12.pil -s tmp/fibonacci.c12.starkinfo.json -v tmp/fibonacci.c12.verkey.json -o tmp/fibonacci.c12.verifier.circom --arity=4", "fibonacci_C12_compileverifier": "circom -l circuits.bn128 -l node_modules/circomlib/circuits --O1 --r1cs --sym --wasm --verbose tmp/fibonacci.c12.verifier.circom -o tmp", "fibonacci_C12_starkVerifierBN128": "mocha test/stark/fibonacci_bn128_verifier.circuit.test.js", "all_buildconst": "NODE_OPTIONS=--max-old-space-size=32000 node test/state_machines/sm_all/main_buildconst_all.js -o tmp/all.const", @@ -83,9 +89,13 @@ "verifier_C12_pil_verify": "NODE_OPTIONS=--max-old-space-size=32000 node src/compressor/main_compressor_pil_verify.js -t tmp/test.c12.commit -p tmp/test.c12.pil -c tmp/test.c12.const", "verifier_C12_buildstarkinfo": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_genstarkinfo.js -p tmp/test.c12.pil -s test/state_machines/sm_fibonacci/fibonacci.c12.starkstruct.json -i tmp/test.c12.starkinfo.json", "verifier_C12_buildconsttree": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_buildconsttree.js -c tmp/test.c12.const -p tmp/test.c12.pil -s test/state_machines/sm_fibonacci/fibonacci.c12.starkstruct.json -t tmp/test.c12.consttree -v tmp/test.c12.verkey.json", + "verifier_C12_buildconsttree_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_buildconsttree.js -c tmp/test.c12.const -p tmp/test.c12.pil -s test/state_machines/sm_fibonacci/fibonacci.c12.starkstruct.json -t tmp/test.c12.consttree -v tmp/test.c12.verkey.json --arity=4", "verifier_C12_prove": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_prover.js -m tmp/test.c12.commit -c tmp/test.c12.const -t tmp/test.c12.consttree -p tmp/test.c12.pil -s tmp/test.c12.starkinfo.json -o tmp/test.c12.proof.json -z tmp/test.c12.proof.zkin.json -b tmp/test.c12.public.json --proverAddr=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", + "verifier_C12_prove_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_prover.js -m tmp/test.c12.commit -c tmp/test.c12.const -t tmp/test.c12.consttree -p tmp/test.c12.pil -s tmp/test.c12.starkinfo.json -o tmp/test.c12.proof.json -z tmp/test.c12.proof.zkin.json -b tmp/test.c12.public.json --proverAddr=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 --arity=4", "verifier_C12_verify": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_verifier.js -s tmp/test.c12.starkinfo.json -o tmp/test.c12.proof.json -b tmp/test.c12.public.json -v tmp/test.c12.verkey.json", + "verifier_C12_verify_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_verifier.js -s tmp/test.c12.starkinfo.json -o tmp/test.c12.proof.json -b tmp/test.c12.public.json -v tmp/test.c12.verkey.json --arity=4", "verifier_C12_gencircomBN128": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_pil2circom.js -s tmp/test.c12.starkinfo.json -p tmp/test.c12.pil -v tmp/test.c12.verkey.json -o tmp/test.c12.verifier.circom", + "verifier_C12_gencircomBN128_custom": "NODE_OPTIONS=--max-old-space-size=32000 node src/main_pil2circom.js -s tmp/test.c12.starkinfo.json -p tmp/test.c12.pil -v tmp/test.c12.verkey.json -o tmp/test.c12.verifier.circom --arity=4", "verifier_C12_starkVerifierBN128": "mocha test/stark/test_c12_bn128_verifier.circuit.test.js", "plonktest_compile": "circom --O1 --prime goldilocks --r1cs --sym --wasm --c --verbose test/circuits/plonktest.circom -o tmp", "plonktest_wc": "snarkjs wc tmp/plonktest_js/plonktest.wasm test/circuits/plonkinput.json tmp/plonktest.wtns" diff --git a/src/helpers/hash/linearhash/linearhash.bn128.js b/src/helpers/hash/linearhash/linearhash.bn128.js index 536c38f4..3e1e742e 100644 --- a/src/helpers/hash/linearhash/linearhash.bn128.js +++ b/src/helpers/hash/linearhash/linearhash.bn128.js @@ -3,9 +3,10 @@ module.exports = class LinearHashBN { - constructor(poseidon) { + constructor(poseidon, arity) { this.H = poseidon; this.F = poseidon.F; + this.arity = arity; } hash(vals) { @@ -47,15 +48,14 @@ module.exports = class LinearHashBN { let inHash = []; for (let i=0; i0) { -// while (inHash.length<16) inHash.push(this.F.zero); st = this.H(inHash, st); } return st; } -} \ No newline at end of file +} diff --git a/src/helpers/hash/merklehash/merklehash_bn128.js b/src/helpers/hash/merklehash/merklehash_bn128.js index fa688161..50580959 100644 --- a/src/helpers/hash/merklehash/merklehash_bn128.js +++ b/src/helpers/hash/merklehash/merklehash_bn128.js @@ -1,12 +1,14 @@ const LinearHash = require("../linearhash/linearhash.bn128"); const assert = require("assert"); +const { log2 } = require("pilcom/src/utils.js"); module.exports = class MerkleHash { - constructor(poseidon) { + constructor(poseidon, arity) { this.poseidon = poseidon; this.F = poseidon.F; - this.lh = new LinearHash(poseidon); + this.lh = new LinearHash(poseidon, arity); + this.arity = arity; } async merkelize(vals, elementSize, elementsInLinear, nLinears, interleaved) { @@ -17,7 +19,7 @@ module.exports = class MerkleHash { let nTree = nLinears; while (nTree>1) { sizeTree += nTree; - nTree = Math.floor((nTree-1)/16)+1; + nTree = Math.floor((nTree-1)/this.arity)+1; } const buff = new BigUint64Array(3 + elementsInLinear*nLinears*elementSize + 4*sizeTree); @@ -83,16 +85,16 @@ module.exports = class MerkleHash { o=nextO; let n = nLinears; - let nextN = Math.floor((n-1)/16)+1; - const auxBuff = new Uint8Array(16*32); + let nextN = Math.floor((n-1)/this.arity)+1; + const auxBuff = new Uint8Array(this.arity*32); while (n>1) { nextO = pp; for (let i=0; i> 4; + const nBitsArity = Math.ceil(Math.log2(self.arity)); + const nextIdx = idx >> nBitsArity; - const si = idx ^ (idx & 0xF); + const si = idx ^ (idx & (self.arity - 1)); const sibs = []; - for (let i=0; i<16; i++) { + for (let i=0; i> 4; + const nBitsArity = Math.ceil(Math.log2(self.arity)); - const buff = new Uint8Array(32*16); - for (let i=0; i<16; i++) { + const curIdx = idx & (self.arity - 1); + const nextIdx = idx >> nBitsArity; + + const buff = new Uint8Array(32*self.arity); + for (let i=0; i1) { // FIll with zeros if n nodes in the leve is not even n = nextN; - nextN = (Math.floor((n-1)/16)+1); + nextN = Math.floor((n-1)/this.arity)+1; if (n>1) { - acc += nextN*16; + acc += nextN*this.arity; } else { acc +=1; } @@ -52,21 +53,22 @@ class MerkleHash { }; const pool = workerpool.pool(__dirname + '/merklehash_bn128_worker.js'); -//const pool = {maxWorkers: 15}; const promisesLH = []; let res = []; let nPerThreadF = Math.floor((height-1)/pool.maxWorkers)+1; - const minPT = Math.floor(this.minOpsPerThread / (Math.floor((width -1) / (3*16)) + 1)); + + const minPT = Math.floor(this.minOpsPerThread / (Math.floor((width -1) / (3*this.arity)) + 1)); + if (nPerThreadF < minPT) nPerThreadF = minPT; if (nPerThreadF > this.maxOpsPerThread) nPerThreadF = this.maxOpsPerThread; for (let i=0; i< height; i+=nPerThreadF) { const curN = Math.min(nPerThreadF, height-i); const bb = tree.elements.slice(i*width, (i+ curN)*width); if (self.useThreads) { - promisesLH.push(pool.exec("linearHash", [self.wasmModule, bb, width, i, height])); + promisesLH.push(pool.exec("linearHash", [self.wasmModule, bb, width, i, height, this.arity])); } else { - res.push(await linearHash(self.wasmModule, bb, width, i, height)); + res.push(await linearHash(self.wasmModule, bb, width, i, height, this.arity)); } let st = pool.stats(); @@ -85,16 +87,16 @@ class MerkleHash { let pIn = 0; let n256 = height; - let nextN256 = (Math.floor((n256-1)/16)+1); - let pOut = pIn + nextN256*16*32; + let nextN256 = (Math.floor((n256-1)/self.arity)+1); + let pOut = pIn + nextN256*self.arity*32; while (n256>1) { // FIll with zeros if n nodes in the leve is not even await _merkelizeLevel(pIn, nextN256, pOut); n256 = nextN256; - nextN256 = (Math.floor((n256-1)/16)+1); + nextN256 = (Math.floor((n256-1)/self.arity)+1); pIn = pOut; - pOut = pIn + nextN256*16*32; + pOut = pIn + nextN256*self.arity*32; } pool.terminate(); @@ -109,11 +111,11 @@ class MerkleHash { for (let i=0; i< nOps; i+=nOpsPerThread) { const curNOps = Math.min(nOpsPerThread, nOps-i); - const bb = tree.nodes.slice(pIn/8 + i*64, pIn/8 + (i+curNOps)*64); + const bb = tree.nodes.slice(pIn/8 + i*4*self.arity, pIn/8 + (i+curNOps)*4*self.arity); if (self.useThreads) { - promises.push(pool.exec("merkelizeLevel", [self.wasmModule, bb, i, nOps])); + promises.push(pool.exec("merkelizeLevel", [self.wasmModule, bb, i, nOps, self.arity])); } else { - res.push(await merkelizeLevel(self.wasmModule, bb, i, nOps)); + res.push(await merkelizeLevel(self.wasmModule, bb, i, nOps, self.arity)); } } if (self.useThreads) { @@ -147,20 +149,26 @@ class MerkleHash { function merkle_genMerkleProof(tree, idx, offset, n) { if (n<=1) return []; - const nextIdx = idx >> 4; + const nBitsArity = Math.ceil(Math.log2(self.arity)); + + const nextIdx = idx >> nBitsArity; - const si = idx & 0xFFFFFFF0; + const si = idx ^ (idx & (self.arity - 1)); const sibs = []; - for (let i=0; i<16; i++) { - const buff8 = new Uint8Array(tree.nodes.buffer, offset + (si+i)*32, 32 ); - sibs.push(self.F.toObject(buff8)); + for (let i=0; i> 4; + const nBitsArity = Math.ceil(Math.log2(self.arity)); + const curIdx = idx & (self.arity - 1); + const nextIdx = idx >> nBitsArity; - const buff = new Uint8Array(32*16); - for (let i=0; i<16; i++) { + const buff = new Uint8Array(32*self.arity); + for (let i=0; i prime) vals[i] -= prime; in64[p] = vals[i]; p++; - if (p==16*4) { - instance.exports.poseidon(pSt, pIn, 16, pSt, 1); + if (p==arity*4) { + instance.exports.poseidon(pSt, pIn, arity, pSt, 1); p=0; } if (i%3 == 2) { in64[p] = 0n; p++; instance.exports.frm_toMontgomery(pIn + p*8 - 32,pIn + p*8 - 32); - if (p==16*4) { - instance.exports.poseidon(pSt, pIn, 16, pSt, 1); + if (p==arity*4) { + instance.exports.poseidon(pSt, pIn, arity, pSt, 1); p=0; } } @@ -95,12 +93,12 @@ async function linearHash(wasmModule, buffIn, width, st_i, st_n) { // a deliberately inefficient implementation of the fibonacci sequence -async function merkelizeLevel(wasmModule, buffIn, st_i, st_n) { +async function merkelizeLevel(wasmModule, buffIn, st_i, st_n, arity) { console.log(`merkelizing bn128 hash start.... ${st_i}/${st_n}`); - const nOps = buffIn.byteLength / (32*16); + const nOps = buffIn.byteLength / (32*arity); + + const bytesRequired = arity*32 + 32 + 32; - // const bytesRequired = nOps*32*16 + 32 + nOps*32; - const bytesRequired = 16*32 + 32 + 32; const pagesRequired = Math.floor((bytesRequired - 1)/(1<<16)) +10000; const wasmMem = new WebAssembly.Memory({initial:pagesRequired}); @@ -110,11 +108,11 @@ async function merkelizeLevel(wasmModule, buffIn, st_i, st_n) { } }); - const pIn = alloc(wasmMem, 16*32); + const pIn = alloc(wasmMem, arity*32); const pSt = alloc(wasmMem, 32); const pOut = alloc(wasmMem, 32); - const in64 =new BigUint64Array(wasmMem.buffer, pIn, 4*16); + const in64 =new BigUint64Array(wasmMem.buffer, pIn, 4*arity); const st64 =new BigUint64Array(wasmMem.buffer, pSt, 4); const out64 =new BigUint64Array(wasmMem.buffer, pOut, 4); @@ -127,9 +125,9 @@ async function merkelizeLevel(wasmModule, buffIn, st_i, st_n) { st64[2] = 0n; st64[3] = 0n; - const sBuff = new BigUint64Array(buffIn.buffer, buffIn.byteOffset + i*(16*32), 16*4); + const sBuff = new BigUint64Array(buffIn.buffer, buffIn.byteOffset + i*(arity*32), arity*4); in64.set(sBuff); - instance.exports.poseidon(pSt, pIn, 16, pOut, 1); + instance.exports.poseidon(pSt, pIn, arity, pOut, 1); buffOut64.set(out64, i*4); } @@ -145,4 +143,4 @@ if (!workerpool.isMainThread) { } module.exports.linearHash = linearHash; -module.exports.merkelizeLevel = merkelizeLevel; \ No newline at end of file +module.exports.merkelizeLevel = merkelizeLevel; diff --git a/src/helpers/transcript/transcript.bn128.js b/src/helpers/transcript/transcript.bn128.js index 0f11f01d..fa46da04 100644 --- a/src/helpers/transcript/transcript.bn128.js +++ b/src/helpers/transcript/transcript.bn128.js @@ -1,7 +1,8 @@ class Transcript { - constructor(poseidon) { + constructor(poseidon, nInputs) { this.poseidon = poseidon; + this.nInputs = nInputs; this.F = poseidon.F; this.state = this.F.zero; this.pending = []; @@ -41,10 +42,10 @@ class Transcript { } updateState() { - while (this.pending.length<16) { + while (this.pending.length -v -s [--skipMain] [--enableInput] [--verkeyInput]") + .usage("node main_pil2circom.js -o -v -s [--skipMain] [--enableInput] [--verkeyInput] [--arity]") .alias("s", "starkinfo") .alias("v", "verkey") .alias("o", "output") + .string("arity") .argv; async function run() { @@ -34,6 +35,11 @@ async function run() { verkeyInput: argv.verkeyInput || false } + if(starkInfo.starkStruct.verificationHashType === "BN128") { + options.arity = Number(argv.arity) || 16; + console.log(`Arity: ${options.arity}`); + } + const verifier = await pil2circom(constRoot, starkInfo, options); await fs.promises.writeFile(outputFile, verifier, "utf8"); diff --git a/src/main_prover.js b/src/main_prover.js index b44d6c7a..70dc3236 100644 --- a/src/main_prover.js +++ b/src/main_prover.js @@ -6,7 +6,7 @@ const starkGen = require("./stark/stark_gen.js"); const JSONbig = require('json-bigint')({ useNativeBigInt: true, alwaysParseAsBig: true, storeAsString: true }); const { proof2zkin } = require("./proof2zkin"); const buildMerklehashGL = require("./helpers/hash/merklehash/merklehash_p.js"); -const buildMerklehashBN128 = require("./helpers/hash/merklehash/merklehash_bn128_p.js"); +const buildMerkleHashBN128 = require("./helpers/hash/merklehash/merklehash_bn128_p.js"); const F3g = require("./helpers/f3g.js"); const { createHash } = require("crypto"); @@ -26,6 +26,7 @@ const argv = require("yargs") .alias("z", "zkin") .alias("b", "public") .string("proverAddr") + .string("arity") .argv; async function run() { @@ -51,18 +52,24 @@ async function run() { const cmPols = newCommitPolsArray(pil); await cmPols.loadFromFile(commitFile); + let options = {}; let MH; if (starkInfo.starkStruct.verificationHashType == "GL") { MH = await buildMerklehashGL(); } else if (starkInfo.starkStruct.verificationHashType == "BN128") { - MH = await buildMerklehashBN128(); + let arity = Number(argv.arity) || 16; + + options = {arity}; + + console.log(`Arity: ${arity}`); + MH = await buildMerkleHashBN128(arity); } else { throw new Error("Invalid Hash Type: "+ starkInfo.starkStruct.verificationHashType); } const constTree = await MH.readFromFile(constTreeFile); - const resP = await starkGen(cmPols, constPols, constTree, starkInfo); + const resP = await starkGen(cmPols, constPols, constTree, starkInfo, options); await fs.promises.writeFile(proofFile, JSONbig.stringify(resP.proof, null, 1), "utf8"); @@ -109,4 +116,3 @@ run().then(()=> { console.log(err.stack); process.exit(1); }); - diff --git a/src/main_verifier.js b/src/main_verifier.js index 401c7dda..1c3b4afd 100644 --- a/src/main_verifier.js +++ b/src/main_verifier.js @@ -10,6 +10,7 @@ const argv = require("yargs") .alias("v", "verkey") .alias("o", "proof") .alias("b", "public") + .string("arity") .argv; async function run() { @@ -27,7 +28,14 @@ async function run() { proof = str2bigInt(proof); - const resV = await starkVerify(proof, public, constRoot, starkInfo); + let options = {}; + if (starkInfo.starkStruct.verificationHashType === "BN128") { + options.arity = Number(argv.arity) || 16; + console.log(`Arity: ${options.arity}`); + + } + + const resV = await starkVerify(proof, public, constRoot, starkInfo, options); if (resV === true) { console.log("Verification Ok!!") diff --git a/src/pil2circom.js b/src/pil2circom.js index 6aad4372..e9b81d75 100644 --- a/src/pil2circom.js +++ b/src/pil2circom.js @@ -2,6 +2,7 @@ const ejs = require("ejs"); const F3g = require("./helpers/f3g.js"); const fs = require("fs"); const path = require("path"); +const { log2 } = require("pilcom/src/utils.js"); module.exports = async function pil2circom(constRoot, starkInfo, options) { @@ -31,7 +32,10 @@ module.exports = async function pil2circom(constRoot, starkInfo, options) { starkInfo: starkInfo, starkStruct: starkStruct, constRoot: constRoot, - options: options + options: options, + arity: Number(options.arity), + nBitsArity: log2(options.arity), + arityTranscript: 16, }; return ejs.render(template , obj); diff --git a/src/stark/stark_buildConstTree.js b/src/stark/stark_buildConstTree.js index 96876618..16b2155f 100644 --- a/src/stark/stark_buildConstTree.js +++ b/src/stark/stark_buildConstTree.js @@ -3,7 +3,7 @@ const buildMerkleHashGL = require("../helpers/hash/merklehash/merklehash_p.js"); const buildMerkleHashBN128 = require("../helpers/hash/merklehash/merklehash_bn128_p.js"); const {interpolate} = require("../helpers/fft/fft_p"); -module.exports.buildConstTree = async function buildConstTree(starkStruct, pil, constPols) { +module.exports.buildConstTree = async function buildConstTree(starkStruct, pil, constPols, arity = 16) { const nBits = starkStruct.nBits; const nBitsExt = starkStruct.nBitsExt; const extN = 1 << nBitsExt; @@ -18,7 +18,8 @@ module.exports.buildConstTree = async function buildConstTree(starkStruct, pil, if (starkStruct.verificationHashType == "GL") { MH = await buildMerkleHashGL(starkStruct.splitLinearHash); } else if (starkStruct.verificationHashType == "BN128") { - MH = await buildMerkleHashBN128(); + console.log(`Arity: ${arity}`); + MH = await buildMerkleHashBN128(arity); } else { throw new Error("Invalid Hash Type: "+ starkStruct.verificationHashType); } @@ -39,4 +40,4 @@ module.exports.buildConstTree = async function buildConstTree(starkStruct, pil, verKey, } -} \ No newline at end of file +} diff --git a/src/stark/stark_gen.js b/src/stark/stark_gen.js index 7fde0327..59e608b4 100644 --- a/src/stark/stark_gen.js +++ b/src/stark/stark_gen.js @@ -22,7 +22,7 @@ const maxNperThread = 1<<18; const minNperThread = 1<<12; -module.exports = async function starkGen(cmPols, constPols, constTree, starkInfo) { +module.exports = async function starkGen(cmPols, constPols, constTree, starkInfo, options) { const starkStruct = starkInfo.starkStruct; const N = 1 << starkStruct.nBits; const extendBits = starkStruct.nBitsExt - starkStruct.nBits; @@ -40,8 +40,10 @@ module.exports = async function starkGen(cmPols, constPols, constTree, starkInfo transcript = new Transcript(poseidon); } else if (starkStruct.verificationHashType == "BN128") { const poseidonBN128 = await buildPoseidonBN128(); - MH = await buildMerkleHashBN128(); - transcript = new TranscriptBN128(poseidonBN128); + let arity = options.arity || 16; + console.log(`Arity: ${arity}`); + MH = await buildMerkleHashBN128(arity); + transcript = new TranscriptBN128(poseidonBN128, 16); } else { throw new Error("Invalid Hash Type: "+ starkStruct.verificationHashType); } diff --git a/src/stark/stark_setup.js b/src/stark/stark_setup.js index 751286eb..e58af48c 100644 --- a/src/stark/stark_setup.js +++ b/src/stark/stark_setup.js @@ -5,7 +5,7 @@ const starkInfoGen = require("./stark_info.js") const { interpolate } = require("../helpers/fft/fft_p"); -module.exports = async function starkSetup(constPols, pil, starkStruct) { +module.exports = async function starkSetup(constPols, pil, starkStruct, options = {}) { const nBits = starkStruct.nBits; const nBitsExt = starkStruct.nBitsExt; @@ -15,11 +15,12 @@ module.exports = async function starkSetup(constPols, pil, starkStruct) { const constBuff = constPols.writeToBuff(); await interpolate(constBuff, pil.nConstants, nBits, constPolsArrayE, nBitsExt ); + let arity = options.arity || 16; let MH; if (starkStruct.verificationHashType == "GL") { MH = await buildMerklehashGL(); } else if (starkStruct.verificationHashType == "BN128") { - MH = await buildMerklehashBN128(); + MH = await buildMerkleHashBN128(arity); } else { throw new Error("Invalid Hash Type: "+ starkStruct.verificationHashType); } diff --git a/src/stark/stark_verify.js b/src/stark/stark_verify.js index 0669e5fd..c861080f 100644 --- a/src/stark/stark_verify.js +++ b/src/stark/stark_verify.js @@ -7,7 +7,7 @@ const { assert } = require("chai"); const buildPoseidonGL = require("../helpers/hash/poseidon/poseidon"); const buildPoseidonBN128 = require("circomlibjs").buildPoseidon; -module.exports = async function starkVerify(proof, publics, constRoot, starkInfo) { +module.exports = async function starkVerify(proof, publics, constRoot, starkInfo, options) { const starkStruct = starkInfo.starkStruct; @@ -21,8 +21,9 @@ module.exports = async function starkVerify(proof, publics, constRoot, starkInfo transcript = new Transcript(poseidonGL); } else if (starkStruct.verificationHashType == "BN128") { const poseidonBN128 = await buildPoseidonBN128(); - MH = await buildMerkleHashBN128(); - transcript = new TranscriptBN128(poseidonBN128); + let arity = options.arity || 16; + MH = await buildMerkleHashBN128(arity); + transcript = new TranscriptBN128(poseidonBN128, 16); } else { throw new Error("Invalid Hash Type: "+ starkStruct.verificationHashType); } diff --git a/test/circuits/bn128/circom/linearhash.bn128.custom.test.circom b/test/circuits/bn128/circom/linearhash.bn128.custom.test.circom deleted file mode 100644 index b5a0a17d..00000000 --- a/test/circuits/bn128/circom/linearhash.bn128.custom.test.circom +++ /dev/null @@ -1,6 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "../../../../circuits.bn128.custom/linearhash.circom"; - -component main = LinearHash(9, 3, 16); \ No newline at end of file diff --git a/test/circuits/bn128/circom/linearhash100.bn128.custom.test.circom b/test/circuits/bn128/circom/linearhash100.bn128.custom.test.circom deleted file mode 100644 index 9cb50d45..00000000 --- a/test/circuits/bn128/circom/linearhash100.bn128.custom.test.circom +++ /dev/null @@ -1,6 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "../../../../circuits.bn128.custom/linearhash.circom"; - -component main = LinearHash(100, 3, 16); \ No newline at end of file diff --git a/test/circuits/bn128/circom/linearhash110.bn128.custom.test.circom b/test/circuits/bn128/circom/linearhash110.bn128.custom.test.circom deleted file mode 100644 index 532228d2..00000000 --- a/test/circuits/bn128/circom/linearhash110.bn128.custom.test.circom +++ /dev/null @@ -1,6 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "../../../../circuits.bn128.custom/linearhash.circom"; - -component main = LinearHash(110, 3, 4); \ No newline at end of file diff --git a/test/circuits/bn128/circom/linearhash100.bn128.test.circom b/test/circuits/bn128/circom/linearhash16.bn128.test.circom similarity index 67% rename from test/circuits/bn128/circom/linearhash100.bn128.test.circom rename to test/circuits/bn128/circom/linearhash16.bn128.test.circom index 3ac065d3..b614e6e9 100644 --- a/test/circuits/bn128/circom/linearhash100.bn128.test.circom +++ b/test/circuits/bn128/circom/linearhash16.bn128.test.circom @@ -2,4 +2,4 @@ pragma circom 2.1.0; include "../../../../circuits.bn128/linearhash.circom"; -component main = LinearHash(100, 3); \ No newline at end of file +component main = LinearHash(9, 3, 16); \ No newline at end of file diff --git a/test/circuits/bn128/circom/linearhash16_100.bn128.test.circom b/test/circuits/bn128/circom/linearhash16_100.bn128.test.circom new file mode 100644 index 00000000..9aa3a353 --- /dev/null +++ b/test/circuits/bn128/circom/linearhash16_100.bn128.test.circom @@ -0,0 +1,5 @@ +pragma circom 2.1.0; + +include "../../../../circuits.bn128/linearhash.circom"; + +component main = LinearHash(100, 3, 16); \ No newline at end of file diff --git a/test/circuits/bn128/circom/linearhash.bn128.test.circom b/test/circuits/bn128/circom/linearhash4.bn128.test.circom similarity index 68% rename from test/circuits/bn128/circom/linearhash.bn128.test.circom rename to test/circuits/bn128/circom/linearhash4.bn128.test.circom index 12127eaf..67082684 100644 --- a/test/circuits/bn128/circom/linearhash.bn128.test.circom +++ b/test/circuits/bn128/circom/linearhash4.bn128.test.circom @@ -2,4 +2,4 @@ pragma circom 2.1.0; include "../../../../circuits.bn128/linearhash.circom"; -component main = LinearHash(9, 3); \ No newline at end of file +component main = LinearHash(9, 3, 4); \ No newline at end of file diff --git a/test/circuits/bn128/circom/linearhash4_100.bn128.test.circom b/test/circuits/bn128/circom/linearhash4_100.bn128.test.circom new file mode 100644 index 00000000..c559092e --- /dev/null +++ b/test/circuits/bn128/circom/linearhash4_100.bn128.test.circom @@ -0,0 +1,5 @@ +pragma circom 2.1.0; + +include "../../../../circuits.bn128/linearhash.circom"; + +component main = LinearHash(100, 3, 4); \ No newline at end of file diff --git a/test/circuits/bn128/circom/merklehash16.bn128.custom.test.circom b/test/circuits/bn128/circom/merklehash16.bn128.custom.test.circom deleted file mode 100644 index 77cb1aec..00000000 --- a/test/circuits/bn128/circom/merklehash16.bn128.custom.test.circom +++ /dev/null @@ -1,6 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "../../../../circuits.bn128.custom/merklehash.circom"; - -component main = VerifyMerkleHash(3, 9, 8, 16); \ No newline at end of file diff --git a/test/circuits/bn128/circom/merklehash.bn128.test.circom b/test/circuits/bn128/circom/merklehash16.bn128.test.circom similarity index 61% rename from test/circuits/bn128/circom/merklehash.bn128.test.circom rename to test/circuits/bn128/circom/merklehash16.bn128.test.circom index 9a779b0a..c0a53340 100644 --- a/test/circuits/bn128/circom/merklehash.bn128.test.circom +++ b/test/circuits/bn128/circom/merklehash16.bn128.test.circom @@ -2,4 +2,4 @@ pragma circom 2.1.0; include "../../../../circuits.bn128/merklehash.circom"; -component main = VerifyMerkleHash(3, 9, 32); +component main = VerifyMerkleHash(3, 9, 256, 16); \ No newline at end of file diff --git a/test/circuits/bn128/circom/merklehash4.bn128.custom.test.circom b/test/circuits/bn128/circom/merklehash4.bn128.custom.test.circom deleted file mode 100644 index e9080583..00000000 --- a/test/circuits/bn128/circom/merklehash4.bn128.custom.test.circom +++ /dev/null @@ -1,6 +0,0 @@ -pragma circom 2.1.0; -pragma custom_templates; - -include "../../../../circuits.bn128.custom/merklehash.circom"; - -component main = VerifyMerkleHash(3, 9, 14, 4); \ No newline at end of file diff --git a/test/circuits/bn128/circom/merklehash4.bn128.test.circom b/test/circuits/bn128/circom/merklehash4.bn128.test.circom new file mode 100644 index 00000000..71a244a2 --- /dev/null +++ b/test/circuits/bn128/circom/merklehash4.bn128.test.circom @@ -0,0 +1,5 @@ +pragma circom 2.1.0; + +include "../../../../circuits.bn128/merklehash.circom"; + +component main = VerifyMerkleHash(3, 9, 16384, 4); \ No newline at end of file diff --git a/test/circuits/bn128/linearhash.bn128.circuit.test.js b/test/circuits/bn128/linearhash.bn128.circuit.test.js index c64b4e83..385c0475 100644 --- a/test/circuits/bn128/linearhash.bn128.circuit.test.js +++ b/test/circuits/bn128/linearhash.bn128.circuit.test.js @@ -6,18 +6,71 @@ const LinearHash = require("../../../src/helpers/hash/linearhash/linearhash.bn12 const wasm_tester = require("circom_tester").wasm; describe("Linear Hash Circuit Test", function () { - let eddsa; - let F; - let circuit; + let circuit4; + let circuit16; + + let circuit4_100; + let circuit16_100; + this.timeout(10000000); before( async() => { - circuit = await wasm_tester(path.join(__dirname, "circom", "linearhash.bn128.test.circom"), {O:1, verbose:false, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); - circuit100 = await wasm_tester(path.join(__dirname, "circom", "linearhash100.bn128.test.circom"), {O:1, verbose:false, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); + circuit16 = await wasm_tester(path.join(__dirname, "circom", "linearhash16.bn128.test.circom"), {O:1, verbose:false, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); + circuit16_100 = await wasm_tester(path.join(__dirname, "circom", "linearhash16_100.bn128.test.circom"), {O:1, verbose:false, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); + + circuit4 = await wasm_tester(path.join(__dirname, "circom", "linearhash4.bn128.test.circom"), {O:1, verbose:false, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); + circuit4_100 = await wasm_tester(path.join(__dirname, "circom", "linearhash4_100.bn128.test.circom"), {O:1, verbose:false, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); + }); + + it("Should calculate linear hash of 9 complex elements and arity 16", async () => { + const poseidon = await buildPoseidon(); + const F = poseidon.F; + + const input={ + in: [ + [1n,2n,3n], + [4n,5n,6n], + [7n,8n,9n], + [10n,11n,12n], + [13n,14n,15n], + [16n,17n,18n], + [19n,20n,21n], + [22n,23n,24n], + [25n,26n,27n] + ] + }; + + const w1 = await circuit16.calculateWitness(input, true); + + const lh = new LinearHash(poseidon, 16); + + const res = lh.hash(input.in); + + await circuit16.assertOut(w1, {out: F.toObject(res)}); + }); + it("Should calculate linear hash of 100 complex elements and arity 16", async () => { + const poseidon = await buildPoseidon(); + const F = poseidon.F; + + const input={ + in: [] + }; + + for (let i=0; i<100; i++) { + input.in.push([i, i*1000, i*1000000]) + } + + const w1 = await circuit16_100.calculateWitness(input, true); + + const lh = new LinearHash(poseidon, 16); + + const res = lh.hash(input.in); + + await circuit16_100.assertOut(w1, {out: F.toObject(res)}); }); - it("Should calculate linear hash of 9 complex elements", async () => { + it("Should calculate linear hash of 9 complex elements and arity 4", async () => { const poseidon = await buildPoseidon(); const F = poseidon.F; @@ -35,15 +88,15 @@ describe("Linear Hash Circuit Test", function () { ] }; - const w1 = await circuit.calculateWitness(input, true); + const w1 = await circuit4.calculateWitness(input, true); - const lh = new LinearHash(poseidon); + const lh = new LinearHash(poseidon, 4); const res = lh.hash(input.in); - await circuit.assertOut(w1, {out: F.toObject(res)}); + await circuit4.assertOut(w1, {out: F.toObject(res)}); }); - it("Should calculate linear hash of 100 complex elements", async () => { + it("Should calculate linear hash of 100 complex elements and arity 4", async () => { const poseidon = await buildPoseidon(); const F = poseidon.F; @@ -55,12 +108,12 @@ describe("Linear Hash Circuit Test", function () { input.in.push([i, i*1000, i*1000000]) } - const w1 = await circuit100.calculateWitness(input, true); + const w1 = await circuit4_100.calculateWitness(input, true); - const lh = new LinearHash(poseidon); + const lh = new LinearHash(poseidon, 4); const res = lh.hash(input.in); - await circuit100.assertOut(w1, {out: F.toObject(res)}); + await circuit4_100.assertOut(w1, {out: F.toObject(res)}); }); }); diff --git a/test/circuits/bn128/merklehash.bn128.circuit.test.js b/test/circuits/bn128/merklehash.bn128.circuit.test.js index 512d8351..3580950a 100644 --- a/test/circuits/bn128/merklehash.bn128.circuit.test.js +++ b/test/circuits/bn128/merklehash.bn128.circuit.test.js @@ -15,9 +15,12 @@ function getBits(idx, nBits) { return res; } -describe("Linear Hash Circuit Test", function () { - let circuit; - let MH; +describe("Merkle Hash Circuit Test", function () { + let circuit16; + let circuit4; + + let MH16; + let MH4; let poseidon; @@ -25,17 +28,22 @@ describe("Linear Hash Circuit Test", function () { before( async() => { poseidon = await buildPoseidon(); - MH = new MerkleHash(poseidon); + MH16 = new MerkleHash(poseidon, 16); + MH4 = new MerkleHash(poseidon, 4); + + + circuit16 = await wasm_tester(path.join(__dirname, "circom", "merklehash16.bn128.test.circom"), {O:1, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); + circuit4 = await wasm_tester(path.join(__dirname, "circom", "merklehash4.bn128.test.circom"), {O:1, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); - circuit = await wasm_tester(path.join(__dirname, "circom", "merklehash.bn128.test.circom"), {O:1, include: ["circuits.bn128", "node_modules/circomlib/circuits"]}); }); - it("Should calculate linear hash of 9 complex elements", async () => { + it("Should calculate linear hash of 9 complex elements and arity 16", async () => { const NPols = 9; - const nBits = 5; + const arity = 16; + const nBitsArity = Math.ceil(Math.log2(arity)); + const nBits = Math.ceil(NPols*3/arity)*nBitsArity; const idx = 9; - const poseidon = await buildPoseidon(); const N = 1< { + const NPols = 9; + const arity = 4; + const nBitsArity = Math.ceil(Math.log2(arity)); + const nBits = Math.ceil(NPols*3/arity)*nBitsArity; + const idx = 9; + + const N = 1< { - const res = executeCode(F, ctx, starkInfo.verifierCode.code); + const res = executeCode(F, ctx, starkInfo.verifierCode.first); const w1 = await circuitVerifyEvals.calculateWitness({}, true); @@ -63,7 +63,7 @@ describe("Verify Evals Goldilocks Circuit Test", function () { it("Should check that verify eval calculates proper output if all inputs are set to max value and all subs are zero", async () => { - const res = executeCode(F, ctx, starkInfo.verifierCode.code, true); + const res = executeCode(F, ctx, starkInfo.verifierCode.first, true); const w2 = await circuitVerifyEvalsMin.calculateWitness({}, true); diff --git a/test/circuits/gl/circom/cmuladd.test.circom b/test/circuits/gl/circom/cmuladd.test.circom index 644d5637..087753d2 100644 --- a/test/circuits/gl/circom/cmuladd.test.circom +++ b/test/circuits/gl/circom/cmuladd.test.circom @@ -14,4 +14,4 @@ template CMulAdd() { out <== [inc[0] + cmul[0], inc[1] + cmul[1], inc[2] + cmul[2]]; } -component main = CMulAdd(); \ No newline at end of file +component main = CMulAdd(); diff --git a/test/f3g.test.js b/test/f3g.test.js index 0af19a59..d56a2199 100644 --- a/test/f3g.test.js +++ b/test/f3g.test.js @@ -52,4 +52,4 @@ describe("f3g", function () { } }); -}); \ No newline at end of file +}); diff --git a/test/merklehash.bn128.test.js b/test/merklehash.bn128.test.js index 662bcb24..cc130d99 100644 --- a/test/merklehash.bn128.test.js +++ b/test/merklehash.bn128.test.js @@ -6,14 +6,17 @@ const { buildPoseidon } = require("circomlibjs"); describe("merkle hash", async function () { this.timeout(10000000); let poseidon; - let MH; + let MH4; + let MH16; before( async() => { poseidon = await buildPoseidon(); - MH = new MerkleHash(poseidon); + MH16 = new MerkleHash(poseidon, 16); + MH4 = new MerkleHash(poseidon, 4); + }); - it("It should merkelize and return the right number of elements", async () => { + it("It should merkelize and return the right number of elements with arity 16", async () => { const N = 256; const idx = 3; @@ -28,14 +31,15 @@ describe("merkle hash", async function () { } - const tree = await MH.merkelize(pols, 1, nPols, N); + const tree = await MH16.merkelize(pols, 1, nPols, N); - const [groupElements, mp] = MH.getGroupProof(tree, idx); - const root = MH.root(tree); + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); - assert(MH.verifyGroupProof(root, mp, idx, groupElements)); + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); }); - it("It should merkelize polynomials in ext 3", async () => { + + it("It should merkelize polynomials in ext 3 with arity 16", async () => { const N = 256; const idx = 3; const nPols = 9; @@ -48,14 +52,14 @@ describe("merkle hash", async function () { } } - const tree = await MH.merkelize(pols, 3, nPols, N); + const tree = await MH16.merkelize(pols, 3, nPols, N); - const [groupElements, mp] = MH.getGroupProof(tree, idx); - const root = MH.root(tree); + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); - assert(MH.verifyGroupProof(root, mp, idx, groupElements)); + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); }); - it("It should merkelize polynomials in ext 3", async () => { + it("It should merkelize polynomials in ext 3 with arity 16", async () => { const N = 256; const idx = 3; const groupSize = 4; @@ -67,12 +71,12 @@ describe("merkle hash", async function () { pol.push([ BigInt(i), BigInt(i+10), BigInt(i+20)]); } - const tree = await MH.merkelize(pol, 3, groupSize, nGroups); + const tree = await MH16.merkelize(pol, 3, groupSize, nGroups); - const [groupElements, mp] = MH.getGroupProof(tree, idx); - const root = MH.root(tree); + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); - assert(MH.verifyGroupProof(root, mp, idx, groupElements)); + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); }); it("It should merkelize and return the right number of elements", async () => { @@ -89,11 +93,74 @@ describe("merkle hash", async function () { } } - const tree = await MH.merkelize(pols, 1, nPols, N); + const tree = await MH16.merkelize(pols, 1, nPols, N); + + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); + + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); + }); + + it("It should merkelize and return the right number of elements with arity 4", async () => { + + const N = 256; + const idx = 3; + const nPols = 9; + + const pols = []; + for (let i=0; i { + const N = 256; + const idx = 3; + const nPols = 9; + + const pols = []; + for (let i=0; i { + const N = 256; + const idx = 3; + const groupSize = 4; + const nGroups = N/groupSize; + + + const pol = []; + for (let i=0; i { - MH = await buildMH(); + MH16 = await buildMH(16); + MH4 = await buildMH(4); + MH8 = await buildMH(8); }); - it("It should merkelize and validate a proof of a very small tree", async () => { + it("It should merkelize and validate a proof of a very small tree with arity 16", async () => { const N = 256; const idx = 3; const nPols = 3; @@ -25,13 +29,32 @@ describe("merkle hash", async function () { } } - const tree = await MH.merkelize(pols, nPols, N); + const tree = await MH16.merkelize(pols, nPols, N); - const [groupElements, mp] = MH.getGroupProof(tree, idx); - const root = MH.root(tree); - console.log(root); + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); + }); + + it("It should merkelize and validate a proof of a very small tree with arity 4", async () => { + const N = 256; + const idx = 3; + const nPols = 3; + + const pols = new BigBuffer(nPols*N); + for (let i=0; i { @@ -46,16 +69,16 @@ describe("merkle hash", async function () { } } - const tree = await MH.merkelize(pols, nPols, N); + const tree = await MH16.merkelize(pols, nPols, N); - const [groupElements, mp] = MH.getGroupProof(tree, idx); - const root = MH.root(tree); + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); console.log(root); - assert(MH.verifyGroupProof(root, mp, idx, groupElements)); + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); }); - it("It should merkelize and validate a proof not multiple of 2", async () => { + it("It should merkelize and validate a proof not multiple of 2 with arity 16", async () => { const N = 33; const idx = 32; const nPols = 6; @@ -67,15 +90,39 @@ describe("merkle hash", async function () { } } - const tree = await MH.merkelize(pols, nPols, N); + const tree = await MH16.merkelize(pols, nPols, N); - const [groupElements, mp] = MH.getGroupProof(tree, idx); - const root = MH.root(tree); + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); console.log(root); - assert(MH.verifyGroupProof(root, mp, idx, groupElements)); + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); }); - it("Big one (parallel)", async () => { + + it("It should merkelize and validate a proof not multiple of 2 with arity 4", async () => { + const N = 33; + const idx = 32; + const nPols = 6; + + const pols = new BigBuffer(nPols*N); + for (let i=0; i { const N = 1<<17; const idx = 32; const nPols = 10; @@ -87,18 +134,62 @@ describe("merkle hash", async function () { } } - const tree = await MH.merkelize(pols, nPols, N); + const tree = await MH16.merkelize(pols, nPols, N); + + const [groupElements, mp] = MH16.getGroupProof(tree, idx); + const root = MH16.root(tree); + + console.log(root); + + assert(MH16.verifyGroupProof(root, mp, idx, groupElements)); + }); + + it("Big one (parallel) with arity 4", async () => { + const N = 1<<14; + const idx = 32; + const nPols = 31; + + const pols = new BigBuffer(nPols*N); + for (let i=0; i { + const N = 1<<14 + 3; + const idx = 32; + const nPols = 10; - it("Should save and restore to file", async() => { + const pols = new BigBuffer(nPols*N); + for (let i=0; i { const N = 1<<18; const nPols = 10; @@ -109,12 +200,78 @@ describe("merkle hash", async function () { } } - const tree = await MH.merkelize(pols, nPols, N); + const tree = await MH16.merkelize(pols, nPols, N); + + fileName = await tmp.tmpName(); + await MH16.writeToFile(tree, fileName); + + const tree2 = await MH16.readFromFile(fileName); + + assert.equal(tree2.width, tree.width); + assert.equal(tree2.heigth, tree.heigth); + assert.equal(tree2.elements.length, tree.elements.length); + assert.equal(tree2.nodes.length, tree.nodes.length); + for (let i=0; i { + const N = 1<<14; + const nPols = 10; + + const pols = new BigBuffer(nPols*N); + for (let i=0; i { + const N = 1<<14; + const nPols = 10; + + const pols = new BigBuffer(nPols*N); + for (let i=0; i