diff --git a/encoding/kzgrs/prover/parametrized_prover.go b/encoding/kzgrs/prover/parametrized_prover.go deleted file mode 100644 index ddedc7f99f..0000000000 --- a/encoding/kzgrs/prover/parametrized_prover.go +++ /dev/null @@ -1,285 +0,0 @@ -package prover - -import ( - "fmt" - "log" - "math" - "time" - - "github.com/Layr-Labs/eigenda/encoding" - - "github.com/Layr-Labs/eigenda/encoding/fft" - "github.com/Layr-Labs/eigenda/encoding/kzg" - "github.com/Layr-Labs/eigenda/encoding/rs" - "github.com/Layr-Labs/eigenda/encoding/utils/toeplitz" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" -) - -type ParametrizedProver struct { - *rs.Encoder - - *kzg.KzgConfig - Srs *kzg.SRS - G2Trailing []bn254.G2Affine - - Fs *fft.FFTSettings - Ks *kzg.KZGSettings - SFs *fft.FFTSettings // fft used for submatrix product helper - FFTPoints [][]bn254.G1Affine - FFTPointsT [][]bn254.G1Affine // transpose of FFTPoints -} - -type WorkerResult struct { - points []bn254.G1Affine - err error -} - -// just a wrapper to take bytes not Fr Element -func (g *ParametrizedProver) EncodeBytes(inputBytes []byte) (*bn254.G1Affine, *bn254.G2Affine, *bn254.G2Affine, []encoding.Frame, []uint32, error) { - inputFr := rs.ToFrArray(inputBytes) - return g.Encode(inputFr) -} - -func (g *ParametrizedProver) Encode(inputFr []fr.Element) (*bn254.G1Affine, *bn254.G2Affine, *bn254.G2Affine, []encoding.Frame, []uint32, error) { - - startTime := time.Now() - poly, frames, indices, err := g.Encoder.Encode(inputFr) - if err != nil { - return nil, nil, nil, nil, nil, err - } - - if len(poly.Coeffs) > int(g.KzgConfig.SRSNumberToLoad) { - return nil, nil, nil, nil, nil, fmt.Errorf("poly Coeff length %v is greater than Loaded SRS points %v", len(poly.Coeffs), int(g.KzgConfig.SRSNumberToLoad)) - } - - // compute commit for the full poly - commit, err := g.Commit(poly.Coeffs) - if err != nil { - return nil, nil, nil, nil, nil, err - } - //lowDegreeCommitment := bls.LinCombG2(g.Srs.G2[:len(poly.Coeffs)], poly.Coeffs) - - config := ecc.MultiExpConfig{} - - var lowDegreeCommitment bn254.G2Affine - _, err = lowDegreeCommitment.MultiExp(g.Srs.G2[:len(poly.Coeffs)], poly.Coeffs, config) - if err != nil { - return nil, nil, nil, nil, nil, err - } - - intermediate := time.Now() - - polyDegreePlus1 := uint64(len(inputFr)) - - if g.Verbose { - log.Printf(" Commiting takes %v\n", time.Since(intermediate)) - intermediate = time.Now() - - log.Printf("shift %v\n", g.SRSOrder-polyDegreePlus1) - log.Printf("order %v\n", len(g.Srs.G2)) - log.Println("low degree verification info") - } - - shiftedSecret := g.G2Trailing[g.KzgConfig.SRSNumberToLoad-polyDegreePlus1:] - - //The proof of low degree is commitment of the polynomial shifted to the largest srs degree - //lowDegreeProof := bls.LinCombG2(shiftedSecret, poly.Coeffs[:polyDegreePlus1]) - var lowDegreeProof bn254.G2Affine - _, err = lowDegreeProof.MultiExp(shiftedSecret, poly.Coeffs, config) - if err != nil { - return nil, nil, nil, nil, nil, err - } - - //fmt.Println("kzgFFT lowDegreeProof", lowDegreeProof, "poly len ", len(fullCoeffsPoly), "order", len(g.Ks.SecretG2) ) - //ok := VerifyLowDegreeProof(&commit, lowDegreeProof, polyDegreePlus1-1, g.SRSOrder, g.Srs.G2) - //if !ok { - // log.Printf("Kzg FFT Cannot Verify low degree proof %v", lowDegreeProof) - // return nil, nil, nil, nil, errors.New("cannot verify low degree proof") - // } else { - // log.Printf("Kzg FFT Verify low degree proof PPPASSS %v", lowDegreeProof) - // } - - if g.Verbose { - log.Printf(" Generating Low Degree Proof takes %v\n", time.Since(intermediate)) - intermediate = time.Now() - } - - // compute proofs - paddedCoeffs := make([]fr.Element, g.NumEvaluations()) - copy(paddedCoeffs, poly.Coeffs) - - proofs, err := g.ProveAllCosetThreads(paddedCoeffs, g.NumChunks, g.ChunkLength, g.NumWorker) - if err != nil { - return nil, nil, nil, nil, nil, fmt.Errorf("could not generate proofs: %v", err) - } - - if g.Verbose { - log.Printf(" Proving takes %v\n", time.Since(intermediate)) - } - - kzgFrames := make([]encoding.Frame, len(frames)) - for i, index := range indices { - kzgFrames[i] = encoding.Frame{ - Proof: proofs[index], - Coeffs: frames[i].Coeffs, - } - } - - if g.Verbose { - log.Printf("Total encoding took %v\n", time.Since(startTime)) - } - return &commit, &lowDegreeCommitment, &lowDegreeProof, kzgFrames, indices, nil -} - -func (g *ParametrizedProver) Commit(polyFr []fr.Element) (bn254.G1Affine, error) { - commit, err := g.Ks.CommitToPoly(polyFr) - return *commit, err -} - -func (p *ParametrizedProver) ProveAllCosetThreads(polyFr []fr.Element, numChunks, chunkLen, numWorker uint64) ([]bn254.G1Affine, error) { - begin := time.Now() - // Robert: Standardizing this to use the same math used in precomputeSRS - dimE := numChunks - l := chunkLen - - sumVec := make([]bn254.G1Affine, dimE*2) - - jobChan := make(chan uint64, numWorker) - results := make(chan WorkerResult, numWorker) - - // create storage for intermediate fft outputs - coeffStore := make([][]fr.Element, dimE*2) - for i := range coeffStore { - coeffStore[i] = make([]fr.Element, l) - } - - for w := uint64(0); w < numWorker; w++ { - go p.proofWorker(polyFr, jobChan, l, dimE, coeffStore, results) - } - - for j := uint64(0); j < l; j++ { - jobChan <- j - } - close(jobChan) - - // return only first error - var err error - for w := uint64(0); w < numWorker; w++ { - wr := <-results - if wr.err != nil { - err = wr.err - } - } - - if err != nil { - return nil, fmt.Errorf("proof worker error: %v", err) - } - - t0 := time.Now() - - msmErrors := make(chan error, dimE*2) - - // compute proof by multi scaler mulplication - for i := uint64(0); i < dimE*2; i++ { - - go func(k uint64) { - //sumVec[k] = *bls.LinCombG1(p.FFTPointsT[k], coeffStore[k]) - _, err := sumVec[k].MultiExp(p.FFTPointsT[k], coeffStore[k], ecc.MultiExpConfig{}) - // to do handle error - msmErrors <- err - }(i) - } - - for i := uint64(0); i < dimE*2; i++ { - err := <-msmErrors - if err != nil { - fmt.Println("MSM while adding points", err) - return nil, err - } - } - - t1 := time.Now() - - // only 1 ifft is needed - sumVecInv, err := p.Fs.FFTG1(sumVec, true) - if err != nil { - return nil, fmt.Errorf("fft error: %v", err) - } - - t2 := time.Now() - - // outputs is out of order - buttefly - proofs, err := p.Fs.FFTG1(sumVecInv[:dimE], false) - if err != nil { - return nil, err - } - - t3 := time.Now() - - fmt.Printf("mult-th %v, msm %v,fft1 %v, fft2 %v,\n", t0.Sub(begin), t1.Sub(t0), t2.Sub(t1), t3.Sub(t2)) - - //rb.ReverseBitOrderG1Point(proofs) - return proofs, nil -} - -func (p *ParametrizedProver) proofWorker( - polyFr []fr.Element, - jobChan <-chan uint64, - l uint64, - dimE uint64, - coeffStore [][]fr.Element, - results chan<- WorkerResult, -) { - - for j := range jobChan { - coeffs, err := p.GetSlicesCoeff(polyFr, dimE, j, l) - if err != nil { - results <- WorkerResult{ - points: nil, - err: err, - } - } - - for i := 0; i < len(coeffs); i++ { - coeffStore[i][j] = coeffs[i] - } - } - - results <- WorkerResult{ - err: nil, - } -} - -// output is in the form see primeField toeplitz -// -// phi ^ (coset size ) = 1 -// -// implicitly pad slices to power of 2 -func (p *ParametrizedProver) GetSlicesCoeff(polyFr []fr.Element, dimE, j, l uint64) ([]fr.Element, error) { - // there is a constant term - m := uint64(len(polyFr)) - 1 - dim := (m - j) / l - - toeV := make([]fr.Element, 2*dimE-1) - for i := uint64(0); i < dim; i++ { - //bls.CopyFr(&toeV[i], &polyFr[m-(j+i*l)]) - toeV[i].Set(&polyFr[m-(j+i*l)]) - } - - // use precompute table - tm, err := toeplitz.NewToeplitz(toeV, p.SFs) - if err != nil { - return nil, err - } - return tm.GetFFTCoeff() -} - -/* -returns the power of 2 which is immediately bigger than the input -*/ -func CeilIntPowerOf2Num(d uint64) uint64 { - nextPower := math.Ceil(math.Log2(float64(d))) - return uint64(math.Pow(2.0, nextPower)) -} diff --git a/encoding/kzgrs/prover/precompute.go b/encoding/kzgrs/prover/precompute.go deleted file mode 100644 index c29c2314d9..0000000000 --- a/encoding/kzgrs/prover/precompute.go +++ /dev/null @@ -1,331 +0,0 @@ -package prover - -import ( - "bufio" - "fmt" - "io" - "log" - "math" - "os" - "path" - "strconv" - "strings" - "sync" - "time" - - "github.com/Layr-Labs/eigenda/encoding/fft" - "github.com/Layr-Labs/eigenda/encoding/kzg" - "github.com/consensys/gnark-crypto/ecc/bn254" -) - -type SubTable struct { - //SizeLow uint64 - //SizeUp uint64 - FilePath string -} - -type TableParam struct { - DimE uint64 - CosetSize uint64 -} - -type SRSTable struct { - Tables map[TableParam]SubTable - TableDir string - NumWorker uint64 - s1 []bn254.G1Affine -} - -func NewSRSTable(tableDir string, s1 []bn254.G1Affine, numWorker uint64) (*SRSTable, error) { - - err := os.MkdirAll(tableDir, os.ModePerm) - if err != nil { - log.Println("NEWSRSTABLE.ERR.1", err) - return nil, err - } - - files, err := os.ReadDir(tableDir) - if err != nil { - log.Println("NEWSRSTABLE.ERR.2", err) - return nil, err - } - - tables := make(map[TableParam]SubTable) - for _, file := range files { - filename := file.Name() - - tokens := strings.Split(filename, ".") - - dimEValue, err := strconv.Atoi(tokens[0][4:]) - if err != nil { - log.Println("NEWSRSTABLE.ERR.3", err) - return nil, err - } - cosetSizeValue, err := strconv.Atoi(tokens[1][5:]) - if err != nil { - log.Println("NEWSRSTABLE.ERR.4", err) - return nil, err - } - - param := TableParam{ - DimE: uint64(dimEValue), - CosetSize: uint64(cosetSizeValue), - } - - filePath := path.Join(tableDir, filename) - tables[param] = SubTable{FilePath: filePath} - } - - return &SRSTable{ - Tables: tables, - TableDir: tableDir, - NumWorker: numWorker, - s1: s1, // g1 points - }, nil -} - -func (p *SRSTable) GetSubTables( - numChunks uint64, - chunkLen uint64, -) ([][]bn254.G1Affine, error) { - cosetSize := chunkLen - dimE := numChunks - m := numChunks*chunkLen - 1 - dim := m / cosetSize - - param := TableParam{ - DimE: dimE, - CosetSize: cosetSize, - } - - start := time.Now() - table, ok := p.Tables[param] - if !ok { - log.Printf("Table with params: DimE=%v CosetSize=%v does not exist\n", dimE, cosetSize) - log.Printf("Generating the table. May take a while\n") - log.Printf("... ...\n") - filename := fmt.Sprintf("dimE%v.coset%v", dimE, cosetSize) - dstFilePath := path.Join(p.TableDir, filename) - fftPoints := p.Precompute(dim, dimE, cosetSize, m, dstFilePath, p.NumWorker) - - elapsed := time.Since(start) - log.Printf(" Precompute finishes using %v\n", elapsed) - - return fftPoints, nil - } else { - log.Printf("Detected Precomputed FFT sliced G1 table\n") - fftPoints, err := p.TableReaderThreads(table.FilePath, dimE, cosetSize, p.NumWorker) - if err != nil { - log.Println("GetSubTables.ERR.0", err) - return nil, err - } - - elapsed := time.Since(start) - log.Printf(" Loading Table uses %v\n", elapsed) - - return fftPoints, nil - } -} - -type DispatchReturn struct { - points []bn254.G1Affine - j uint64 -} - -// m = len(poly) - 1, which is deg -func (p *SRSTable) Precompute(dim, dimE, l, m uint64, filePath string, numWorker uint64) [][]bn254.G1Affine { - order := dimE * l - if l == 1 { - order = dimE * 2 - } - // TODO, create function only read g1 points - //s1 := ReadG1Points(p.SrsFilePath, order) - n := uint8(math.Log2(float64(order))) - fs := fft.NewFFTSettings(n) - - fftPoints := make([][]bn254.G1Affine, l) - - numJob := l - jobChan := make(chan uint64, numJob) - results := make(chan DispatchReturn, l) - - for w := uint64(0); w < numWorker; w++ { - go p.precomputeWorker(fs, m, dim, dimE, jobChan, l, results) - } - - for j := uint64(0); j < l; j++ { - jobChan <- j - } - close(jobChan) - - for w := uint64(0); w < l; w++ { - computeResult := <-results - fftPoints[computeResult.j] = computeResult.points - } - - err := p.TableWriter(fftPoints, dimE, filePath) - if err != nil { - log.Println("Precompute error:", err) - } - return fftPoints -} - -func (p *SRSTable) precomputeWorker(fs *fft.FFTSettings, m, dim, dimE uint64, jobChan <-chan uint64, l uint64, results chan DispatchReturn) { - for j := range jobChan { - dr, err := p.PrecomputeSubTable(fs, m, dim, dimE, j, l) - if err != nil { - log.Println("precomputeWorker.ERR.1", err) - return - } - results <- dr - } -} - -func (p *SRSTable) PrecomputeSubTable(fs *fft.FFTSettings, m, dim, dimE, j, l uint64) (DispatchReturn, error) { - // there is a constant term - points := make([]bn254.G1Affine, 2*dimE) - k := m - l - j - - for i := uint64(0); i < dim; i++ { - points[i].Set(&p.s1[k]) - k -= l - } - for i := dim; i < 2*dimE; i++ { - points[i].Set(&kzg.ZeroG1) - } - - y, err := fs.FFTG1(points, false) - if err != nil { - log.Println("PrecomputeSubTable.ERR.1", err) - return DispatchReturn{}, err - } - - return DispatchReturn{ - points: y, - j: j, - }, nil - -} - -type Boundary struct { - start uint64 - end uint64 // informational - sliceAt uint64 -} - -func (p *SRSTable) TableReaderThreads(filePath string, dimE, l uint64, numWorker uint64) ([][]bn254.G1Affine, error) { - g1f, err := os.Open(filePath) - if err != nil { - log.Println("TableReaderThreads.ERR.0", err) - return nil, err - } - //todo: resolve panic - defer func() { - if err := g1f.Close(); err != nil { - panic(err) - } - }() - - // 2 due to circular FFT mul - subTableSize := dimE * 2 * kzg.G1PointBytes - totalSubTableSize := subTableSize * l - - if numWorker > l { - numWorker = l - } - - reader := bufio.NewReaderSize(g1f, int(totalSubTableSize+l)) - buf := make([]byte, totalSubTableSize+l) - if _, err := io.ReadFull(reader, buf); err != nil { - log.Println("TableReaderThreads.ERR.1", err, "file path:", filePath) - return nil, err - } - - boundaries := make([]Boundary, l) - for i := uint64(0); i < uint64(l); i++ { - start := (subTableSize + 1) * i - end := (subTableSize+1)*(i+1) - 1 // exclude \n - boundary := Boundary{ - start: start, - end: end, - sliceAt: i, - } - boundaries[i] = boundary - } - - fftPoints := make([][]bn254.G1Affine, l) - - jobChan := make(chan Boundary, l) - - var wg sync.WaitGroup - wg.Add(int(numWorker)) - for i := uint64(0); i < numWorker; i++ { - go p.readWorker(buf, fftPoints, jobChan, dimE, &wg) - } - - for i := uint64(0); i < l; i++ { - jobChan <- boundaries[i] - } - close(jobChan) - wg.Wait() - return fftPoints, nil -} - -func (p *SRSTable) readWorker( - buf []byte, - fftPoints [][]bn254.G1Affine, - jobChan <-chan Boundary, - dimE uint64, - wg *sync.WaitGroup, -) { - for b := range jobChan { - slicePoints := make([]bn254.G1Affine, dimE*2) - for i := uint64(0); i < dimE*2; i++ { - g1 := buf[b.start+i*kzg.G1PointBytes : b.start+(i+1)*kzg.G1PointBytes] - _, err := slicePoints[i].SetBytes(g1[:]) //UnmarshalText(g1[:]) - if err != nil { - log.Printf("Error. From %v to %v. %v", b.start, b.end, err) - log.Println() - log.Println("readWorker.ERR.0", err) - return - } - } - fftPoints[b.sliceAt] = slicePoints - } - wg.Done() -} - -func (p *SRSTable) TableWriter(fftPoints [][]bn254.G1Affine, dimE uint64, filePath string) error { - wf, err := os.Create(filePath) - if err != nil { - log.Println("TableWriter.ERR.0", err) - return err - } - - writer := bufio.NewWriter(wf) - l := uint64(len(fftPoints)) - - delimiter := [1]byte{'\n'} - - for j := uint64(0); j < l; j++ { - for i := uint64(0); i < dimE*2; i++ { - - g1Bytes := fftPoints[j][i].Bytes() - if _, err := writer.Write(g1Bytes[:]); err != nil { - log.Println("TableWriter.ERR.2", err) - return err - } - } - // every line for each slice - if _, err := writer.Write(delimiter[:]); err != nil { - log.Println("TableWriter.ERR.3", err) - return err - } - } - - if err = writer.Flush(); err != nil { - log.Println("TableWriter.ERR.4", err) - return err - } - return nil -} diff --git a/encoding/kzgrs/verifier/batch_commit_equivalence.go b/encoding/kzgrs/verifier/batch_commit_equivalence.go deleted file mode 100644 index b5e0a92d25..0000000000 --- a/encoding/kzgrs/verifier/batch_commit_equivalence.go +++ /dev/null @@ -1,125 +0,0 @@ -package verifier - -import ( - "bytes" - "encoding/gob" - "errors" - - "github.com/Layr-Labs/eigenda/encoding" - "github.com/Layr-Labs/eigenda/encoding/kzg" - - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" -) - -type CommitmentPair struct { - Commitment bn254.G1Affine - LengthCommitment bn254.G2Affine -} - -// generate a random value using Fiat Shamir transform -// we can also pseudo randomness generated locally, but we have to ensure no adversary can manipulate it -// Hashing everything takes about 1ms, so Fiat Shamir transform does not incur much cost -func GenRandomFactorForEquivalence(g1commits []bn254.G1Affine, g2commits []bn254.G2Affine) (fr.Element, error) { - var buffer bytes.Buffer - enc := gob.NewEncoder(&buffer) - - for _, commit := range g1commits { - err := enc.Encode(commit) - if err != nil { - return fr.Element{}, err - } - } - - for _, commit := range g2commits { - err := enc.Encode(commit) - if err != nil { - return fr.Element{}, err - } - } - - var randomFr fr.Element - - err := kzg.HashToSingleField(&randomFr, buffer.Bytes()) - if err != nil { - return fr.Element{}, err - } - - return randomFr, nil -} - -func CreateRandomnessVector(g1commits []bn254.G1Affine, g2commits []bn254.G2Affine) ([]fr.Element, error) { - r, err := GenRandomFactorForEquivalence(g1commits, g2commits) - if err != nil { - return nil, err - } - n := len(g1commits) - - if len(g1commits) != len(g2commits) { - return nil, errors.New("Inconsistent number of blobs for g1 and g2") - } - - randomsFr := make([]fr.Element, n) - //bn254.CopyFr(&randomsFr[0], &r) - randomsFr[0].Set(&r) - - // power of r - for j := 0; j < n-1; j++ { - randomsFr[j+1].Mul(&randomsFr[j], &r) - //bn254.MulModFr(&randomsFr[j+1], &randomsFr[j], &r) - } - - return randomsFr, nil -} - -func (v *Verifier) VerifyCommitEquivalenceBatch(commitments []encoding.BlobCommitments) error { - commitmentsPair := make([]CommitmentPair, len(commitments)) - - for i, c := range commitments { - commitmentsPair[i] = CommitmentPair{ - Commitment: (bn254.G1Affine)(*c.Commitment), - LengthCommitment: (bn254.G2Affine)(*c.LengthCommitment), - } - } - return v.BatchVerifyCommitEquivalence(commitmentsPair) -} - -func (group *Verifier) BatchVerifyCommitEquivalence(commitmentsPair []CommitmentPair) error { - - g1commits := make([]bn254.G1Affine, len(commitmentsPair)) - g2commits := make([]bn254.G2Affine, len(commitmentsPair)) - for i := 0; i < len(commitmentsPair); i++ { - g1commits[i] = commitmentsPair[i].Commitment - g2commits[i] = commitmentsPair[i].LengthCommitment - } - - randomsFr, err := CreateRandomnessVector(g1commits, g2commits) - if err != nil { - return err - } - - var lhsG1 bn254.G1Affine - _, err = lhsG1.MultiExp(g1commits, randomsFr, ecc.MultiExpConfig{}) - if err != nil { - return err - } - - //lhsG1 := bn254.LinCombG1(g1commits, randomsFr) - lhsG2 := &kzg.GenG2 - - //rhsG2 := bn254.LinCombG2(g2commits, randomsFr) - var rhsG2 bn254.G2Affine - _, err = rhsG2.MultiExp(g2commits, randomsFr, ecc.MultiExpConfig{}) - if err != nil { - return err - } - rhsG1 := &kzg.GenG1 - - err = PairingsVerify(&lhsG1, lhsG2, rhsG1, &rhsG2) - if err == nil { - return nil - } else { - return errors.New("Universal Verify Incorrect paring") - } -} diff --git a/encoding/kzgrs/verifier/multiframe.go b/encoding/kzgrs/verifier/multiframe.go deleted file mode 100644 index c6b5890b51..0000000000 --- a/encoding/kzgrs/verifier/multiframe.go +++ /dev/null @@ -1,317 +0,0 @@ -package verifier - -import ( - "bytes" - "encoding/gob" - "errors" - "fmt" - "math" - - "github.com/Layr-Labs/eigenda/encoding" - - "github.com/Layr-Labs/eigenda/encoding/kzg" - "github.com/Layr-Labs/eigenda/encoding/rs" - "github.com/consensys/gnark-crypto/ecc" - "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" -) - -// Sample is the basic unit for a verification -// A blob may contain multiple Samples -type Sample struct { - Commitment bn254.G1Affine - Proof bn254.G1Affine - RowIndex int // corresponds to a row in the verification matrix - Coeffs []fr.Element - X uint // X is the evaluating index which corresponds to the leading coset -} - -// generate a random value using Fiat Shamir transform -// we can also pseudo randomness generated locally, but we have to ensure no adversary can manipulate it -// Hashing everything takes about 1ms, so Fiat Shamir transform does not incur much cost -func GenRandomFactor(samples []Sample) (fr.Element, error) { - var buffer bytes.Buffer - enc := gob.NewEncoder(&buffer) - - for _, sample := range samples { - err := enc.Encode(sample.Commitment) - if err != nil { - return fr.Element{}, err - } - } - - var randomFr fr.Element - - err := kzg.HashToSingleField(&randomFr, buffer.Bytes()) - if err != nil { - return fr.Element{}, err - } - - return randomFr, nil -} - -// Every sample has its own randomness, even though multiple samples can come from identical blob -// Randomnesss for each sample is computed by repeatedly raising the power of the root randomness -func GenRandomnessVector(samples []Sample) ([]fr.Element, error) { - // root randomness - r, err := GenRandomFactor(samples) - if err != nil { - return nil, err - } - - n := len(samples) - - randomsFr := make([]fr.Element, n) - randomsFr[0].Set(&r) - - // power of r - for j := 0; j < n-1; j++ { - randomsFr[j+1].Mul(&randomsFr[j], &r) - } - return randomsFr, nil -} - -// the rhsG1 comprises of three terms, see https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240/1 -func genRhsG1(samples []Sample, randomsFr []fr.Element, m int, params encoding.EncodingParams, ks *kzg.KZGSettings, proofs []bn254.G1Affine) (*bn254.G1Affine, error) { - n := len(samples) - commits := make([]bn254.G1Affine, m) - D := params.ChunkLength - - var tmp fr.Element - - // first term - // get coeffs to compute the aggregated commitment - // note the coeff is affected by how many chunks are validated per blob - // if x chunks are sampled from one blob, we need to compute the sum of all x random field element corresponding to each sample - aggCommitCoeffs := make([]fr.Element, m) - setCommit := make([]bool, m) - for k := 0; k < n; k++ { - s := samples[k] - row := s.RowIndex - //bls.AddModFr(&aggCommitCoeffs[row], &aggCommitCoeffs[row], &randomsFr[k]) - aggCommitCoeffs[row].Add(&aggCommitCoeffs[row], &randomsFr[k]) - - if !setCommit[row] { - commits[row].Set(&s.Commitment) - //bls.CopyG1(&commits[row], &s.Commitment) - setCommit[row] = true - } else { - //bls.EqualG1(&commits[row], &s.Commitment) - if !commits[row].Equal(&s.Commitment) { - return nil, errors.New("Samples of the same row has different commitments") - } - } - } - - var aggCommit bn254.G1Affine - _, err := aggCommit.MultiExp(commits, aggCommitCoeffs, ecc.MultiExpConfig{}) - if err != nil { - return nil, err - } - - // second term - // compute the aggregated interpolation polynomial - aggPolyCoeffs := make([]fr.Element, D) - - // we sum over the weighted coefficients (by the random field element) over all D monomial in all n samples - for k := 0; k < n; k++ { - coeffs := samples[k].Coeffs - - rk := randomsFr[k] - // for each monomial in a given polynomial, multiply its coefficient with the corresponding random field, - // then sum it with others. Given ChunkLen (D) is identical for all samples in a subBatch. - // The operation is always valid. - for j := uint64(0); j < D; j++ { - tmp.Mul(&coeffs[j], &rk) - //bls.MulModFr(&tmp, &coeffs[j], &rk) - //bls.AddModFr(&aggPolyCoeffs[j], &aggPolyCoeffs[j], &tmp) - aggPolyCoeffs[j].Add(&aggPolyCoeffs[j], &tmp) - } - } - - // All samples in a subBatch has identical chunkLen - var aggPolyG1 bn254.G1Affine - _, err = aggPolyG1.MultiExp(ks.Srs.G1[:D], aggPolyCoeffs, ecc.MultiExpConfig{}) - if err != nil { - return nil, err - } - //aggPolyG1 := bls.LinCombG1(ks.Srs.G1[:D], aggPolyCoeffs) - - // third term - // leading coset is an evaluation index, here we compute the weighted leading coset evaluation by random fields - lcCoeffs := make([]fr.Element, n) - - // get leading coset powers - leadingDs := make([]fr.Element, n) - - for k := 0; k < n; k++ { - - // got the leading coset field element - h := ks.ExpandedRootsOfUnity[samples[k].X] - var hPow fr.Element - hPow.SetOne() - //bls.CopyFr(&hPow, &bls.ONE) - - // raising the power for each leading coset - for j := uint64(0); j < D; j++ { - hPow.Mul(&hPow, &h) - //bls.MulModFr(&tmp, &hPow, &h) - //bls.CopyFr(&hPow, &tmp) - } - //bls.CopyFr(&leadingDs[k], &hPow) - leadingDs[k].Set(&hPow) - } - - // applying the random weights to leading coset elements - for k := 0; k < n; k++ { - rk := randomsFr[k] - //bls.MulModFr(&lcCoeffs[k], &rk, &leadingDs[k]) - lcCoeffs[k].Mul(&rk, &leadingDs[k]) - } - - var offsetG1 bn254.G1Affine - _, err = offsetG1.MultiExp(proofs, lcCoeffs, ecc.MultiExpConfig{}) - if err != nil { - return nil, err - } - - //offsetG1 := bls.LinCombG1(proofs, lcCoeffs) - - var rhsG1 bn254.G1Affine - //bls.SubG1(&rhsG1, aggCommit, aggPolyG1) - rhsG1.Sub(&aggCommit, &aggPolyG1) - //bls.AddG1(&rhsG1, &rhsG1, offsetG1) - rhsG1.Add(&rhsG1, &offsetG1) - return &rhsG1, nil -} - -// TODO(mooselumph): Cleanup this function -func (v *Verifier) UniversalVerifySubBatch(params encoding.EncodingParams, samplesCore []encoding.Sample, numBlobs int) error { - - samples := make([]Sample, len(samplesCore)) - - for i, sc := range samplesCore { - x, err := rs.GetLeadingCosetIndex( - uint64(sc.AssignmentIndex), - params.NumChunks, - ) - if err != nil { - return err - } - - sample := Sample{ - Commitment: (bn254.G1Affine)(*sc.Commitment), - Proof: sc.Chunk.Proof, - RowIndex: sc.BlobIndex, - Coeffs: sc.Chunk.Coeffs, - X: uint(x), - } - samples[i] = sample - } - - return v.UniversalVerify(params, samples, numBlobs) -} - -// UniversalVerify implements batch verification on a set of chunks given the same chunk dimension (chunkLen, numChunk). -// The details is given in Ethereum Research post whose authors are George Kadianakis, Ansgar Dietrichs, Dankrad Feist -// https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240 -// -// m is number of blob, samples is a list of chunks -// -// The order of samples do not matter. -// Each sample need not have unique row, it is possible that multiple chunks of the same blob are validated altogether -func (group *Verifier) UniversalVerify(params encoding.EncodingParams, samples []Sample, m int) error { - // precheck - for i, s := range samples { - if s.RowIndex >= m { - fmt.Printf("sample %v has %v Row, but there are only %v blobs\n", i, s.RowIndex, m) - return errors.New("sample.RowIndex and numBlob are inconsistent") - } - } - - verifier, err := group.GetKzgVerifier(params) - if err != nil { - return err - } - ks := verifier.Ks - - D := params.ChunkLength - - if D > group.SRSNumberToLoad { - return fmt.Errorf("requested chunkLen %v is larger than Loaded SRS points %v.", D, group.SRSNumberToLoad) - } - - n := len(samples) - fmt.Printf("Batch verify %v frames of %v symbols out of %v blobs \n", n, params.ChunkLength, m) - - // generate random field elements to aggregate equality check - randomsFr, err := GenRandomnessVector(samples) - if err != nil { - return err - } - - // array of proofs - proofs := make([]bn254.G1Affine, n) - for i := 0; i < n; i++ { - //bls.CopyG1(&proofs[i], &samples[i].Proof) - proofs[i].Set(&samples[i].Proof) - } - - // lhs g1 - //lhsG1 := bls.LinCombG1(proofs, randomsFr) - var lhsG1 bn254.G1Affine - _, err = lhsG1.MultiExp(proofs, randomsFr, ecc.MultiExpConfig{}) - if err != nil { - return err - } - // lhs g2 - exponent := uint64(math.Log2(float64(D))) - G2atD, err := kzg.ReadG2PointOnPowerOf2(exponent, group.KzgConfig) - - if err != nil { - // then try to access if there is a full list of g2 srs - G2atD, err = kzg.ReadG2Point(D, group.KzgConfig) - if err != nil { - return err - } - fmt.Println("Acessed the entire G2") - } - - lhsG2 := &G2atD - - // rhs g2 - rhsG2 := &kzg.GenG2 - - // rhs g1 - rhsG1, err := genRhsG1( - samples, - randomsFr, - m, - params, - ks, - proofs, - ) - if err != nil { - return err - } - - return PairingsVerify(&lhsG1, lhsG2, rhsG1, rhsG2) -} - -func PairingsVerify(a1 *bn254.G1Affine, a2 *bn254.G2Affine, b1 *bn254.G1Affine, b2 *bn254.G2Affine) error { - var negB1 bn254.G1Affine - negB1.Neg((*bn254.G1Affine)(b1)) - - P := [2]bn254.G1Affine{*(*bn254.G1Affine)(a1), negB1} - Q := [2]bn254.G2Affine{*(*bn254.G2Affine)(a2), *(*bn254.G2Affine)(b2)} - - ok, err := bn254.PairingCheck(P[:], Q[:]) - if err != nil { - return err - } - if !ok { - return fmt.Errorf("PairingCheck pairing not ok. SRS is invalid") - } - - return nil -}