Skip to content

Commit

Permalink
chore: Update kzg_multi package (#90)
Browse files Browse the repository at this point in the history
* update kzg_multi

* update 7595 batch method to use efficient version

* update errors.go

* linter
  • Loading branch information
kevaundray authored Aug 20, 2024
1 parent eac30b9 commit 2dfd18f
Show file tree
Hide file tree
Showing 9 changed files with 332 additions and 141 deletions.
103 changes: 37 additions & 66 deletions api_eip7594.go
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
package goethkzg

import (
"errors"
"slices"

bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381"
"github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
"github.com/crate-crypto/go-eth-kzg/internal/domain"
kzgmulti "github.com/crate-crypto/go-eth-kzg/internal/kzg_multi"
Expand Down Expand Up @@ -31,12 +31,11 @@ func (ctx *Context) computeCellsAndKZGProofsFromPolyCoeff(polyCoeff []fr.Element
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, err
}

// TODO: We could return an error -- though its unrecoverable
if len(cosetEvaluations) != CellsPerExtBlob {
panic("expected coset evaluations to be of length `CellsPerExtBlob`")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrNumCosetEvaluationsCheck
}
if len(proofs) != CellsPerExtBlob {
panic("expected proofs to be of length `CellsPerExtBlob`")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrNumProofsCheck
}

// Serialize proofs
Expand All @@ -48,9 +47,8 @@ func (ctx *Context) computeCellsAndKZGProofsFromPolyCoeff(polyCoeff []fr.Element
// Serialize Cells
var Cells [CellsPerExtBlob]*Cell
for i, cosetEval := range cosetEvaluations {
// TODO: We could return an error -- though its unrecoverable
if len(cosetEval) != scalarsPerCell {
panic("expected cell to be of length `scalarsPerCell`")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrCosetEvaluationLengthCheck
}
cosetEvalArr := (*[scalarsPerCell]fr.Element)(cosetEval)

Expand All @@ -62,8 +60,7 @@ func (ctx *Context) computeCellsAndKZGProofsFromPolyCoeff(polyCoeff []fr.Element

func (ctx *Context) RecoverCellsAndComputeKZGProofs(cellIDs []uint64, cells []*Cell, _proofs []KZGProof, numGoRoutines int) ([CellsPerExtBlob]*Cell, [CellsPerExtBlob]KZGProof, error) {
// Check each proof can be deserialized
// TODO: This seems somewhat useless as we should not be calling this method with proofs
// TODO: that are not valid.
// TODO: This gets removed when we update the specs.
for _, proof := range _proofs {
_, err := DeserializeKZGProof(proof)
if err != nil {
Expand All @@ -72,27 +69,27 @@ func (ctx *Context) RecoverCellsAndComputeKZGProofs(cellIDs []uint64, cells []*C
}

if len(cellIDs) != len(cells) {
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, errors.New("number of cell IDs should be equal to the number of cells")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrNumCellIDsNotEqualNumCells
}
if len(cellIDs) != len(_proofs) {
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, errors.New("number of cell IDs should be equal to the number of proofs")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrNumCellIDsNotEqualNumProofs
}

// Check that the cell Ids are unique
if !isUniqueUint64(cellIDs) {
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, errors.New("cell IDs should be unique")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrCellIDsNotUnique
}

// Check that each CellId is less than CellsPerExtBlob
for _, cellID := range cellIDs {
if cellID >= CellsPerExtBlob {
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, errors.New("cell ID should be less than CellsPerExtBlob")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrFoundInvalidCellID
}
}

// Check that we have enough cells to perform reconstruction
if len(cellIDs) < ctx.dataRecovery.NumBlocksNeededToReconstruct() {
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, errors.New("not enough cells to perform reconstruction")
return [CellsPerExtBlob]*Cell{}, [CellsPerExtBlob]KZGProof{}, ErrNotEnoughCellsForReconstruction
}

// Find the missing cell IDs and bit reverse them
Expand Down Expand Up @@ -130,33 +127,7 @@ func (ctx *Context) RecoverCellsAndComputeKZGProofs(cellIDs []uint64, cells []*C
}

func (ctx *Context) VerifyCellKZGProof(commitment KZGCommitment, cellID uint64, cell *Cell, proof KZGProof) error {
// Check if the cell ID is less than CellsPerExtBlob
if cellID >= CellsPerExtBlob {
return ErrInvalidCellID
}

// Deserialize the commitment
commitmentG1, err := DeserializeKZGCommitment(commitment)
if err != nil {
return err
}

// Deserialize the proof
proofG1, err := DeserializeKZGProof(proof)
if err != nil {
return err
}

// Deserialize the cell
cosetEvals, err := deserializeCell(cell)
if err != nil {
return err
}

// partition the extended roots to form cosets
cosets := partition(ctx.domainExtended.Roots, scalarsPerCell)

return kzgmulti.VerifyMultiPointKZGProof(commitmentG1, proofG1, cosetEvals, cosets[cellID], ctx.openKey7594)
return ctx.VerifyCellKZGProofBatch([]KZGCommitment{commitment}, []uint64{0}, []uint64{cellID}, []*Cell{cell}, []KZGProof{proof})
}

func (ctx *Context) VerifyCellKZGProofBatch(rowCommitments []KZGCommitment, rowIndices, columnIndices []uint64, cells []*Cell, proofs []KZGProof) error {
Expand All @@ -178,40 +149,40 @@ func (ctx *Context) VerifyCellKZGProofBatch(rowCommitments []KZGCommitment, rowI
}
}

for i := 0; i < batchSize; i++ {
err := ctx.VerifyCellKZGProof(rowCommitments[rowIndices[i]], columnIndices[i], cells[i], proofs[i])
for _, cellIndex := range columnIndices {
if cellIndex >= CellsPerExtBlob {
return ErrInvalidCellID
}
}

commitments := make([]bls12381.G1Affine, len(rowCommitments))
for i := 0; i < len(rowCommitments); i++ {
comm, err := DeserializeKZGCommitment(rowCommitments[i])
if err != nil {
return err
}
commitments[i] = comm
}

return nil
}

// partition groups a slice into chunks of size k
// Example:
// Input: [1, 2, 3, 4, 5, 6, 7, 8, 9], k: 3
// Output: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
//
// Panics if the slice cannot be divided into chunks of size k
// TODO: Remove, once we make verification not require the cosets
// TODO: These are not needed in a optimized version
func partition(slice []fr.Element, k int) [][]fr.Element {
var result [][]fr.Element

for i := 0; i < len(slice); i += k {
end := i + k
if end > len(slice) {
panic("all partitions should have the same size")
proofsG1 := make([]bls12381.G1Affine, len(proofs))
for i := 0; i < len(proofs); i++ {
proof, err := DeserializeKZGProof(proofs[i])
if err != nil {
return err
}
result = append(result, slice[i:end])
proofsG1[i] = proof
}

return result
cosetsEvals := make([][]fr.Element, len(cells))
for i := 0; i < len(cells); i++ {
cosetEvals, err := deserializeCell(cells[i])
if err != nil {
return err
}
cosetsEvals[i] = cosetEvals
}
return kzgmulti.VerifyMultiPointKZGProofBatch(commitments, rowIndices, columnIndices, proofsG1, cosetsEvals, ctx.openKey7594)
}

// isUniqueUint64 returns true if all elements
// in the slice are unique
// isUniqueUint64 returns true if the slices contains no duplicate elements
func isUniqueUint64(slice []uint64) bool {
elementMap := make(map[uint64]bool)

Expand Down
12 changes: 12 additions & 0 deletions errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,16 @@ var (
ErrNonCanonicalScalar = errors.New("scalar is not canonical when interpreted as a big integer in big-endian")
ErrInvalidCellID = errors.New("cell ID should be less than CellsPerExtBlob")
ErrInvalidRowIndex = errors.New("row index should be less than the number of row commitments")

ErrNumCellIDsNotEqualNumCells = errors.New("number of cell IDs should be equal to the number of cells")
ErrNumCellIDsNotEqualNumProofs = errors.New("number of cell IDs should be equal to the number of proofs")
ErrCellIDsNotUnique = errors.New("cell IDs are not unique")
ErrFoundInvalidCellID = errors.New("cell ID should be less than CellsPerExtBlob")
ErrNotEnoughCellsForReconstruction = errors.New("not enough cells to perform reconstruction")

// The following errors indicate that the library constants have not been setup properly.
// These should never happen unless the library has been incorrectly modified.
ErrNumCosetEvaluationsCheck = errors.New("expected number of coset evaluations to be `CellsPerExtBlob`")
ErrCosetEvaluationLengthCheck = errors.New("expected coset evaluations to have `ScalarsPerCell` number of field elements")
ErrNumProofsCheck = errors.New("expected number of proofs to be `CellsPerExtBlob`")
)
48 changes: 26 additions & 22 deletions internal/kzg_multi/fk20/fk20.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,57 +43,67 @@ func NewFK20(srs []bls12381.G1Affine, numPointsToOpen, evalSetSize int) FK20 {
extDomain := domain.NewDomain(uint64(numPointsToOpen))

return FK20{
batchMulAgg: batchMul,
proofDomain: *proofDomain,
extDomain: *extDomain,

batchMulAgg: batchMul,
proofDomain: *proofDomain,
extDomain: *extDomain,
numPointsToOpen: numPointsToOpen,
evalSetSize: evalSetSize,
}
}

// TODO: move to reed-solomon, though it is somewhat hard to figure out
// TODO: what points we are opening for
func (fk *FK20) ComputeEvaluationSet(polyCoeff []fr.Element) [][]fr.Element {
// computeEvaluationSet evaluates `polyCoeff` on all of the cosets
// that `ComputeMultiOpenProof` has created proofs for.
//
// Note: `polyCoeff` is not mutated in-place, ie it should be treated as a immutable reference.
func (fk *FK20) computeEvaluationSet(polyCoeff []fr.Element) [][]fr.Element {
polyCoeff = slices.Clone(polyCoeff)
// Pad to the correct length
for i := len(polyCoeff); i < len(fk.extDomain.Roots); i++ {
polyCoeff = append(polyCoeff, fr.Element{})
}

evaluations := fk.extDomain.FftFr(polyCoeff)
// TODO: move this to top level, same comment in ComputeMultiOpenProof
domain.BitReverse(evaluations)

return partition(evaluations, fk.evalSetSize)
}

func (fk *FK20) ComputeMultiOpenProof(poly []fr.Element) ([]bls12381.G1Affine, error) {
func (fk *FK20) ComputeMultiOpenProof(poly []fr.Element) ([]bls12381.G1Affine, [][]fr.Element, error) {
// Note: `computeEvaluationSet` will create a copy of `poly`
// and pad it. Hence, the rest of this method, does not use the padded
// version of `poly`.
outputSets := fk.computeEvaluationSet(poly)

hComms, err := fk.computeHPolysComm(poly)
if err != nil {
return nil, err
return nil, nil, err
}

// Padd hComms since fft does not do this
// Pad hComms since fft does not do this
numProofs := len(fk.proofDomain.Roots)
for i := len(hComms); i < numProofs; i++ {
hComms = append(hComms, bls12381.G1Affine{})
}

proofs := fk.proofDomain.FftG1(hComms)
// TODO: move this to top level
domain.BitReverse(proofs)

return proofs, nil
return proofs, outputSets, nil
}

// computeHPolysComm computes commitments to the polynomials that are common amongst all
// of the opening proofs across the cosets. These are denoted as `hPolys` and the naming
// follows the FK20 paper.
//
// Note: `polyCoeff` is not mutated in-place, ie it should be treated as a immutable reference.
func (fk *FK20) computeHPolysComm(polyCoeff []fr.Element) ([]bls12381.G1Affine, error) {
if !utils.IsPowerOfTwo(uint64(len(polyCoeff))) {
return nil, errors.New("expected the polynomial to have power of two number of coefficients")
}

// Reverse polynomial so that we have the highest coefficient
// be first.
polyCoeff = slices.Clone(polyCoeff) // TODO: Clone since we reverse and use PolyCoeff to evaluate after this call
// Note: we clone since we want to reverse the order of the coefficients
polyCoeff = slices.Clone(polyCoeff)
slices.Reverse(polyCoeff)

toeplitzRows := takeEveryNth(polyCoeff, fk.evalSetSize)
Expand All @@ -115,7 +125,7 @@ func takeEveryNth[T any](list []T, n int) [][]T {
result := make([][]T, n)

for i := 0; i < n; i++ {
subList := make([]T, 0, (len(list)+n-1)/n) // Pre-allocate capacity
subList := make([]T, 0)
for j := i; j < len(list); j += n {
subList = append(subList, list[j])
}
Expand All @@ -135,12 +145,6 @@ func nextPowerOfTwo(n int) int {
k <<= 1
}
return k

// p := 1
// for p < n {
// p *= 2
// }
// return p
}

// padToPowerOfTwo pads each inner slice to the next power of two in-place
Expand Down
10 changes: 4 additions & 6 deletions internal/kzg_multi/fk20/toeplitz.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package fk20

import (
"slices"

bls12381 "github.com/consensys/gnark-crypto/ecc/bls12-381"
"github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
"github.com/crate-crypto/go-eth-kzg/internal/domain"
Expand Down Expand Up @@ -46,6 +44,9 @@ type BatchToeplitzMatrixVecMul struct {
circulantDomain domain.Domain
}

// newBatchToeplitzMatrixVecMul creates a new Instance of `BatchToeplitzMatrixVecMul`
//
// Note: `fixedVectors` is mutated in place, ie it is treated as mutable reference to a pointer.
func newBatchToeplitzMatrixVecMul(fixedVectors [][]bls12381.G1Affine) BatchToeplitzMatrixVecMul {
// We assume that the length of the vector is at least one.
// If this is not true, then we panic on startup.
Expand Down Expand Up @@ -76,10 +77,7 @@ func newBatchToeplitzMatrixVecMul(fixedVectors [][]bls12381.G1Affine) BatchToepl

circulantDomain := domain.NewDomain(uint64(circulantPaddedVecSize))

// TODO: grep for these Clones and remove them if the caller does not need it
// TODO: once the function is completed.
// TODO: Also comment the function to mention that the slice is mutated in-place.
fftFixedVectors := slices.Clone(fixedVectors)
fftFixedVectors := fixedVectors
// Before performing the fft, pad the vector so that it is the correct size.
padToPowerOfTwo(fftFixedVectors)

Expand Down
8 changes: 1 addition & 7 deletions internal/kzg_multi/kzg_prove.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,5 @@ import (
)

func ComputeMultiPointKZGProofs(fk20 *fk20.FK20, poly poly.PolynomialCoeff) ([]bls12381.G1Affine, [][]fr.Element, error) {
proofs, err := fk20.ComputeMultiOpenProof(poly)
if err != nil {
return nil, nil, err
}
outputPointsSet := fk20.ComputeEvaluationSet(poly)

return proofs, outputPointsSet, nil
return fk20.ComputeMultiOpenProof(poly)
}
Loading

0 comments on commit 2dfd18f

Please sign in to comment.