diff --git a/ecc/bls12-377/fr/pedersen/pedersen.go b/ecc/bls12-377/fr/pedersen/pedersen.go index cee17840b..26c7842bb 100644 --- a/ecc/bls12-377/fr/pedersen/pedersen.go +++ b/ecc/bls12-377/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bls12-377/fr/pedersen/pedersen_test.go b/ecc/bls12-377/fr/pedersen/pedersen_test.go index 939642488..b7fdeeaa0 100644 --- a/ecc/bls12-377/fr/pedersen/pedersen_test.go +++ b/ecc/bls12-377/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bls12-377/marshal.go b/ecc/bls12-377/marshal.go index 48450677d..1071fe912 100644 --- a/ecc/bls12-377/marshal.go +++ b/ecc/bls12-377/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 48 diff --git a/ecc/bls12-377/marshal_test.go b/ecc/bls12-377/marshal_test.go index 72e4a000d..3bd60d610 100644 --- a/ecc/bls12-377/marshal_test.go +++ b/ecc/bls12-377/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls12-378/fr/pedersen/pedersen.go b/ecc/bls12-378/fr/pedersen/pedersen.go index a526dd71c..e9720a649 100644 --- a/ecc/bls12-378/fr/pedersen/pedersen.go +++ b/ecc/bls12-378/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-378" "github.com/consensys/gnark-crypto/ecc/bls12-378/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bls12-378/fr/pedersen/pedersen_test.go b/ecc/bls12-378/fr/pedersen/pedersen_test.go index b2119020d..f6d85ec2d 100644 --- a/ecc/bls12-378/fr/pedersen/pedersen_test.go +++ b/ecc/bls12-378/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-378" "github.com/consensys/gnark-crypto/ecc/bls12-378/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bls12-378/marshal.go b/ecc/bls12-378/marshal.go index a69a7b3d0..c033fdaf4 100644 --- a/ecc/bls12-378/marshal.go +++ b/ecc/bls12-378/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 48 diff --git a/ecc/bls12-378/marshal_test.go b/ecc/bls12-378/marshal_test.go index 4e4b08e3d..8a4d7c04a 100644 --- a/ecc/bls12-378/marshal_test.go +++ b/ecc/bls12-378/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls12-381/fr/pedersen/pedersen.go b/ecc/bls12-381/fr/pedersen/pedersen.go index e9efb6008..1ff86fc10 100644 --- a/ecc/bls12-381/fr/pedersen/pedersen.go +++ b/ecc/bls12-381/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bls12-381/fr/pedersen/pedersen_test.go b/ecc/bls12-381/fr/pedersen/pedersen_test.go index 78e04273e..5f499c29e 100644 --- a/ecc/bls12-381/fr/pedersen/pedersen_test.go +++ b/ecc/bls12-381/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bls12-381/marshal.go b/ecc/bls12-381/marshal.go index dc1e76eec..324950e09 100644 --- a/ecc/bls12-381/marshal.go +++ b/ecc/bls12-381/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 48 diff --git a/ecc/bls12-381/marshal_test.go b/ecc/bls12-381/marshal_test.go index 3c0d03376..6836ab92f 100644 --- a/ecc/bls12-381/marshal_test.go +++ b/ecc/bls12-381/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls24-315/fr/pedersen/pedersen.go b/ecc/bls24-315/fr/pedersen/pedersen.go index 0580e8449..661351926 100644 --- a/ecc/bls24-315/fr/pedersen/pedersen.go +++ b/ecc/bls24-315/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bls24-315/fr/pedersen/pedersen_test.go b/ecc/bls24-315/fr/pedersen/pedersen_test.go index 475026061..cec9770c1 100644 --- a/ecc/bls24-315/fr/pedersen/pedersen_test.go +++ b/ecc/bls24-315/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bls24-315/marshal.go b/ecc/bls24-315/marshal.go index 0b22441ef..97dab9a10 100644 --- a/ecc/bls24-315/marshal.go +++ b/ecc/bls24-315/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 40 diff --git a/ecc/bls24-315/marshal_test.go b/ecc/bls24-315/marshal_test.go index e5c665fa0..db14955a3 100644 --- a/ecc/bls24-315/marshal_test.go +++ b/ecc/bls24-315/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls24-317/fr/pedersen/pedersen.go b/ecc/bls24-317/fr/pedersen/pedersen.go index 3b8fc2509..0e06ebcc9 100644 --- a/ecc/bls24-317/fr/pedersen/pedersen.go +++ b/ecc/bls24-317/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bls24-317/fr/pedersen/pedersen_test.go b/ecc/bls24-317/fr/pedersen/pedersen_test.go index ee64c9aa3..f299af085 100644 --- a/ecc/bls24-317/fr/pedersen/pedersen_test.go +++ b/ecc/bls24-317/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bls24-317/marshal.go b/ecc/bls24-317/marshal.go index 89e7bcf5c..e68738af3 100644 --- a/ecc/bls24-317/marshal.go +++ b/ecc/bls24-317/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 40 diff --git a/ecc/bls24-317/marshal_test.go b/ecc/bls24-317/marshal_test.go index 23f3b8a10..d922c7c12 100644 --- a/ecc/bls24-317/marshal_test.go +++ b/ecc/bls24-317/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bn254/fr/pedersen/pedersen.go b/ecc/bn254/fr/pedersen/pedersen.go index b7f60bf2e..aa19032cd 100644 --- a/ecc/bn254/fr/pedersen/pedersen.go +++ b/ecc/bn254/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bn254/fr/pedersen/pedersen_test.go b/ecc/bn254/fr/pedersen/pedersen_test.go index 14fa00d83..1939b3bf9 100644 --- a/ecc/bn254/fr/pedersen/pedersen_test.go +++ b/ecc/bn254/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bn254/marshal.go b/ecc/bn254/marshal.go index 3ef63696d..2350345f2 100644 --- a/ecc/bn254/marshal.go +++ b/ecc/bn254/marshal.go @@ -98,24 +98,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -143,7 +155,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -198,7 +209,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -232,7 +242,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -258,7 +268,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -292,7 +301,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -347,6 +356,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + func isCompressed(msb byte) bool { mData := msb & mMask return !(mData == mUncompressed) @@ -432,6 +453,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -548,6 +571,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -644,25 +669,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 32 diff --git a/ecc/bn254/marshal_test.go b/ecc/bn254/marshal_test.go index ddc699b9c..b6d51546a 100644 --- a/ecc/bn254/marshal_test.go +++ b/ecc/bn254/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bw6-633/fr/pedersen/pedersen.go b/ecc/bw6-633/fr/pedersen/pedersen.go index a680a17c6..8068986bf 100644 --- a/ecc/bw6-633/fr/pedersen/pedersen.go +++ b/ecc/bw6-633/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bw6-633/fr/pedersen/pedersen_test.go b/ecc/bw6-633/fr/pedersen/pedersen_test.go index f2b9bb6e4..df519d9ff 100644 --- a/ecc/bw6-633/fr/pedersen/pedersen_test.go +++ b/ecc/bw6-633/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bw6-633/marshal.go b/ecc/bw6-633/marshal.go index 49f29735e..107b62155 100644 --- a/ecc/bw6-633/marshal.go +++ b/ecc/bw6-633/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 80 diff --git a/ecc/bw6-633/marshal_test.go b/ecc/bw6-633/marshal_test.go index 5e6cab476..c72e93ec8 100644 --- a/ecc/bw6-633/marshal_test.go +++ b/ecc/bw6-633/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bw6-756/fr/pedersen/pedersen.go b/ecc/bw6-756/fr/pedersen/pedersen.go index e6377e136..d5871890f 100644 --- a/ecc/bw6-756/fr/pedersen/pedersen.go +++ b/ecc/bw6-756/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-756" "github.com/consensys/gnark-crypto/ecc/bw6-756/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bw6-756/fr/pedersen/pedersen_test.go b/ecc/bw6-756/fr/pedersen/pedersen_test.go index 62319db8a..06cf64470 100644 --- a/ecc/bw6-756/fr/pedersen/pedersen_test.go +++ b/ecc/bw6-756/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-756" "github.com/consensys/gnark-crypto/ecc/bw6-756/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bw6-756/marshal.go b/ecc/bw6-756/marshal.go index b62d30e48..b99e2f0fa 100644 --- a/ecc/bw6-756/marshal.go +++ b/ecc/bw6-756/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 96 diff --git a/ecc/bw6-756/marshal_test.go b/ecc/bw6-756/marshal_test.go index fa06db299..422abc90e 100644 --- a/ecc/bw6-756/marshal_test.go +++ b/ecc/bw6-756/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bw6-761/fr/pedersen/pedersen.go b/ecc/bw6-761/fr/pedersen/pedersen.go index f22b49970..30f14a529 100644 --- a/ecc/bw6-761/fr/pedersen/pedersen.go +++ b/ecc/bw6-761/fr/pedersen/pedersen.go @@ -18,10 +18,12 @@ package pedersen import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -51,7 +53,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -70,34 +72,139 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { + return + } + + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) + return +} + +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) return } @@ -108,21 +215,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { return err + } else if !isOne { + return fmt.Errorf("proof rejected") } - if product.IsOne() { - return nil + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } + } + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -132,6 +254,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -150,7 +280,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -161,7 +298,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/ecc/bw6-761/fr/pedersen/pedersen_test.go b/ecc/bw6-761/fr/pedersen/pedersen_test.go index 646cf0e9b..aacb677c3 100644 --- a/ecc/bw6-761/fr/pedersen/pedersen_test.go +++ b/ecc/bw6-761/fr/pedersen/pedersen_test.go @@ -17,6 +17,7 @@ package pedersen import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/utils" @@ -67,15 +68,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -83,6 +87,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func(t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -110,5 +179,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } diff --git a/ecc/bw6-761/marshal.go b/ecc/bw6-761/marshal.go index 1c5cc799b..5a6ae02e2 100644 --- a/ecc/bw6-761/marshal.go +++ b/ecc/bw6-761/marshal.go @@ -103,24 +103,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -148,7 +160,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -215,7 +226,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -255,7 +265,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -281,7 +291,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -321,7 +330,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -376,6 +385,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { mData := msb & mMask @@ -467,6 +488,8 @@ func (enc *Encoder) encode(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -583,6 +606,8 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) @@ -679,25 +704,51 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { } } -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return } } return nil } +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } + } + } + return nil +} + +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + // SizeOfG1AffineCompressed represents the size in bytes that a G1Affine need in binary form, compressed const SizeOfG1AffineCompressed = 96 diff --git a/ecc/bw6-761/marshal_test.go b/ecc/bw6-761/marshal_test.go index 99b6bcfa2..185be8007 100644 --- a/ecc/bw6-761/marshal_test.go +++ b/ecc/bw6-761/marshal_test.go @@ -53,6 +53,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,12 +71,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} + inM = [][]uint64{{1, 2}, {4}, {}} // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -101,8 +103,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -145,6 +148,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inL, outL) { t.Fatal("decode(encode(slice²(elements))) failed") } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/internal/generator/ecc/template/marshal.go.tmpl b/internal/generator/ecc/template/marshal.go.tmpl index 795cb13ec..95a26f42d 100644 --- a/internal/generator/ecc/template/marshal.go.tmpl +++ b/internal/generator/ecc/template/marshal.go.tmpl @@ -108,24 +108,36 @@ func (dec *Decoder) Decode(v interface{}) (err error) { var buf [SizeOfG2AffineUncompressed]byte var read int + var sliceLen uint32 switch t := v.(type) { + case *[][]uint64: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + *t = make([][]uint64, sliceLen) + + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + (*t)[i] = make([]uint64, sliceLen) + for j := range (*t)[i] { + if (*t)[i][j], err = dec.readUint64(); err != nil { + return + } + } + } + return case *[]uint64: - buf64 := buf[:64/8] - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if sliceLen, err = dec.readUint32(); err != nil { return } - length := binary.BigEndian.Uint64(buf64) - *t = make([]uint64, length) + *t = make([]uint64, sliceLen) for i := range *t { - read, err = io.ReadFull(dec.r, buf64) - dec.n += int64(read) - if err != nil { + if (*t)[i], err = dec.readUint64(); err != nil { return } - (*t)[i] = binary.BigEndian.Uint64(buf64) } return case *fr.Element: @@ -153,7 +165,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n+=read64 return case *[][]fr.Element: - var sliceLen uint32 if sliceLen, err = dec.readUint32(); err != nil { return } @@ -224,7 +235,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { _, err = t.setBytes(buf[:nbBytes], dec.subGroupCheck) return case *[]G1Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -266,7 +276,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -292,7 +302,6 @@ func (dec *Decoder) Decode(v interface{}) (err error) { return nil case *[]G2Affine: - var sliceLen uint32 sliceLen, err = dec.readUint32() if err != nil { return @@ -335,7 +344,7 @@ func (dec *Decoder) Decode(v interface{}) (err error) { } } else { var r bool - if r, err = ((*t)[i].unsafeSetCompressedBytes(buf[:nbBytes])); err != nil { + if r, err = (*t)[i].unsafeSetCompressedBytes(buf[:nbBytes]); err != nil { return } compressed[i] = !r @@ -390,6 +399,18 @@ func (dec *Decoder) readUint32() (r uint32, err error) { return } +func (dec *Decoder) readUint64() (r uint64, err error) { + var read int + var buf [8]byte + read, err = io.ReadFull(dec.r, buf[:]) + dec.n += int64(read) + if err != nil { + return + } + r = binary.BigEndian.Uint64(buf[:]) + return +} + {{ if ge .FpUnusedBits 3}} // isMaskInvalid returns true if the mask is invalid func isMaskInvalid(msb byte) bool { @@ -469,25 +490,51 @@ func isZeroed(firstByte byte, buf []byte) bool { {{template "encode" dict "Raw" ""}} {{template "encode" dict "Raw" "Raw"}} -func (enc *Encoder) writeUint64Slice(t []uint64) error { - buff := make([]byte, 64/8) - binary.BigEndian.PutUint64(buff, uint64(len(t))) - written, err := enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err +func (enc *Encoder) writeUint64Slice(t []uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return } for i := range t { - binary.BigEndian.PutUint64(buff, t[i]) - written, err = enc.w.Write(buff) - enc.n += int64(written) - if err != nil { - return err + if err = enc.writeUint64(t[i]); err != nil { + return + } + } + return nil +} + +func (enc *Encoder) writeUint64SliceSlice(t [][]uint64) (err error) { + if err = enc.writeUint32(uint32(len(t))); err != nil { + return + } + for i := range t { + if err = enc.writeUint32(uint32(len(t[i]))); err != nil { + return + } + for j := range t[i] { + if err = enc.writeUint64(t[i][j]); err != nil { + return + } } } return nil } +func (enc *Encoder) writeUint64(a uint64) error { + var buff [64 / 8]byte + binary.BigEndian.PutUint64(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + +func (enc *Encoder) writeUint32(a uint32) error { + var buff [32 / 8]byte + binary.BigEndian.PutUint32(buff[:], a) + written, err := enc.w.Write(buff[:]) + enc.n += int64(written) + return err +} + {{ define "encode"}} func (enc *Encoder) encode{{- $.Raw}}(v interface{}) (err error) { @@ -510,6 +557,8 @@ func (enc *Encoder) encode{{- $.Raw}}(v interface{}) (err error) { switch t := v.(type) { case []uint64: return enc.writeUint64Slice(t) + case [][]uint64: + return enc.writeUint64SliceSlice(t) case *fr.Element: buf := t.Bytes() written, err = enc.w.Write(buf[:]) diff --git a/internal/generator/ecc/template/tests/marshal.go.tmpl b/internal/generator/ecc/template/tests/marshal.go.tmpl index 88e4269ee..9860884e9 100644 --- a/internal/generator/ecc/template/tests/marshal.go.tmpl +++ b/internal/generator/ecc/template/tests/marshal.go.tmpl @@ -43,6 +43,7 @@ func TestEncoder(t *testing.T) { var inJ []fr.Element var inK fr.Vector var inL [][]fr.Element + var inM [][]uint64 // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -60,13 +61,13 @@ func TestEncoder(t *testing.T) { inK = make(fr.Vector, 42) inK[41].SetUint64(42) inL =[][]fr.Element {inJ, inK} - + inM = [][]uint64{ {1, 2}, {4}, {} } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -93,8 +94,9 @@ func TestEncoder(t *testing.T) { var outJ []fr.Element var outK fr.Vector var outL [][]fr.Element + var outM [][]uint64 - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -135,7 +137,10 @@ func TestEncoder(t *testing.T) { t.Fatal("decode(encode(vector)) failed") } if !reflect.DeepEqual(inL, outL) { - t.Fatal("decode(encode(slice²(elements))) failed") + t.Fatal("decode(encode(slice²(elements))) failed") + } + if !reflect.DeepEqual(inM, outM) { + t.Fatal("decode(encode(slice²(uint64))) failed") } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") diff --git a/internal/generator/pedersen/template/pedersen.go.tmpl b/internal/generator/pedersen/template/pedersen.go.tmpl index 6957402e1..c84d17559 100644 --- a/internal/generator/pedersen/template/pedersen.go.tmpl +++ b/internal/generator/pedersen/template/pedersen.go.tmpl @@ -1,9 +1,11 @@ import ( "crypto/rand" + "crypto/sha256" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/{{.Name}}" "github.com/consensys/gnark-crypto/ecc/{{.Name}}/fr" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" "io" "math/big" ) @@ -33,7 +35,7 @@ func randomOnG2() (curve.G2Affine, error) { // TODO: Add to G2.go? } } -func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { +func Setup(bases ...[]curve.G1Affine) (pk []ProvingKey, vk VerifyingKey, err error) { if vk.g, err = randomOnG2(); err != nil { return @@ -52,37 +54,142 @@ func Setup(basis []curve.G1Affine) (pk ProvingKey, vk VerifyingKey, err error) { sigmaInvNeg.Sub(fr.Modulus(), &sigmaInvNeg) vk.gRootSigmaNeg.ScalarMultiplication(&vk.g, &sigmaInvNeg) - pk.basisExpSigma = make([]curve.G1Affine, len(basis)) - for i := range basis { - pk.basisExpSigma[i].ScalarMultiplication(&basis[i], sigma) + pk = make([]ProvingKey, len(bases)) + for i := range bases { + pk[i].basisExpSigma = make([]curve.G1Affine, len(bases[i])) + for j := range bases[i] { + pk[i].basisExpSigma[j].ScalarMultiplication(&bases[i][j], sigma) + } + pk[i].basis = bases[i] + } + return +} + +func (pk *ProvingKey) ProveKnowledge(values []fr.Element) (pok curve.G1Affine, err error) { + if len(values) != len(pk.basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, // TODO Experiment } - pk.basis = basis + _, err = pok.MultiExp(pk.basisExpSigma, values, config) return } -func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, knowledgeProof curve.G1Affine, err error) { +func (pk *ProvingKey) Commit(values []fr.Element) (commitment curve.G1Affine, err error) { if len(values) != len(pk.basis) { - err = fmt.Errorf("unexpected number of values") + err = fmt.Errorf("must have as many values as basis elements") return } // TODO @gbotrel this will spawn more than one task, see - // https://github.com/ConsenSys/gnark-crypto/issues/269 + // https://github.com/ConsenSys/gnark-crypto/issues/269 config := ecc.MultiExpConfig{ - NbTasks: 1, // TODO Experiment + NbTasks: 1, + } + _, err = commitment.MultiExp(pk.basis, values, config) + + return +} + +// BatchProve generates a single proof of knowledge for multiple commitments for faster verification +func BatchProve(pk []ProvingKey, values [][]fr.Element, fiatshamirSeeds ...[]byte) (pok curve.G1Affine, err error) { + if len(pk) != len(values) { + err = fmt.Errorf("must have as many value vectors as bases") + return } - if _, err = commitment.MultiExp(pk.basis, values, config); err != nil { + if len(pk) == 1 { // no need to fold + return pk[0].ProveKnowledge(values[0]) + } else if len(pk) == 0 { // nothing to do at all + return + } + + offset := 0 + for i := range pk { + if len(values[i]) != len(pk[i].basis) { + err = fmt.Errorf("must have as many values as basis elements") + return + } + offset += len(values[i]) + } + + var r fr.Element + if r, err = getChallenge(fiatshamirSeeds); err != nil { return } - _, err = knowledgeProof.MultiExp(pk.basisExpSigma, values, config) + // prepare one amalgamated MSM + scaledValues := make([]fr.Element, offset) + basis := make([]curve.G1Affine, offset) + + copy(basis, pk[0].basisExpSigma) + copy(scaledValues, values[0]) + + offset = len(values[0]) + rI := r + for i := 1; i < len(pk); i++ { + copy(basis[offset:], pk[i].basisExpSigma) + for j := range pk[i].basis { + scaledValues[offset].Mul(&values[i][j], &rI) + offset++ + } + if i+1 < len(pk) { + rI.Mul(&rI, &r) + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = pok.MultiExp(basis, scaledValues, config) return } +// FoldCommitments amalgamates multiple commitments into one, which can be verifier against a folded proof obtained from BatchProve +func FoldCommitments(commitments []curve.G1Affine, fiatshamirSeeds ...[]byte) (commitment curve.G1Affine, err error) { + + if len(commitments) == 1 { // no need to fold + commitment = commitments[0] + return + } else if len(commitments) == 0 { // nothing to do at all + return + } + + r := make([]fr.Element, len(commitments)) + r[0].SetOne() + if r[1], err = getChallenge(fiatshamirSeeds); err != nil { + return + } + for i := 2; i < len(commitments); i++ { + r[i].Mul(&r[i-1], &r[1]) + } + + for i := range commitments { // TODO @Tabaie Remove if MSM does subgroup check for you + if !commitments[i].IsInSubGroup() { + err = fmt.Errorf("subgroup check failed") + return + } + } + + // TODO @gbotrel this will spawn more than one task, see + // https://github.com/ConsenSys/gnark-crypto/issues/269 + config := ecc.MultiExpConfig{ + NbTasks: 1, + } + _, err = commitment.MultiExp(commitments, r, config) + return +} + // Verify checks if the proof of knowledge is valid func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G1Affine) error { @@ -90,22 +197,36 @@ func (vk *VerifyingKey) Verify(commitment curve.G1Affine, knowledgeProof curve.G return fmt.Errorf("subgroup check failed") } - product, err := curve.Pair([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}) - if err != nil { - return err + if isOne, err := curve.PairingCheck([]curve.G1Affine{commitment, knowledgeProof}, []curve.G2Affine{vk.g, vk.gRootSigmaNeg}); err != nil { + return err + } else if !isOne { + return fmt.Errorf("proof rejected") + } + return nil +} + +func getChallenge(fiatshamirSeeds [][]byte) (r fr.Element, err error) { + // incorporate user-provided seeds into the transcript + t := fiatshamir.NewTranscript(sha256.New(), "r") + for i := range fiatshamirSeeds { + if err = t.Bind("r", fiatshamirSeeds[i]); err != nil { + return + } } - if product.IsOne() { - return nil + + // obtain the challenge + var rBytes []byte + + if rBytes, err = t.ComputeChallenge("r"); err != nil { + return } - return fmt.Errorf("proof rejected") + r.SetBytes(rBytes) // TODO @Tabaie Plonk challenge generation done the same way; replace both with hash to fr? + return } - // Marshal -func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) - +func (pk *ProvingKey) writeTo(enc *curve.Encoder) (int64, error) { if err := enc.Encode(pk.basis); err != nil { return enc.BytesWritten(), err } @@ -115,6 +236,14 @@ func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { return enc.BytesWritten(), err } +func (pk *ProvingKey) WriteTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w)) +} + +func (pk *ProvingKey) WriteRawTo(w io.Writer) (int64, error) { + return pk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { dec := curve.NewDecoder(r) @@ -133,7 +262,14 @@ func (pk *ProvingKey) ReadFrom(r io.Reader) (int64, error) { } func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { - enc := curve.NewEncoder(w) + return vk.writeTo(curve.NewEncoder(w)) +} + +func (vk *VerifyingKey) WriteRawTo(w io.Writer) (int64, error) { + return vk.writeTo(curve.NewEncoder(w, curve.RawEncoding())) +} + +func (vk *VerifyingKey) writeTo(enc *curve.Encoder) (int64, error) { var err error if err = enc.Encode(&vk.g); err != nil { @@ -144,7 +280,15 @@ func (vk *VerifyingKey) WriteTo(w io.Writer) (int64, error) { } func (vk *VerifyingKey) ReadFrom(r io.Reader) (int64, error) { - dec := curve.NewDecoder(r) + return vk.readFrom(r) +} + +func (vk *VerifyingKey) UnsafeReadFrom(r io.Reader) (int64, error) { + return vk.readFrom(r, curve.NoSubgroupChecks()) +} + +func (vk *VerifyingKey) readFrom(r io.Reader, decOptions ...func(*curve.Decoder)) (int64, error) { + dec := curve.NewDecoder(r, decOptions...) var err error if err = dec.Decode(&vk.g); err != nil { diff --git a/internal/generator/pedersen/template/pedersen.test.go.tmpl b/internal/generator/pedersen/template/pedersen.test.go.tmpl index 12f6f8002..73ec97873 100644 --- a/internal/generator/pedersen/template/pedersen.test.go.tmpl +++ b/internal/generator/pedersen/template/pedersen.test.go.tmpl @@ -1,4 +1,5 @@ import ( + "fmt" curve "github.com/consensys/gnark-crypto/ecc/{{.Name}}" "github.com/consensys/gnark-crypto/ecc/{{.Name}}/fr" "github.com/consensys/gnark-crypto/utils" @@ -49,15 +50,18 @@ func testCommit(t *testing.T, values ...interface{}) { basis := randomG1Slice(t, len(values)) var ( - pk ProvingKey + pk []ProvingKey vk VerifyingKey err error commitment, pok curve.G1Affine ) + valuesFr := interfaceSliceToFrSlice(t, values...) pk, vk, err = Setup(basis) assert.NoError(t, err) - commitment, pok, err = pk.Commit(interfaceSliceToFrSlice(t, values...)) + commitment, err = pk[0].Commit(valuesFr) + assert.NoError(t, err) + pok, err = pk[0].ProveKnowledge(valuesFr) assert.NoError(t, err) assert.NoError(t, vk.Verify(commitment, pok)) @@ -65,6 +69,71 @@ func testCommit(t *testing.T, values ...interface{}) { assert.NotNil(t, vk.Verify(commitment, pok)) } +func TestFoldProofs(t *testing.T) { + + values := [][]fr.Element{ + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + interfaceSliceToFrSlice(t, randomFrSlice(t, 5)...), + } + + bases := make([][]curve.G1Affine, len(values)) + for i := range bases { + bases[i] = randomG1Slice(t, len(values[i])) + } + + pk, vk, err := Setup(bases...) + assert.NoError(t, err) + + commitments := make([]curve.G1Affine, len(values)) + for i := range values { + commitments[i], err = pk[i].Commit(values[i]) + assert.NoError(t, err) + } + + t.Run("folding with zeros", func(t *testing.T) { + pokFolded, err := BatchProve(pk[:2], [][]fr.Element{ + values[0], + make([]fr.Element, len(values[1])), + }, []byte("test")) + assert.NoError(t, err) + var pok curve.G1Affine + pok, err = pk[0].ProveKnowledge(values[0]) + assert.NoError(t, err) + assert.Equal(t, pok, pokFolded) + }) + + t.Run("run empty", func (t *testing.T) { + var foldedCommitment curve.G1Affine + pok, err := BatchProve([]ProvingKey{}, [][]fr.Element{}, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments([]curve.G1Affine{}, []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + }) + + run := func(values [][]fr.Element) func(t *testing.T) { + return func(t *testing.T) { + + var foldedCommitment curve.G1Affine + pok, err := BatchProve(pk[:len(values)], values, []byte("test")) + assert.NoError(t, err) + + foldedCommitment, err = FoldCommitments(commitments[:len(values)], []byte("test")) + assert.NoError(t, err) + assert.NoError(t, vk.Verify(foldedCommitment, pok)) + + pok.Neg(&pok) + assert.NotNil(t, vk.Verify(foldedCommitment, pok)) + } + } + + for i := range values { + t.Run(fmt.Sprintf("folding %d", i+1), run(values[:i+1])) + } +} + func TestCommitToOne(t *testing.T) { testCommit(t, 1) } @@ -92,5 +161,7 @@ func TestMarshal(t *testing.T) { assert.NoError(t, err) t.Run("ProvingKey -> Bytes -> ProvingKey must remain identical.", utils.SerializationRoundTrip(&pk)) + t.Run("ProvingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&pk)) t.Run("VerifyingKey -> Bytes -> VerifyingKey must remain identical.", utils.SerializationRoundTrip(&vk)) + t.Run("VerifyingKey -> Bytes (raw) -> ProvingKey must remain identical.", utils.SerializationRoundTripRaw(&vk)) } \ No newline at end of file diff --git a/utils/testing.go b/utils/testing.go index af2d37c20..247ec63d5 100644 --- a/utils/testing.go +++ b/utils/testing.go @@ -13,6 +13,10 @@ type Serializable interface { io.WriterTo } +type RawSerializable interface { + WriteRawTo(io.Writer) (int64, error) +} + func SerializationRoundTrip(o Serializable) func(*testing.T) { return func(t *testing.T) { // serialize it... @@ -29,3 +33,20 @@ func SerializationRoundTrip(o Serializable) func(*testing.T) { assert.Equal(t, o, _o) } } + +func SerializationRoundTripRaw(o RawSerializable) func(*testing.T) { + return func(t *testing.T) { + // serialize it... + var buf bytes.Buffer + _, err := o.WriteRawTo(&buf) + assert.NoError(t, err) + + // reconstruct the object + _o := reflect.New(reflect.TypeOf(o).Elem()).Interface().(Serializable) + _, err = _o.ReadFrom(&buf) + assert.NoError(t, err) + + // compare + assert.Equal(t, o, _o) + } +}