diff --git a/merklize/binary_encoding.go b/merklize/binary_encoding.go new file mode 100644 index 0000000..4a4a1e7 --- /dev/null +++ b/merklize/binary_encoding.go @@ -0,0 +1,458 @@ +package merklize + +import ( + "bytes" + "context" + "encoding/gob" + "encoding/json" + "errors" + "fmt" + "math/big" + "time" + + "github.com/iden3/go-merkletree-sql/v2" + "github.com/iden3/go-merkletree-sql/v2/db/memory" +) + +const rdfEntryEncodingVersion = 1 + +type entryType uint8 + +const ( + entryTypeInt64 entryType = 0 + entryTypeBool entryType = 1 + entryTypeString entryType = 2 + entryTypeTime entryType = 3 + entryTypeBigInt entryType = 4 +) + +func doEncode[T rdfEntryValueType](enc *gob.Encoder, d entryType, v T) error { + err := enc.Encode(d) + if err != nil { + return err + } + err = enc.Encode(v) + if err != nil { + return err + } + return nil +} + +func doDecode[T rdfEntryValueType](dec *gob.Decoder) (any, error) { + var x T + err := dec.Decode(&x) + if err != nil { + return nil, err + } + return x, nil +} + +func (e *RDFEntry) MarshalBinary() ([]byte, error) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + + err := enc.Encode(rdfEntryEncodingVersion) + if err != nil { + return nil, err + } + + err = enc.Encode(e.key.parts) + if err != nil { + return nil, err + } + + switch v := e.value.(type) { + case int64: + err = doEncode(enc, entryTypeInt64, v) + case int: + err = doEncode(enc, entryTypeInt64, int64(v)) + case bool: + err = doEncode(enc, entryTypeBool, v) + case string: + err = doEncode(enc, entryTypeString, v) + case time.Time: + err = doEncode(enc, entryTypeTime, v) + case *big.Int: + err = doEncode(enc, entryTypeBigInt, v) + default: + err = fmt.Errorf("unsupported entry type: %T", e) + } + if err != nil { + return nil, err + } + + err = enc.Encode(e.datatype) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +func (e *RDFEntry) UnmarshalBinary(in []byte) error { + dec := gob.NewDecoder(bytes.NewReader(in)) + + var encVersion int + + err := dec.Decode(&encVersion) + if err != nil { + return err + } + + if encVersion != rdfEntryEncodingVersion { + return fmt.Errorf("wrong encoding version: %v", encVersion) + } + + e.key.hasher = e.getHasher() + if e.hasher == nil { + e.hasher = e.key.hasher + } + err = dec.Decode(&e.key.parts) + if err != nil { + return err + } + + var tp entryType + err = dec.Decode(&tp) + if err != nil { + return err + } + switch tp { + case entryTypeInt64: + e.value, err = doDecode[int64](dec) + case entryTypeBool: + e.value, err = doDecode[bool](dec) + case entryTypeString: + e.value, err = doDecode[string](dec) + case entryTypeTime: + e.value, err = doDecode[time.Time](dec) + case entryTypeBigInt: + e.value, err = doDecode[*big.Int](dec) + default: + err = fmt.Errorf("unsupported entry type: %T", e) + } + if err != nil { + return err + } + + err = dec.Decode(&e.datatype) + if err != nil { + return err + } + + return nil +} + +const mzEncodingVersion = 1 + +func MerklizerFromBytes(in []byte, opts ...MerklizeOption) (*Merklizer, error) { + mz := &Merklizer{ + safeMode: true, + hasher: defaultHasher, + } + for _, o := range opts { + o(mz) + } + + err := mz.UnmarshalBinary(in) + return mz, err +} + +func (mz *Merklizer) MarshalBinary() ([]byte, error) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + + err := enc.Encode(mzEncodingVersion) + if err != nil { + return nil, err + } + + err = enc.Encode(mz.srcDoc) + if err != nil { + return nil, err + } + + compactedBytes, err := json.Marshal(mz.compacted) + if err != nil { + return nil, err + } + err = enc.Encode(compactedBytes) + if err != nil { + return nil, err + } + + root := mz.mt.Root().BigInt() + err = enc.Encode(root) + if err != nil { + return nil, err + } + + err = enc.Encode(len(mz.entries)) + if err != nil { + return nil, err + } + + for k, e := range mz.entries { + err = enc.Encode(k) + if err != nil { + return nil, err + } + + e := e // actually not needed, but lint complains + err = enc.Encode(&e) + if err != nil { + return nil, err + } + } + + err = enc.Encode(mz.safeMode) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +func (mz *Merklizer) UnmarshalBinary(in []byte) error { + enc := gob.NewDecoder(bytes.NewReader(in)) + + var encodingVersion int + err := enc.Decode(&encodingVersion) + if err != nil { + return err + } + + if mzEncodingVersion != encodingVersion { + return fmt.Errorf("wrong encoding version: %v", encodingVersion) + } + + err = enc.Decode(&mz.srcDoc) + if err != nil { + return err + } + + var compactedBytes []byte + err = enc.Decode(&compactedBytes) + if err != nil { + return err + } + err = json.Unmarshal(compactedBytes, &mz.compacted) + if err != nil { + return err + } + + var root *big.Int + err = enc.Decode(&root) + if err != nil { + return err + } + + addToMT := false + + if mz.hasher == nil { + mz.hasher = defaultHasher + } + + // if merkletree is not set with options, initialize new in-memory MT. + if mz.mt == nil { + var mt *merkletree.MerkleTree + mt, err = merkletree.NewMerkleTree(context.Background(), + memory.NewMemoryStorage(), 40) + if err != nil { + return err + } + mz.mt = MerkleTreeSQLAdapter(mt) + addToMT = true + } + + if !addToMT && mz.mt.Root().BigInt().Cmp(root) != 0 { + return errors.New("root hash mismatch") + } + + var entriesLen int + err = enc.Decode(&entriesLen) + if err != nil { + return err + } + + entries := make([]RDFEntry, entriesLen) + mz.entries = make(map[string]RDFEntry, entriesLen) + + for i := 0; i < entriesLen; i++ { + var key string + err = enc.Decode(&key) + if err != nil { + return err + } + + var p Path + p, err = mz.Options().NewPath("") + if err != nil { + return err + } + entries[i], err = mz.Options().NewRDFEntry(p, "") + if err != nil { + return err + } + + err = enc.Decode(&entries[i]) + if err != nil { + return err + } + + mz.entries[key] = entries[i] + } + + if addToMT { + err = AddEntriesToMerkleTree(context.Background(), mz.mt, entries) + if err != nil { + return err + } + } + + err = enc.Decode(&mz.safeMode) + if err != nil { + return err + } + + return nil +} + +type gobJTp uint8 + +const ( + gobJTpNull gobJTp = 0 + gobJTpList gobJTp = 1 + gobJTpMap gobJTp = 2 + gobJTpOther gobJTp = 3 +) + +func gobJSONObjectEncode(enc *gob.Encoder, e any) error { + var err error + switch v := e.(type) { + case nil: + err = enc.Encode(gobJTpNull) + if err != nil { + return err + } + case []any: + err = enc.Encode(gobJTpList) + if err != nil { + return err + } + err = enc.Encode(len(v)) + if err != nil { + return err + } + for _, e2 := range v { + err = gobJSONObjectEncode(enc, e2) + if err != nil { + return err + } + } + case map[string]any: + err = enc.Encode(gobJTpMap) + if err != nil { + return err + } + err = gobJSONMapEncode(enc, v) + if err != nil { + return err + } + default: + err = enc.Encode(gobJTpOther) + if err != nil { + return err + } + err = enc.Encode(&e) + if err != nil { + return err + } + } + return nil +} + +func gobJSONMapEncode(enc *gob.Encoder, o map[string]any) error { + err := enc.Encode(len(o)) + if err != nil { + return err + } + + for k, v := range o { + err = enc.Encode(k) + if err != nil { + return err + } + err = gobJSONObjectEncode(enc, v) + if err != nil { + return err + } + } + + return nil +} + +func gobJSONObjectDecode(dec *gob.Decoder) (any, error) { + var tp gobJTp + err := dec.Decode(&tp) + if err != nil { + return nil, err + } + + switch tp { + case gobJTpNull: + return nil, nil + case gobJTpMap: + return gobJSONMapDecode(dec) + case gobJTpList: + return gobJSONListDecode(dec) + case gobJTpOther: + var e any + err = dec.Decode(&e) + if err != nil { + return nil, err + } + return e, nil + default: + return nil, errors.New("unexpected type") + } +} + +func gobJSONListDecode(dec *gob.Decoder) ([]any, error) { + var ln int + err := dec.Decode(&ln) + if err != nil { + return nil, err + } + var l = make([]any, ln) + for i := 0; i < ln; i++ { + l[i], err = gobJSONObjectDecode(dec) + if err != nil { + return nil, err + } + } + return l, nil +} + +func gobJSONMapDecode(dec *gob.Decoder) (map[string]any, error) { + var ln int + err := dec.Decode(&ln) + if err != nil { + return nil, err + } + + var m = make(map[string]any, ln) + for i := 0; i < ln; i++ { + var k string + err = dec.Decode(&k) + if err != nil { + return nil, err + } + m[k], err = gobJSONObjectDecode(dec) + if err != nil { + return nil, err + } + } + + return m, nil +} diff --git a/merklize/binary_encoding_test.go b/merklize/binary_encoding_test.go new file mode 100644 index 0000000..f950810 --- /dev/null +++ b/merklize/binary_encoding_test.go @@ -0,0 +1,422 @@ +package merklize + +import ( + "bytes" + "context" + "crypto/md5" + "encoding/gob" + "encoding/json" + "math/big" + "strings" + "testing" + "time" + + "github.com/iden3/go-merkletree-sql/v2" + "github.com/iden3/go-merkletree-sql/v2/db/memory" + "github.com/stretchr/testify/require" +) + +type md5Hasher struct{} + +func (h *md5Hasher) Hash(inpBI []*big.Int) (*big.Int, error) { + mh := md5.New() + for _, i := range inpBI { + mh.Write(i.Bytes()) + } + sumBytes := mh.Sum(nil) + return new(big.Int).SetBytes(sumBytes), nil +} +func (h *md5Hasher) HashBytes(msg []byte) (*big.Int, error) { + s := md5.Sum(msg) + return new(big.Int).SetBytes(s[:]), nil +} +func (h *md5Hasher) Prime() *big.Int { + bs := make([]byte, md5.Size) + for i := 0; i < md5.Size; i++ { + bs[i] = 0xff + } + return new(big.Int).SetBytes(bs) +} + +func TestRDFEntry_BinaryMashaler(t *testing.T) { + testCases := []struct { + title string + value any + }{ + { + title: "int64", + value: int64(100500), + }, + { + title: "bool", + value: true, + }, + { + title: "string", + value: "xyz", + }, + { + title: "time.Time", + value: time.Date(2020, 1, 1, 10, 20, 0, 0, time.UTC), + }, + { + title: "*big.Int", + value: big.NewInt(100500), + }, + } + + path, err := NewPath("x", "y", 1, "z") + require.NoError(t, err) + + for _, tc := range testCases { + t.Run(tc.title, func(t *testing.T) { + ent, err := NewRDFEntry(path, tc.value) + require.NoError(t, err) + + entBytes, err := ent.MarshalBinary() + require.NoError(t, err) + require.NotEmpty(t, entBytes) + + var version int + err = gob.NewDecoder(bytes.NewReader(entBytes)).Decode(&version) + require.NoError(t, err) + require.Equal(t, rdfEntryEncodingVersion, version) + + var ent2 RDFEntry + err = ent2.UnmarshalBinary(entBytes) + require.NoError(t, err) + + require.Equal(t, ent, ent2) + }) + } +} + +func TestRDFEntry_Gob(t *testing.T) { + testCases := []struct { + title string + value any + }{ + { + title: "int64", + value: int64(100500), + }, + { + title: "bool", + value: true, + }, + { + title: "string", + value: "xyz", + }, + { + title: "time.Time", + value: time.Date(2020, 1, 1, 10, 20, 0, 0, time.UTC), + }, + { + title: "*big.Int", + value: big.NewInt(100500), + }, + } + + path, err := NewPath("x", "y", 1, "z") + require.NoError(t, err) + + for _, tc := range testCases { + t.Run(tc.title, func(t *testing.T) { + ent, err := NewRDFEntry(path, tc.value) + require.NoError(t, err) + + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + err = enc.Encode(&ent) + require.NoError(t, err) + + require.NotEmpty(t, buf.Bytes()) + + var ent2 RDFEntry + err = gob.NewDecoder(&buf).Decode(&ent2) + require.NoError(t, err) + + require.Equal(t, ent, ent2) + }) + } +} + +func TestRDFEntry_BinaryMashaler_CustomHasher(t *testing.T) { + opts := Options{Hasher: &md5Hasher{}} + path, err := opts.NewPath("x", "y", 1, "z") + require.NoError(t, err) + value := "abc" + ent, err := opts.NewRDFEntry(path, value) + require.NoError(t, err) + + key, val, err := ent.KeyValueMtEntries() + require.NoError(t, err) + + entBytes, err := ent.MarshalBinary() + require.NoError(t, err) + require.NotEmpty(t, entBytes) + + var version int + err = gob.NewDecoder(bytes.NewReader(entBytes)).Decode(&version) + require.NoError(t, err) + + require.Equal(t, rdfEntryEncodingVersion, version) + + var ent2 RDFEntry + + path, err = opts.NewPath("") + require.NoError(t, err) + ent2, err = opts.NewRDFEntry(path, "") + require.NoError(t, err) + + err = ent2.UnmarshalBinary(entBytes) + require.NoError(t, err) + + require.Equal(t, ent, ent2) + + key2, val2, err := ent2.KeyValueMtEntries() + require.NoError(t, err) + + require.Zero(t, key.Cmp(key2)) + require.Zero(t, val.Cmp(val2)) +} + +func testMarshalCompactObjCustomFunction(t testing.TB, obj map[string]any) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + err := gobJSONObjectEncode(enc, obj) + require.NoError(t, err) + require.NotEmpty(t, buf.Bytes()) + + dec := gob.NewDecoder(bytes.NewReader(buf.Bytes())) + obj2, err := gobJSONObjectDecode(dec) + require.NoError(t, err) + require.Equal(t, obj, obj2) +} + +func testMarshalCompactObjGob(t testing.TB, obj map[string]any) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + err := enc.Encode(obj) + require.NoError(t, err) + require.NotEmpty(t, buf.Bytes()) + + var obj2 map[string]any + dec := gob.NewDecoder(bytes.NewReader(buf.Bytes())) + err = dec.Decode(&obj2) + + require.NoError(t, err) + require.Equal(t, obj, obj2) +} + +func testMarshalCompactObjJSONMarshal(t testing.TB, obj map[string]any) { + var buf2 bytes.Buffer + buf2.Grow(1000) + bs, err := json.Marshal(obj) + if err != nil { + t.Fatal(err) + } + var buf bytes.Buffer + err = gob.NewEncoder(&buf).Encode(bs) + if err != nil { + t.Fatal(err) + } + + var bs2 []byte + err = gob.NewDecoder(bytes.NewReader(buf.Bytes())).Decode(&bs2) + if err != nil { + t.Fatal(err) + } + + var obj2 map[string]any + err = json.Unmarshal(bs2, &obj2) + if err != nil { + t.Fatal(err) + } + + require.Equal(t, obj, obj2) +} + +func TestMerklizer_BinaryMashaler(t *testing.T) { + ctx := context.Background() + mz, err := MerklizeJSONLD(ctx, strings.NewReader(testDocument)) + require.NoError(t, err) + + mzBytes, err := mz.MarshalBinary() + require.NoError(t, err) + require.NotEmpty(t, mzBytes) + + mz2, err := MerklizerFromBytes(mzBytes) + require.NoError(t, err) + + require.Equal(t, mz.Root(), mz2.Root()) +} + +func TestMerklizer_BinaryMashaler_WithMT(t *testing.T) { + ctx := context.Background() + mt, err := merkletree.NewMerkleTree(ctx, memory.NewMemoryStorage(), 40) + if err != nil { + t.Fatal(err) + } + mzMT := MerkleTreeSQLAdapter(mt) + + mz, err := MerklizeJSONLD(ctx, strings.NewReader(testDocument), WithMerkleTree(mzMT)) + require.NoError(t, err) + + mzBytes, err := mz.MarshalBinary() + require.NoError(t, err) + require.NotEmpty(t, mzBytes) + + mz2, err := MerklizerFromBytes(mzBytes, WithMerkleTree(mzMT)) + require.NoError(t, err) + + require.Equal(t, mz.Root(), mz2.Root()) +} + +func TestMerklizer_BinaryMashaler_3(t *testing.T) { + ctx := context.Background() + mt, err := merkletree.NewMerkleTree(ctx, memory.NewMemoryStorage(), 40) + if err != nil { + t.Fatal(err) + } + mzMT := MerkleTreeSQLAdapter(mt) + + mz, err := MerklizeJSONLD(ctx, strings.NewReader(testDocument), WithMerkleTree(mzMT)) + require.NoError(t, err) + + testMarshalCompactObjCustomFunction(t, mz.compacted) +} + +func BenchmarkMerkalizerSerializationTrims(b *testing.B) { + gob.Register(map[string]interface{}{}) + gob.Register([]interface{}{}) + ctx := context.Background() + mt, err := merkletree.NewMerkleTree(ctx, memory.NewMemoryStorage(), 40) + if err != nil { + b.Fatal(err) + } + mzMT := MerkleTreeSQLAdapter(mt) + + mz, err := MerklizeJSONLD(ctx, strings.NewReader(testDocument), WithMerkleTree(mzMT)) + require.NoError(b, err) + + b.ResetTimer() + + b.ReportAllocs() + + b.Run("custom function", func(b *testing.B) { + for i := 0; i < b.N; i++ { + testMarshalCompactObjCustomFunction(b, mz.compacted) + } + }) + + b.Run("gob", func(b *testing.B) { + for i := 0; i < b.N; i++ { + testMarshalCompactObjGob(b, mz.compacted) + } + }) + + b.Run("through json marshaling", func(b *testing.B) { + for i := 0; i < b.N; i++ { + testMarshalCompactObjJSONMarshal(b, mz.compacted) + } + }) +} + +func BenchmarkMerklizer_BinaryMashaler_2(b *testing.B) { + gob.Register(map[string]interface{}{}) + gob.Register([]interface{}{}) + gob.Register(time.Time{}) + gob.Register(&big.Int{}) + + ctx := context.Background() + mt, err := merkletree.NewMerkleTree(ctx, memory.NewMemoryStorage(), 40) + if err != nil { + b.Fatal(err) + } + mzMT := MerkleTreeSQLAdapter(mt) + + mz, err := MerklizeJSONLD(ctx, strings.NewReader(testDocument), WithMerkleTree(mzMT)) + require.NoError(b, err) + + for i := 0; i < b.N; i++ { + mzBytes, err := mz.MarshalBinary() + if err != nil { + b.Fatal(err) + } + mz2, err := MerklizerFromBytes(mzBytes, WithMerkleTree(mzMT)) + if err != nil { + b.Fatal(err) + } + + if mz.Root().BigInt().Cmp(mz2.Root().BigInt()) != 0 { + b.Fatal("root mismatch") + } + } +} + +func TestName(t *testing.T) { + gob.Register(big.NewInt(0)) + var i = map[string]interface{}{ + "one": 1, + "two": big.NewInt(2), + } + var b bytes.Buffer + err := gob.NewEncoder(&b).Encode(i) + require.NoError(t, err) + + var i2 map[string]interface{} + err = gob.NewDecoder(&b).Decode(&i2) + require.NoError(t, err) + t.Logf("%[1]T, %[1]v", i2["two"]) +} + +func BenchmarkName(b *testing.B) { + gob.Register(map[string]interface{}{}) + gob.Register([]interface{}{}) + x := `{"@context":null,"@id":"https://issuer.oidp.uscis.gov/credentials/83627465","@type":["https://www.w3.org/2018/credentials#VerifiableCredential","https://w3id.org/citizenship#PermanentResidentCard"],"http://schema.org/description":"Government of Example Permanent Resident Card.","http://schema.org/identifier":83627465,"http://schema.org/name":"Permanent Resident Card","https://www.w3.org/2018/credentials#credentialSubject":[{"@id":"did:example:b34ca6cd37bbf23","@type":["https://w3id.org/citizenship#PermanentResident","http://schema.org/Person"],"http://schema.org/birthDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"1958-07-17"},"http://schema.org/familyName":"SMITH","http://schema.org/gender":"Male","http://schema.org/givenName":"JOHN","http://schema.org/image":{"@id":"data:image/png;base64,iVBORw0KGgokJggg=="},"https://w3id.org/citizenship#birthCountry":"Bahamas","https://w3id.org/citizenship#commuterClassification":"C1","https://w3id.org/citizenship#lprCategory":"C09","https://w3id.org/citizenship#lprNumber":"999-999-999","https://w3id.org/citizenship#residentSince":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2015-01-01"}},{"@id":"did:example:b34ca6cd37bbf24","@type":["https://w3id.org/citizenship#PermanentResident","http://schema.org/Person"],"http://schema.org/birthDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"1958-07-18"},"http://schema.org/familyName":"SMITH","http://schema.org/gender":"Male","http://schema.org/givenName":"JOHN","http://schema.org/image":{"@id":"data:image/png;base64,iVBORw0KGgokJggg=="},"https://w3id.org/citizenship#birthCountry":"Bahamas","https://w3id.org/citizenship#commuterClassification":"C1","https://w3id.org/citizenship#lprCategory":"C09","https://w3id.org/citizenship#lprNumber":"999-999-999","https://w3id.org/citizenship#residentSince":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2015-01-01"}}],"https://www.w3.org/2018/credentials#expirationDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2029-12-03T12:19:52Z"},"https://www.w3.org/2018/credentials#issuanceDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2019-12-03T12:19:52Z"},"https://www.w3.org/2018/credentials#issuer":{"@id":"did:example:489398593"}}` + var obj interface{} + err := json.Unmarshal([]byte(x), &obj) + require.NoError(b, err) + + buf := make([]byte, 0, 1000) + for i := 0; i < b.N; i++ { + buf2 := bytes.NewBuffer(buf[:0]) + err := gob.NewEncoder(buf2).Encode(&obj) + require.NoError(b, err) + + var obj2 interface{} + err = gob.NewDecoder(bytes.NewReader(buf2.Bytes())).Decode(&obj2) + require.NoError(b, err) + require.Equal(b, obj, obj2) + } +} +func BenchmarkName2(b *testing.B) { + gob.Register(map[string]interface{}{}) + gob.Register([]interface{}{}) + x := `{"@context":null,"@id":"https://issuer.oidp.uscis.gov/credentials/83627465","@type":["https://www.w3.org/2018/credentials#VerifiableCredential","https://w3id.org/citizenship#PermanentResidentCard"],"http://schema.org/description":"Government of Example Permanent Resident Card.","http://schema.org/identifier":83627465,"http://schema.org/name":"Permanent Resident Card","https://www.w3.org/2018/credentials#credentialSubject":[{"@id":"did:example:b34ca6cd37bbf23","@type":["https://w3id.org/citizenship#PermanentResident","http://schema.org/Person"],"http://schema.org/birthDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"1958-07-17"},"http://schema.org/familyName":"SMITH","http://schema.org/gender":"Male","http://schema.org/givenName":"JOHN","http://schema.org/image":{"@id":"data:image/png;base64,iVBORw0KGgokJggg=="},"https://w3id.org/citizenship#birthCountry":"Bahamas","https://w3id.org/citizenship#commuterClassification":"C1","https://w3id.org/citizenship#lprCategory":"C09","https://w3id.org/citizenship#lprNumber":"999-999-999","https://w3id.org/citizenship#residentSince":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2015-01-01"}},{"@id":"did:example:b34ca6cd37bbf24","@type":["https://w3id.org/citizenship#PermanentResident","http://schema.org/Person"],"http://schema.org/birthDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"1958-07-18"},"http://schema.org/familyName":"SMITH","http://schema.org/gender":"Male","http://schema.org/givenName":"JOHN","http://schema.org/image":{"@id":"data:image/png;base64,iVBORw0KGgokJggg=="},"https://w3id.org/citizenship#birthCountry":"Bahamas","https://w3id.org/citizenship#commuterClassification":"C1","https://w3id.org/citizenship#lprCategory":"C09","https://w3id.org/citizenship#lprNumber":"999-999-999","https://w3id.org/citizenship#residentSince":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2015-01-01"}}],"https://www.w3.org/2018/credentials#expirationDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2029-12-03T12:19:52Z"},"https://www.w3.org/2018/credentials#issuanceDate":{"@type":"http://www.w3.org/2001/XMLSchema#dateTime","@value":"2019-12-03T12:19:52Z"},"https://www.w3.org/2018/credentials#issuer":{"@id":"did:example:489398593"}}` + var obj interface{} + err := json.Unmarshal([]byte(x), &obj) + require.NoError(b, err) + + buf := make([]byte, 0, 1000) + for i := 0; i < b.N; i++ { + b1, err := json.Marshal(obj) + require.NoError(b, err) + + buf2 := bytes.NewBuffer(buf[:0]) + err = gob.NewEncoder(buf2).Encode(b1) + require.NoError(b, err) + + var b2 []byte + err = gob.NewDecoder(bytes.NewReader(buf2.Bytes())).Decode(&b2) + require.NoError(b, err) + + var obj2 interface{} + err = json.Unmarshal(b2, &obj2) + require.NoError(b, err) + + require.Equal(b, obj, obj2) + } +} diff --git a/merklize/merklize.go b/merklize/merklize.go index 33aec8d..ed56833 100644 --- a/merklize/merklize.go +++ b/merklize/merklize.go @@ -126,6 +126,8 @@ func (o Options) NewRDFEntry(key Path, value interface{}) (RDFEntry, error) { e.value = int64(v) case int64, string, bool, time.Time: e.value = value + case *big.Int: + e.value = new(big.Int).Set(v) default: return e, fmt.Errorf("incorrect value type: %T", value) } @@ -512,25 +514,6 @@ func (p *Path) Prepend(parts ...interface{}) error { return nil } -// type RDFEntryValueType interface { -// int | int32 | int64 | uint | uint32 | uint64 | string | bool | time.Time -// } - -// type RDFEntry[T RDFEntryValueType] struct { -// key Path -// // valid types are: int64, string, bool, time.Time -// value T -// hasher Hasher -// } - -type RDFEntry struct { - key Path - // valid types are: int64, string, bool, time.Time, *big.Int - value any - datatype string - hasher Hasher -} - type Value interface { MtEntry() (*big.Int, error) @@ -648,34 +631,6 @@ func (v *value) AsBigInt() (*big.Int, error) { return i, nil } -func NewRDFEntry(key Path, value any) (RDFEntry, error) { - return Options{}.NewRDFEntry(key, value) -} - -func (e RDFEntry) KeyMtEntry() (*big.Int, error) { - return e.key.MtEntry() -} - -func (e RDFEntry) ValueMtEntry() (*big.Int, error) { - return mkValueMtEntry(e.getHasher(), e.value) -} - -func (e RDFEntry) KeyValueMtEntries() ( - keyMtEntry *big.Int, valueMtEntry *big.Int, err error) { - - keyMtEntry, err = e.KeyMtEntry() - if err != nil { - return nil, nil, err - } - - valueMtEntry, err = e.ValueMtEntry() - if err != nil { - return nil, nil, err - } - - return keyMtEntry, valueMtEntry, nil -} - type nodeType uint8 const ( @@ -1443,14 +1398,6 @@ func AddEntriesToMerkleTree(ctx context.Context, mt mtAppender, return nil } -func (e RDFEntry) getHasher() Hasher { - h := e.hasher - if h == nil { - h = defaultHasher - } - return h -} - // Hasher is an interface to hash data type Hasher interface { Hash(inpBI []*big.Int) (*big.Int, error) @@ -1650,12 +1597,12 @@ func MerklizeJSONLD(ctx context.Context, in io.Reader, return mz, err } -func (m *Merklizer) Entry(path Path) (RDFEntry, error) { +func (mz *Merklizer) Entry(path Path) (RDFEntry, error) { key, err := path.MtEntry() if err != nil { return RDFEntry{}, err } - e, ok := m.entries[key.String()] + e, ok := mz.entries[key.String()] if !ok { return RDFEntry{}, ErrorEntryNotFound } @@ -1663,14 +1610,14 @@ func (m *Merklizer) Entry(path Path) (RDFEntry, error) { return e, nil } -func (m *Merklizer) getDocumentLoader() ld.DocumentLoader { - if m.documentLoader != nil { - return m.documentLoader +func (mz *Merklizer) getDocumentLoader() ld.DocumentLoader { + if mz.documentLoader != nil { + return mz.documentLoader } - if m.ipfsCli == nil && m.ipfsGW == "" { + if mz.ipfsCli == nil && mz.ipfsGW == "" { return defaultDocumentLoader } - return loaders.NewDocumentLoader(m.ipfsCli, m.ipfsGW) + return loaders.NewDocumentLoader(mz.ipfsCli, mz.ipfsGW) } func rvExtractObjField(obj any, field string) (any, error) { @@ -1707,9 +1654,9 @@ func rvExtractArrayIdx(obj any, idx int) (any, error) { return objArr[idx], nil } -func (m *Merklizer) RawValue(path Path) (any, error) { +func (mz *Merklizer) RawValue(path Path) (any, error) { parts := path.Parts() - var obj any = m.compacted + var obj any = mz.compacted var err error var traversedParts []string currentPath := func() string { return strings.Join(traversedParts, " / ") } @@ -1742,40 +1689,40 @@ func (m *Merklizer) RawValue(path Path) (any, error) { // JSONLDType returns the JSON-LD type of the given path. If there is no literal // by this path, it returns an error. -func (m *Merklizer) JSONLDType(path Path) (string, error) { - entry, err := m.Entry(path) +func (mz *Merklizer) JSONLDType(path Path) (string, error) { + entry, err := mz.Entry(path) if err != nil { return "", err } return entry.datatype, nil } -func (m *Merklizer) ResolveDocPath(path string) (Path, error) { +func (mz *Merklizer) ResolveDocPath(path string) (Path, error) { opts := Options{ - Hasher: m.hasher, - DocumentLoader: m.getDocumentLoader(), + Hasher: mz.hasher, + DocumentLoader: mz.getDocumentLoader(), } if opts.Hasher == nil { opts.Hasher = defaultHasher } - realPath, err := opts.NewPathFromDocument(m.srcDoc, path) + realPath, err := opts.NewPathFromDocument(mz.srcDoc, path) if err != nil { return Path{}, err } return realPath, nil } -func (m *Merklizer) Options() Options { +func (mz *Merklizer) Options() Options { return Options{ - Hasher: m.hasher, - DocumentLoader: m.getDocumentLoader(), + Hasher: mz.hasher, + DocumentLoader: mz.getDocumentLoader(), } } // Proof generate and return Proof and Value by the given Path. // If the path is not found, it returns nil as value interface. -func (m *Merklizer) Proof(ctx context.Context, +func (mz *Merklizer) Proof(ctx context.Context, path Path) (*merkletree.Proof, Value, error) { keyHash, err := path.MtEntry() @@ -1783,19 +1730,19 @@ func (m *Merklizer) Proof(ctx context.Context, return nil, nil, err } - proof, err := m.mt.GenerateProof(ctx, keyHash) + proof, err := mz.mt.GenerateProof(ctx, keyHash) if err != nil { return nil, nil, err } var value Value if proof.Existence { - entry, ok := m.entries[keyHash.String()] + entry, ok := mz.entries[keyHash.String()] if !ok { return nil, nil, errors.New( "[assertion] no Entry found while existence is true") } - value, err = NewValue(m.hasher, entry.value) + value, err = NewValue(mz.hasher, entry.value) if err != nil { return nil, nil, err } @@ -1804,16 +1751,16 @@ func (m *Merklizer) Proof(ctx context.Context, return proof, value, err } -func (m *Merklizer) MkValue(val any) (Value, error) { - return NewValue(m.hasher, val) +func (mz *Merklizer) MkValue(val any) (Value, error) { + return NewValue(mz.hasher, val) } -func (m *Merklizer) Root() *merkletree.Hash { - return m.mt.Root() +func (mz *Merklizer) Root() *merkletree.Hash { + return mz.mt.Root() } -func (m *Merklizer) Hasher() Hasher { - return m.hasher +func (mz *Merklizer) Hasher() Hasher { + return mz.hasher } func mkValueMtEntry(h Hasher, v interface{}) (*big.Int, error) { diff --git a/merklize/merklize_test.go b/merklize/merklize_test.go index 6668b1d..5ca62e7 100644 --- a/merklize/merklize_test.go +++ b/merklize/merklize_test.go @@ -622,7 +622,7 @@ func TestProofInteger(t *testing.T) { } func TestMerklizer_Proof(t *testing.T) { - defer tst.MockHTTPClient(t, testDocumentURLMaps)() + defer tst.MockHTTPClient(t, testDocumentURLMaps, tst.IgnoreUntouchedURLs())() ctx := context.Background() mz, err := MerklizeJSONLD(ctx, strings.NewReader(testDocument)) require.NoError(t, err) diff --git a/merklize/rdfentry.go b/merklize/rdfentry.go new file mode 100644 index 0000000..c287ec9 --- /dev/null +++ b/merklize/rdfentry.go @@ -0,0 +1,61 @@ +package merklize + +import ( + "math/big" + "time" +) + +type rdfEntryValueType interface { + int64 | string | bool | time.Time | *big.Int +} + +// type RDFEntry[T RDFEntryValueType] struct { +// key Path +// // valid types are: int64, string, bool, time.Time, *big.Int +// value T +// hasher Hasher +// } + +type RDFEntry struct { + key Path + // valid types are: int64, string, bool, time.Time, *big.Int + value any + datatype string + hasher Hasher +} + +func NewRDFEntry(key Path, value any) (RDFEntry, error) { + return Options{}.NewRDFEntry(key, value) +} + +func (e RDFEntry) KeyMtEntry() (*big.Int, error) { + return e.key.MtEntry() +} + +func (e RDFEntry) ValueMtEntry() (*big.Int, error) { + return mkValueMtEntry(e.getHasher(), e.value) +} + +func (e RDFEntry) KeyValueMtEntries() ( + keyMtEntry *big.Int, valueMtEntry *big.Int, err error) { + + keyMtEntry, err = e.KeyMtEntry() + if err != nil { + return nil, nil, err + } + + valueMtEntry, err = e.ValueMtEntry() + if err != nil { + return nil, nil, err + } + + return keyMtEntry, valueMtEntry, nil +} + +func (e RDFEntry) getHasher() Hasher { + h := e.hasher + if h == nil { + h = defaultHasher + } + return h +}