diff --git a/package.json b/package.json index f2fab2d..22a0688 100644 --- a/package.json +++ b/package.json @@ -76,6 +76,7 @@ "aegir": "^22.0.0", "chai": "^4.2.0", "dirty-chai": "^2.0.1", + "chai-subset": "1.6.0", "fs-extra": "^9.0.0", "ipfs-block-service": "~0.17.0", "ipfs-repo": "^3.0.0", @@ -84,4 +85,4 @@ "multibase": "^0.7.0", "multihashes": "~0.4.19" } -} +} \ No newline at end of file diff --git a/src/dag-link/dagLink.js b/src/dag-link/dagLink.js index dc55565..b661845 100644 --- a/src/dag-link/dagLink.js +++ b/src/dag-link/dagLink.js @@ -15,14 +15,16 @@ class DAGLink { // note - links should include size, but this assert is disabled // for now to maintain consistency with go-ipfs pinset - this._name = name || '' - this._nameBuf = null - this._size = size - this._cid = new CID(cid) + Object.defineProperties(this, { + Name: { value: name || '', writable: false, enumerable: true }, + Tsize: { value: size, writable: false, enumerable: true }, + Hash: { value: new CID(cid), writable: false, enumerable: true }, + _nameBuf: { value: null, writable: true, enumerable: false } + }) } toString () { - return `DAGLink <${this._cid.toBaseEncodedString()} - name: "${this.Name}", size: ${this.Tsize}>` + return `DAGLink <${this.Hash.toBaseEncodedString()} - name: "${this.Name}", size: ${this.Tsize}>` } toJSON () { @@ -37,10 +39,6 @@ class DAGLink { return Object.assign({}, this._json) } - get Name () { - return this._name - } - // Memoize the Buffer representation of name // We need this to sort the links, otherwise // we will reallocate new buffers every time @@ -49,29 +47,9 @@ class DAGLink { return this._nameBuf } - this._nameBuf = Buffer.from(this._name) + this._nameBuf = Buffer.from(this.Name) return this._nameBuf } - - set Name (name) { - throw new Error("Can't set property: 'name' is immutable") - } - - get Tsize () { - return this._size - } - - set Tsize (size) { - throw new Error("Can't set property: 'size' is immutable") - } - - get Hash () { - return this._cid - } - - set Hash (cid) { - throw new Error("Can't set property: 'cid' is immutable") - } } exports = module.exports = withIs(DAGLink, { className: 'DAGLink', symbolName: '@ipld/js-ipld-dag-pb/daglink' }) diff --git a/src/dag-node/addLink.js b/src/dag-node/addLink.js index 1f1eca2..24dbc34 100644 --- a/src/dag-node/addLink.js +++ b/src/dag-node/addLink.js @@ -26,8 +26,8 @@ const asDAGLink = (link) => { const addLink = (node, link) => { const dagLink = asDAGLink(link) - node._links.push(dagLink) - node._links = sortLinks(node._links) + node.Links.push(dagLink) + sortLinks.inplace(node.Links) } module.exports = addLink diff --git a/src/dag-node/dagNode.js b/src/dag-node/dagNode.js index 1184580..948dffd 100644 --- a/src/dag-node/dagNode.js +++ b/src/dag-node/dagNode.js @@ -25,24 +25,26 @@ class DAGNode { throw new Error('Passed \'serializedSize\' must be a number!') } - links = links.map((link) => { + links = links.map(link => { return DAGLink.isDAGLink(link) ? link : DAGLink.util.createDagLinkFromB58EncodedHash(link) }) - links = sortLinks(links) + sortLinks.inplace(links) - this._data = data - this._links = links - this._serializedSize = serializedSize - this._size = null + Object.defineProperties(this, { + Data: { value: data, writable: false, enumerable: true }, + Links: { value: links, writable: false, enumerable: true }, + _serializedSize: { value: serializedSize, writable: true, enumerable: false }, + _size: { value: null, writable: true, enumerable: false } + }) } toJSON () { if (!this._json) { this._json = Object.freeze({ data: this.Data, - links: this._links.map((l) => l.toJSON()), + links: this.Links.map((l) => l.toJSON()), size: this.size }) } @@ -75,10 +77,7 @@ class DAGNode { } serialize () { - return serializeDAGNode({ - Data: this._data, - Links: this._links - }) + return serializeDAGNode(this) } get size () { @@ -86,7 +85,7 @@ class DAGNode { if (this._serializedSize === null) { this._serializedSize = this.serialize().length } - this._size = this._links.reduce((sum, l) => sum + l.Tsize, this._serializedSize) + this._size = this.Links.reduce((sum, l) => sum + l.Tsize, this._serializedSize) } return this._size @@ -95,29 +94,6 @@ class DAGNode { set size (size) { throw new Error("Can't set property: 'size' is immutable") } - - // Getters for backwards compatible path resolving - get Data () { - return this._data - } - - set Data (_) { - throw new Error("Can't set property: 'Data' is immutable") - } - - get Links () { - return this._links.map((link) => { - return { - Name: link.Name, - Tsize: link.Tsize, - Hash: link.Hash - } - }) - } - - set Links (_) { - throw new Error("Can't set property: 'Links' is immutable") - } } exports = module.exports = withIs(DAGNode, { className: 'DAGNode', symbolName: '@ipld/js-ipld-dag-pb/dagnode' }) diff --git a/src/dag-node/rmLink.js b/src/dag-node/rmLink.js index 4ccd82d..fc1caa3 100644 --- a/src/dag-node/rmLink.js +++ b/src/dag-node/rmLink.js @@ -4,12 +4,26 @@ const CID = require('cids') const { Buffer } = require('buffer') const rmLink = (dagNode, nameOrCid) => { + let predicate = null + // It's a name if (typeof nameOrCid === 'string') { - dagNode._links = dagNode._links.filter((link) => link.Name !== nameOrCid) + predicate = link => link.Name === nameOrCid } else if (Buffer.isBuffer(nameOrCid) || CID.isCID(nameOrCid)) { - dagNode._links = dagNode._links.filter( - (link) => !link.Hash.equals(nameOrCid)) + predicate = link => link.Hash.equals(nameOrCid) + } + + if (predicate) { + const links = dagNode.Links + let index = 0 + while (index < links.length) { + const link = links[index] + if (predicate(link)) { + links.splice(index, 1) + } else { + index++ + } + } } else { throw new Error('second arg needs to be a name or CID') } diff --git a/src/dag-node/sortLinks.js b/src/dag-node/sortLinks.js index 399b3fa..753b538 100644 --- a/src/dag-node/sortLinks.js +++ b/src/dag-node/sortLinks.js @@ -8,7 +8,7 @@ const linkSort = (a, b) => { } /** - * + * Returns new sorted links array. * @param {Array} links * @returns {Array} */ @@ -16,4 +16,15 @@ const sortLinks = (links) => { return sort(links, linkSort) } +/** + * Sorts links in place (mutating given array) + * @param {Array} links + * @returns {void} + */ +const sortLinksInPlace = (links) => { + sort.inplace(links, linkSort) +} + +sortLinks.inplace = sortLinksInPlace + module.exports = sortLinks diff --git a/src/serialize.js b/src/serialize.js index 68d101d..d5fefb1 100644 --- a/src/serialize.js +++ b/src/serialize.js @@ -9,8 +9,8 @@ exports = module.exports const toProtoBuf = (node) => { const pbn = {} - if (node.Data && node.Data.length > 0) { - pbn.Data = node.Data + if (node.Data && node.Data.byteLength > 0) { + pbn.Data = asBuffer(node.Data) } else { // NOTE: this has to be null in order to match go-ipfs serialization // `null !== new Buffer(0)` @@ -20,7 +20,7 @@ const toProtoBuf = (node) => { if (node.Links && node.Links.length > 0) { pbn.Links = node.Links .map((link) => ({ - Hash: link.Hash.buffer, + Hash: asBuffer(link.Hash.buffer), Name: link.Name, Tsize: link.Tsize })) @@ -31,6 +31,24 @@ const toProtoBuf = (node) => { return pbn } +/** + * Takes bytes in various representations and returns `Buffer` + * view of the underyling data without copying. + * @param {Buffer|ArrayBuffer|ArrayBufferView} bytes + * @returns {Buffer} + */ +const asBuffer = (bytes) => { + if (Buffer.isBuffer(bytes)) { + return bytes + } else if (bytes instanceof ArrayBuffer) { + return Buffer.from(bytes, 0, bytes.byteLength) + } else if (ArrayBuffer.isView(bytes)) { + return Buffer.from(bytes.buffer, bytes.byteOffset, bytes.byteLength) + } else { + return bytes + } +} + /** * Serialize internal representation into a binary PB block. * diff --git a/test/dag-node-test.js b/test/dag-node-test.js index 1c9a0be..f219846 100644 --- a/test/dag-node-test.js +++ b/test/dag-node-test.js @@ -4,8 +4,10 @@ const chai = require('chai') const { Buffer } = require('buffer') const dirtyChai = require('dirty-chai') +const chaiSubset = require('chai-subset') const expect = chai.expect chai.use(dirtyChai) +chai.use(chaiSubset) const dagPB = require('../src') const DAGLink = dagPB.DAGLink @@ -80,7 +82,7 @@ module.exports = (repo) => { }) const node2 = new DAGNode(someData, l2) - expect(node2.Links).to.eql([l1[1], l1[0]]) + expect(node2.Links).to.containSubset([l1[1], l1[0]]) expect(node1.toJSON()).to.eql(node2.toJSON()) // check sorting diff --git a/test/resolver.spec.js b/test/resolver.spec.js index 4139ab9..2818347 100644 --- a/test/resolver.spec.js +++ b/test/resolver.spec.js @@ -5,8 +5,10 @@ const chai = require('chai') const { Buffer } = require('buffer') const dirtyChai = require('dirty-chai') +const chaiSubset = require('chai-subset') const expect = chai.expect chai.use(dirtyChai) +chai.use(chaiSubset) const CID = require('cids') const { DAGNode, resolver } = require('../src') @@ -28,186 +30,217 @@ describe('IPLD Format resolver (local)', () => { return utils.serialize(node) } - const emptyNodeBlob = create(Buffer.alloc(0), []) - const linksNodeBlob = create(Buffer.alloc(0), links) - const dataLinksNodeBlob = create(Buffer.from('aaah the data'), links) - - describe('empty node', () => { - describe('resolver.resolve', () => { - it('links path', () => { - const result = resolver.resolve(emptyNodeBlob, 'Links') - expect(result.value).to.eql([]) - expect(result.remainderPath).to.eql('') - }) - - it('data path', () => { - const result = resolver.resolve(emptyNodeBlob, 'Data') - expect(result.value).to.eql(Buffer.alloc(0)) - expect(result.remainderPath).to.eql('') - }) - - it('non existent path', () => { - expect(() => - resolver.resolve(emptyNodeBlob, 'pathThatDoesNotExist') - ).to.throw( - "Object has no property 'pathThatDoesNotExist'" - ) - }) + const createPlain = (data, links) => { + const node = { + Data: data, + Links: links + } + return utils.serialize(node) + } - it('empty path', () => { - const result = resolver.resolve(emptyNodeBlob, '') - expect(result.value.Data).to.eql(Buffer.alloc(0)) - expect(result.value.Links).to.eql([]) - expect(result.remainderPath).to.eql('') + const emptyNodeBlobs = [ + ['DAGNode', create(Buffer.alloc(0), [])], + ['{Data:Buffer}', createPlain(Buffer.alloc(0), [])], + ['{data:ArrayBuffer}', createPlain(new ArrayBuffer(), [])], + ['{data:Uint8Array}', createPlain(new Uint8Array(), [])] + ] + + const linksNodeBlobs = [ + ['DAGNode', create(Buffer.alloc(0), links)], + ['{Data:Buffer}', createPlain(Buffer.alloc(0), links)], + ['{data:ArrayBuffer}', createPlain(new ArrayBuffer(), links)], + ['{data:Uint8Array}', createPlain(new Uint8Array(), links)] + ] + + const dataLinksNodeBlobs = [ + ['DAGNode', create(Buffer.from('aaah the data'), links)], + ['{Data:Buffer}', createPlain(Buffer.from('aaah the data'), links)], + ['{data:ArrayBuffer}', createPlain(new TextEncoder().encode('aaah the data').buffer, links)], + ['{data:Uint8Array}', createPlain(new TextEncoder().encode('aaah the data'), links)] + ] + + for (const [kind, emptyNodeBlob] of emptyNodeBlobs) { + describe(`empty node (${kind})`, () => { + describe('resolver.resolve', () => { + it('links path', () => { + const result = resolver.resolve(emptyNodeBlob, 'Links') + expect(result.value).to.eql([]) + expect(result.remainderPath).to.eql('') + }) + + it('data path', () => { + const result = resolver.resolve(emptyNodeBlob, 'Data') + expect(result.value).to.eql(Buffer.alloc(0)) + expect(result.remainderPath).to.eql('') + }) + + it('non existent path', () => { + expect(() => + resolver.resolve(emptyNodeBlob, 'pathThatDoesNotExist') + ).to.throw( + "Object has no property 'pathThatDoesNotExist'" + ) + }) + + it('empty path', () => { + const result = resolver.resolve(emptyNodeBlob, '') + expect(result.value.Data).to.eql(Buffer.alloc(0)) + expect(result.value.Links).to.eql([]) + expect(result.remainderPath).to.eql('') + }) + }) + + it('resolver.tree', () => { + const tree = resolver.tree(emptyNodeBlob) + const paths = [...tree] + expect(paths).to.have.members([ + 'Links', + 'Data' + ]) }) }) + } - it('resolver.tree', () => { - const tree = resolver.tree(emptyNodeBlob) - const paths = [...tree] - expect(paths).to.have.members([ - 'Links', - 'Data' - ]) - }) - }) - - describe('links node', () => { - describe('resolver.resolve', () => { - it('links path', () => { - const result = resolver.resolve(linksNodeBlob, 'Links') - expect(result.value).to.eql(links) - expect(result.remainderPath).to.eql('') - }) - - it('links position path Hash', () => { - const result = resolver.resolve(linksNodeBlob, 'Links/1/Hash') - expect(result.value).to.eql(links[1].Hash) - expect(result.remainderPath).to.eql('') - }) - - it('links position path Name', () => { - const result = resolver.resolve(linksNodeBlob, 'Links/1/Name') - expect(result.value).to.eql(links[1].Name) - expect(result.remainderPath).to.eql('') - }) - - it('links position path Tsize', () => { - const result = resolver.resolve(linksNodeBlob, 'Links/1/Tsize') - expect(result.value).to.eql(links[1].Tsize) - expect(result.remainderPath).to.eql('') - }) - - it('links by name', () => { - const result = resolver.resolve(linksNodeBlob, 'named link') - expect(result.value.equals(links[1].Hash)).to.be.true() - expect(result.remainderPath).to.eql('') - }) - - it('missing link by name', () => { - expect(() => - resolver.resolve(linksNodeBlob, 'missing link') - ).to.throw( - "Object has no property 'missing link'" - ) - }) - - it('yield remainderPath if impossible to resolve through (a)', () => { - const result = resolver.resolve(linksNodeBlob, 'Links/1/Hash/Data') - expect(result.value.equals( - new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') - )).to.be.true() - expect(result.remainderPath).to.equal('Data') - }) - - it('yield remainderPath if impossible to resolve through (b)', () => { - const result = resolver.resolve(linksNodeBlob, 'Links/1/Hash/Links/0/Hash/Data') - expect(result.value.equals( - new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') - )).to.be.true() - expect(result.remainderPath).to.equal('Links/0/Hash/Data') - }) - - it('yield remainderPath if impossible to resolve through named link (a)', () => { - const result = resolver.resolve(linksNodeBlob, 'named link/Data') - expect(result.value.equals( - new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') - )).to.be.true() - expect(result.remainderPath).to.equal('Data') - }) - - it('yield remainderPath if impossible to resolve through named link (b)', () => { - const result = resolver.resolve(linksNodeBlob, 'named link/Links/0/Hash/Data') - expect(result.value.equals( - new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') - )).to.be.true() - expect(result.remainderPath).to.equal('Links/0/Hash/Data') + for (const [kind, linksNodeBlob] of linksNodeBlobs) { + describe(`links node ${kind}`, () => { + describe('resolver.resolve', () => { + it('links path', () => { + const result = resolver.resolve(linksNodeBlob, 'Links') + expect(result.value).to.containSubset(links) + expect(result.remainderPath).to.eql('') + }) + + it('links position path Hash', () => { + const result = resolver.resolve(linksNodeBlob, 'Links/1/Hash') + expect(result.value).to.eql(links[1].Hash) + expect(result.remainderPath).to.eql('') + }) + + it('links position path Name', () => { + const result = resolver.resolve(linksNodeBlob, 'Links/1/Name') + expect(result.value).to.eql(links[1].Name) + expect(result.remainderPath).to.eql('') + }) + + it('links position path Tsize', () => { + const result = resolver.resolve(linksNodeBlob, 'Links/1/Tsize') + expect(result.value).to.eql(links[1].Tsize) + expect(result.remainderPath).to.eql('') + }) + + it('links by name', () => { + const result = resolver.resolve(linksNodeBlob, 'named link') + expect(result.value.equals(links[1].Hash)).to.be.true() + expect(result.remainderPath).to.eql('') + }) + + it('missing link by name', () => { + expect(() => + resolver.resolve(linksNodeBlob, 'missing link') + ).to.throw( + "Object has no property 'missing link'" + ) + }) + + it('yield remainderPath if impossible to resolve through (a)', () => { + const result = resolver.resolve(linksNodeBlob, 'Links/1/Hash/Data') + expect(result.value.equals( + new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + )).to.be.true() + expect(result.remainderPath).to.equal('Data') + }) + + it('yield remainderPath if impossible to resolve through (b)', () => { + const result = resolver.resolve(linksNodeBlob, 'Links/1/Hash/Links/0/Hash/Data') + expect(result.value.equals( + new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + )).to.be.true() + expect(result.remainderPath).to.equal('Links/0/Hash/Data') + }) + + it('yield remainderPath if impossible to resolve through named link (a)', () => { + const result = resolver.resolve(linksNodeBlob, 'named link/Data') + expect(result.value.equals( + new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + )).to.be.true() + expect(result.remainderPath).to.equal('Data') + }) + + it('yield remainderPath if impossible to resolve through named link (b)', () => { + const result = resolver.resolve(linksNodeBlob, 'named link/Links/0/Hash/Data') + expect(result.value.equals( + new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V') + )).to.be.true() + expect(result.remainderPath).to.equal('Links/0/Hash/Data') + }) + }) + + it('resolver.tree', () => { + const tree = resolver.tree(linksNodeBlob) + const paths = [...tree] + expect(paths).to.have.members([ + 'Links', + 'Links/0', + 'Links/0/Name', + 'Links/0/Tsize', + 'Links/0/Hash', + 'Links/1', + 'Links/1/Name', + 'Links/1/Tsize', + 'Links/1/Hash', + 'Data' + ]) }) }) + } - it('resolver.tree', () => { - const tree = resolver.tree(linksNodeBlob) - const paths = [...tree] - expect(paths).to.have.members([ - 'Links', - 'Links/0', - 'Links/0/Name', - 'Links/0/Tsize', - 'Links/0/Hash', - 'Links/1', - 'Links/1/Name', - 'Links/1/Tsize', - 'Links/1/Hash', - 'Data' - ]) - }) - }) - - describe('links and data node', () => { - describe('resolver.resolve', () => { - it('links path', () => { - const result = resolver.resolve(dataLinksNodeBlob, 'Links') - expect(result.value).to.eql(links) - expect(result.remainderPath).to.eql('') - }) - - it('data path', () => { - const result = resolver.resolve(dataLinksNodeBlob, 'Data') - expect(result.value).to.eql(Buffer.from('aaah the data')) - expect(result.remainderPath).to.eql('') - }) - - it('non existent path', () => { - expect(() => - resolver.resolve(dataLinksNodeBlob, 'pathThatDoesNotExist') - ).to.throw( - "Object has no property 'pathThatDoesNotExist'" - ) - }) - - it('empty path', () => { - const result = resolver.resolve(dataLinksNodeBlob, '') - expect(result.value.Data).to.eql(Buffer.from('aaah the data')) - expect(result.value.Links).to.eql(links) - expect(result.remainderPath).to.eql('') + for (const [kind, dataLinksNodeBlob] of dataLinksNodeBlobs) { + describe(`links and data node (${kind})`, () => { + describe('resolver.resolve', () => { + it('links path', () => { + const result = resolver.resolve(dataLinksNodeBlob, 'Links') + expect(result.value).to.containSubset(links) + expect(result.remainderPath).to.eql('') + }) + + it('data path', () => { + const result = resolver.resolve(dataLinksNodeBlob, 'Data') + expect(result.value).to.eql(Buffer.from('aaah the data')) + expect(result.remainderPath).to.eql('') + }) + + it('non existent path', () => { + expect(() => + resolver.resolve(dataLinksNodeBlob, 'pathThatDoesNotExist') + ).to.throw( + "Object has no property 'pathThatDoesNotExist'" + ) + }) + + it('empty path', () => { + const result = resolver.resolve(dataLinksNodeBlob, '') + expect(result.value.Data).to.eql(Buffer.from('aaah the data')) + expect(result.value.Links).to.containSubset(links) + expect(result.remainderPath).to.eql('') + }) + }) + + it('resolver.tree', () => { + const tree = resolver.tree(dataLinksNodeBlob) + const paths = [...tree] + expect(paths).to.have.members([ + 'Links', + 'Links/0', + 'Links/0/Name', + 'Links/0/Tsize', + 'Links/0/Hash', + 'Links/1', + 'Links/1/Name', + 'Links/1/Tsize', + 'Links/1/Hash', + 'Data' + ]) }) }) - - it('resolver.tree', () => { - const tree = resolver.tree(dataLinksNodeBlob) - const paths = [...tree] - expect(paths).to.have.members([ - 'Links', - 'Links/0', - 'Links/0/Name', - 'Links/0/Tsize', - 'Links/0/Hash', - 'Links/1', - 'Links/1/Name', - 'Links/1/Tsize', - 'Links/1/Hash', - 'Data' - ]) - }) - }) + } }) diff --git a/test/util.spec.js b/test/util.spec.js index 1b91578..8460474 100644 --- a/test/util.spec.js +++ b/test/util.spec.js @@ -5,9 +5,11 @@ const CID = require('cids') const { Buffer } = require('buffer') const chai = require('chai') +const chaiSubset = require('chai-subset') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) +chai.use(chaiSubset) const { DAGLink @@ -33,6 +35,24 @@ describe('util', () => { expect(node.Data).to.deep.equal(data) }) + it('should serialize a node with ArrayBuffer data', () => { + const data = Uint8Array.from([0, 1, 2, 3]).buffer + const result = serialize({ Data: data }) + expect(result).to.be.an.instanceof(Uint8Array) + + const node = deserialize(result) + expect(node.Data).to.deep.equal(Buffer.from([0, 1, 2, 3])) + }) + + it('should serialize a node with Uint8Array data', () => { + const data = Uint8Array.from([0, 1, 2, 3]) + const result = serialize({ Data: data }) + expect(result).to.be.an.instanceof(Uint8Array) + + const node = deserialize(result) + expect(node.Data).to.deep.equal(Buffer.from([0, 1, 2, 3])) + }) + it('should serialize a node with links', () => { const links = [ new DAGLink('', 0, 'QmWDtUQj38YLW8v3q4A6LwPn4vYKEbuKWpgSm6bjKW6Xfe') @@ -41,7 +61,7 @@ describe('util', () => { expect(result).to.be.an.instanceof(Uint8Array) const node = deserialize(result) - expect(node.Links).to.deep.equal([{ + expect(node.Links).to.containSubset([{ Name: '', Tsize: 0, Hash: new CID('QmWDtUQj38YLW8v3q4A6LwPn4vYKEbuKWpgSm6bjKW6Xfe') @@ -58,7 +78,7 @@ describe('util', () => { expect(result).to.be.an.instanceof(Uint8Array) const node = deserialize(result) - expect(node.Links).to.deep.equal(links) + expect(node.Links).to.containSubset(links) }) it('should ignore invalid properties when serializing', () => {