From bf216a975094f215a797355046d196406f93b0f8 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 21 Nov 2018 18:11:18 +0000 Subject: [PATCH] feat: split hamt out into separate module, closes #1 --- package.json | 2 +- src/hamt/bucket.js | 289 --------------------------------- src/hamt/consumable-buffer.js | 82 ---------- src/hamt/consumable-hash.js | 103 ------------ src/hamt/index.js | 9 - src/importer/dir-sharded.js | 81 ++++++--- test/browser.js | 5 - test/builder-dir-sharding.js | 116 ++++++++----- test/hamt-consumable-buffer.js | 103 ------------ test/hamt-consumable-hash.js | 83 ---------- test/hamt.js | 190 ---------------------- test/node.js | 5 - 12 files changed, 131 insertions(+), 937 deletions(-) delete mode 100644 src/hamt/bucket.js delete mode 100644 src/hamt/consumable-buffer.js delete mode 100644 src/hamt/consumable-hash.js delete mode 100644 src/hamt/index.js delete mode 100644 test/hamt-consumable-buffer.js delete mode 100644 test/hamt-consumable-hash.js delete mode 100644 test/hamt.js diff --git a/package.json b/package.json index b4c9924..a943a93 100644 --- a/package.json +++ b/package.json @@ -57,6 +57,7 @@ "async": "^2.6.1", "cids": "~0.5.5", "deep-extend": "~0.6.0", + "hamt-sharding": "~0.0.1", "ipfs-unixfs": "~0.1.16", "ipld-dag-pb": "~0.15.0", "left-pad": "^1.3.0", @@ -72,7 +73,6 @@ "pull-through": "^1.0.18", "pull-traverse": "^1.0.3", "pull-write": "^1.1.4", - "sparse-array": "^1.3.1", "stream-to-pull-stream": "^1.7.2" }, "optionalDependencies": { diff --git a/src/hamt/bucket.js b/src/hamt/bucket.js deleted file mode 100644 index f15a95a..0000000 --- a/src/hamt/bucket.js +++ /dev/null @@ -1,289 +0,0 @@ -'use strict' - -const SparseArray = require('sparse-array') -const map = require('async/map') -const eachSeries = require('async/eachSeries') -const wrapHash = require('./consumable-hash') - -const defaultOptions = { - bits: 8 -} - -// TODO: make HAMT a generic NPM package - -class Bucket { - constructor (options, parent, posAtParent) { - this._options = Object.assign({}, defaultOptions, options) - this._popCount = 0 - this._parent = parent - this._posAtParent = posAtParent - - if (!this._options.hashFn) { - throw new Error('please define an options.hashFn') - } - - // make sure we only wrap options.hashFn once in the whole tree - if (!this._options.hash) { - this._options.hash = wrapHash(this._options.hashFn) - } - this._children = new SparseArray() - } - - static isBucket (o) { - return o instanceof Bucket - } - - put (key, value, callback) { - this._findNewBucketAndPos(key, (err, place) => { - if (err) { - callback(err) - return // early - } - - place.bucket._putAt(place, key, value) - callback() - }) - } - - get (key, callback) { - this._findChild(key, (err, child) => { - if (err) { - callback(err) - } else { - callback(null, child && child.value) - } - }) - } - - del (key, callback) { - this._findPlace(key, (err, place) => { - if (err) { - callback(err) - return // early - } - const child = place.bucket._at(place.pos) - if (child && child.key === key) { - place.bucket._delAt(place.pos) - } - callback(null) - }) - } - - leafCount () { - this._children.reduce((acc, child) => { - if (child instanceof Bucket) { - return acc + child.leafCount() - } - return acc + 1 - }, 0) - } - - childrenCount () { - return this._children.length - } - - onlyChild (callback) { - process.nextTick(() => callback(null, this._children.get(0))) - } - - eachLeafSeries (iterator, callback) { - eachSeries( - this._children.compactArray(), - (child, cb) => { - if (child instanceof Bucket) { - child.eachLeafSeries(iterator, cb) - } else { - iterator(child.key, child.value, cb) - } - }, - callback) - } - - serialize (map, reduce) { - // serialize to a custom non-sparse representation - return reduce(this._children.reduce((acc, child, index) => { - if (child) { - if (child instanceof Bucket) { - acc.push(child.serialize(map, reduce)) - } else { - acc.push(map(child, index)) - } - } - return acc - }, [])) - } - - asyncTransform (asyncMap, asyncReduce, callback) { - asyncTransformBucket(this, asyncMap, asyncReduce, callback) - } - - toJSON () { - return this.serialize(mapNode, reduceNodes) - } - - prettyPrint () { - return JSON.stringify(this.toJSON(), null, ' ') - } - - tableSize () { - return Math.pow(2, this._options.bits) - } - - _findChild (key, callback) { - this._findPlace(key, (err, result) => { - if (err) { - callback(err) - return // early - } - - const child = result.bucket._at(result.pos) - if (child && child.key === key) { - callback(null, child) - } else { - callback(null, undefined) - } - }) - } - - _findPlace (key, callback) { - const hashValue = this._options.hash(key) - hashValue.take(this._options.bits, (err, index) => { - if (err) { - callback(err) - return // early - } - - const child = this._children.get(index) - if (child instanceof Bucket) { - child._findPlace(hashValue, callback) - } else { - const place = { - bucket: this, - pos: index, - hash: hashValue - } - callback(null, place) - } - }) - } - - _findNewBucketAndPos (key, callback) { - this._findPlace(key, (err, place) => { - if (err) { - callback(err) - return // early - } - const child = place.bucket._at(place.pos) - if (child && child.key !== key) { - // conflict - - const bucket = new Bucket(this._options, place.bucket, place.pos) - place.bucket._putObjectAt(place.pos, bucket) - - // put the previous value - bucket._findPlace(child.hash, (err, newPlace) => { - if (err) { - callback(err) - return // early - } - - newPlace.bucket._putAt(newPlace, child.key, child.value) - bucket._findNewBucketAndPos(place.hash, callback) - }) - } else { - // no conflict, we found the place - callback(null, place) - } - }) - } - - _putAt (place, key, value) { - this._putObjectAt(place.pos, { - key: key, - value: value, - hash: place.hash - }) - } - - _putObjectAt (pos, object) { - if (!this._children.get(pos)) { - this._popCount++ - } - this._children.set(pos, object) - } - - _delAt (pos) { - if (this._children.get(pos)) { - this._popCount-- - } - this._children.unset(pos) - this._level() - } - - _level () { - if (this._parent && this._popCount <= 1) { - if (this._popCount === 1) { - // remove myself from parent, replacing me with my only child - const onlyChild = this._children.find(exists) - if (!(onlyChild instanceof Bucket)) { - const hash = onlyChild.hash - hash.untake(this._options.bits) - const place = { - pos: this._posAtParent, - hash: hash - } - this._parent._putAt(place, onlyChild.key, onlyChild.value) - } - } else { - this._parent._delAt(this._posAtParent) - } - } - } - - _at (index) { - return this._children.get(index) - } -} - -function exists (o) { - return Boolean(o) -} - -function mapNode (node, index) { - return node.key -} - -function reduceNodes (nodes) { - return nodes -} - -function asyncTransformBucket (bucket, asyncMap, asyncReduce, callback) { - map( - bucket._children.compactArray(), - (child, callback) => { - if (child instanceof Bucket) { - asyncTransformBucket(child, asyncMap, asyncReduce, callback) - } else { - asyncMap(child, (err, mappedChildren) => { - if (err) { - callback(err) - } else { - callback(null, { - bitField: bucket._children.bitField(), - children: mappedChildren - }) - } - }) - } - }, - (err, mappedChildren) => { - if (err) { - callback(err) - } else { - asyncReduce(mappedChildren, callback) - } - } - ) -} - -module.exports = Bucket diff --git a/src/hamt/consumable-buffer.js b/src/hamt/consumable-buffer.js deleted file mode 100644 index 1b0d6c9..0000000 --- a/src/hamt/consumable-buffer.js +++ /dev/null @@ -1,82 +0,0 @@ -'use strict' - -const START_MASKS = [ - 0b11111111, - 0b11111110, - 0b11111100, - 0b11111000, - 0b11110000, - 0b11100000, - 0b11000000, - 0b10000000 -] - -const STOP_MASKS = [ - 0b00000001, - 0b00000011, - 0b00000111, - 0b00001111, - 0b00011111, - 0b00111111, - 0b01111111, - 0b11111111 -] - -module.exports = class ConsumableBuffer { - constructor (value) { - this._value = value - this._currentBytePos = value.length - 1 - this._currentBitPos = 7 - } - - availableBits () { - return this._currentBitPos + 1 + this._currentBytePos * 8 - } - - totalBits () { - return this._value.length * 8 - } - - take (bits) { - let pendingBits = bits - let result = 0 - while (pendingBits && this._haveBits()) { - const byte = this._value[this._currentBytePos] - const availableBits = this._currentBitPos + 1 - const taking = Math.min(availableBits, pendingBits) - const value = byteBitsToInt(byte, availableBits - taking, taking) - result = (result << taking) + value - - pendingBits -= taking - - this._currentBitPos -= taking - if (this._currentBitPos < 0) { - this._currentBitPos = 7 - this._currentBytePos-- - } - } - - return result - } - - untake (bits) { - this._currentBitPos += bits - while (this._currentBitPos > 7) { - this._currentBitPos -= 8 - this._currentBytePos += 1 - } - } - - _haveBits () { - return this._currentBytePos >= 0 - } -} - -function byteBitsToInt (byte, start, length) { - const mask = maskFor(start, length) - return (byte & mask) >>> start -} - -function maskFor (start, length) { - return START_MASKS[start] & STOP_MASKS[Math.min(length + start - 1, 7)] -} diff --git a/src/hamt/consumable-hash.js b/src/hamt/consumable-hash.js deleted file mode 100644 index 6fb40aa..0000000 --- a/src/hamt/consumable-hash.js +++ /dev/null @@ -1,103 +0,0 @@ -'use strict' - -const whilst = require('async/whilst') -const ConsumableBuffer = require('./consumable-buffer') - -module.exports = function wrapHash (hashFn) { - return function hashing (value) { - if (value instanceof InfiniteHash) { - // already a hash. return it - return value - } else { - return new InfiniteHash(value, hashFn) - } - } -} - -class InfiniteHash { - constructor (value, hashFn) { - if ((typeof value) !== 'string' && !Buffer.isBuffer(value)) { - throw new Error('can only hash strings or buffers') - } - this._value = value - this._hashFn = hashFn - this._depth = -1 - this._availableBits = 0 - this._currentBufferIndex = 0 - this._buffers = [] - } - - take (bits, callback) { - let pendingBits = bits - whilst( - () => this._availableBits < pendingBits, - (callback) => { - this._produceMoreBits(callback) - }, - (err) => { - if (err) { - callback(err) - return // early - } - - let result = 0 - - // TODO: this is sync, no need to use whilst - whilst( - () => pendingBits > 0, - (callback) => { - const hash = this._buffers[this._currentBufferIndex] - const available = Math.min(hash.availableBits(), pendingBits) - const took = hash.take(available) - result = (result << available) + took - pendingBits -= available - this._availableBits -= available - if (hash.availableBits() === 0) { - this._currentBufferIndex++ - } - callback() - }, - (err) => { - if (err) { - callback(err) - return // early - } - - process.nextTick(() => callback(null, result)) - } - ) - } - ) - } - - untake (bits) { - let pendingBits = bits - while (pendingBits > 0) { - const hash = this._buffers[this._currentBufferIndex] - const availableForUntake = Math.min(hash.totalBits() - hash.availableBits(), pendingBits) - hash.untake(availableForUntake) - pendingBits -= availableForUntake - this._availableBits += availableForUntake - if (this._currentBufferIndex > 0 && hash.totalBits() === hash.availableBits()) { - this._depth-- - this._currentBufferIndex-- - } - } - } - - _produceMoreBits (callback) { - this._depth++ - const value = this._depth ? this._value + this._depth : this._value - this._hashFn(value, (err, hashValue) => { - if (err) { - callback(err) - return // early - } - - const buffer = new ConsumableBuffer(hashValue) - this._buffers.push(buffer) - this._availableBits += buffer.availableBits() - callback() - }) - } -} diff --git a/src/hamt/index.js b/src/hamt/index.js deleted file mode 100644 index bdb5c46..0000000 --- a/src/hamt/index.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const Bucket = require('./bucket') - -module.exports = function createHAMT (options) { - return new Bucket(options) -} - -module.exports.isBucket = Bucket.isBucket diff --git a/src/importer/dir-sharded.js b/src/importer/dir-sharded.js index 3f8fd8d..911a414 100644 --- a/src/importer/dir-sharded.js +++ b/src/importer/dir-sharded.js @@ -11,26 +11,28 @@ const multihashing = require('multihashing-async') const Dir = require('./dir') const persist = require('../utils/persist') -const Bucket = require('../hamt') +const Bucket = require('hamt-sharding') const hashFn = function (value, callback) { - multihashing(value, 'murmur3-128', (err, hash) => { - if (err) { - callback(err) - } else { - // Multihashing inserts preamble of 2 bytes. Remove it. - // Also, murmur3 outputs 128 bit but, accidently, IPFS Go's - // implementation only uses the first 64, so we must do the same - // for parity.. - const justHash = hash.slice(2, 10) - const length = justHash.length - const result = Buffer.alloc(length) - // TODO: invert buffer because that's how Go impl does it - for (let i = 0; i < length; i++) { - result[length - i - 1] = justHash[i] + return new Promise((resolve, reject) => { + multihashing(value, 'murmur3-128', (err, hash) => { + if (err) { + reject(err) + } else { + // Multihashing inserts preamble of 2 bytes. Remove it. + // Also, murmur3 outputs 128 bit but, accidently, IPFS Go's + // implementation only uses the first 64, so we must do the same + // for parity.. + const justHash = hash.slice(2, 10) + const length = justHash.length + const result = Buffer.alloc(length) + // TODO: invert buffer because that's how Go impl does it + for (let i = 0; i < length; i++) { + result[length - i - 1] = justHash[i] + } + resolve(result) } - callback(null, result) - } + }) }) } hashFn.code = 0x22 // TODO: get this from multihashing-async? @@ -46,12 +48,23 @@ class DirSharded extends Dir { this._bucket = Bucket(options) } - put (name, value, callback) { - this._bucket.put(name, value, callback) + async put (name, value, callback) { + try { + await this._bucket.put(name, value) + + return callback() + } catch (err) { + console.error(err) + return callback(err) + } } - get (name, callback) { - this._bucket.get(name, callback) + async get (name, callback) { + try { + return callback(null, await this._bucket.get(name)) + } catch (err) { + return callback(err) + } } childCount () { @@ -63,11 +76,31 @@ class DirSharded extends Dir { } onlyChild (callback) { - this._bucket.onlyChild(callback) + try { + return callback(null, this._bucket.onlyChild()) + } catch (err) { + return callback(err) + } } - eachChildSeries (iterator, callback) { - this._bucket.eachLeafSeries(iterator, callback) + async eachChildSeries (iterator, callback) { + try { + for await (const child of this._bucket.eachLeafSeries()) { + await new Promise((resolve, reject) => { + iterator(child.key, child.value, (err) => { + if (err) { + return reject(err) + } + + resolve() + }) + }) + } + + callback() + } catch (err) { + callback(err) + } } flush (path, ipld, source, callback) { diff --git a/test/browser.js b/test/browser.js index 5633b34..e846743 100644 --- a/test/browser.js +++ b/test/browser.js @@ -34,11 +34,6 @@ describe('IPFS data importing tests on the Browser', function () { ], done) }) - // HAMT - require('./hamt') - require('./hamt-consumable-buffer') - require('./hamt-consumable-hash') - // Chunkers require('./chunker-fixed-size') require('./chunker-rabin-browser') diff --git a/test/builder-dir-sharding.js b/test/builder-dir-sharding.js index 6feb4fa..94db073 100644 --- a/test/builder-dir-sharding.js +++ b/test/builder-dir-sharding.js @@ -44,13 +44,17 @@ module.exports = (repo) => { ]), importer(ipld, options), pull.collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - expect(nodes[0].path).to.be.eql('a/b') - expect(nodes[1].path).to.be.eql('a') - nonShardedHash = nodes[1].multihash - expect(nonShardedHash).to.exist() - done() + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + expect(nodes[0].path).to.be.eql('a/b') + expect(nodes[1].path).to.be.eql('a') + nonShardedHash = nodes[1].multihash + expect(nonShardedHash).to.exist() + done() + } catch (err) { + done(err) + } }) ) }) @@ -69,14 +73,18 @@ module.exports = (repo) => { ]), importer(ipld, options), pull.collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - expect(nodes[0].path).to.be.eql('a/b') - expect(nodes[1].path).to.be.eql('a') - shardedHash = nodes[1].multihash - // hashes are different - expect(shardedHash).to.not.equal(nonShardedHash) - done() + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + expect(nodes[0].path).to.be.eql('a/b') + expect(nodes[1].path).to.be.eql('a') + shardedHash = nodes[1].multihash + // hashes are different + expect(shardedHash).to.not.equal(nonShardedHash) + done() + } catch (err) { + done(err) + } }) ) }) @@ -85,13 +93,18 @@ module.exports = (repo) => { pull( exporter(nonShardedHash, ipld), pull.collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - const expectedHash = new CID(nonShardedHash).toBaseEncodedString() - expect(nodes[0].path).to.be.eql(expectedHash) - expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) - expect(nodes[1].path).to.be.eql(expectedHash + '/b') - expect(nodes[1].size).to.be.eql(29) + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + const expectedHash = new CID(nonShardedHash).toBaseEncodedString() + expect(nodes[0].path).to.be.eql(expectedHash) + expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) + expect(nodes[1].path).to.be.eql(expectedHash + '/b') + expect(nodes[1].size).to.be.eql(29) + } catch (err) { + return done(err) + } + pull( nodes[1].content, pull.collect(collected) @@ -100,10 +113,14 @@ module.exports = (repo) => { ) function collected (err, content) { - expect(err).to.not.exist() - expect(content.length).to.be.eql(1) - expect(content[0].toString()).to.be.eql('i have the best bytes') - done() + try { + expect(err).to.not.exist() + expect(content.length).to.be.eql(1) + expect(content[0].toString()).to.be.eql('i have the best bytes') + done() + } catch (err) { + done(err) + } } }) @@ -111,13 +128,18 @@ module.exports = (repo) => { pull( exporter(shardedHash, ipld), pull.collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(2) - const expectedHash = new CID(shardedHash).toBaseEncodedString() - expect(nodes[0].path).to.be.eql(expectedHash) - expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) - expect(nodes[1].path).to.be.eql(expectedHash + '/b') - expect(nodes[1].size).to.be.eql(21) + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(2) + const expectedHash = new CID(shardedHash).toBaseEncodedString() + expect(nodes[0].path).to.be.eql(expectedHash) + expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash) + expect(nodes[1].path).to.be.eql(expectedHash + '/b') + expect(nodes[1].size).to.be.eql(21) + } catch (err) { + return done(err) + } + pull( nodes[1].content, pull.collect(collected) @@ -126,10 +148,14 @@ module.exports = (repo) => { ) function collected (err, content) { - expect(err).to.not.exist() - expect(content.length).to.be.eql(1) - expect(content[0].toString()).to.be.eql('i have the best bytes') - done() + try { + expect(err).to.not.exist() + expect(content.length).to.be.eql(1) + expect(content[0].toString()).to.be.eql('i have the best bytes') + done() + } catch (err) { + done(err) + } } }) }) @@ -144,12 +170,16 @@ module.exports = (repo) => { push, importer(ipld), pull.collect((err, nodes) => { - expect(err).to.not.exist() - expect(nodes.length).to.be.eql(maxDirs + 1) - const last = nodes[nodes.length - 1] - expect(last.path).to.be.eql('big') - rootHash = last.multihash - done() + try { + expect(err).to.not.exist() + expect(nodes.length).to.be.eql(maxDirs + 1) + const last = nodes[nodes.length - 1] + expect(last.path).to.be.eql('big') + rootHash = last.multihash + done() + } catch (err) { + done(err) + } }) ) diff --git a/test/hamt-consumable-buffer.js b/test/hamt-consumable-buffer.js deleted file mode 100644 index 3fc47c2..0000000 --- a/test/hamt-consumable-buffer.js +++ /dev/null @@ -1,103 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('chai').expect - -const ConsumableBuffer = require('../src/hamt/consumable-buffer') - -describe('HAMT: consumable buffer', () => { - let buf - - it('can create an empty one', () => { - buf = new ConsumableBuffer([]) - }) - - it('from which I can take nothing', () => { - expect(buf.take(0)).to.be.eql(0) - }) - - it('from which I can keep on taking', () => { - expect(buf.take(100)).to.be.eql(0) - expect(buf.take(1000)).to.be.eql(0) - }) - - it('can create one with one zeroed byte', () => { - buf = new ConsumableBuffer([0]) - }) - - it('from which I can take nothing', () => { - expect(buf.take(0)).to.be.eql(0) - }) - - it('from which I can keep on taking', () => { - expect(buf.take(100)).to.be.eql(0) - expect(buf.take(1000)).to.be.eql(0) - }) - - it('can create one with one byte with ones', () => { - buf = new ConsumableBuffer([0b11111111]) - }) - - it('from which I can take nothing', () => { - expect(buf.take(0)).to.be.eql(0) - }) - - it('from which I can take one bit at a time', () => { - for (let i = 0; i < 8; i++) { - expect(buf.take(1)).to.be.eql(1) - } - }) - - it('should be exhausted', () => { - expect(buf.take(1)).to.be.eql(0) - }) - - it('from which I can keep on taking', () => { - expect(buf.take(100)).to.be.eql(0) - expect(buf.take(1000)).to.be.eql(0) - }) - - it('can create one with 3 full bytes', () => { - buf = new ConsumableBuffer([0xff, 0xff, 0xff]) - }) - - it('from which I can take nothing', () => { - expect(buf.take(0)).to.be.eql(0) - }) - - it('from which I can take one bit at a time', () => { - for (let i = 0; i < 24; i++) { - expect(buf.take(1)).to.be.eql(1) - } - }) - - it('should be exhausted', () => { - expect(buf.take(1)).to.be.eql(0) - }) - - it('can create one with 3 full bytes', () => { - buf = new ConsumableBuffer([0xff, 0xff, 0xff]) - }) - - it('from which I can take 2 bits at a time', () => { - for (let i = 0; i < 12; i++) { - expect(buf.take(2)).to.be.eql(3) - } - }) - - it('should be exhausted', () => { - expect(buf.take(1)).to.be.eql(0) - }) - - it('can create one with 3 full bytes', () => { - buf = new ConsumableBuffer([0xff, 0xff, 0xff]) - }) - - it('from which I can take every bit', () => { - expect(buf.take(24)).to.be.eql(0b111111111111111111111111) - }) - - it('should be exhausted', () => { - expect(buf.take(1)).to.be.eql(0) - }) -}) diff --git a/test/hamt-consumable-hash.js b/test/hamt-consumable-hash.js deleted file mode 100644 index 5f76aaf..0000000 --- a/test/hamt-consumable-hash.js +++ /dev/null @@ -1,83 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const crypto = require('crypto') -const whilst = require('async/whilst') - -const ConsumableHash = require('../src/hamt/consumable-hash') - -describe('HAMT: consumable hash', () => { - let hash, h - const maxIter = 100 - const values = [] - - it('can create a hashing function', () => { - hash = ConsumableHash(hashFn) - }) - - it('can take a 0 length value', (callback) => { - hash('some value').take(0, (err, result) => { - expect(err).to.not.exist() - expect(result).to.be.eql(0) - callback() - }) - }) - - it('can take a 10 bit value', (callback) => { - hash('some value').take(10, (err, result) => { - expect(err).to.not.exist() - expect(result).to.be.eql(110) - callback() - }) - }) - - it('can keep on taking a 10 bit value', (callback) => { - h = hash('some value') - let iter = maxIter - whilst( - () => iter > 0, - (callback) => { - h.take(10, (err, result) => { - expect(err).to.not.exist() - values.push(result) - expect(result).to.be.below(1024) - expect(result).to.be.above(0) - iter-- - callback() - }) - }, - callback - ) - }) - - it('can untake all', () => { - h.untake(10 * maxIter) - }) - - it('keeps taking the same values after untaking all', (callback) => { - let iter = maxIter - whilst( - () => iter > 0, - (callback) => { - h.take(10, (err, result) => { - expect(err).to.not.exist() - values.push(result) - expect(result).to.be.eql(values.shift()) - iter-- - callback() - }) - }, - callback - ) - }) -}) - -function hashFn (value, callback) { - callback(null, crypto - .createHash('sha256') - .update(value) - .digest()) -} diff --git a/test/hamt.js b/test/hamt.js deleted file mode 100644 index dc07ca2..0000000 --- a/test/hamt.js +++ /dev/null @@ -1,190 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const crypto = require('crypto') -const each = require('async/each') -const eachSeries = require('async/eachSeries') - -const HAMT = require('../src/hamt') - -const hashFn = function (value, callback) { - callback(null, crypto - .createHash('sha256') - .update(value) - .digest()) -} - -const options = { - hashFn: hashFn -} - -describe('HAMT', () => { - describe('basic', () => { - let bucket - it('can create an empty one', () => { - bucket = HAMT(options) - }) - - it('get unknown key returns undefined', (callback) => { - bucket.get('unknown', (err, result) => { - expect(err).to.not.exist() - expect(result).to.be.undefined() - callback() - }) - }) - - it('can put a value', (callback) => { - bucket.put('key', 'value', callback) - }) - - it('can get that value', (callback) => { - bucket.get('key', (err, result) => { - expect(err).to.not.exist() - expect(result).to.eql('value') - callback() - }) - }) - - it('can override a value', (callback) => { - bucket.put('key', 'a different value', callback) - }) - - it('can get that value', (callback) => { - bucket.get('key', (err, result) => { - expect(err).to.not.exist() - expect(result).to.eql('a different value') - callback() - }) - }) - - it('can remove a non existing value', (callback) => { - bucket.del('a key which does not exist', callback) - }) - - it('can remove an existing value', (callback) => { - bucket.del('key', callback) - }) - - it('get deleted key returns undefined', (callback) => { - bucket.get('key', (err, result) => { - expect(err).to.not.exist() - expect(result).to.be.undefined() - callback() - }) - }) - }) - - describe('many keys', () => { - let bucket - let keys - let masterHead - - it('can create an empty one', () => { - bucket = HAMT(options) - }) - - it('accepts putting many keys', (done) => { - const max = 400 - keys = new Array(max) - for (let i = 1; i <= max; i++) { - keys[i - 1] = i.toString() - } - - each(keys, (key, callback) => bucket.put(key, key, callback), done) - }) - - it('can remove all the keys and still find remaining', function (done) { - this.timeout(50 * 1000) - - masterHead = keys.pop() - iterate() - - function iterate () { - const head = keys.shift() - if (!head) { - done() - return // early - } - - bucket.get(head, (err, value) => { - expect(err).to.not.exist() - expect(value).to.eql(head) - bucket.del(head, afterDel) - }) - - function afterDel (err) { - expect(err).to.not.exist() - bucket.get(head, afterGet) - } - - function afterGet (err, value) { - expect(err).to.not.exist() - expect(value).to.be.undefined() - - each(keys, onEachKey, reiterate) - } - } - - function onEachKey (key, callback) { - bucket.get(key, (err, value) => { - expect(err).to.not.exist() - expect(value).to.eql(key) - callback() - }) - } - - function reiterate (err) { - expect(err).to.not.exist() - // break from stack on next iteration - process.nextTick(iterate) - } - }) - - it('collapsed all the buckets', () => { - expect(bucket.toJSON()).to.be.eql([masterHead]) - }) - - it('can still find sole head', (callback) => { - bucket.get(masterHead, (err, value) => { - expect(err).to.not.exist() - expect(value).to.be.eql(masterHead) - callback() - }) - }) - }) - - describe('exhausting hash', () => { - let bucket - - before(() => { - bucket = HAMT({ - hashFn: smallHashFn, - bits: 2 - }) - }) - - it('iterates', (callback) => { - const size = 300 - const keys = Array(size) - for (let i = 0; i < size; i++) { - keys[i] = i.toString() - } - - eachSeries(keys, (key, callback) => bucket.put(key, key, callback), (err) => { - expect(err).to.not.exist() - callback() - }) - }) - - function smallHashFn (value, callback) { - callback(null, crypto - .createHash('sha256') - .update(value) - .digest() - .slice(0, 2)) // just return the 2 first bytes of the hash - } - }) -}) diff --git a/test/node.js b/test/node.js index cae5d02..196394e 100644 --- a/test/node.js +++ b/test/node.js @@ -37,11 +37,6 @@ describe('IPFS UnixFS Engine', () => { ], done) }) - // HAMT - require('./hamt') - require('./hamt-consumable-buffer') - require('./hamt-consumable-hash') - // Chunkers require('./chunker-fixed-size') require('./chunker-rabin')