diff --git a/README.md b/README.md index b3a8032d..de28fffa 100644 --- a/README.md +++ b/README.md @@ -115,7 +115,7 @@ The `import` object is a duplex pull stream that takes objects of the form: } ``` -`import` will outoyt file info objects as files get stored in IPFS. When stats on a node are emitted they are guaranteed to have been written. +`import` will output file info objects as files get stored in IPFS. When stats on a node are emitted they are guaranteed to have been written. `dag` is an instance of the [`IPLD Resolver`](https://github.com/ipld/js-ipld-resolver) or the [`js-ipfs` `dag api`](https://github.com/ipfs/interface-ipfs-core/tree/master/API/dag) @@ -140,6 +140,7 @@ The input's file paths and directory structure will be preserved in the [`dag-pb - bits (positive integer, defaults to `8`): the number of bits at each bucket of the HAMT - `progress` (function): a function that will be called with the byte length of chunks as a file is added to ipfs. - `onlyHash` (boolean, defaults to false): Only chunk and hash - do not write to disk +- `hashAlg` (string): multihash hashing algorithm to use ### Exporter diff --git a/src/builder/reduce.js b/src/builder/reduce.js index 3b7ea1c9..c520738f 100644 --- a/src/builder/reduce.js +++ b/src/builder/reduce.js @@ -32,7 +32,7 @@ module.exports = function (file, ipldResolver, options) { }) waterfall([ - (cb) => DAGNode.create(f.marshal(), links, cb), + (cb) => DAGNode.create(f.marshal(), links, options.hashAlg, cb), (node, cb) => { if (options.onlyHash) return cb(null, node) diff --git a/src/importer/dir-flat.js b/src/importer/dir-flat.js index 59751c12..f3a9af39 100644 --- a/src/importer/dir-flat.js +++ b/src/importer/dir-flat.js @@ -56,12 +56,13 @@ class DirFlat extends Dir { }) const dir = new UnixFS('directory') + const options = this._options waterfall( [ - (callback) => DAGNode.create(dir.marshal(), links, callback), + (callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback), (node, callback) => { - if (this._options.onlyHash) return callback(null, node) + if (options.onlyHash) return callback(null, node) ipldResolver.put( node, diff --git a/src/importer/dir-sharded.js b/src/importer/dir-sharded.js index f120614b..aa7b349e 100644 --- a/src/importer/dir-sharded.js +++ b/src/importer/dir-sharded.js @@ -144,7 +144,7 @@ function flush (options, bucket, path, ipldResolver, source, callback) { dir.hashType = options.hashFn.code waterfall( [ - (callback) => DAGNode.create(dir.marshal(), links, callback), + (callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback), (node, callback) => { if (options.onlyHash) return callback(null, node) diff --git a/test/test-builder.js b/test/test-builder.js index 4e5d1780..015a7b0c 100644 --- a/test/test-builder.js +++ b/test/test-builder.js @@ -57,5 +57,38 @@ module.exports = (repo) => { ) }, done) }) + + it('allows multihash hash algorithm to be specified for big file', (done) => { + eachSeries(Object.keys(mh.names), (hashAlg, cb) => { + const options = { hashAlg, strategy: 'flat' } + const content = String(Math.random() + Date.now()) + const inputFile = { + path: content + '.txt', + // Bigger than maxChunkSize + content: Buffer.alloc(262144 + 5).fill(1) + } + + const onCollected = (err, nodes) => { + if (err) return cb(err) + + const node = nodes[0] + + try { + expect(node).to.exist() + expect(mh.decode(node.multihash).name).to.equal(hashAlg) + } catch (err) { + return cb(err) + } + + cb() + } + + pull( + pull.values([Object.assign({}, inputFile)]), + createBuilder(FixedSizeChunker, ipldResolver, options), + pull.collect(onCollected) + ) + }, done) + }) }) }