diff --git a/package.json b/package.json index 31fdcb9f..6b16b994 100644 --- a/package.json +++ b/package.json @@ -37,7 +37,6 @@ "aegir": "^5.0.1", "async": "^1.5.2", "block-stream2": "^1.1.0", - "bs58": "^3.0.0", "buffer-loader": "0.0.1", "chai": "^3.5.0", "concat-stream": "^1.5.1", @@ -61,6 +60,7 @@ "ipfs-unixfs": "^0.1.0", "is-ipfs": "^0.2.0", "isstream": "^0.1.2", + "multihashes": "^0.2.2", "readable-stream": "^1.1.13", "run-series": "^1.1.4", "streamifier": "^0.1.1", diff --git a/src/clean-multihash.js b/src/clean-multihash.js new file mode 100644 index 00000000..d30a3ecf --- /dev/null +++ b/src/clean-multihash.js @@ -0,0 +1,14 @@ +'use strict' + +const mh = require('multihashes') +const isIPFS = require('is-ipfs') + +module.exports = function (multihash) { + if (!isIPFS.multihash(multihash)) { + throw new Error('not valid multihash') + } + if (Buffer.isBuffer(multihash)) { + return mh.toB58String(multihash) + } + return multihash +} diff --git a/src/exporter.js b/src/exporter.js index afafef6c..cabeb039 100644 --- a/src/exporter.js +++ b/src/exporter.js @@ -10,6 +10,7 @@ const Readable = require('readable-stream').Readable const pathj = require('path') const util = require('util') const fieldtrip = require('field-trip') +const cleanMultihash = require('./clean-multihash') exports = module.exports = Exporter @@ -24,6 +25,7 @@ function Exporter (hash, dagService, options) { if (!isIPFS.multihash(hash)) { throw new Error('not valid multihash') } + hash = cleanMultihash(hash) Readable.call(this, { objectMode: true }) diff --git a/test/test-exporter.js b/test/test-exporter.js index b7920828..e6b76c15 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -10,6 +10,7 @@ const UnixFS = require('ipfs-unixfs') const concat = require('concat-stream') const fs = require('fs') const path = require('path') +const bs58 = require('bs58') let ds @@ -24,6 +25,29 @@ module.exports = function (repo) { done() }) + it('ensure hash inputs are sanitized', (done) => { + const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' + const bs = new BlockService(repo) + const ds = new DAGService(bs) + const mhBuf = new Buffer(bs58.decode(hash)) + ds.get(hash, (err, fetchedNode) => { + const unmarsh = UnixFS.unmarshal(fetchedNode.data) + expect(err).to.not.exist + const testExport = exporter(mhBuf, ds) + testExport.on('error', (err) => { + expect(err).to.not.exist + }) + testExport.pipe(concat((files) => { + expect(files).to.be.length(1) + expect(files[0].path).to.equal(hash) + files[0].content.pipe(concat((bldata) => { + expect(bldata).to.deep.equal(unmarsh.data) + done() + })) + })) + }) + }) + it('export a file with no links', (done) => { const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' const bs = new BlockService(repo)