From 6f97418aabef326fca305bb00a928e6a6e756abc Mon Sep 17 00:00:00 2001 From: nginnever Date: Thu, 12 May 2016 18:21:19 -0700 Subject: [PATCH 1/6] feat(ipfs.get): Add the ipfs.files.get call --- src/api/get.js | 11 +++++ src/load-commands.js | 1 + test/api/get.spec.js | 101 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 113 insertions(+) create mode 100644 src/api/get.js create mode 100644 test/api/get.spec.js diff --git a/src/api/get.js b/src/api/get.js new file mode 100644 index 000000000..aabaabba0 --- /dev/null +++ b/src/api/get.js @@ -0,0 +1,11 @@ +'use strict' + +module.exports = (send) => { + return function get (path, opts, cb) { + if (typeof opts === 'function' && !cb) { + cb = opts + opts = {} + } + return send('get', path, opts, null, cb) + } +} diff --git a/src/load-commands.js b/src/load-commands.js index e8dc92657..81e3f5d9f 100644 --- a/src/load-commands.js +++ b/src/load-commands.js @@ -13,6 +13,7 @@ function requireCommands () { dht: require('./api/dht'), diag: require('./api/diag'), id: require('./api/id'), + get: require('./api/get'), log: require('./api/log'), ls: require('./api/ls'), mount: require('./api/mount'), diff --git a/test/api/get.spec.js b/test/api/get.spec.js new file mode 100644 index 000000000..82278616a --- /dev/null +++ b/test/api/get.spec.js @@ -0,0 +1,101 @@ +/* eslint-env mocha */ +/* globals apiClients */ +'use strict' + +const expect = require('chai').expect +const isNode = require('detect-node') +const fs = require('fs') +const bl = require('bl') + +const path = require('path') +const streamEqual = require('stream-equal') + +let testfile +let testfileBig + +if (isNode) { + testfile = fs.readFileSync(path.join(__dirname, '/../testfile.txt')) + testfileBig = fs.createReadStream(path.join(__dirname, '/../15mb.random'), { bufferSize: 128 }) +} else { + testfile = require('raw!../testfile.txt') +} + +describe('.get', () => { + it('get with no compression args', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', (err, res) => { + expect(err).to.not.exist + res.pipe(bl((err, bldata) => { + expect(err).to.not.exist + expect(bldata.toString()).to.contain(testfile.toString()) + done() + })) + }) + }) + + it('get with archive true', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {archive: true}, (err, res) => { + expect(err).to.not.exist + res.pipe(bl((err, bldata) => { + expect(err).to.not.exist + expect(bldata.toString()).to.contain(testfile.toString()) + done() + })) + }) + }) + + it('get err with out of range compression level', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {compress: true, 'compression-level': 10}, (err, res) => { + expect(err).to.exist + expect(err.toString()).to.equal('Error: Compression level must be between 1 and 9') + done() + }) + }) + + it('get with compression level', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {compress: true, 'compression-level': 1}, (err, res) => { + expect(err).to.not.exist + done() + }) + }) + + it.skip('get BIG file', (done) => { + if (!isNode) { + return done() + } + + apiClients.a.get('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, res) => { + expect(err).to.not.exist + + // Do not blow out the memory of nodejs :) + streamEqual(res, testfileBig, (err, equal) => { + expect(err).to.not.exist + expect(equal).to.be.true + done() + }) + }) + }) + + describe('promise', () => { + it.skip('get', (done) => { + return apiClients.a.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') + .then((res) => { + let buf = '' + res + .on('error', (err) => { + throw err + }) + .on('data', (data) => { + buf += data + }) + .on('end', () => { + expect(buf).to.contain(testfile.toString()) + done() + }) + }) + }) + }) +}) From 4c96439b886ec5b6b28e3a99cdf589725dda5d34 Mon Sep 17 00:00:00 2001 From: Stephen Whitmore Date: Tue, 14 Jun 2016 13:18:39 -0700 Subject: [PATCH 2/6] Move cleanMultihash into a module. --- src/api/cat.js | 13 +------------ src/clean-multihash.js | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 12 deletions(-) create mode 100644 src/clean-multihash.js diff --git a/src/api/cat.js b/src/api/cat.js index 8b61f66a1..06e9bac90 100644 --- a/src/api/cat.js +++ b/src/api/cat.js @@ -1,8 +1,7 @@ 'use strict' -const bs58 = require('bs58') -const isIPFS = require('is-ipfs') const promisify = require('promisify-es6') +const cleanMultihash = require('../clean-multihash') module.exports = (send) => { const cat = promisify((multihash, callback) => { @@ -15,13 +14,3 @@ module.exports = (send) => { }) return cat } - -function cleanMultihash (multihash) { - if (!isIPFS.multihash(multihash)) { - throw new Error('not valid multihash') - } - if (Buffer.isBuffer(multihash)) { - return bs58.encode(multihash) - } - return multihash -} diff --git a/src/clean-multihash.js b/src/clean-multihash.js new file mode 100644 index 000000000..bbf3f9a39 --- /dev/null +++ b/src/clean-multihash.js @@ -0,0 +1,15 @@ +'use strict' + +const bs58 = require('bs58') +const isIPFS = require('is-ipfs') + +module.exports = function (multihash) { + if (!isIPFS.multihash(multihash)) { + throw new Error('not valid multihash') + } + if (Buffer.isBuffer(multihash)) { + return bs58.encode(multihash) + } + return multihash +} + From bf17442b9402dd26d906cd342f5ab976b5173f3c Mon Sep 17 00:00:00 2001 From: Stephen Whitmore Date: Tue, 14 Jun 2016 19:13:34 -0700 Subject: [PATCH 3/6] Add files.get command and tests. --- src/api/get.js | 25 +++++++++++++++++++--- src/load-commands.js | 6 ++++++ src/tar-stream-to-objects.js | 32 +++++++++++++++++++++++++++++ test/api/get.spec.js | 40 ++++++++++++++++++++++++++++-------- 4 files changed, 92 insertions(+), 11 deletions(-) create mode 100644 src/tar-stream-to-objects.js diff --git a/src/api/get.js b/src/api/get.js index aabaabba0..b1b833d9d 100644 --- a/src/api/get.js +++ b/src/api/get.js @@ -1,11 +1,30 @@ 'use strict' +const tarStreamToObjects = require('../tar-stream-to-objects') +const cleanMultihash = require('../clean-multihash') +const promisify = require('promisify-es6') + module.exports = (send) => { - return function get (path, opts, cb) { + return promisify(function get (path, opts, cb) { if (typeof opts === 'function' && !cb) { cb = opts opts = {} } - return send('get', path, opts, null, cb) - } + + // opts is the real callback -- 'cb' is being injected by promisify + if (typeof opts === 'function' && typeof cb === 'function') { + cb = opts + opts = {} + } + + try { + path = cleanMultihash(path) + } catch (err) { + return cb(err) + } + + var sendWithTransform = send.withTransform(tarStreamToObjects) + + return sendWithTransform('get', path, opts, null, cb) + }) } diff --git a/src/load-commands.js b/src/load-commands.js index 81e3f5d9f..ebd832de7 100644 --- a/src/load-commands.js +++ b/src/load-commands.js @@ -34,6 +34,12 @@ function requireCommands () { const files = require('./api/files')(send) files.add = require('./api/add')(send) files.createAddStream = require('./api/add-stream.js')(send) + files.get = require('./api/get')(send) + + // aliases + cmds.add = files.add + cmds.createAddStream = files.createAddStream + cmds.get = files.get return files } diff --git a/src/tar-stream-to-objects.js b/src/tar-stream-to-objects.js new file mode 100644 index 000000000..9e8ba0c51 --- /dev/null +++ b/src/tar-stream-to-objects.js @@ -0,0 +1,32 @@ +'use strict' + +const tar = require('tar-stream') +const Readable = require('readable-stream') +const through = require('through2') + +// transform tar stream into readable stream of +// { path: 'string', content: Readable } +module.exports = function (err, res, send, done) { + + if (err) return done(err) + + var ex = tar.extract() + res.pipe(ex) + + var objStream = new Readable({ objectMode: true }) + objStream._read = function noop () {} + + ex.on('entry', function (header, stream, next) { + objStream.push({ + path: header.name, + content: stream + }) + next() + }) + ex.on('finish', () => { + objStream.push(null) + }) + + done(null, objStream) +} + diff --git a/test/api/get.spec.js b/test/api/get.spec.js index 82278616a..30f9a8479 100644 --- a/test/api/get.spec.js +++ b/test/api/get.spec.js @@ -6,10 +6,14 @@ const expect = require('chai').expect const isNode = require('detect-node') const fs = require('fs') const bl = require('bl') +const concat = require('concat-stream') +const through = require('through2') const path = require('path') const streamEqual = require('stream-equal') +const extract = require('tar-stream').extract + let testfile let testfileBig @@ -24,10 +28,20 @@ describe('.get', () => { it('get with no compression args', (done) => { apiClients.a .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', (err, res) => { - expect(err).to.not.exist - res.pipe(bl((err, bldata) => { - expect(err).to.not.exist - expect(bldata.toString()).to.contain(testfile.toString()) + + // accumulate the files and their content + var files = [] + res.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(testfile.toString()) done() })) }) @@ -36,10 +50,20 @@ describe('.get', () => { it('get with archive true', (done) => { apiClients.a .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {archive: true}, (err, res) => { - expect(err).to.not.exist - res.pipe(bl((err, bldata) => { - expect(err).to.not.exist - expect(bldata.toString()).to.contain(testfile.toString()) + + // accumulate the files and their content + var files = [] + res.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(testfile.toString()) done() })) }) From d8a1689596a6de5f9a2de686506233e5f7a5c438 Mon Sep 17 00:00:00 2001 From: Stephen Whitmore Date: Thu, 23 Jun 2016 12:19:11 -0700 Subject: [PATCH 4/6] Set content to null for directories. --- src/tar-stream-to-objects.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tar-stream-to-objects.js b/src/tar-stream-to-objects.js index 9e8ba0c51..888aca797 100644 --- a/src/tar-stream-to-objects.js +++ b/src/tar-stream-to-objects.js @@ -19,7 +19,7 @@ module.exports = function (err, res, send, done) { ex.on('entry', function (header, stream, next) { objStream.push({ path: header.name, - content: stream + content: header.type !== 'directory' ? stream : null }) next() }) From 833f249aecc82dd10b1db2aefe5536b0bda65782 Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 9 Aug 2016 10:16:24 +0100 Subject: [PATCH 5/6] fix(lint+unused modules): Remove the usage of raw buffer loader, upgrade to aegir 6 and fix linting --- package.json | 2 +- src/tar-stream-to-objects.js | 6 +++--- tasks/daemons.js | 1 + test/api/block.spec.js | 1 + test/api/bootstrap.spec.js | 1 + test/api/config.spec.js | 1 + test/api/files.spec.js | 1 + test/api/get.spec.js | 25 +++++++++++++------------ test/api/log.spec.js | 1 + 9 files changed, 23 insertions(+), 16 deletions(-) diff --git a/package.json b/package.json index f0ff3949a..d14eb1053 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "url": "https://github.com/ipfs/js-ipfs-api" }, "devDependencies": { - "aegir": "^5.0.1", + "aegir": "^6.0.0", "chai": "^3.5.0", "gulp": "^3.9.1", "interface-ipfs-core": "^0.5.0", diff --git a/src/tar-stream-to-objects.js b/src/tar-stream-to-objects.js index 888aca797..b817d5612 100644 --- a/src/tar-stream-to-objects.js +++ b/src/tar-stream-to-objects.js @@ -2,13 +2,13 @@ const tar = require('tar-stream') const Readable = require('readable-stream') -const through = require('through2') // transform tar stream into readable stream of // { path: 'string', content: Readable } module.exports = function (err, res, send, done) { - - if (err) return done(err) + if (err) { + return done(err) + } var ex = tar.extract() res.pipe(ex) diff --git a/tasks/daemons.js b/tasks/daemons.js index a341312dc..ad1c1909d 100644 --- a/tasks/daemons.js +++ b/tasks/daemons.js @@ -1,4 +1,5 @@ 'use strict' +/* eslint max-nested-callbacks: ["error", 8] */ // TODO reduce nesteness const gulp = require('gulp') const fs = require('fs') diff --git a/test/api/block.spec.js b/test/api/block.spec.js index 4fdef4646..5fedd8f0e 100644 --- a/test/api/block.spec.js +++ b/test/api/block.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/bootstrap.spec.js b/test/api/bootstrap.spec.js index 5728d0dbd..8cecb7978 100644 --- a/test/api/bootstrap.spec.js +++ b/test/api/bootstrap.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/config.spec.js b/test/api/config.spec.js index 21ca82d8e..86914958e 100644 --- a/test/api/config.spec.js +++ b/test/api/config.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/files.spec.js b/test/api/files.spec.js index 7a5cc616b..731a37395 100644 --- a/test/api/files.spec.js +++ b/test/api/files.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/get.spec.js b/test/api/get.spec.js index 30f9a8479..42c5b852e 100644 --- a/test/api/get.spec.js +++ b/test/api/get.spec.js @@ -1,33 +1,33 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ + 'use strict' const expect = require('chai').expect const isNode = require('detect-node') const fs = require('fs') -const bl = require('bl') +// const bl = require('bl') const concat = require('concat-stream') const through = require('through2') +const streamEqual = require('stream-equal') const path = require('path') -const streamEqual = require('stream-equal') -const extract = require('tar-stream').extract +// const extract = require('tar-stream').extract -let testfile +const testfile = fs.readFileSync(path.join(__dirname, '/../testfile.txt')) let testfileBig if (isNode) { - testfile = fs.readFileSync(path.join(__dirname, '/../testfile.txt')) testfileBig = fs.createReadStream(path.join(__dirname, '/../15mb.random'), { bufferSize: 128 }) -} else { - testfile = require('raw!../testfile.txt') } -describe('.get', () => { +describe.skip('.get', () => { it('get with no compression args', (done) => { apiClients.a .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', (err, res) => { + expect(err).to.not.exist // accumulate the files and their content var files = [] @@ -50,6 +50,7 @@ describe('.get', () => { it('get with archive true', (done) => { apiClients.a .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {archive: true}, (err, res) => { + expect(err).to.not.exist // accumulate the files and their content var files = [] @@ -86,7 +87,7 @@ describe('.get', () => { }) }) - it.skip('get BIG file', (done) => { + it('get BIG file', (done) => { if (!isNode) { return done() } @@ -103,9 +104,9 @@ describe('.get', () => { }) }) - describe('promise', () => { - it.skip('get', (done) => { - return apiClients.a.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') + describe.skip('promise', () => { + it('get', (done) => { + apiClients.a.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') .then((res) => { let buf = '' res diff --git a/test/api/log.spec.js b/test/api/log.spec.js index 6c46ba36f..65ac3c0ac 100644 --- a/test/api/log.spec.js +++ b/test/api/log.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' From 248cd137359d679a1bfd6317fa49e990c56c380d Mon Sep 17 00:00:00 2001 From: David Dias Date: Tue, 9 Aug 2016 10:36:07 +0100 Subject: [PATCH 6/6] fix(files.get): - Move path.join call out of readStream to not mess with the AST parser - Fix big file test --- package.json | 3 +- src/add-to-dagnode-transform.js | 1 + src/api/add-files.js | 5 ++++ src/request-api.js | 38 +++++++++++++++-------- test/api/get.spec.js | 53 +++++++++++++++++++-------------- 5 files changed, 64 insertions(+), 36 deletions(-) diff --git a/package.json b/package.json index d14eb1053..6f8915b59 100644 --- a/package.json +++ b/package.json @@ -33,8 +33,9 @@ "aegir": "^6.0.0", "chai": "^3.5.0", "gulp": "^3.9.1", - "interface-ipfs-core": "^0.5.0", + "interface-ipfs-core": "^0.6.0", "ipfsd-ctl": "^0.14.0", + "passthrough-counter": "^1.0.0", "pre-commit": "^1.1.3", "stream-equal": "^0.1.8", "stream-http": "^2.3.1", diff --git a/src/add-to-dagnode-transform.js b/src/add-to-dagnode-transform.js index 46e92b6fb..f70d869f9 100644 --- a/src/add-to-dagnode-transform.js +++ b/src/add-to-dagnode-transform.js @@ -8,6 +8,7 @@ module.exports = function (err, res, send, done) { if (err) { return done(err) } + async.map(res, function map (entry, next) { getDagNode(send, entry.Hash, function (err, node) { if (err) { diff --git a/src/api/add-files.js b/src/api/add-files.js index c5362f643..26f71097d 100644 --- a/src/api/add-files.js +++ b/src/api/add-files.js @@ -1,5 +1,6 @@ 'use strict' +const isNode = require('detect-node') const addToDagNodesTransform = require('../add-to-dagnode-transform') module.exports = (send) => { @@ -9,6 +10,10 @@ module.exports = (send) => { opts = {} } + if (!isNode) { + return cb(new Error('Recursive uploads are not supported in the browser')) + } + if (typeof (path) !== 'string') { return cb(new Error('"path" must be a string')) } diff --git a/src/request-api.js b/src/request-api.js index bef4fafcf..a5c01ea4c 100644 --- a/src/request-api.js +++ b/src/request-api.js @@ -4,6 +4,7 @@ const Wreck = require('wreck') const Qs = require('qs') const ndjson = require('ndjson') const getFilesStream = require('./get-files-stream') +const Counter = require('passthrough-counter') const isNode = require('detect-node') @@ -11,13 +12,19 @@ const isNode = require('detect-node') function parseChunkedJson (res, cb) { const parsed = [] + const c = new Counter() res + .pipe(c) .pipe(ndjson.parse()) - .on('data', parsed.push.bind(parsed)) - .on('end', () => cb(null, parsed)) + .on('data', (obj) => { + parsed.push(obj) + }) + .on('end', () => { + cb(null, parsed) + }) } -function onRes (buffer, cb) { +function onRes (buffer, cb, uri) { return (err, res) => { if (err) { return cb(err) @@ -42,10 +49,14 @@ function onRes (buffer, cb) { }) } - if (stream && !buffer) return cb(null, res) + if (stream && !buffer) { + return cb(null, res) + } if (chunkedObjects) { - if (isJson) return parseChunkedJson(res, cb) + if (isJson) { + return parseChunkedJson(res, cb) + } return Wreck.read(res, null, cb) } @@ -56,6 +67,11 @@ function onRes (buffer, cb) { function requestAPI (config, path, args, qs, files, buffer, cb) { qs = qs || {} + + if (Array.isArray(files)) { + qs.recursive = true + } + if (Array.isArray(path)) path = path.join('/') if (args && !Array.isArray(args)) args = [args] if (args) qs.arg = args @@ -67,10 +83,6 @@ function requestAPI (config, path, args, qs, files, buffer, cb) { delete qs.r } - if (!isNode && qs.recursive && path === 'add') { - return cb(new Error('Recursive uploads are not supported in the browser')) - } - qs['stream-channels'] = true let stream @@ -104,7 +116,7 @@ function requestAPI (config, path, args, qs, files, buffer, cb) { opts.payload = stream } - return Wreck.request(opts.method, opts.uri, opts, onRes(buffer, cb)) + return Wreck.request(opts.method, opts.uri, opts, onRes(buffer, cb, opts.uri)) } // -- Interface @@ -128,9 +140,9 @@ exports = module.exports = function getRequestAPI (config) { return requestAPI(config, path, args, qs, files, buffer, cb) } - // Wraps the 'send' function such that an asynchronous transform may be - // applied to its result before passing it on to either its callback or - // promise. + // Wraps the 'send' function such that an asynchronous + // transform may be applied to its result before + // passing it on to either its callback or promise. send.withTransform = function (transform) { return function (path, args, qs, files, buffer, cb) { if (typeof buffer === 'function') { diff --git a/test/api/get.spec.js b/test/api/get.spec.js index 42c5b852e..77a9f7451 100644 --- a/test/api/get.spec.js +++ b/test/api/get.spec.js @@ -17,13 +17,15 @@ const path = require('path') // const extract = require('tar-stream').extract const testfile = fs.readFileSync(path.join(__dirname, '/../testfile.txt')) + let testfileBig if (isNode) { - testfileBig = fs.createReadStream(path.join(__dirname, '/../15mb.random'), { bufferSize: 128 }) + const tfbPath = path.join(__dirname, '/../15mb.random') + testfileBig = fs.createReadStream(tfbPath, { bufferSize: 128 }) } -describe.skip('.get', () => { +describe('.get', () => { it('get with no compression args', (done) => { apiClients.a .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', (err, res) => { @@ -92,35 +94,42 @@ describe.skip('.get', () => { return done() } - apiClients.a.get('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, res) => { + apiClients.a.get('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, files) => { expect(err).to.not.exist - // Do not blow out the memory of nodejs :) - streamEqual(res, testfileBig, (err, equal) => { - expect(err).to.not.exist - expect(equal).to.be.true - done() + files.on('data', (file) => { + // Do not blow out the memory of nodejs :) + streamEqual(file.content, testfileBig, (err, equal) => { + expect(err).to.not.exist + expect(equal).to.be.true + done() + }) }) }) }) - describe.skip('promise', () => { + describe('promise', () => { it('get', (done) => { apiClients.a.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - .then((res) => { - let buf = '' - res - .on('error', (err) => { - throw err - }) - .on('data', (data) => { - buf += data - }) - .on('end', () => { - expect(buf).to.contain(testfile.toString()) - done() - }) + .then((files) => { + files.on('data', (file) => { + let buf = '' + file.content + .on('error', (err) => { + throw err + }) + .on('data', (data) => { + buf += data.toString() + }) + .on('end', () => { + expect(buf).to.contain(testfile.toString()) + done() + }) + }) }) + .catch((err) => { + expect(err).to.not.exist + }) }) }) })