From 8f34b0eecd7a57acddf5f256b6a45fe92220d646 Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 17 Nov 2017 17:41:11 +0000 Subject: [PATCH] New Streaming and Buffered Interfaces (#162) * add function signatures * fix typo * complete update the spec * chore: update deps * structure tests, add placeholders * apply cr * update .add tests * files.add done * .addReadableStream * .addPullStream * files.cat * .catReadableStream * catPulLStream * .get * getReadableStream * chore: fix travis * chore: add ipfs.ls spec * chore: update CI * chore: fix CI and linting * fix directory test * add remaining tests for .ls .lsReadableStream and .lsPullStream * apply spec CR * enable all tests to fix https://github.com/ipfs/js-ipfs-api/issues/339 * remove dup ls test * .add tested in js-ipfs * get tests also passing on js-ipfs * ls is implemented in js-ipfs too --- .travis.yml | 5 - SPEC/FILES.md | 469 ++++++++++++++++--- circle.yml | 9 - package.json | 11 +- src/files.js | 1208 +++++++++++++++++++++++++++++-------------------- 5 files changed, 1129 insertions(+), 573 deletions(-) diff --git a/.travis.yml b/.travis.yml index f87ae1e2..b305ff68 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,12 +9,7 @@ matrix: script: - npm run lint -before_script: - - export DISPLAY=:99.0 - - sh -e /etc/init.d/xvfb start - addons: - firefox: 'latest' apt: sources: - ubuntu-toolchain-r-test diff --git a/SPEC/FILES.md b/SPEC/FILES.md index 353782c2..e595a6f2 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -1,6 +1,8 @@ files API ========= +> The files API enables users to use the File System abstraction of IPFS. + #### `add` > Add files and data to IPFS. @@ -9,24 +11,27 @@ files API ##### `JavaScript` - ipfs.files.add(data, [options], [callback]) -Where `data` may be +Where `data` may be: -- an array of objects, each of the form +- a [`Buffer instance`][b] +- a [`Readable Stream`][rs] +- a [`Pull Stream`][ps] +- a Path (caveat: will only work in Node.js) +- a URL +- an array of objects, each of the form: ```JavaScript { - path: '/tmp/myfile.txt', - content: (Buffer or Readable stream) + path: '/tmp/myfile.txt', // The file path + content: // A Buffer, Readable Stream or Pull Stream with the contents of the file } ``` -- a `Buffer` instance -- a `Readable` stream - If no `content` is passed, then the path is treated as an empty directory `options` is an optional object argument that might include the following keys: - cid-version (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including it's version) - progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. +- recursive (boolean): for when a Path is passed, this option can be enabled to add recursively all the files. - hashAlg || hash (string): multihash hashing algorithm to use `callback` must follow `function (err, res) {}` signature, where `err` is an error if the operation was not successful. `res` will be an array of: @@ -52,88 +57,195 @@ const files = [ ] ipfs.files.add(files, function (err, files) { - // 'files' will be an array of objects + // 'files' will be an array of objects containing paths and the multihashes of the files added }) ``` A great source of [examples][] can be found in the tests for this API. -#### `createAddStream` +#### `addReadableStream` -> Add files and data to IPFS using a transform stream. +> Add files and data to IPFS using a [Readable Stream][rs] of class Duplex. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.createAddStream([options], [callback]) +##### `JavaScript` - ipfs.files.addReadableStream([options]) -> [Readable Stream][rs] -Provides a Transform stream, where objects can be written of the forms +Returns a Readable Stream of class Duplex, where objects can be written of the forms ```js { - path: '/tmp/myfile.txt', - content: (Buffer or Readable stream) + path: '/tmp/myfile.txt', // The file path + content: // A Buffer, Readable Stream or Pull Stream with the contents of the file +} +``` + +`options` is an optional object argument that might include the following keys: + +- cid-version (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including it's version) +- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. +- hashAlg || hash (string): multihash hashing algorithm to use + +If no `callback` is passed, a promise is returned. + +**Example:** + +```JavaScript +const stream = ipfs.files.addReadableStream() +stream.on('data', function (file) { + // 'file' will be of the form + // { + // path: '/tmp/myfile.txt', + // hash: 'QmHash' // base58 encoded multihash + // size: 123 + // } +}) + +stream.write({ + path: + content: +}) +// write as many files as you want + +stream.end() +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `addPullStream` + +> Add files and data to IPFS using a [Pull Stream][ps]. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.addPullStream([options]) -> [Pull Stream][ps] + +Returns a Pull Stream, where objects can be written of the forms + +```js +{ + path: '/tmp/myfile.txt', // The file path + content: // A Buffer, Readable Stream or Pull Stream with the contents of the file } ``` -`options` is an optional object argument containing the [DAG importer options](https://github.com/ipfs/js-ipfs-unixfs-engine#importer-api). +`options` is an optional object argument that might include the following keys: -`callback` must follow `function (err, stream) {}` signature, where `err` is an -error if the operation was not successful. `stream` will be a Transform stream, -to which tuples like the above two object formats can be written and [DAGNode][] -objects will be outputted. +- cid-version (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including it's version) +- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. +- hashAlg || hash (string): multihash hashing algorithm to use If no `callback` is passed, a promise is returned. **Example:** ```JavaScript -ipfs.files.createAddStream(function (err, stream) { - stream.on('data', function (file) { - // 'file' will be of the form +const stream = ipfs.files.addPullStream() + +pull( + pull.values([ + { path: , content: } + ]), + stream, + pull.collect((err, values) => { + // values will be an array of objects, which one of the form // { // path: '/tmp/myfile.txt', // hash: 'QmHash' // base58 encoded multihash // size: 123 // } }) +) +``` - stream.write({ - path: , - content: - }) - // write as many as you want +#### `cat` + +> Returns a file addressed by a valid IPFS Path. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.cat(ipfsPath, [callback]) + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, file) {}` signature, where `err` is an error if the operation was not successful and `file` is a [Buffer][b] + +If no `callback` is passed, a promise is returned. + +**Example:** - stream.end() +```JavaScript +ipfs.files.cat(ipfsPath, function (err, file) { + if (err) { + throw err + } + + console.log(file.toString('utf8')) }) ``` A great source of [examples][] can be found in the tests for this API. -#### `cat` +#### `catReadableStream` -> Streams the file at the given IPFS multihash. +> Returns a [Readable Stream][rs] containing the contents of a file addressed by a valid IPFS Path. ##### `Go` **WIP** -##### `JavaScript` - ipfs.files.cat(ipfsPath, [callback]) +##### `JavaScript` - ipfs.files.catReadableStream(ipfsPath) -> [Readable Stream][rs] ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an error if the operation was not successful and `stream` is a readable stream of the file. +Returns a [Readable Stream][rs] with the contents of the file. -If no `callback` is passed, a promise is returned. ```JavaScript -ipfs.files.cat(ipfsPath, function (err, file) { - // file will be a stream containing the data of the file requested +const stream = ipfs.files.catReadableStream(ipfsPath) +// stream will be a stream containing the data of the file requested +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `catPullStream` + +> Returns a [Pull Stream][ps] containing the contents of a file addressed by a valid IPFS Path. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.catPullStream(ipfsPath) -> [Pull Stream][rs] + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +Returns a [Pull Stream][ps] with the contents of the file. + +```JavaScript +const stream = ipfs.files.catPullStream(ipfsPath) +// stream will be a stream containing the data of the file requested }) ``` @@ -141,7 +253,7 @@ A great source of [examples][] can be found in the tests for this API. #### `get` -> Get [UnixFS][] files from IPFS. +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. ##### `Go` **WIP** @@ -149,18 +261,61 @@ A great source of [examples][] can be found in the tests for this API. ipfsPath can be of type: -- `multihash` is a [multihash][] which can be passed as - - Buffer, the raw Buffer of the multihash - - String, the base58 encoded version of the multihash -- String, including the ipfs handler, a multihash and a path to traverse to, ie: +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, files) {}` signature, where `err` is an error if the operation was not successful. `files` is an array containing objects of the following form: + +```js +{ + path: '/tmp/myfile.txt', + content: +} +``` + +Here, each `path` corresponds to the name of a file, and `content` is a regular Readable stream with the raw contents of that file. + +If no `callback` is passed, a promise is returned. + +**Example:** + +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +ipfs.files.get(validCID, function (err, files) { + files.forEach((file) => { + console.log(file.path) + console.log(file.content.toString('utf8')) + }) +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `getReadableStream` + +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.getReadableStream(ipfsPath) -> [Readable Stream][rs] + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' -`callback` must follow `function (err, stream) {}` signature, where `err` is an -error if the operation was not successful. `stream` will be a Readable stream in -[*object mode*](https://nodejs.org/api/stream.html#stream_object_mode), -outputting objects of the form +It returns a [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects of the form: ```js { @@ -169,25 +324,227 @@ outputting objects of the form } ``` -Here, each `path` corresponds to the name of a file, and `content` is a regular -Readable stream with the raw contents of that file. +**Example:** + +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +const stream = ipfs.files.getReadableStream(validCID) + +stream.on('data', (file) => { + // write the file's path and contents to standard out + console.log(file.path) + console.log(file.path.toString()) +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `getPullStream` + +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.files.getPullStream(ipfsPath) -> [Pull Stream][ps] + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +It returns a [Pull Stream][os] that will yield objects of the form: + +```js +{ + path: '/tmp/myfile.txt', + content: +} +``` + +**Example:** -If no `callback` is passed, a promise is returned with the Readable stream. +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +const stream = ipfs.files.getReadableStream(validCID) + +pull( + stream, + pull.collect((err, files) => { + if (err) { + throw err + } + + files.forEach((file) => { + console.log(file.path) + console.log(file.path.toString()) + }) + }) +) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `ls` + +> Lists a directory from IPFS that is addressed by a valid IPFS Path. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.ls(ipfsPath, [callback]) + +> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionality. + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +`callback` must follow `function (err, files) {}` signature, where `err` is an error if the operation was not successful. `files` is an array containing objects of the following form: + +```js +{ + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' +} +``` + +If no `callback` is passed, a promise is returned. **Example:** ```JavaScript -const multihashStr = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -ipfs.files.get(multihashStr, function (err, stream) { - stream.on('data', (file) => { - // write the file's path and contents to standard out +ipfs.files.ls(validCID, function (err, files) { + files.forEach((file) => { console.log(file.path) - file.content.pipe(process.stdout) }) }) ``` A great source of [examples][] can be found in the tests for this API. +#### `lsReadableStream` + +> Lists a directory from IPFS that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.lsReadableStream(ipfsPath) -> [Readable Stream][rs] + +> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionality. + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +It returns a [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects of the form: + +```js +{ + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' +} +``` + +**Example:** + +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +const stream = ipfs.files.lsReadableStream(validCID) + +stream.on('data', (file) => { + // write the file's path and contents to standard out + console.log(file.path) +}) +``` + +A great source of [examples][] can be found in the tests for this API. + +#### `lsPullStream` + +> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded through a Pull Stream. + +##### `Go` **WIP** + +##### `JavaScript` - ipfs.lsPullStream(ipfsPath) -> [Pull Stream][ps] + +> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionality. + + +ipfsPath can be of type: + +- [`cid`][cid] of type: + - [Buffer][b], the raw Buffer of the cid + - String, the base58 encoded version of the cid +- String, including the ipfs handler, a cid and a path to traverse to, ie: + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' + - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' + +It returns a [Pull Stream][os] that will yield objects of the form: + +```js +{ + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' +} +``` + +**Example:** + +```JavaScript +const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' + +const stream = ipfs.files.getReadableStream(validCID) + +pull( + stream, + pull.collect((err, files) => { + if (err) { + throw err + } + + files.forEach((file) => console.log(file.path)) + }) +) +``` + +A great source of [examples][] can be found in the tests for this API. + [examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files.js +[b]: https://www.npmjs.com/package/buffer +[rs]: https://www.npmjs.com/package/readable-stream +[ps]: https://www.npmjs.com/package/pull-stream +[cid]: https://www.npmjs.com/package/cids diff --git a/circle.yml b/circle.yml index 6af3aa6d..355d2261 100644 --- a/circle.yml +++ b/circle.yml @@ -2,15 +2,6 @@ machine: node: version: stable -dependencies: - pre: - - google-chrome --version - - wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add - - - sudo sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' - - sudo apt-get update - - sudo apt-get --only-upgrade install google-chrome-stable - - google-chrome --version - test: override: - npm run lint diff --git a/package.json b/package.json index 6eebd0cf..1f99c919 100644 --- a/package.json +++ b/package.json @@ -6,9 +6,9 @@ "scripts": { "test": "exit 0", "lint": "aegir lint", - "release": "aegir release node --no-docs", - "release-minor": "aegir release node --type minor --no-docs", - "release-major": "aegir release node --type major --no-docs", + "release": "aegir release -t node --no-docs", + "release-minor": "aegir release -t node --type minor --no-docs", + "release-major": "aegir release -t node --type major --no-docs", "coverage": "exit 0", "coverage-publish": "exit 0" }, @@ -35,6 +35,7 @@ "bl": "^1.2.1", "bs58": "^4.0.1", "chai": "^4.1.2", + "dirty-chai": "^2.0.1", "cids": "~0.5.2", "concat-stream": "^1.6.0", "detect-node": "^2.0.3", @@ -45,9 +46,9 @@ "multihashes": "~0.4.12", "multihashing-async": "~0.4.7", "peer-id": "~0.10.2", - "pull-stream": "^3.6.1", - "dirty-chai": "^2.0.1" + "pull-stream": "^3.6.1" }, + "devDependencies": {}, "contributors": [ "David Dias ", "Dmitriy Ryajov ", diff --git a/src/files.js b/src/files.js index cfc086d6..3dde439a 100644 --- a/src/files.js +++ b/src/files.js @@ -7,37 +7,48 @@ const chai = require('chai') const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const bs58 = require('bs58') -const Readable = require('stream').Readable const loadFixture = require('aegir/fixtures') -const bl = require('bl') -const isNode = require('detect-node') +const bs58 = require('bs58') +const parallel = require('async/parallel') +const series = require('async/series') +const Readable = require('readable-stream').Readable +const pull = require('pull-stream') const concat = require('concat-stream') const through = require('through2') -const Buffer = require('safe-buffer').Buffer +const bl = require('bl') module.exports = (common) => { describe('.files', function () { - this.timeout(80 * 1000) + this.timeout(5 * 1000) - let smallFile - let bigFile - let directoryContent let ipfs - before((done) => { - smallFile = loadFixture(__dirname, '../test/fixtures/testfile.txt', 'interface-ipfs-core') - bigFile = loadFixture(__dirname, '../test/fixtures/15mb.random', 'interface-ipfs-core') - - directoryContent = { - 'pp.txt': loadFixture(__dirname, '../test/fixtures/test-folder/pp.txt', 'interface-ipfs-core'), - 'holmes.txt': loadFixture(__dirname, '../test/fixtures/test-folder/holmes.txt', 'interface-ipfs-core'), - 'jungle.txt': loadFixture(__dirname, '../test/fixtures/test-folder/jungle.txt', 'interface-ipfs-core'), - 'alice.txt': loadFixture(__dirname, '../test/fixtures/test-folder/alice.txt', 'interface-ipfs-core'), - 'files/hello.txt': loadFixture(__dirname, '../test/fixtures/test-folder/files/hello.txt', 'interface-ipfs-core'), - 'files/ipfs.txt': loadFixture(__dirname, '../test/fixtures/test-folder/files/ipfs.txt', 'interface-ipfs-core') + function fixture (path) { + return loadFixture(__dirname, path, 'interface-ipfs-core') + } + + const smallFile = { + cid: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', + data: fixture('../test/fixtures/testfile.txt') + } + const bigFile = { + cid: 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', + data: fixture('../test/fixtures/15mb.random') + } + + const directory = { + cid: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + files: { + 'pp.txt': fixture('../test/fixtures/test-folder/pp.txt'), + 'holmes.txt': fixture('../test/fixtures/test-folder/holmes.txt'), + 'jungle.txt': fixture('../test/fixtures/test-folder/jungle.txt'), + 'alice.txt': fixture('../test/fixtures/test-folder/alice.txt'), + 'files/hello.txt': fixture('../test/fixtures/test-folder/files/hello.txt'), + 'files/ipfs.txt': fixture('../test/fixtures/test-folder/files/ipfs.txt') } + } + before((done) => { common.setup((err, factory) => { expect(err).to.not.exist() factory.spawnNode((err, node) => { @@ -50,425 +61,470 @@ module.exports = (common) => { after((done) => common.teardown(done)) - describe('callback API', () => { - describe('.add', () => { - it('stream', (done) => { - const buffered = Buffer.from('some data') - const expectedMultihash = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - - const rs = new Readable() - rs.push(buffered) - rs.push(null) + describe('.add', () => { + it('a Buffer', (done) => { + ipfs.files.add(smallFile.data, (err, filesAdded) => { + expect(err).to.not.exist() - const arr = [] - const filePair = { - path: 'data.txt', - content: rs - } + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal(smallFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(smallFile.data.length) + done() + }) + }) - arr.push(filePair) + it('a BIG buffer', (done) => { + ipfs.files.add(bigFile.data, (err, filesAdded) => { + expect(err).to.not.exist() - ipfs.files.add(arr, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.length(1) - const file = res[0] - expect(file).to.exist() - expect(file.path).to.equal('data.txt') - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedMultihash) - done() - }) + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(bigFile.cid) + expect(file.path).to.equal(bigFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(bigFile.data.length) + done() }) + }) - it('buffer as tuple', (done) => { - const file = { - path: 'testfile.txt', - content: smallFile - } - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('a BIG buffer with progress enabled', (done) => { + let progCount = 0 + let accumProgress = 0 + function handler (p) { + progCount += 1 + accumProgress = p + } - ipfs.files.add([file], (err, res) => { - expect(err).to.not.exist() + ipfs.files.add(bigFile.data, { progress: handler }, (err, filesAdded) => { + expect(err).to.not.exist() - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal('testfile.txt') - done() - }) + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(bigFile.cid) + expect(file.path).to.equal(bigFile.cid) + + expect(progCount).to.equal(58) + expect(accumProgress).to.equal(bigFile.data.length) + done() }) + }) - it('buffer', (done) => { - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('a Buffer as tuple', (done) => { + const tuple = { path: 'testfile.txt', content: smallFile.data } - ipfs.files.add(smallFile, (err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - done() - }) - }) + ipfs.files.add([ + tuple + ], (err, filesAdded) => { + expect(err).to.not.exist() - it('BIG buffer', (done) => { - const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + expect(filesAdded).to.have.length(1) + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal('testfile.txt') - ipfs.files.add(bigFile, (err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - done() - }) + done() }) + }) - it('BIG buffer with progress', (done) => { - const expectedMultihash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + it('a Readable Stream', (done) => { + const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - let progCount = 0 - let accumProgress = 0 - const handler = (p) => { - progCount += 1 - accumProgress = p - } + const rs = new Readable() + rs.push(Buffer.from('some data')) + rs.push(null) - ipfs.files.add(bigFile, { progress: handler }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.have.length(1) - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - expect(progCount).to.equal(58) - expect(accumProgress).to.equal(bigFile.byteLength) - done() - }) - }) + const tuple = { path: 'data.txt', content: rs } - it('add a nested dir as array', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } + ipfs.files.add([tuple], (err, filesAdded) => { + expect(err).to.not.exist() - const content = (name) => ({ - path: `test-folder/${name}`, - content: directoryContent[name] - }) + expect(filesAdded).to.be.length(1) + const file = filesAdded[0] + expect(file.path).to.equal('data.txt') + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedCid) + done() + }) + }) - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) + it('add a nested directory as array of tupples', (done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const emptyDir = (name) => ({ path: `test-folder/${name}` }) - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] - ipfs.files.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(expectedRootMultihash) - done() - }) + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() }) + }) - it('add a nested dir as array with progress', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } + it('add a nested directory as array of tuppled with progress', (done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) - const content = (name) => ({ - path: `test-folder/${name}`, - content: directoryContent[name] - }) + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const total = dirs.reduce((i, entry) => { + return i + (entry.content ? entry.content.length : 0) + }, 0) + + let progCount = 0 + let accumProgress = 0 + const handler = (p) => { + progCount += 1 + accumProgress += p + } + + ipfs.files.add(dirs, { progress: handler }, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + expect(progCount).to.equal(8) + expect(accumProgress).to.be.at.least(total) + done() + }) + }) - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const total = dirs.reduce((i, entry) => { - return i + (entry.content ? entry.content.length : 0) - }, 0) - - let progCount = 0 - let accumProgress = 0 - const handler = (p) => { - progCount += 1 - accumProgress += p - } + it('fails in invalid input', (done) => { + const nonValid = 'sfdasfasfs' - ipfs.files.add(dirs, { progress: handler }, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + ipfs.files.add(nonValid, (err, result) => { + expect(err).to.exist() + done() + }) + }) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(expectedRootMultihash) - expect(progCount).to.equal(8) - expect(accumProgress).to.be.at.least(total) - done() + it('Promise test', () => { + return ipfs.files.add(smallFile.data) + .then((filesAdded) => { + const file = filesAdded[0] + expect(file.hash).to.equal(smallFile.cid) + expect(file.path).to.equal(smallFile.cid) }) + }) + }) + + describe('.addReadableStream', () => { + it('stream of valid files and dirs', (done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] }) - describe('.createAddStream', () => { - it('stream of valid files and dirs', (done) => { - const content = (name) => ({ - path: `test-folder/${name}`, - content: directoryContent[name] - }) + const emptyDir = (name) => ({ path: `test-folder/${name}` }) - const emptyDir = (name) => ({ - path: `test-folder/${name}` - }) + const files = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] - const expectedRootMultihash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.files.addReadableStream() - const files = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] + stream.on('error', (err) => { + expect(err).to.not.exist() + }) - ipfs.files.createAddStream((err, stream) => { - expect(err).to.not.exist() + stream.on('data', (file) => { + if (file.path === 'test-folder') { + expect(file.hash).to.equal(directory.cid) + done() + } + }) - stream.on('data', (file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(expectedRootMultihash) - done() - } - }) + files.forEach((file) => stream.write(file)) + stream.end() + }) + }) - files.forEach((file) => stream.write(file)) + describe('.addPullStream', () => { + it('stream of valid files and dirs', function (done) { + this.timeout(20 * 1000) - stream.end() - }) - }) + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] }) - it('fails in invalid input', (done) => { - const nonValid = 'sfdasfasfs' + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const files = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const stream = ipfs.files.addPullStream() + + pull( + pull.values(files), + stream, + pull.collect((err, filesAdded) => { + expect(err).to.not.exist() - ipfs.files.add(nonValid, (err, result) => { - expect(err).to.exist() - done() + filesAdded.forEach((file) => { + if (file.path === 'test-folder') { + expect(file.hash).to.equal(directory.cid) + done() + } + }) }) + ) + }) + }) + + describe('.cat', () => { + before((done) => { + parallel([ + (cb) => ipfs.files.add(smallFile.data, cb), + (cb) => ipfs.files.add(bigFile.data, cb) + ], done) + }) + + it('with a base58 string encoded multihash', (done) => { + ipfs.files.cat(smallFile.cid, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() }) }) - describe('.cat', () => { - it('with a base58 string encoded multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('with a multihash', (done) => { + const cid = Buffer.from(bs58.decode(smallFile.cid)) - ipfs.files.cat(hash, (err, stream) => { - expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) - }) + ipfs.files.cat(cid, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() }) + }) - it('with a multihash', (done) => { - const mhBuf = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) - ipfs.files.cat(mhBuf, (err, stream) => { - expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) - }) + it('streams a large file', (done) => { + ipfs.files.cat(bigFile.cid, (err, data) => { + expect(err).to.not.exist() + expect(data.length).to.equal(bigFile.data.length) + expect(data).to.eql(bigFile.data) + done() }) + }) - it('streams a large file', (done) => { - const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' + it('with ipfs path', (done) => { + const ipfsPath = '/ipfs/' + smallFile.cid - ipfs.files.cat(hash, (err, stream) => { - expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data).to.deep.equal(bigFile) - done() - })) - }) + ipfs.files.cat(ipfsPath, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() }) + }) - it('with ipfs path', (done) => { - const ipfsPath = '/ipfs/Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + it('with ipfs path, nested value', (done) => { + const file = { path: 'a/testfile.txt', content: smallFile.data } - ipfs.files.cat(ipfsPath, (err, stream) => { - expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) + ipfs.files.add([file], (err, filesAdded) => { + expect(err).to.not.exist() + + filesAdded.forEach((file) => { + if (file.path === 'a') { + ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + } }) }) + }) - it('with ipfs path, nested value', (done) => { - const file = { - path: 'a/testfile.txt', - content: smallFile - } + it('Promise test', () => { + return ipfs.files.cat(smallFile.cid) + .then((data) => { + expect(data.toString()).to.contain('Plz add me!') + }) + }) - ipfs.files.createAddStream((err, stream) => { - expect(err).to.not.exist() + it('errors on invalid key', () => { + const invalidCid = 'somethingNotMultihash' - stream.on('data', (file) => { - if (file.path === 'a') { - ipfs.files.cat(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { - expect(err).to.not.exist() - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - })) - }) - } - }) + return ipfs.files.cat(invalidCid) + .catch((err) => { + expect(err).to.exist() - stream.write(file) - stream.end() + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') + } }) - }) }) + }) - describe('.get', () => { - it('with a base58 encoded multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - ipfs.files.get(hash, (err, stream) => { - expect(err).to.not.exist() + describe('.catReadableStream', () => { + before((done) => ipfs.files.add(bigFile.data, done)) + + it('returns a Readable Stream for a cid', (done) => { + const stream = ipfs.files.catReadableStream(bigFile.cid) + + stream.pipe(bl((err, data) => { + expect(err).to.not.exist() + expect(data).to.eql(bigFile.data) + done() + })) + }) + }) + + describe('.catPullStream', () => { + before((done) => ipfs.files.add(smallFile.data, done)) + + it('returns a Pull Stream for a cid', (done) => { + const stream = ipfs.files.catPullStream(smallFile.cid) - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.be.eql(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) + pull( + stream, + pull.concat((err, data) => { + expect(err).to.not.exist() + expect(data.length).to.equal(smallFile.data.length) + expect(data).to.eql(smallFile.data.toString()) + done() }) + ) + }) + }) + + describe('.get', () => { + before((done) => { + parallel([ + (cb) => ipfs.files.add(smallFile.data, cb), + (cb) => ipfs.files.add(bigFile.data, cb) + ], done) + }) + + it('with a base58 encoded multihash', (done) => { + ipfs.files.get(smallFile.cid, (err, files) => { + expect(err).to.not.exist() + + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() }) + }) - it('with a multihash', (done) => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - const mhBuf = Buffer.from(bs58.decode(hash)) - ipfs.files.get(mhBuf, (err, stream) => { - expect(err).to.not.exist() + it('with a multihash', (done) => { + const cidBuf = Buffer.from(bs58.decode(smallFile.cid)) + ipfs.files.get(cidBuf, (err, files) => { + expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.be.eql(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) - }) + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() }) + }) - it('large file', (done) => { - const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq' - ipfs.files.get(hash, (err, stream) => { - expect(err).to.not.exist() + it('large file', (done) => { + ipfs.files.get(bigFile.cid, (err, files) => { + expect(err).to.not.exist() - // accumulate the files and their content - var files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(hash) - expect(files[0].content).to.deep.equal(bigFile) - done() - })) - }) + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(bigFile.cid) + expect(files[0].content.length).to.eql(bigFile.data.length) + expect(files[0].content).to.eql(bigFile.data) + done() }) + }) - it('directory', (done) => { - // Needs https://github.com/ipfs/js-ipfs-api/issues/339 to be fixed - // for js-ipfs-api + go-ipfs - if (!isNode) { return done() } + it('directory', (done) => { + series([ + (cb) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) - const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.files.get(hash, (err, stream) => { - expect(err).to.not.exist() + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + cb() + }) + }, + (cb) => { + ipfs.files.get(directory.cid, (err, files) => { + expect(err).to.not.exist() - // accumulate the files and their content - var files = [] - stream.pipe(through.obj((file, enc, next) => { - if (file.content) { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - } else { - files.push(file) - next() - } - }, () => { files = files.sort((a, b) => { if (a.path > b.path) return 1 if (a.path < b.path) return -1 return 0 }) + // Check paths - var paths = files.map((file) => { - return file.path - }) + const paths = files.map((file) => { return file.path }) expect(paths).to.include.members([ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', @@ -484,65 +540,315 @@ module.exports = (common) => { // Check contents const contents = files.map((file) => { - return file.content ? file.content.toString() : null + return file.content + ? file.content.toString() + : null }) + expect(contents).to.include.members([ - directoryContent['alice.txt'].toString(), - directoryContent['files/hello.txt'].toString(), - directoryContent['files/ipfs.txt'].toString(), - directoryContent['holmes.txt'].toString(), - directoryContent['jungle.txt'].toString(), - directoryContent['pp.txt'].toString() + directory.files['alice.txt'].toString(), + directory.files['files/hello.txt'].toString(), + directory.files['files/ipfs.txt'].toString(), + directory.files['holmes.txt'].toString(), + directory.files['jungle.txt'].toString(), + directory.files['pp.txt'].toString() ]) - done() - })) + cb() + }) + } + ], done) + }) + + it('with ipfs path, nested value', (done) => { + const file = { + path: 'a/testfile.txt', + content: smallFile.data + } + + ipfs.files.add([file], (err, filesAdded) => { + expect(err).to.not.exist() + + filesAdded.forEach((file) => { + if (file.path === 'a') { + ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { + expect(err).to.not.exist() + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + done() + }) + } }) }) + }) - it('with ipfs path, nested value', (done) => { - const file = { - path: 'a/testfile.txt', - content: smallFile - } + it('Promise test', () => { + return ipfs.files.get(smallFile.cid) + .then((files) => { + expect(files).to.be.length(1) + expect(files[0].path).to.equal(smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') + }) + }) - ipfs.files.createAddStream((err, stream) => { - expect(err).to.not.exist() + it('errors on invalid key', () => { + const invalidCid = 'somethingNotMultihash' - stream.on('data', (file) => { - if (file.path === 'a') { - ipfs.files.get(`/ipfs/${file.hash}/testfile.txt`, (err, stream) => { - expect(err).to.not.exist() - let files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) - }) - } - }) + return ipfs.files.get(invalidCid) + .catch((err) => { + expect(err).to.exist() + const errString = err.toString() + if (errString === 'Error: invalid ipfs ref path') { + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + } + if (errString === 'Error: Invalid Key') { + expect(err.toString()).to.contain('Error: Invalid Key') + } + }) + }) + }) + + describe('.getReadableStream', () => { + before((done) => ipfs.files.add(smallFile.data, done)) + + it('returns a Readable Stream of Readable Streams', (done) => { + const stream = ipfs.files.getReadableStream(smallFile.cid) + + let files = [] + stream.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ path: file.path, content: content }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') + done() + })) + }) + }) - stream.write(file) - stream.end() + describe('.getPullStream', () => { + before((done) => ipfs.files.add(smallFile.data, done)) + + it('returns a Pull Stream of Pull Streams', (done) => { + const stream = ipfs.files.getPullStream(smallFile.cid) + + pull( + stream, + pull.collect((err, files) => { + expect(err).to.not.exist() + expect(files).to.be.length(1) + expect(files[0].path).to.eql(smallFile.cid) + pull( + files[0].content, + pull.concat((err, data) => { + expect(err).to.not.exist() + expect(data.toString()).to.contain('Plz add me!') + done() + }) + ) }) + ) + }) + }) + + describe('.ls', () => { + before((done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() }) }) - describe('.ls', () => { - it('with a base58 encoded string', (done) => { - const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.ls(hash, (err, files) => { + it('with a base58 encoded CID', (done) => { + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + ipfs.ls(cid, (err, files) => { + expect(err).to.not.exist() + + expect(files).to.eql([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } + ]) + done() + }) + }) + }) + + describe('.lsReadableStream', () => { + before((done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() + }) + }) + + it('with a base58 encoded CID', (done) => { + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsReadableStream(cid) + + stream.pipe(concat((files) => { + expect(files).to.eql([ + { depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11696, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' }, + { depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 4, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' }, + { depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 183, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' }, + { depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 582072, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' }, + { depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2305, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' }, + { depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4551, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' } + ]) + done() + })) + }) + }) + + describe('.lsPullStream', () => { + before((done) => { + const content = (name) => ({ + path: `test-folder/${name}`, + content: directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + ipfs.files.add(dirs, (err, res) => { + expect(err).to.not.exist() + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(directory.cid) + done() + }) + }) + + it('with a base58 encoded CID', (done) => { + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsPullStream(cid) + + pull( + stream, + pull.collect((err, files) => { expect(err).to.not.exist() - files.forEach((file) => delete file.content) - expect(files).to.deep.equal([ + + expect(files).to.eql([ { depth: 1, name: 'alice.txt', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', @@ -578,105 +884,11 @@ module.exports = (common) => { path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', size: 4551, hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' } ]) + type: 'file' } + ]) done() }) - }) - }) - }) - - describe('promise API', () => { - describe('.add', () => { - const expectedMultihash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - it('buffer', () => { - return ipfs.files.add(smallFile) - .then((res) => { - const file = res[0] - expect(file.hash).to.equal(expectedMultihash) - expect(file.path).to.equal(file.hash) - }) - }) - }) - - describe('.cat', () => { - it('with a base58 multihash encoded string', () => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - - return ipfs.files.cat(hash) - .then((stream) => { - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - })) - }) - }) - - it('errors on invalid key', () => { - const hash = 'somethingNotMultihash' - - return ipfs.files.cat(hash) - .catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) - }) - - it('with a multihash', () => { - const hash = Buffer.from(bs58.decode('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')) - return ipfs.files.cat(hash) - .then((stream) => { - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - })) - }) - }) - }) - - describe('.get', () => { - it('with a base58 encoded string', () => { - const hash = 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - return ipfs.files.get(hash).then((stream) => { - let files = [] - return new Promise((resolve, reject) => { - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ - path: file.path, - content: content - }) - next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.equal(hash) - expect(files[0].content.toString()).to.contain('Plz add me!') - resolve() - })) - }) - }) - }) - - it('errors on invalid key', () => { - const hash = 'somethingNotMultihash' - return ipfs.files.get(hash).catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) - }) + ) }) }) })