From 186d007a9cf9eb29f1ec0d9954cacee79eb04ce3 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 17 Dec 2019 10:56:32 +0000 Subject: [PATCH] refactor: async iterables --- SPEC/BITSWAP.md | 3 - SPEC/BLOCK.md | 25 +- SPEC/BOOTSTRAP.md | 5 +- SPEC/CONFIG.md | 3 - SPEC/DAG.md | 3 - SPEC/DHT.md | 189 +++- SPEC/FILES.md | 903 +++--------------- SPEC/KEY.md | 3 - SPEC/MISCELLANEOUS.md | 107 +-- SPEC/NAME.md | 12 +- SPEC/OBJECT.md | 9 +- SPEC/PIN.md | 25 +- SPEC/PUBSUB.md | 16 +- SPEC/REFS.md | 165 +--- SPEC/REPO.md | 14 +- SPEC/STATS.md | 93 +- SPEC/SWARM.md | 49 +- package.json | 33 +- src/add.js | 441 +++++++++ src/bitswap/utils.js | 4 +- src/bitswap/wantlist.js | 2 +- src/block/rm.js | 25 +- src/{files-regular => }/cat.js | 53 +- src/config/get.js | 2 - src/dag/get.js | 13 +- src/dag/tree.js | 10 +- src/dht/find-peer.js | 4 +- src/dht/find-provs.js | 22 +- src/dht/get.js | 7 +- src/dht/provide.js | 23 +- src/dht/query.js | 7 +- src/dht/utils.js | 10 + src/files-mfs/ls-pull-stream.js | 75 -- src/files-mfs/ls-readable-stream.js | 79 -- src/files-mfs/read-pull-stream.js | 48 - src/files-mfs/read-readable-stream.js | 50 - src/files-regular/add-from-fs.js | 82 -- src/files-regular/add-from-stream.js | 41 - src/files-regular/add-from-url.js | 114 --- src/files-regular/add-pull-stream.js | 67 -- src/files-regular/add-readable-stream.js | 58 -- src/files-regular/add.js | 305 ------ src/files-regular/cat-pull-stream.js | 51 - src/files-regular/cat-readable-stream.js | 50 - src/files-regular/get-pull-stream.js | 39 - src/files-regular/get-readable-stream.js | 50 - src/files-regular/index.js | 29 - src/files-regular/ls-pull-stream.js | 109 --- src/files-regular/ls-readable-stream.js | 109 --- src/files-regular/refs-local-pull-stream.js | 13 - .../refs-local-readable-stream.js | 12 - src/files-regular/refs-local.js | 7 - src/files-regular/refs-pull-stream.js | 13 - src/files-regular/refs-readable-stream.js | 12 - src/files-regular/refs.js | 7 - src/{files-mfs => files}/cp.js | 10 +- src/{files-mfs => files}/flush.js | 0 src/{files-mfs => files}/index.js | 4 - src/{files-mfs => files}/ls.js | 74 +- src/{files-mfs => files}/mkdir.js | 0 src/{files-mfs => files}/mv.js | 0 src/{files-mfs => files}/read.js | 11 +- src/{files-mfs => files}/rm.js | 5 +- src/{files-mfs => files}/stat.js | 24 +- src/{files-mfs => files}/write.js | 0 src/{files-regular => }/get.js | 73 +- src/index.js | 14 +- src/key/list.js | 3 +- src/{files-regular => }/ls.js | 68 +- src/miscellaneous/resolve.js | 9 +- src/name-pubsub/cancel.js | 4 +- src/name/publish.js | 14 +- src/name/resolve.js | 56 +- src/object/data.js | 32 +- src/object/get.js | 5 +- src/object/links.js | 5 +- src/object/utils.js | 17 +- src/pin/add.js | 3 +- src/pin/ls.js | 27 +- src/pin/rm.js | 9 +- src/ping/index.js | 4 +- src/ping/ping-pull-stream.js | 61 -- src/ping/ping-readable-stream.js | 92 -- src/ping/ping.js | 7 +- src/pubsub/subscribe.js | 20 +- .../refs-local-tests.js => refs-local.js} | 13 +- src/{files-regular/refs-tests.js => refs.js} | 70 +- src/repo/gc.js | 71 +- src/stats/bw-pull-stream.js | 33 - src/stats/bw-readable-stream.js | 34 - src/stats/bw.js | 3 +- src/stats/index.js | 2 - src/swarm/addrs.js | 8 +- src/swarm/peers.js | 6 +- src/utils/expect-timeout.js | 16 - .../utils.js => utils/index.js} | 0 src/utils/suite.js | 2 +- 97 files changed, 1243 insertions(+), 3366 deletions(-) create mode 100644 src/add.js rename src/{files-regular => }/cat.js (67%) create mode 100644 src/dht/utils.js delete mode 100644 src/files-mfs/ls-pull-stream.js delete mode 100644 src/files-mfs/ls-readable-stream.js delete mode 100644 src/files-mfs/read-pull-stream.js delete mode 100644 src/files-mfs/read-readable-stream.js delete mode 100644 src/files-regular/add-from-fs.js delete mode 100644 src/files-regular/add-from-stream.js delete mode 100644 src/files-regular/add-from-url.js delete mode 100644 src/files-regular/add-pull-stream.js delete mode 100644 src/files-regular/add-readable-stream.js delete mode 100644 src/files-regular/add.js delete mode 100644 src/files-regular/cat-pull-stream.js delete mode 100644 src/files-regular/cat-readable-stream.js delete mode 100644 src/files-regular/get-pull-stream.js delete mode 100644 src/files-regular/get-readable-stream.js delete mode 100644 src/files-regular/index.js delete mode 100644 src/files-regular/ls-pull-stream.js delete mode 100644 src/files-regular/ls-readable-stream.js delete mode 100644 src/files-regular/refs-local-pull-stream.js delete mode 100644 src/files-regular/refs-local-readable-stream.js delete mode 100644 src/files-regular/refs-local.js delete mode 100644 src/files-regular/refs-pull-stream.js delete mode 100644 src/files-regular/refs-readable-stream.js delete mode 100644 src/files-regular/refs.js rename src/{files-mfs => files}/cp.js (85%) rename src/{files-mfs => files}/flush.js (100%) rename src/{files-mfs => files}/index.js (63%) rename src/{files-mfs => files}/ls.js (50%) rename src/{files-mfs => files}/mkdir.js (100%) rename src/{files-mfs => files}/mv.js (100%) rename src/{files-mfs => files}/read.js (79%) rename src/{files-mfs => files}/rm.js (90%) rename src/{files-mfs => files}/stat.js (83%) rename src/{files-mfs => files}/write.js (100%) rename src/{files-regular => }/get.js (68%) rename src/{files-regular => }/ls.js (64%) delete mode 100644 src/ping/ping-pull-stream.js delete mode 100644 src/ping/ping-readable-stream.js rename src/{files-regular/refs-local-tests.js => refs-local.js} (76%) rename src/{files-regular/refs-tests.js => refs.js} (83%) delete mode 100644 src/stats/bw-pull-stream.js delete mode 100644 src/stats/bw-readable-stream.js delete mode 100644 src/utils/expect-timeout.js rename src/{files-regular/utils.js => utils/index.js} (100%) diff --git a/SPEC/BITSWAP.md b/SPEC/BITSWAP.md index 9b97dc62..6262e7df 100644 --- a/SPEC/BITSWAP.md +++ b/SPEC/BITSWAP.md @@ -3,9 +3,6 @@ * [bitswap.wantlist](#bitswapwantlist) * [bitswap.stat](#bitswapstat) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - ### `bitswap.wantlist` > Returns the wantlist, optionally filtered by peer ID diff --git a/SPEC/BLOCK.md b/SPEC/BLOCK.md index 1c303725..a3a20d13 100644 --- a/SPEC/BLOCK.md +++ b/SPEC/BLOCK.md @@ -5,9 +5,6 @@ * [block.rm](#blockrm) * [block.stat](#blockstat) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `block.get` > Get a raw IPFS block. @@ -114,22 +111,32 @@ A great source of [examples][] can be found in the tests for this API. `options` is an Object that can contain the following properties: -- force (boolean): Ignores nonexistent blocks. -- quiet (boolean): write minimal output +- `force` (boolean): Ignores nonexistent blocks. +- `quiet` (boolean): write minimal output **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An array of objects containing hash and (potentially) error strings | +| `AsyncIterable` | An async iterable that yields objects containing hash and (potentially) error strings | -Note: If an error string is present for a given object in the returned array, the block with that hash was not removed and the string will contain the reason why, for example if the block was pinned. +Each object yielded is of the form: + +```js +{ + hash: string, + error: string +} +``` + +Note: If an error string is present for a given object, the block with that hash was not removed and the string will contain the reason why, for example if the block was pinned. **Example:** ```JavaScript -const result = await ipfs.block.rm(cid) -console.log(result[0].hash) +for await (const result of ipfs.block.rm(cid)) { + console.log(result.hash) +} ``` A great source of [examples][] can be found in the tests for this API. diff --git a/SPEC/BOOTSTRAP.md b/SPEC/BOOTSTRAP.md index ba562a57..2dbe5ce0 100644 --- a/SPEC/BOOTSTRAP.md +++ b/SPEC/BOOTSTRAP.md @@ -4,15 +4,12 @@ the addresses of the bootstrap nodes. These are the trusted peers from which to learn about other peers in the network. -> Only edit this list if you understand the risks of adding or removing nodes from this list. +> Only edit this list if you understand the risks of adding or removing nodes * [bootstrap.add](#bootstrapadd) * [bootstrap.list](#bootstraplist) * [bootstrap.rm](#bootstraprm) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `bootstrap.add` > Add a peer address to the bootstrap list diff --git a/SPEC/CONFIG.md b/SPEC/CONFIG.md index 5dd37790..725a59d4 100644 --- a/SPEC/CONFIG.md +++ b/SPEC/CONFIG.md @@ -6,9 +6,6 @@ * [config.profiles.list](#configprofileslist) * [config.profiles.apply](#configprofilesapply) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `config.get` > Returns the currently being used config. If the daemon is off, it returns the stored config. diff --git a/SPEC/DAG.md b/SPEC/DAG.md index 270a30f6..c4ef9c49 100644 --- a/SPEC/DAG.md +++ b/SPEC/DAG.md @@ -10,9 +10,6 @@ _Explore the DAG API through interactive coding challenges in our ProtoSchool tu - _[P2P data links with content addressing](https://proto.school/#/basics/) (beginner)_ - _[Blogging on the Decentralized Web](https://proto.school/#/blog/) (intermediate)_ -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `dag.put` > Store an IPLD format node diff --git a/SPEC/DHT.md b/SPEC/DHT.md index 31c8a5d3..f856555f 100644 --- a/SPEC/DHT.md +++ b/SPEC/DHT.md @@ -7,79 +7,80 @@ * [dht.put](#dhtput) * [dht.query](#dhtquery) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `dht.findPeer` -> Retrieve the Peer Info of a reachable node in the network. +> Find the multiaddresses associated with a Peer ID ##### `ipfs.dht.findPeer(peerId)` -Where `peerId` is a IPFS/libp2p Id from [PeerId](https://github.com/libp2p/js-peer-id) type. +Where `peerId` is a Peer ID in `String`, [`CID`](https://github.com/multiformats/js-cid) or [`PeerId`](https://github.com/libp2p/js-peer-id) format. **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An object type [`PeerInfo`](https://github.com/libp2p/js-peer-info) | +| `Promise<{ id: CID, addrs: Multiaddr[] }>` | A promise that resolves to an object with `id` and `addrs`. `id` is a [`CID`](https://github.com/multiformats/js-cid) - the peer's ID and `addrs` is an array of [Multiaddr](https://github.com/multiformats/js-multiaddr/) - addresses for the peer. | **Example:** ```JavaScript -var id = PeerId.create() - -const peerInfo = await ipfs.dht.findPeer(id) -// peerInfo will contain the multiaddrs of that peer -const id = peerInfo.id -const addrs = peerInfo.multiaddrs +const info = await ipfs.dht.findPeer('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt') + +console.log(info.id.toString()) +/* +QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt +*/ + +info.addrs.forEach(addr => console.log(addr.toString())) +/* +/ip4/147.75.94.115/udp/4001/quic +/ip6/2604:1380:3000:1f00::1/udp/4001/quic +/dnsaddr/bootstrap.libp2p.io +/ip6/2604:1380:3000:1f00::1/tcp/4001 +/ip4/147.75.94.115/tcp/4001 +*/ ``` A great source of [examples][] can be found in the tests for this API. #### `dht.findProvs` -> Retrieve the providers for content that is addressed by an hash. +> Find peers that can provide a specific value, given a CID. -##### `ipfs.dht.findProvs(hash, [options])` +##### `ipfs.dht.findProvs(cid, [options])` -Where `hash` is a multihash. +Where `cid` is a CID as a `String`, `Buffer` or [`CID`](https://github.com/multiformats/js-cid) instance. -`options` an optional object with the following properties - - `timeout` - a maximum timeout in milliseconds - - `maxNumProviders` - a maximum number of providers to find +`options` an optional object with the following properties: + - `numProviders` - the number of providers to find. Default: 20 + +Note that if `options.numProviders` are not found an error will be thrown. **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An array of type [`PeerInfo`](https://github.com/libp2p/js-peer-info) | - -each entry of the returned array is composed by the peerId, as well as an array with its adresses. +| `AsyncIterable<{ id: CID, addrs: Multiaddr[] }>` | A async iterable that yields objects with `id` and `addrs`. `id` is a [`CID`](https://github.com/multiformats/js-cid) - the peer's ID and `addrs` is an array of [Multiaddr](https://github.com/multiformats/js-multiaddr/) - addresses for the peer. | **Example:** ```JavaScript -const provs = await ipfs.dht.findProvs(multihash) -provs.forEach(prov => { - console.log(prov.id.toB58String()) -}) - -const provs2 = await ipfs.dht.findProvs(multihash, { timeout: 4000 }) -provs2.forEach(prov => { - console.log(prov.id.toB58String()) -}) +const providers = ipfs.dht.findProvs('QmdPAhQRxrDKqkGPvQzBvjYe3kU8kiEEAd2J6ETEamKAD9') + +for await (const provider of providers) { + console.log(provider.id.toString()) +} ``` A great source of [examples][] can be found in the tests for this API. #### `dht.get` -> Retrieve a value from DHT +> Given a key, query the routing system for its best value. ##### `ipfs.dht.get(key)` -Where `key` is a Buffer. +Where `key` is a `Buffer`. **Returns** @@ -99,70 +100,158 @@ A great source of [examples][] can be found in the tests for this API. > Announce to the network that you are providing given values. -##### `ipfs.dht.provide(cid)` +##### `ipfs.dht.provide(cid, [options])` + +Where `cid` is a CID or array of CIDs as a `String`, `Buffer` or [`CID`](https://github.com/multiformats/js-cid) instance. -Where `cid` is a CID or array of CIDs. +`options` an optional object with the following properties: + - `recursive` - boolean, set to `true` to recursively provide the entire graph. Default `false`. **Returns** | Type | Description | | -------- | -------- | -| `Promise` | If action is successfully completed. Otherwise an error will be thrown | +| `AsyncIterable` | DHT query messages. See example below for structure. | + +Note: You must consume the iterable to completion to complete the provide operation. **Example:** ```JavaScript -await ipfs.dht.provide(cid) +for await (const message of ipfs.dht.provide('QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR')) { + console.log(message) +} + +/* +Prints objects like: + +{ + extra: 'dial backoff', + id: CID(QmWtewmnzJiQevJPSmG9s8aC7yRfK2WXTCdRc1pCbDFu6z), + responses: [ + { + addrs: [ + Multiaddr(/ip4/127.0.0.1/tcp/4001), + Multiaddr(/ip4/172.20.0.3/tcp/4001), + Multiaddr(/ip4/35.178.190.196/tcp/1024) + ], + id: CID(QmRz5Nth4jTFuJJKcjyb6uwvrhxWbruRvamKY2PJxwJKw8) + } + ], + type: 1 +} + +For message `type` values, see: +https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L15-L24 +*/ +``` + +Alternatively you can simply "consume" the iterable: + +```js +const { consume } = require('streaming-iterables') +await consume(ipfs.dht.provide('QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR')) ``` A great source of [examples][] can be found in the tests for this API. #### `dht.put` -> Store a value on the DHT +> Write a key/value pair to the routing system. ##### `ipfs.dht.put(key, value)` -Where `key` is a Buffer and `value` is a Buffer. +Where `key` is a `Buffer` and `value` is a `Buffer`. **Returns** | Type | Description | | -------- | -------- | -| `Promise` | If action is successfully completed. Otherwise an error will be thrown | +| `AsyncIterable` | DHT query messages. See example below for structure. | **Example:** ```JavaScript -await ipfs.dht.put(key, value) +for await (const message of ipfs.dht.put(key, value)) { + console.log(message) +} + +/* +Prints objects like: + +{ + extra: 'dial backoff', + id: CID(QmWtewmnzJiQevJPSmG9s8aC7yRfK2WXTCdRc1pCbDFu6z), + responses: [ + { + addrs: [ + Multiaddr(/ip4/127.0.0.1/tcp/4001), + Multiaddr(/ip4/172.20.0.3/tcp/4001), + Multiaddr(/ip4/35.178.190.196/tcp/1024) + ], + id: CID(QmRz5Nth4jTFuJJKcjyb6uwvrhxWbruRvamKY2PJxwJKw8) + } + ], + type: 1 +} + +For message `type` values, see: +https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L15-L24 +*/ +``` + +Alternatively you can simply "consume" the iterable: + +```js +const { consume } = require('streaming-iterables') +await consume(ipfs.dht.put(key, value)) ``` A great source of [examples][] can be found in the tests for this API. #### `dht.query` -> Queries the network for the 'closest peers' to a given key. 'closest' is defined by the rules of the underlying Peer Routing mechanism. +> Find the closest Peer IDs to a given Peer ID by querying the DHT. ##### `ipfs.dht.query(peerId)` -Where `peerId` is a IPFS/libp2p Id of type [PeerId](https://github.com/libp2p/js-peer-id). +Where `peerId` is a Peer ID in `String`, [`CID`](https://github.com/multiformats/js-cid) or [`PeerId`](https://github.com/libp2p/js-peer-id) format. **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An array of objects of type [PeerInfo](https://github.com/libp2p/js-peer-info) | +| `AsyncIterable` | DHT query messages. See example below for structure. | **Example:** ```JavaScript -const id = PeerId.create() - -const peerInfos = await ipfs.dht.query(id) - -peerInfos.forEach(p => { - console.log(p.id.toB58String()) -}) +for await (const info of ipfs.dht.query('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt')) { + console.log(info) +} + +/* +Prints objects like: + +{ + extra: 'dial backoff', + id: CID(QmWtewmnzJiQevJPSmG9s8aC7yRfK2WXTCdRc1pCbDFu6z), + responses: [ + { + addrs: [ + Multiaddr(/ip4/127.0.0.1/tcp/4001), + Multiaddr(/ip4/172.20.0.3/tcp/4001), + Multiaddr(/ip4/35.178.190.196/tcp/1024) + ], + id: CID(QmRz5Nth4jTFuJJKcjyb6uwvrhxWbruRvamKY2PJxwJKw8) + } + ], + type: 1 +} + +For message `type` values, see: +https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L15-L24 +*/ ``` A great source of [examples][] can be found in the tests for this API. diff --git a/SPEC/FILES.md b/SPEC/FILES.md index b9b6aaf1..984e7ca7 100644 --- a/SPEC/FILES.md +++ b/SPEC/FILES.md @@ -4,330 +4,137 @@ #### The Regular API The regular, top-level API for add, cat, get and ls Files on IPFS - - [add](#add) - - [addFromFs](#addfromfs) - - [addFromStream](#addfromstream) - - [addFromURL](#addfromurl) - - [addPullStream](#addpullstream) - - [addReadableStream](#addreadablestream) - - [cat](#cat) - - [catPullStream](#catpullstream) - - [catReadableStream](#catreadablestream) - - [get](#get) - - [getPullStream](#getpullstream) - - [getReadableStream](#getreadablestream) - - [ls](#ls) - - [lsPullStream](#lspullstream) - - [lsReadableStream](#lsreadablestream) +- [add](#add) +- [cat](#cat) +- [get](#get) +- [ls](#ls) #### The Files API The Files API, aka MFS (Mutable File System) +- [files.cp](#filescp) +- [files.flush](#filesflush) +- [files.ls](#filesls) +- [files.mkdir](#filesmkdir) +- [files.mv](#filesmv) +- [files.read](#filesread) +- [files.rm](#filesrm) +- [files.stat](#filesstat) +- [files.write](#fileswrite) _Explore the Mutable File System through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/mutable-file-system/)._ - - [files.cp](#filescp) - - [files.flush](#filesflush) - - [files.ls](#filesls) - - [files.lsReadableStream](#fileslsreadablestream) - - [files.lsPullStream](#fileslspullstream) - - [files.mkdir](#filesmkdir) - - [files.mv](#filesmv) - - [files.read](#filesread) - - [files.readPullStream](#filesreadpullstream) - - [files.readReadableStream](#filesreadreadablestream) - - [files.rm](#filesrm) - - [files.stat](#filesstat) - - [files.write](#fileswrite) - -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. #### `add` -> Add files and data to IPFS. +> Import files and data into IPFS. ##### `ipfs.add(data, [options])` Where `data` may be: -- a [`Buffer instance`][b] -- a [`Readable Stream`][rs] -- a [`Pull Stream`][ps] -- a [`File`][file] -- an array of objects, each of the form: -```JavaScript -{ - path: '/tmp/myfile.txt', // The file path - content: // A Buffer, Readable Stream, Pull Stream or File with the contents of the file -} -``` -If no `content` is passed, then the path is treated as an empty directory +* `Bytes` (alias for `Buffer`|`ArrayBuffer`|`TypedArray`) [single file] +* `Bloby` (alias for: `Blob`|`File`) [single file] +* `string` [single file] +* `{ path: string, content: Bytes }` [single file] +* `{ path: string, content: Bloby }` [single file] +* `{ path: string, content: string }` [single file] +* `{ path: string, content: Iterable }` [single file] +* `{ path: string, content: Iterable }` [single file] +* `{ path: string, content: AsyncIterable }` [single file] +* `Iterable` [single file] +* `Iterable` [single file] +* `Iterable` [multiple files] +* `Iterable` [multiple files] +* `Iterable<{ path: string, content: Bytes }>` [multiple files] +* `Iterable<{ path: string, content: Bloby }>` [multiple files] +* `Iterable<{ path: string, content: string }>` [multiple files] +* `Iterable<{ path: string, content: Iterable }>` [multiple files] +* `Iterable<{ path: string, content: Iterable }>` [multiple files] +* `Iterable<{ path: string, content: AsyncIterable }>` [multiple files] +* `AsyncIterable` [single file] +* `AsyncIterable` [multiple files] +* `AsyncIterable` [multiple files] +* `AsyncIterable<{ path: string, content: Bytes }>` [multiple files] +* `AsyncIterable<{ path: string, content: Bloby }>` [multiple files] +* `AsyncIterable<{ path: string, content: String }>` [multiple files] +* `AsyncIterable<{ path: string, content: Iterable }>` [multiple files] +* `AsyncIterable<{ path: string, content: Iterable }>` [multiple files] +* `AsyncIterable<{ path: string, content: AsyncIterable }>` [multiple files] + +Typically when adding multiple files you'll want to provide an object `{ path, content }`. Where `path` is the path you want to the file to be accessible at from the root CID _after_ it has been added. `content` is the contents of the file. If no `content` is passed, then the path is treated as an empty directory. `options` is an optional object argument that might include the following keys: -- chunker (string, default `size-262144`): chunking algorithm used to build ipfs DAGs. Available formats: +- `chunker` (string, default `size-262144`): chunking algorithm used to build ipfs DAGs. Available formats: - size-{size} - rabin - rabin-{avg} - rabin-{min}-{avg}-{max} -- cidVersion (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including its version). -- cidBase (string, default `base58btc`): Number base to display CIDs in. [The list of all possible values](https://github.com/multiformats/js-multibase/blob/master/src/constants.js). -- enableShardingExperiment: allows to create directories with an unlimited number of entries currently size of unixfs directories is limited by the maximum block size. Note that this is an experimental feature. -- hashAlg || hash (string, default `sha2-256`): multihash hashing algorithm to use. [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343). -- onlyHash (boolean, default false): doesn't actually add the file to IPFS, but rather calculates its hash. -- pin (boolean, default true): pin this object when adding. -- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. -- quiet (boolean, default false): writes a minimal output. -- quieter (boolean, default false): writes only final hash. -- rawLeaves (boolean, default false): if true, DAG leaves will contain raw file data and not be wrapped in a protobuf. -- recursive (boolean, default false): for when a Path is passed, this option can be enabled to add recursively all the files. -- shardSplitThreshold (integer, default 1000): specifies the maximum size of unsharded directory that can be generated. -- silent (boolean, default false): writes no output. -- trickle (boolean, default false): if true will use the trickle DAG format for DAG generation. +- `cidVersion` (integer, default `0`): the CID version to use when storing the data (storage keys are based on the CID, including its version). +- `enableShardingExperiment`: allows to create directories with an unlimited number of entries currently size of unixfs directories is limited by the maximum block size. Note that this is an experimental feature. +- `hashAlg` (string, default `sha2-256`): multihash hashing algorithm to use. [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343). +- `onlyHash` (boolean, default `false`): doesn't actually add the file to IPFS, but rather calculates its hash. +- `pin` (boolean, default `true`): pin this object when adding. +- `progress` (function): a function that will be called with the byte length of chunks as a file is added to ipfs. +- `rawLeaves` (boolean, default `false`): if true, DAG leaves will contain raw file data and not be wrapped in a protobuf. +- `shardSplitThreshold` (integer, default `1000`): specifies the maximum size of unsharded directory that can be generated. +- `trickle` (boolean, default `false`): if true will use the trickle DAG format for DAG generation. [Trickle definition from go-ipfs documentation](https://godoc.org/github.com/ipsn/go-ipfs/gxlibs/github.com/ipfs/go-unixfs/importer/trickle). -- wrapWithDirectory (boolean, default false): adds a wrapping node around the content. +- `wrapWithDirectory` (boolean, default `false`): adds a wrapping node around the content. **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An array of objects describing the added data | +| `AsyncIterable` | An async iterable that yields objects describing the added data | -an array of objects is returned, each of the form: +Each yielded object is of the form: ```JavaScript { path: '/tmp/myfile.txt', - hash: 'QmHash', // base58 encoded multihash + cid: CID('QmHash'), size: 123 } ``` **Example:** -In the browser, assuming `ipfs = new Ipfs(...)`: - ```js -const content = Ipfs.Buffer.from('ABC') -const results = await ipfs.add(content) -const hash = results[0].hash // "Qm...WW" -``` - -Now [ipfs.io/ipfs/Qm...WW](https://ipfs.io/ipfs/QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW) -returns the "ABC" string. - -The following allows you to add multiple files at once. Note that intermediate directories in file paths will be automatically created and returned in the response along with files: - -```JavaScript -const files = [ - { - path: '/tmp/myfile.txt', - content: Ipfs.Buffer.from('ABC') - } -] +const content = 'ABC' -const results = await ipfs.add(files) -``` - -The `results` array: - -```json -[ - { - "path": "tmp", - "hash": "QmWXdjNC362aPDtwHPUE9o2VMqPeNeCQuTBTv1NsKtwypg", - "size": 67 - }, - { - "path": "/tmp/myfile.txt", - "hash": "QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW", - "size": 11 - } -] -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `addReadableStream` - -> Add files and data to IPFS using a [Readable Stream][rs] of class Duplex. - -##### `ipfs.addReadableStream([options])` -> [Readable Stream][rs] - -Returns a Readable Stream of class Duplex, where objects can be written of the forms - -```js -{ - path: '/tmp/myfile.txt', // The file path - content: // A Buffer, Readable Stream, Pull Stream or File with the contents of the file +for await (const result of ipfs.add(content)) { + console.log(result.cid.toString()) // prints QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW } ``` -`options` is an optional object argument that might include the following keys: - -- cidVersion (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including its version) -- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. -- hashAlg || hash (string): multihash hashing algorithm to use. (default: `sha2-256`) [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343) -- wrapWithDirectory (boolean): adds a wrapping node around the content -- pin (boolean, default true): pin this object when adding. +Now [ipfs.io/ipfs/Qm...WW](https://ipfs.io/ipfs/QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW) returns the "ABC" string. -**Example:** +The following allows you to add multiple files at once. Note that intermediate directories in file paths will be automatically created and returned in the response along with files: ```JavaScript -const stream = ipfs.addReadableStream() -stream.on('data', function (file) { - // 'file' will be of the form - // { - // path: '/tmp/myfile.txt', - // hash: 'QmHash' // base58 encoded multihash - // size: 123 - // } -}) - -stream.write({ - path: - content: -}) -// write as many files as you want - -stream.end() -}) -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `addPullStream` - -> Add files and data to IPFS using a [Pull Stream][ps]. - -##### `ipfs.addPullStream([options])` -> [Pull Stream][ps] - -Returns a Pull Stream, where objects can be written of the forms +const files = [{ + path: '/tmp/myfile.txt', + content: 'ABC' +}] -```js -{ - path: '/tmp/myfile.txt', // The file path - content: // A Buffer, Readable Stream, Pull Stream or File with the contents of the file +for await (const result of ipfs.add(content)) { + console.log(result) } -``` - -`options` is an optional object argument that might include the following keys: - -- cidVersion (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including its version) -- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs. -- hashAlg || hash (string): multihash hashing algorithm to use. (default: `sha2-256`) [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343) -- wrapWithDirectory (boolean): adds a wrapping node around the content -- pin (boolean, default true): pin this object when adding. - -**Example:** - -```JavaScript -const stream = ipfs.addPullStream() - -pull( - pull.values([ - { path: , content: } - ]), - stream, - pull.collect((err, values) => { - // values will be an array of objects, which one of the form - // { - // path: '/tmp/myfile.txt', - // hash: 'QmHash' // base58 encoded multihash - // size: 123 - // } - }) -) -``` - -#### `addFromFs` - -> Add files or entire directories from the FileSystem to IPFS - -##### `ipfs.addFromFs(path, [options])` - -Reads a file or folder from `path` on the filesystem and adds it to IPFS. - -Options: - - **recursive**: If `path` is a directory, use option `{ recursive: true }` to add the directory and all its sub-directories. - - **ignore**: To exclude file globs from the directory, use option `{ ignore: ['ignore/this/folder/**', 'and/this/file'] }`. - - **hidden**: hidden/dot files (files or folders starting with a `.`, for example, `.git/`) are not included by default. To add them, use the option `{ hidden: true }`. - -**Returns** - -| Type | Description | -| -------- | -------- | -| `Promise` | An array of objects describing the files that were added | - -an array of objects is returned, each of the form: - -```js +/* { - path: 'test-folder', - hash: 'QmRNjDeKStKGTQXnJ2NFqeQ9oW23WcpbmvCVrpDHgDg3T6', - size: 123 + "path": "tmp", + "hash": "QmWXdjNC362aPDtwHPUE9o2VMqPeNeCQuTBTv1NsKtwypg", + "size": 67 } -``` - -**Example** - -```JavaScript -const results = await ipfs.addFromFs('path/to/a/folder', { recursive: true , ignore: ['subfolder/to/ignore/**']}) -console.log(results) -``` - -#### `addFromURL` - -> Add a file from a URL to IPFS - -##### `ipfs.addFromURL(url, [options])` - -`options` is an optional object that argument that might include the same keys of [`ipfs.add(data, [options])`](#add) - -**Returns** - -| Type | Description | -| -------- | -------- | -| `Promise` | An object describing the added file | - -**Example** - -```JavaScript -const result = await ipfs.addFromURL('http://example.com/') -console.log('result') -``` - -#### `addFromStream` - -> Add a file from a stream to IPFS - -##### `ipfs.addFromStream(stream, [options])` - -This is very similar to `ipfs.add({ path:'', content: stream })`. It is like the reverse of cat. - -`options` is an optional object that argument that might include the same keys of [`ipfs.add(data, [options])`](#add) - -**Returns** - -| Type | Description | -| -------- | -------- | -| `Promise` | An array of objects describing the added data | - -an array of objects is returned, each of the form: - -```JavaScript { - path: '/tmp/myfile.txt', - hash: 'QmHash', // base58 encoded multihash - size: 123 + "path": "/tmp/myfile.txt", + "hash": "QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW", + "size": 11 } +*/ ``` -**Example** - -```JavaScript -const result = await ipfs.addFromStream() -console.log(result) -``` +A great source of [examples][] can be found in the tests for this API. #### `cat` @@ -337,11 +144,11 @@ console.log(result) `ipfsPath` can be of type: -- [`cid`][cid] of type: - - a [CID](https://github.com/ipfs/js-cid) instance - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: +- [`CID`][cid] of type: + - `string` - the base encoded version of the CID + - [CID](https://github.com/ipfs/js-cid) - a CID instance + - [Buffer][b] - the raw Buffer of the CID +- `string` - including the ipfs handler, a CID and a path to traverse to, e.g. - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' @@ -354,85 +161,16 @@ console.log(result) | Type | Description | | -------- | -------- | -| `Promise` | A [`Buffer`][b] with the contents of `path` | +| `AsyncIterable` | An async iterable that yields [`Buffer`][b] objects with the contents of `path` | **Example:** ```JavaScript -const file = await ipfs.cat(ipfsPath) { -console.log(file.toString('utf8')) -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `catReadableStream` - -> Returns a [Readable Stream][rs] containing the contents of a file addressed by a valid IPFS Path. - -##### `ipfs.catReadableStream(ipfsPath, [options])` -> [Readable Stream][rs] - -`ipfsPath` can be of type: - -- [`cid`][cid] of type: - - a [CID](https://github.com/ipfs/js-cid) instance - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - -`options` is an optional object that may contain the following keys: - - `offset` is an optional byte offset to start the stream at - - `length` is an optional number of bytes to read from the stream - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] with the contents of the file | - -**Example** - -```JavaScript -const stream = ipfs.catReadableStream(ipfsPath) -// stream will be a stream containing the data of the file requested -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `catPullStream` - -> Returns a [Pull Stream][ps] containing the contents of a file addressed by a valid IPFS Path. - -##### `ipfs.catPullStream(ipfsPath, [options])` -> [Pull Stream][rs] - -`ipfsPath` can be of type: - -- [`cid`][cid] of type: - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - -`options` is an optional object that may contain the following keys: - - `offset` is an optional byte offset to start the stream at - - `length` is an optional number of bytes to read from the stream - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [Pull Stream][ps] with the contents of the file | - -**Example** - -```JavaScript -const stream = ipfs.catPullStream(ipfsPath) -// stream will be a stream containing the data of the file requested -}) +const chunks = [] +for await (const chunk of ipfs.cat(ipfsPath)) { + chunks.push(chunk) +} +console.log(Buffer.concat(chunks).toString()) ``` A great source of [examples][] can be found in the tests for this API. @@ -443,12 +181,13 @@ A great source of [examples][] can be found in the tests for this API. ##### `ipfs.get(ipfsPath)` -ipfsPath can be of type: +`ipfsPath` can be of type: -- [`cid`][cid] of type: - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: +- [`CID`][cid] of type: + - `string` - the base encoded version of the CID + - [CID](https://github.com/ipfs/js-cid) - a CID instance + - [Buffer][b] - the raw Buffer of the CID +- String, including the ipfs handler, a cid and a path to traverse to, e.g. - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' @@ -457,138 +196,37 @@ ipfsPath can be of type: | Type | Description | | -------- | -------- | -| `Promise` | An array of objects representing the files | +| `AsyncIterable` | An async iterable that yields objects representing the files | -an array of objects is returned, each of the form: +Each yielded object is of the form: ```js { path: '/tmp/myfile.txt', - content: + content: > } ``` -Here, each `path` corresponds to the name of a file, and `content` is a regular Readable stream with the raw contents of that file. +Here, each `path` corresponds to the name of a file, and `content` is an async iterable with the file contents. **Example:** ```JavaScript -const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' +const BufferList = require('bl/BufferList') +const cid = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -const files = await ipfs.get(validCID) -files.forEach((file) => { +for await (const file of ipfs.get(cid)) { console.log(file.path) - console.log(file.content.toString('utf8')) -}) -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `getReadableStream` - -> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. - -##### `ipfs.getReadableStream(ipfsPath)` -> [Readable Stream][rs] - -ipfsPath can be of type: - -- [`cid`][cid] of type: - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects | - -the yielded objects are of the form: - -```js -{ - path: '/tmp/myfile.txt', - content: -} -``` - -**Example:** -```JavaScript -const validCID = 'QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG' - -const stream = ipfs.getReadableStream(validCID) - -stream.on('data', (file) => { - // write the file's path and contents to standard out - console.log(file.path) - if(file.type !== 'dir') { - file.content.on('data', (data) => { - console.log(data.toString()) - }) - file.content.resume() + const content = new BufferList() + for await (const chunk of file.content) { + content.append(chunk) } -}) -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `getPullStream` - -> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams. - -##### `ipfs.getPullStream(ipfsPath)` -> [Pull Stream][ps] - -ipfsPath can be of type: - -- [`cid`][cid] of type: - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [Pull Stream][ps] that will yield objects | - -the yielded objects are of the form: -```js -{ - path: '/tmp/myfile.txt', - content: + console.log(content.toString()) } ``` -**Example:** - -```JavaScript -const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' - -const stream = ipfs.getReadableStream(validCID) - -pull( - stream, - pull.collect((err, files) => { - if (err) { - throw err - } - - files.forEach((file) => { - console.log(file.path) - console.log(file.path.toString()) - }) - }) -) -``` - A great source of [examples][] can be found in the tests for this API. #### `ls` @@ -597,64 +235,13 @@ A great source of [examples][] can be found in the tests for this API. ##### `ipfs.ls(ipfsPath)` -> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionalities. - -ipfsPath can be of type: - -- [`cid`][cid] of type: - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - -**Returns** - -| Type | Description | -| -------- | -------- | -| `Promise` | An array of objects representing the files | - -an array of objects is returned, each of the form: - -```js -{ - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11696, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' -} -``` - -**Example:** - -```JavaScript -const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' - -const files = await ipfs.ls(validCID) -files.forEach((file) => { - console.log(file.path) -}) -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `lsReadableStream` - -> Lists a directory from IPFS that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams. - -##### `ipfs.lsReadableStream(ipfsPath)` -> [Readable Stream][rs] - -> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionalities. - -ipfsPath can be of type: +`ipfsPath` can be of type: -- [`cid`][cid] of type: - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: +- [`CID`][cid] of type: + - `string` - the base encoded version of the CID + - [CID](https://github.com/ipfs/js-cid) - a CID instance + - [Buffer][b] - the raw Buffer of the CID +- String, including the ipfs handler, a cid and a path to traverse to, e.g. - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' @@ -663,9 +250,9 @@ ipfsPath can be of type: | Type | Description | | -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects | +| `AsyncIterable` | An async iterable that yields objects representing the files | -the yielded objects are of the form: +Each yielded object is of the form: ```js { @@ -673,7 +260,7 @@ the yielded objects are of the form: name: 'alice.txt', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', size: 11696, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + cid: CID('QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi'), type: 'file' } ``` @@ -681,79 +268,16 @@ the yielded objects are of the form: **Example:** ```JavaScript -const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' - -const stream = ipfs.lsReadableStream(validCID) +const cid = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' -stream.on('data', (file) => { - // write the file's path and contents to standard out +for await (const file of ipfs.ls(cid)) { console.log(file.path) -}) -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `lsPullStream` - -> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded through a Pull Stream. - -##### `ipfs.lsPullStream(ipfsPath)` -> [Pull Stream][ps] - -> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionalities. - - -ipfsPath can be of type: - -- [`cid`][cid] of type: - - [Buffer][b], the raw Buffer of the cid - - String, the base58 encoded version of the cid -- String, including the ipfs handler, a cid and a path to traverse to, ie: - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66' - - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt' - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [Pull Stream][ps] that will yield objects | - -the yielded objects are of the form: - -```js -{ - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11696, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' } ``` -**Example:** - -```JavaScript -const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF' - -const stream = ipfs.lsPullStream(validCID) - -pull( - stream, - pull.collect((err, files) => { - if (err) { - throw err - } - - files.forEach((file) => console.log(file.path)) - }) -) -``` - A great source of [examples][] can be found in the tests for this API. ------------------------------------------------------------------------- ------------------------------------------------------------------------- +--- ## The Files API aka MFS (The Mutable File System) @@ -850,7 +374,6 @@ Where: - `hash` is a Boolean value to return only the hash (default: false) - `size` is a Boolean value to return only the size (default: false) - `withLocal` is a Boolean value to compute the amount of the dag that is local, and if possible the total size (default: false) - - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`) **Returns** @@ -860,7 +383,7 @@ Where: the returned object has the following keys: -- `hash` is a string with the hash +- `cid` a [CID][cid] instance - `size` is an integer with the file size in Bytes - `cumulativeSize` is an integer with the size of the DAGNodes making up the file in Bytes - `type` is a string that can be either `directory` or `file` @@ -876,7 +399,7 @@ const stats = await ipfs.files.stat('/file.txt') console.log(stats) // { -// hash: 'QmXmJBmnYqXVuicUfn9uDCC8kxCEEzQpsAbeq1iJvLAmVs', +// hash: CID('QmXmJBmnYqXVuicUfn9uDCC8kxCEEzQpsAbeq1iJvLAmVs'), // size: 60, // cumulativeSize: 118, // blocks: 1, @@ -917,7 +440,7 @@ await ipfs.files.rm('/my/beautiful/directory', { recursive: true }) #### `files.read` -> Read a file into a [`Buffer`][b]. +> Read a file ##### `ipfs.files.read(path, [options])` @@ -935,81 +458,18 @@ Where: | Type | Description | | -------- | -------- | -| `Promise` | A [`Buffer`][b] with the contents of `path` | - -N.b. this method is likely to result in high memory usage, you should use [files.readReadableStream](#filesreadreadablestream) or [files.readPullStream](#filesreadpullstream) instead where possible. - -**Example:** - -```JavaScript -const buf = await ipfs.files.read('/hello-world') -console.log(buf.toString('utf8')) - -// Hello, World! -``` - -#### `files.readReadableStream` - -> Read a file into a [`ReadableStream`][rs]. - -##### `ipfs.files.readReadableStream(path, [options])` - -Where: - -- `path` is the path of the file to read and must point to a file (and not a directory). It might be: - - An existing MFS path to a file (e.g. `/my-dir/a.txt`) - - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`) - - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`) -- `options` is an optional Object that might contain the following keys: - - `offset` is an Integer with the byte offset to begin reading from (default: 0) - - `length` is an Integer with the maximum number of bytes to read (default: Read to the end of stream) - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] with the contents of `path` | +| `AsyncIterable` | An async iterable that yields [`Buffer`][b] objects with the contents of `path` | **Example:** ```JavaScript -const stream = ipfs.files.readReadableStream('/hello-world') -stream.on('data', (buf) => console.log(buf.toString('utf8'))) - -// Hello, World! -``` - -#### `files.readPullStream` - -> Read a file into a [`PullStream`][ps]. - -##### `ipfs.files.readPullStream(path, [options])` - -Where: - -- `path` is the path of the file to read and must point to a file (and not a directory). It might be: - - An existing MFS path to a file (e.g. `/my-dir/a.txt`) - - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`) - - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`) -- `options` is an optional Object that might contain the following keys: - - `offset` is an Integer with the byte offset to begin reading from (default: 0) - - `length` is an Integer with the maximum number of bytes to read (default: Read to the end of stream) - -**Returns** +const chunks = [] -| Type | Description | -| -------- | -------- | -| `PullStream` | A [`PullStream`][ps] with the contents of `path` | - -**Example:** - -```JavaScript -pull( - ipfs.files.readPullStream('/hello-world'), - through(buf => console.log(buf.toString('utf8'))), - collect(err => {}) -) +for await (const chunk of ipfs.files.read('/hello-world')) { + chunks.push(chunk) +} +console.log(Buffer.concat(chunks).toString()) // Hello, World! ``` @@ -1024,8 +484,7 @@ Where: - `path` is the path of the file to write - `content` can be: - a [`Buffer`][b] - - a [`PullStream`][ps] - - a [`ReadableStream`][rs] + - an `AsyncIterable` (note: Node.js readable streams are iterable) - a [`Blob`][blob] (caveat: will only work in the browser) - a string path to a file (caveat: will only work in Node.js) - `options` is an optional Object that might contain the following keys: @@ -1128,127 +587,33 @@ Where: - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`) - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`) - `options` is an optional Object that might contain the following keys: - - `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false) - - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`) - `sort` is a Boolean value. If true entries will be sorted by filename (default: false) **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An array of objects representing the files | +| `AsyncIterable` | An async iterable that yields objects representing the files | -each object contains the following keys: +Each object contains the following keys: - `name` which is the file's name - `type` which is the object's type (`directory` or `file`) - `size` the size of the file in bytes -- `hash` the hash of the file +- `cid` the hash of the file (A [CID][cid] instance) **Example:** ```JavaScript -const files = await ipfs.files.ls('/screenshots') - -files.forEach((file) => { - console.log(file.name) -}) - +for await (const file of ipfs.files.ls('/screenshots')) { + console.log(file.name) +} // 2018-01-22T18:08:46.775Z.png // 2018-01-22T18:08:49.184Z.png ``` -#### `files.lsReadableStream` - -> Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams. - -##### `Go` **WIP** - -##### `JavaScript` - ipfs.files.lsReadableStream([path], [options]) -> [Readable Stream][rs] - -Where: - -- `path` is an optional string to show listing for (default: `/`). It might be: - - An existing MFS path to a directory (e.g. `/my-dir`) - - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`) - - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`) -- `options` is an optional Object that might contain the following keys: - - `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false) - - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`) - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects | - -the yielded objects contain the following keys: - -- `name` which is the file's name -- `type` which is the object's type (`directory` or `file`) -- `size` the size of the file in bytes -- `hash` the hash of the file - -**Example:** - -```JavaScript -const stream = ipfs.lsReadableStream('/some-dir') - -stream.on('data', (file) => { - // write the file's path and contents to standard out - console.log(file.name) -}) -``` - -#### `files.lsPullStream` - -> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded through a Pull Stream. - -##### `Go` **WIP** - -##### `JavaScript` - ipfs.lsPullStream([path], [options]) -> [Pull Stream][ps] - -Where: - -- `path` is an optional string to show listing for (default: `/`). It might be: - - An existing MFS path to a directory (e.g. `/my-dir`) - - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`) - - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`) -- `options` is an optional Object that might contain the following keys: - - `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false) - - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`) - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [Pull Stream][os] that will yield objects | - -the yielded objects contain the following keys: - - - `name` which is the file's name - - `type` which is the object's type (`directory` or `file`) - - `size` the size of the file in bytes - - `hash` the hash of the file - -**Example:** - -```JavaScript -pull( - ipfs.lsPullStream('/some-dir'), - pull.through(file => { - console.log(file.name) - }) - pull.onEnd(...) -) -``` - -A great source of [examples][] can be found in the tests for this API. - [examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files-regular [b]: https://www.npmjs.com/package/buffer -[rs]: https://www.npmjs.com/package/readable-stream -[ps]: https://www.npmjs.com/package/pull-stream [file]: https://developer.mozilla.org/en-US/docs/Web/API/File [cid]: https://www.npmjs.com/package/cids [blob]: https://developer.mozilla.org/en-US/docs/Web/API/Blob diff --git a/SPEC/KEY.md b/SPEC/KEY.md index 82aad9a2..4d36532f 100644 --- a/SPEC/KEY.md +++ b/SPEC/KEY.md @@ -7,9 +7,6 @@ * [key.export](#keyexport) * [key.import](#keyimport) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `key.gen` > Generate a new key diff --git a/SPEC/MISCELLANEOUS.md b/SPEC/MISCELLANEOUS.md index b09641a5..ae1af409 100644 --- a/SPEC/MISCELLANEOUS.md +++ b/SPEC/MISCELLANEOUS.md @@ -5,13 +5,8 @@ * [dns](#dns) * [stop](#stop) * [ping](#ping) -* [pingPullStream](#pingpullstream) -* [pingReadableStream](#pingreadablestream) * [resolve](#resolve) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `id` > Returns the identity of the Peer @@ -116,9 +111,9 @@ Where: | Type | Description | | -------- | -------- | -| `Promise` | An array of ping response objects | +| `AsyncIterable` | An async iterable that yields ping response objects | -an array of objects is returned, each of the form: +Each yielded object is of the form: ```js { @@ -133,111 +128,15 @@ Note that not all ping response objects are "pongs". A "pong" message can be ide **Example:** ```JavaScript -const responses = await ipfs.ping('Qmhash') -responses.forEach((res) => { +for await (const res of ipfs.ping('Qmhash')) { if (res.time) { console.log(`Pong received: time=${res.time} ms`) } else { console.log(res.text) } -}) -``` - -A great source of [examples](https://github.com/ipfs/interface-ipfs-core/tree/master/src/ping) can be found in the tests for this API. - -#### `pingPullStream` - -> Stream echo request packets to IPFS hosts - -##### `ipfs.pingPullStream(peerId, [options])` - -Where: - -- `peerId` (string) ID of the peer to be pinged. -- `options` is an optional object argument that might include the following properties: - - `count` (integer, default 10): the number of ping messages to send - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [`PullStream`][ps] of ping response objects | - -example of the returned objects: - -```js -{ - success: true, - time: 1234, - text: '' -} -``` - -Note that not all ping response objects are "pongs". A "pong" message can be identified by a truthy `success` property and an empty `text` property. Other ping responses are failures or status updates. - -**Example:** - -```JavaScript -const pull = require('pull-stream') - -pull( - ipfs.pingPullStream('Qmhash'), - pull.drain((res) => { - if (res.time) { - console.log(`Pong received: time=${res.time} ms`) - } else { - console.log(res.text) - } - }) -) -``` - -A great source of [examples](https://github.com/ipfs/interface-ipfs-core/tree/master/src/ping) can be found in the tests for this API. - -#### `pingReadableStream` - -> Stream echo request packets to IPFS hosts - -##### `ipfs.pingReadableStream(peerId, [options])` - -Where: - -- `peerId` (string) ID of the peer to be pinged. -- `options` is an optional object argument that might include the following properties: - - `count` (integer, default 10): the number of ping messages to send - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [`ReadableStream`][rs] of ping response objects | - -example of the returned objects: - -```js -{ - success: true, - time: 1234, - text: '' } ``` -Note that not all ping response objects are "pongs". A "pong" message can be identified by a truthy `success` property and an empty `text` property. Other ping responses are failures or status updates. - -**Example:** - -```JavaScript -const stream = ipfs.pingReadableStream('Qmhash') - -stream.on('data', (res) => { - if (res.time) { - console.log(`Pong received: time=${res.time} ms`) - } else { - console.log(res.text) - } -}) -``` - A great source of [examples](https://github.com/ipfs/interface-ipfs-core/tree/master/src/ping) can be found in the tests for this API. #### `resolve` diff --git a/SPEC/NAME.md b/SPEC/NAME.md index e6705953..319ff84d 100644 --- a/SPEC/NAME.md +++ b/SPEC/NAME.md @@ -6,9 +6,6 @@ * [name.pubsub.subs](#namepubsubsubs) * [name.resolve](#nameresolve) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `name.publish` > Publish an IPNS name with a given value. @@ -180,7 +177,7 @@ A great source of [examples][examples-pubsub] can be found in the tests for this | Type | Description | | -------- | -------- | -| `Promise` | A string that contains the IPFS hash | +| `AsyncIterable` | An async iterable that yields strings that are increasingly more accurate resolved paths. | **Example:** @@ -188,9 +185,10 @@ A great source of [examples][examples-pubsub] can be found in the tests for this // The IPNS address you want to resolve. const addr = '/ipns/ipfs.io' -const name = await ipfs.name.resolve(addr) -console.log(name) -// /ipfs/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm +for await (const name of ipfs.name.resolve(addr)) { + console.log(name) + // /ipfs/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm +} ``` A great source of [examples][] can be found in the tests for this API. diff --git a/SPEC/OBJECT.md b/SPEC/OBJECT.md index d8db3618..a74e8d72 100644 --- a/SPEC/OBJECT.md +++ b/SPEC/OBJECT.md @@ -11,9 +11,6 @@ * [object.patch.appendData](#objectpatchappenddata) * [object.patch.setData](#objectpatchsetdata) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `object.new` > Create a new MerkleDAG node, using a specific layout. Caveat: So far, only UnixFS object layouts are supported. @@ -129,14 +126,14 @@ A great source of [examples][] can be found in the tests for this API. | Type | Description | | -------- | -------- | -| `Promise` | A Buffer with the data that the MerkleDAG node contained | +| `Promise` | An Promise that resolves to Buffer objects with the data that the MerkleDAG node contained | **Example:** ```JavaScript -const multihash = 'QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK' +const cid = 'QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK' -const data = await ipfs.object.data(multihash) +const data = await ipfs.object.data(cid) console.log(data.toString()) // Logs: // some data diff --git a/SPEC/PIN.md b/SPEC/PIN.md index f8628a0a..285905d5 100644 --- a/SPEC/PIN.md +++ b/SPEC/PIN.md @@ -4,9 +4,6 @@ * [pin.ls](#pinls) * [pin.rm](#pinrm) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `pin.add` > Adds an IPFS object to the pinset and also stores it to the IPFS repo. pinset is the set of hashes currently pinned (not gc'able). @@ -53,28 +50,24 @@ A great source of [examples][] can be found in the tests for this API. Where: - `cid` - a [CID][cid] instance or CID as a string or an array of CIDs. -- `options` is an object that can contain the following keys: - - 'type' - Return also the type of pin (direct, indirect or recursive) +- `options` - is an object that can contain the following keys: + - `type` - filter by this type of pin ("recursive", "direct" or "indirect") **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An array of current pinned objects | - -an array of objects with keys `hash` and `type` is returned. +| `AsyncIterable<{ hash: string, type: string }>` | An async iterable that yields currently pinned objects with `hash` and `type` properties. `hash` is a string CID of the pinned node, `type` is the pin type ("recursive", "direct" or "indirect") | **Example:** ```JavaScript -const pinset = await ipfs.pin.ls() -console.log(pinset) -// Logs -// [ -// { hash: Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E, type: 'recursive' }, -// { hash: QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ, type: 'indirect' }, -// { hash: QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R, type: 'indirect' } -// ] +for await (const { hash, type } of ipfs.pin.ls()) { + console.log(pinset) +} +// { hash: Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E, type: 'recursive' } +// { hash: QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ, type: 'indirect' } +// { hash: QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R, type: 'indirect' } ``` A great source of [examples][] can be found in the tests for this API. diff --git a/SPEC/PUBSUB.md b/SPEC/PUBSUB.md index 1f794a0d..63d505b3 100644 --- a/SPEC/PUBSUB.md +++ b/SPEC/PUBSUB.md @@ -6,9 +6,6 @@ * [pubsub.ls](#pubsubls) * [pubsub.peers](#pubsubpeers) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `pubsub.subscribe` > Subscribe to a pubsub topic. @@ -53,12 +50,6 @@ If the `topic` and `handler` are provided, the `handler` will no longer receive If **only** the `topic` param is provided, unsubscribe will remove **all** handlers for the `topic`. This behaves like [EventEmitter.removeAllListeners](https://nodejs.org/dist/latest/docs/api/events.html#events_emitter_removealllisteners_eventname). Use this if you would like to no longer receive any updates for the `topic`. -**WARNING:** Unsubscribe is an async operation, but removing **all** handlers for a topic can only be done using the Promises API (due to the difficulty in distinguishing between a "handler" and a "callback" - they are both functions). If you _need_ to know when unsubscribe has completed you must use `await` or `.then` on the return value from - -```JavaScript -ipfs.pubsub.unsubscribe('topic') -``` - **Returns** | Type | Description | @@ -79,6 +70,7 @@ console.log(`unsubscribed from ${topic}`) ``` Or removing all listeners: + ```JavaScript const topic = 'fruit-of-the-day' const receiveMsg = (msg) => console.log(msg.toString()) @@ -130,7 +122,7 @@ A great source of [examples][] can be found in the tests for this API. | Type | Description | | -------- | -------- | -| `Promise` | An array of topicIDs that the peer is subscribed to | +| `Promise` | An array of topicIDs that the peer is subscribed to | **Example:** @@ -153,14 +145,14 @@ A great source of [examples][] can be found in the tests for this API. | Type | Description | | -------- | -------- | -| `Promise` | An array of peer IDs subscribed to the `topic` | +| `Promise` | An array of peer IDs subscribed to the `topic` | **Example:** ```JavaScript const topic = 'fruit-of-the-day' -const peerIds = ipfs.pubsub.peers(topic) +const peerIds = await ipfs.pubsub.peers(topic) console.log(peerIds) ``` diff --git a/SPEC/REFS.md b/SPEC/REFS.md index c2439a60..5c1bd08e 100644 --- a/SPEC/REFS.md +++ b/SPEC/REFS.md @@ -1,14 +1,7 @@ # Refs API * [refs](#refs) -* [refsReadableStream](#refsreadablestream) -* [refsPullStream](#refspullstream) * [refs.local](#refslocal) -* [refs.localReadableStream](#refslocalreadablestream) -* [refs.localPullStream](#refslocalpullstream) - -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. #### `refs` @@ -38,22 +31,21 @@ Although not listed in the documentation, all the following APIs that actually r | Type | Description | | -------- | -------- | -| `Promise` | An array representing the links (references) | +| `AsyncIterable` | An async iterable that yields objects representing the links (references) | + +Each yielded object is of the form: -example of the returned array: ```js { - ref: "myref", - err: "error msg" + ref: string, + err: Error | null } ``` **Example:** ```JavaScript -const refs = await ipfs.refs(ipfsPath, { recursive: true }) - -for (const ref of refs) { +for await (const ref of ipfs.refs(ipfsPath, { recursive: true })) { if (ref.err) { console.error(ref.err) } else { @@ -63,64 +55,6 @@ for (const ref of refs) { } ``` -#### `refsReadableStream` - -> Output references using a [Readable Stream][rs] - -##### `ipfs.refsReadableStream(ipfsPath, [options])` - -`options` is an optional object argument identical to the options for [ipfs.refs](#refs) - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] representing the references | - -**Example:** - -```JavaScript -const stream = ipfs.refsReadableStream(ipfsPath, { recursive: true }) -stream.on('data', function (ref) { - // 'ref' will be of the form - // { - // ref: 'QmHash', - // err: 'err message' - // } -}) -``` - -#### `refsPullStream` - -> Output references using a [Pull Stream][ps]. - -##### `ipfs.refsReadableStream(ipfsPath, [options])` - -`options` is an optional object argument identical to the options for [ipfs.refs](#refs) - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [Pull Stream][ps] representing the references | - -**Example:** - -```JavaScript -const stream = ipfs.refsPullStream(ipfsPath, { recursive: true }) - -pull( - stream, - pull.collect((err, values) => { - // values will be an array of objects, each one of the form - // { - // ref: 'QmHash', - // err: 'err message' - // } - }) -) -``` - #### `refs.local` > Output all local references (CIDs of all blocks in the blockstore) @@ -131,94 +65,31 @@ pull( | Type | Description | | -------- | -------- | -| `Promise` | An array representing all the local references | +| `AsyncIterable` | An async iterable that yields objects representing the links (references) | + +Each yielded object is of the form: -example of the returned array: ```js { - ref: "myref", - err: "error msg" + ref: string, + err: Error | null } ``` **Example:** ```JavaScript -ipfs.refs.local(function (err, refs) { - if (err) { - throw err - } - - for (const ref of refs) { - if (ref.err) { - console.error(ref.err) - } else { - console.log(ref.ref) - // output: "QmHash" - } +for await (const ref of ipfs.refs.local()) { + if (ref.err) { + console.error(ref.err) + } else { + console.log(ref.ref) + // output: "QmHash" } -}) -``` - -#### `refs.localReadableStream` - -> Output all local references using a [Readable Stream][rs] - -##### `ipfs.localReadableStream()` - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] representing all the local references | - -**Example:** - -```JavaScript -const stream = ipfs.refs.localReadableStream() -stream.on('data', function (ref) { - // 'ref' will be of the form - // { - // ref: 'QmHash', - // err: 'err message' - // } -}) -``` - -#### `refs.localPullStream` - -> Output all local references using a [Pull Stream][ps]. - -##### `ipfs.refs.localReadableStream()` - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [Pull Stream][ps] representing all the local references | - -**Example:** - -```JavaScript -const stream = ipfs.refs.localPullStream() - -pull( - stream, - pull.collect((err, values) => { - // values will be an array of objects, each one of the form - // { - // ref: 'QmHash', - // err: 'err message' - // } - }) -) +} ``` -A great source of [examples][] can be found in the tests for this API. - [examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files-regular [b]: https://www.npmjs.com/package/buffer -[rs]: https://www.npmjs.com/package/readable-stream -[ps]: https://www.npmjs.com/package/pull-stream [cid]: https://www.npmjs.com/package/cids [blob]: https://developer.mozilla.org/en-US/docs/Web/API/Blob diff --git a/SPEC/REPO.md b/SPEC/REPO.md index 03789f5d..37c8a930 100644 --- a/SPEC/REPO.md +++ b/SPEC/REPO.md @@ -4,9 +4,6 @@ * [repo.stat](#repostat) * [repo.version](#repoversion) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `repo.gc` > Perform a garbage collection sweep on the repo. @@ -23,18 +20,19 @@ Where: | Type | Description | | -------- | -------- | -| `Promise` | An array of objects | +| `AsyncIterable` | An async iterable that yields objects describing nodes that were garbage collected | -each object contains the following properties: +Each yielded object contains the following properties: -- `err` is an Error if it was not possible to GC a particular block. +- `err` is an `Error` if it was not possible to GC a particular block. - `cid` is the [CID][cid] of the block that was Garbage Collected. **Example:** ```JavaScript -const res = await ipfs.repo.gc() -console.log(res) +for await (const res of ipfs.repo.gc()) { + console.log(res) +} ``` #### `repo.stat` diff --git a/SPEC/STATS.md b/SPEC/STATS.md index 67cf0119..7d350cd7 100644 --- a/SPEC/STATS.md +++ b/SPEC/STATS.md @@ -3,11 +3,6 @@ * [stats.bitswap](#statsbitswap) * [stats.repo](#statsrepo) * [stats.bw](#statsbw) -* [stats.bwPullStream](#statsbwpullstream) -* [stats.bwReadableStream](#statsbwreadablestream) - -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. #### `stats.bitswap` @@ -23,7 +18,7 @@ Note: `stats.repo` and `repo.stat` can be used interchangeably. See [`repo.stat` #### `stats.bw` -> Get IPFS bandwidth information as an object. +> Get IPFS bandwidth information. ##### `ipfs.stats.bw([options])` @@ -32,16 +27,16 @@ Where: - `options` is an optional object that might contain the following keys: - `peer` specifies a peer to print bandwidth for. - `proto` specifies a protocol to print bandwidth for. - - `poll` is used to print bandwidth at an interval. - - `interval` is the time interval to wait between updating output, if `poll` is true. + - `poll` is used to yield bandwidth info at an interval. + - `interval` is the time interval to wait between updating output, if `poll` is `true`. **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An object representing IPFS bandwidth information | +| `AsyncIterable` | An async iterable that yields IPFS bandwidth information | -the returned object contains the following keys: +Each yielded object contains the following keys: - `totalIn` - is a [BigNumber Int][bigNumber], in bytes. - `totalOut` - is a [BigNumber Int][bigNumber], in bytes. @@ -51,88 +46,16 @@ the returned object contains the following keys: **Example:** ```JavaScript -const stats = await ipfs.stats.bw() - -console.log(stats) -// { totalIn: BigNumber {...}, -// totalOut: BigNumber {...}, -// rateIn: BigNumber {...}, -// rateOut: BigNumber {...} } -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `stats.bwPullStream` - -> Get IPFS bandwidth information as a [Pull Stream][ps]. - -##### `ipfs.stats.bwPullStream([options])` - -Options are described on [`ipfs.stats.bw`](#bw). - -**Returns** - -| Type | Description | -| -------- | -------- | -| `PullStream` | A [Pull Stream][ps] representing IPFS bandwidth information | - -**Example:** - -```JavaScript -const pull = require('pull-stream') -const log = require('pull-stream/sinks/log') - -const stream = ipfs.stats.bwPullStream({ poll: true }) - -pull( - stream, - log() -) - -// { totalIn: BigNumber {...}, -// totalOut: BigNumber {...}, -// rateIn: BigNumber {...}, -// rateOut: BigNumber {...} } -// ... -// Ad infinitum -``` - -A great source of [examples][] can be found in the tests for this API. - -#### `stats.bwReadableStream` - -> Get IPFS bandwidth information as a [Readable Stream][rs]. - -##### `ipfs.stats.bwReadableStream([options])` - -Options are described on [`ipfs.stats.bw`](#bw). - -**Returns** - -| Type | Description | -| -------- | -------- | -| `ReadableStream` | A [Readable Stream][rs] representing IPFS bandwidth information | - -**Example:** - -```JavaScript -const stream = ipfs.stats.bwReadableStream({ poll: true }) - -stream.on('data', (data) => { - console.log(data) -})) - +for await (const stats of ipfs.stats.bw()) { + console.log(stats) +} // { totalIn: BigNumber {...}, // totalOut: BigNumber {...}, // rateIn: BigNumber {...}, // rateOut: BigNumber {...} } -// ... -// Ad infinitum ``` A great source of [examples][] can be found in the tests for this API. [bigNumber]: https://github.com/MikeMcl/bignumber.js/ -[rs]: https://www.npmjs.com/package/readable-stream -[ps]: https://www.npmjs.com/package/pull-stream [examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/stats diff --git a/SPEC/SWARM.md b/SPEC/SWARM.md index e61a8bf0..b82eb258 100644 --- a/SPEC/SWARM.md +++ b/SPEC/SWARM.md @@ -8,9 +8,6 @@ * [swarm.filters.add](#swarmfiltersadd) (not implemented yet) * [swarm.filters.rm](#swarmfiltersrm) (not implemented yet) -### ⚠️ Note -Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter. - #### `swarm.addrs` > List of known addresses of each peer connected. @@ -21,13 +18,29 @@ Although not listed in the documentation, all the following APIs that actually r | Type | Description | | -------- | -------- | -| `Promise` | An array of of [`PeerInfo`](https://github.com/libp2p/js-peer-info)s | +| `Promise<{ id: CID, addrs: Multiaddr[] }>` | A promise that resolves to an object with `id` and `addrs`. `id` is a [`CID`](https://github.com/multiformats/js-cid) - the peer's ID and `addrs` is an array of [Multiaddr](https://github.com/multiformats/js-multiaddr/) - addresses for the peer. | **Example:** ```JavaScript const peerInfos = await ipfs.swarm.addrs() -console.log(peerInfos) + +peerInfos.forEach(info => { + console.log(info.id.toString()) + /* + QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt + */ + + info.addrs.forEach(addr => console.log(addr.toString())) + /* + /ip4/147.75.94.115/udp/4001/quic + /ip6/2604:1380:3000:1f00::1/udp/4001/quic + /dnsaddr/bootstrap.libp2p.io + /ip6/2604:1380:3000:1f00::1/tcp/4001 + /ip4/147.75.94.115/tcp/4001 + */ +}) + ``` A great source of [examples][] can be found in the tests for this API. @@ -86,7 +99,7 @@ A great source of [examples][] can be found in the tests for this API. | Type | Description | | -------- | -------- | -| `Promise` | An array of [`MultiAddr`](https://github.com/multiformats/js-multiaddr) representing the local addresses the node is listening | +| `Promise` | An array of [`Multiaddr`](https://github.com/multiformats/js-multiaddr) representing the local addresses the node is listening | **Example:** @@ -103,31 +116,33 @@ A great source of [examples][] can be found in the tests for this API. ##### `ipfs.swarm.peers([options])` -If `options.verbose` is set to `true` additional information, such as `latency` is provided. +`options` an optional object with the following properties: + - `direction` - set to `true` to return connection direction information. Default `false` + - `streams` - set to `true` to return information about open muxed streams. Default `false` + - `verbose` - set to `true` to return all extra information. Default `false` + - `latency` - set to `true` to return latency information. Default `false` **Returns** | Type | Description | | -------- | -------- | -| `Promise` | An array with the list of peers that the node have connections with | +| `Promise` | An array with the list of peers that the node have connections with | -the returned array has the following form: +The returned array has the following form: - `addr: Multiaddr` -- `peer: PeerId` -- `latency: String` Only if `verbose: true` was passed - -Starting with `go-ipfs 0.4.5` these additional properties are provided - +- `peer: CID` +- `latency: String` - Only if `verbose: true` was passed - `muxer: String` - The type of stream muxer the peer is usng -- `streams: []String` - Only if `verbose: true`, a list of currently open streams +- `streams: string[]` - Only if `verbose: true`, a list of currently open streams +- `direction: number` - Inbound or outbound connection -If an error occurs trying to create an individual `peerInfo` object, it will have the properties +If an error occurs trying to create an individual object, it will have the properties: - `error: Error` - the error that occurred - `rawPeerInfo: Object` - the raw data for the peer -and all other properties may be undefined. +All other properties may be `undefined`. **Example:** diff --git a/package.json b/package.json index a563b6b7..54162dea 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,9 @@ "leadMaintainer": "Alan Shaw ", "main": "src/index.js", "browser": { - "fs": false + "fs": false, + "os": false, + "ipfs-utils/src/files/glob-source": false }, "scripts": { "test": "exit 0", @@ -36,43 +38,28 @@ }, "homepage": "https://github.com/ipfs/interface-ipfs-core#readme", "dependencies": { - "bl": "^3.0.0", - "bs58": "^4.0.1", - "callbackify": "^1.1.0", "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "cids": "~0.7.1", - "concat-stream": "^2.0.0", "delay": "^4.3.0", "dirty-chai": "^2.0.1", - "es6-promisify": "^6.0.1", - "get-stream": "^5.1.0", "hat": "0.0.3", "ipfs-block": "~0.8.0", "ipfs-unixfs": "~0.1.16", - "ipfs-utils": "~0.4.0", + "ipfs-utils": "^0.5.0", "ipld-dag-cbor": "~0.15.0", "ipld-dag-pb": "^0.18.1", "is-ipfs": "~0.6.1", - "is-plain-object": "^3.0.0", + "it-all": "^1.0.1", + "it-concat": "^1.0.0", + "it-last": "^1.0.1", "it-pushable": "^1.2.1", - "libp2p-crypto": "~0.16.0", "multiaddr": "^6.0.0", "multibase": "~0.6.0", "multihashes": "~0.4.14", - "multihashing-async": "~0.6.0", - "p-each-series": "^2.1.0", - "p-map-series": "^2.1.0", - "p-timeout": "^3.2.0", - "p-times": "^2.1.0", - "peer-id": "~0.12.2", - "peer-info": "~0.15.1", - "pull-stream": "^3.6.11", - "pull-to-promise": "^1.0.1", - "pump": "^3.0.0", - "readable-stream": "^3.1.1", - "streaming-iterables": "^4.1.0", - "through2": "^3.0.0" + "multihashing-async": "^0.7.0", + "peer-id": "~0.13.5", + "readable-stream": "^3.4.0" }, "devDependencies": { "aegir": "^20.0.0" diff --git a/src/add.js b/src/add.js new file mode 100644 index 00000000..5254690f --- /dev/null +++ b/src/add.js @@ -0,0 +1,441 @@ +/* eslint-env mocha, browser */ +'use strict' + +const { fixtures } = require('./utils') +const { Readable } = require('readable-stream') +const all = require('it-all') +const fs = require('fs') +const os = require('os') +const path = require('path') +const { supportsFileReader } = require('ipfs-utils/src/supports') +const globSource = require('ipfs-utils/src/files/glob-source') +const urlSource = require('ipfs-utils/src/files/url-source') +const { isNode } = require('ipfs-utils/src/env') +const { getDescribe, getIt, expect } = require('./utils/mocha') +const { echoUrl, redirectUrl } = require('./utils/echo-http-server') + +const fixturesPath = path.join(__dirname, '..', 'test', 'fixtures') + +module.exports = (createCommon, options) => { + const describe = getDescribe(options) + const it = getIt(options) + const common = createCommon() + + describe('.add', function () { + this.timeout(40 * 1000) + + let ipfs + + before(async function () { + // CI takes longer to instantiate the daemon, so we need to increase the + // timeout for the before step + this.timeout(60 * 1000) + + ipfs = await common.setup() + }) + + after(() => common.teardown()) + + it('should add a File', async function () { + if (!supportsFileReader) return this.skip('skip in node') + + const filesAdded = await all(ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }))) + expect(filesAdded[0].cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') + }) + + it('should add a File as tuple', async function () { + if (!supportsFileReader) return this.skip('skip in node') + + const tuple = { + path: 'filename.txt', + content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) + } + + const filesAdded = await all(ipfs.add(tuple)) + expect(filesAdded[0].cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') + }) + + it('should add a File as array of tuple', async function () { + if (!supportsFileReader) return this.skip('skip in node') + + const tuple = { + path: 'filename.txt', + content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) + } + + const filesAdded = await all(ipfs.add([tuple])) + expect(filesAdded[0].cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') + }) + + it('should add a Buffer', async () => { + const filesAdded = await all(ipfs.add(fixtures.smallFile.data)) + expect(filesAdded).to.have.length(1) + + const file = filesAdded[0] + expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal(fixtures.smallFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(fixtures.smallFile.data.length) + }) + + it('should add a BIG Buffer', async () => { + const filesAdded = await all(ipfs.add(fixtures.bigFile.data)) + expect(filesAdded).to.have.length(1) + + const file = filesAdded[0] + expect(file.cid.toString()).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(fixtures.bigFile.data.length) + }) + + it('should add a BIG Buffer with progress enabled', async () => { + let progCalled = false + let accumProgress = 0 + function handler (p) { + progCalled = true + accumProgress = p + } + + const filesAdded = await all(ipfs.add(fixtures.bigFile.data, { progress: handler })) + expect(filesAdded).to.have.length(1) + + const file = filesAdded[0] + expect(file.cid.toString()).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) + expect(progCalled).to.be.true() + expect(accumProgress).to.equal(fixtures.bigFile.data.length) + }) + + it('should add a Buffer as tuple', async () => { + const tuple = { path: 'testfile.txt', content: fixtures.smallFile.data } + + const filesAdded = await all(ipfs.add([tuple])) + expect(filesAdded).to.have.length(1) + + const file = filesAdded[0] + expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal('testfile.txt') + }) + + it('should add a string', async () => { + const data = 'a string' + const expectedCid = 'QmQFRCwEpwQZ5aQMqCsCaFbdjNLLHoyZYDjr92v1F7HeqX' + + const filesAdded = await all(ipfs.add(data)) + expect(filesAdded).to.be.length(1) + + const { path, size, cid } = filesAdded[0] + expect(path).to.equal(expectedCid) + expect(size).to.equal(16) + expect(cid.toString()).to.equal(expectedCid) + }) + + it('should add a TypedArray', async () => { + const data = Uint8Array.from([1, 3, 8]) + const expectedCid = 'QmRyUEkVCuHC8eKNNJS9BDM9jqorUvnQJK1DM81hfngFqd' + + const filesAdded = await all(ipfs.add(data)) + expect(filesAdded).to.be.length(1) + + const { path, size, cid } = filesAdded[0] + expect(path).to.equal(expectedCid) + expect(size).to.equal(11) + expect(cid.toString()).to.equal(expectedCid) + }) + + it('should add readable stream', async () => { + const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' + + const rs = new Readable() + rs.push(Buffer.from('some data')) + rs.push(null) + + const filesAdded = await all(ipfs.add(rs)) + expect(filesAdded).to.be.length(1) + + const file = filesAdded[0] + expect(file.path).to.equal(expectedCid) + expect(file.size).to.equal(17) + expect(file.cid.toString()).to.equal(expectedCid) + }) + + it('should add array of objects with readable stream content', async () => { + const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' + + const rs = new Readable() + rs.push(Buffer.from('some data')) + rs.push(null) + + const tuple = { path: 'data.txt', content: rs } + + const filesAdded = await all(ipfs.add([tuple])) + expect(filesAdded).to.be.length(1) + + const file = filesAdded[0] + expect(file.path).to.equal('data.txt') + expect(file.size).to.equal(17) + expect(file.cid.toString()).to.equal(expectedCid) + }) + + it('should add a nested directory as array of tupples', async function () { + const content = (name) => ({ + path: `test-folder/${name}`, + content: fixtures.directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const res = await all(ipfs.add(dirs)) + + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.cid.toString()).to.equal(fixtures.directory.cid) + }) + + it('should add a nested directory as array of tupples with progress', async function () { + const content = (name) => ({ + path: `test-folder/${name}`, + content: fixtures.directory.files[name] + }) + + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const total = dirs.reduce((i, entry) => { + return i + (entry.content ? entry.content.length : 0) + }, 0) + + let progCalled = false + let accumProgress = 0 + const handler = (p) => { + progCalled = true + accumProgress += p + } + + const filesAdded = await all(ipfs.add(dirs, { progress: handler })) + + const root = filesAdded[filesAdded.length - 1] + expect(progCalled).to.be.true() + expect(accumProgress).to.be.at.least(total) + expect(root.path).to.equal('test-folder') + expect(root.cid.toString()).to.equal(fixtures.directory.cid) + }) + + it('should add files to a directory non sequentially', async function () { + const content = path => ({ + path: `test-dir/${path}`, + content: fixtures.directory.files[path.split('/').pop()] + }) + + const input = [ + content('a/pp.txt'), + content('a/holmes.txt'), + content('b/jungle.txt'), + content('a/alice.txt') + ] + + const filesAdded = await all(ipfs.add(input)) + + const toPath = ({ path }) => path + const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/')) + const filesAddedPaths = filesAdded.map(toPath) + + expect(nonSeqDirFilePaths.every(p => filesAddedPaths.includes(p))).to.be.true() + }) + + it('should fail when passed invalid input', () => { + const nonValid = 138 + + return expect(all(ipfs.add(nonValid))).to.eventually.be.rejected() + }) + + it('should wrap content in a directory', async () => { + const data = { path: 'testfile.txt', content: fixtures.smallFile.data } + + const filesAdded = await all(ipfs.add(data, { wrapWithDirectory: true })) + expect(filesAdded).to.have.length(2) + + const file = filesAdded[0] + const wrapped = filesAdded[1] + expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal('testfile.txt') + expect(wrapped.path).to.equal('') + }) + + it('should add with only-hash=true', async function () { + this.slow(10 * 1000) + const content = String(Math.random() + Date.now()) + + const files = await all(ipfs.add(Buffer.from(content), { onlyHash: true })) + expect(files).to.have.length(1) + + await expect(ipfs.object.get(files[0].cid, { timeout: 4000 })) + .to.eventually.be.rejected() + .and.to.have.property('name').that.equals('TimeoutError') + }) + + it('should add a directory from the file system', async function () { + if (!isNode) this.skip() + + const filesPath = path.join(fixturesPath, 'test-folder') + + const result = await all(ipfs.add(globSource(filesPath, { recursive: true }))) + expect(result.length).to.be.above(8) + }) + + it('should add a directory from the file system with an odd name', async function () { + if (!isNode) this.skip() + + const filesPath = path.join(fixturesPath, 'weird name folder [v0]') + + const result = await all(ipfs.add(globSource(filesPath, { recursive: true }))) + expect(result.length).to.be.above(8) + }) + + it('should ignore a directory from the file system', async function () { + if (!isNode) this.skip() + + const filesPath = path.join(fixturesPath, 'test-folder') + + const result = await all(ipfs.add(globSource(filesPath, { recursive: true, ignore: ['files/**'] }))) + expect(result.length).to.be.below(9) + }) + + it('should add a file from the file system', async function () { + if (!isNode) this.skip() + + const filePath = path.join(fixturesPath, 'testfile.txt') + + const result = await all(ipfs.add(globSource(filePath))) + expect(result.length).to.equal(1) + expect(result[0].path).to.equal('testfile.txt') + }) + + it('should add a hidden file in a directory from the file system', async function () { + if (!isNode) this.skip() + + const filesPath = path.join(fixturesPath, 'hidden-files-folder') + + const result = await all(ipfs.add(globSource(filesPath, { recursive: true, hidden: true }))) + expect(result.length).to.be.above(10) + expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt') + expect(result.map(object => object.cid.toString())).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') + }) + + it('should add a file from the file system with only-hash=true', async function () { + if (!isNode) this.skip() + + this.slow(10 * 1000) + + const content = String(Math.random() + Date.now()) + const filepath = path.join(os.tmpdir(), `${content}.txt`) + fs.writeFileSync(filepath, content) + + const out = await all(ipfs.add(globSource(filepath), { onlyHash: true })) + + fs.unlinkSync(filepath) + + await expect(ipfs.object.get(out[0].cid, { timeout: 500 })) + .to.eventually.be.rejected() + .and.to.have.property('name').that.equals('TimeoutError') + }) + + it('should add from a HTTP URL', async () => { + const text = `TEST${Date.now()}` + const url = echoUrl(text) + + const [result, expectedResult] = await Promise.all([ + all(ipfs.add(urlSource(url))), + all(ipfs.add(Buffer.from(text))) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result[0].cid.toString()).to.equal(expectedResult[0].cid.toString()) + expect(result[0].size).to.equal(expectedResult[0].size) + expect(result[0].path).to.equal(text) + }) + + it('should add from a HTTP URL with redirection', async () => { + const text = `TEST${Date.now()}` + const url = echoUrl(text) + '?foo=bar#buzz' + + const [result, expectedResult] = await Promise.all([ + all(ipfs.add(urlSource(redirectUrl(url)))), + all(ipfs.add(Buffer.from(text))) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result[0].cid.toString()).to.equal(expectedResult[0].cid.toString()) + expect(result[0].size).to.equal(expectedResult[0].size) + expect(result[0].path).to.equal(text) + }) + + it('should add from a URL with only-hash=true', async function () { + const text = `TEST${Date.now()}` + const url = echoUrl(text) + + const res = await all(ipfs.add(urlSource(url), { onlyHash: true })) + + await expect(ipfs.object.get(res[0].cid, { timeout: 500 })) + .to.eventually.be.rejected() + .and.to.have.property('name').that.equals('TimeoutError') + }) + + it('should add from a URL with wrap-with-directory=true', async () => { + const filename = `TEST${Date.now()}.txt` // also acts as data + const url = echoUrl(filename) + '?foo=bar#buzz' + const addOpts = { wrapWithDirectory: true } + + const [result, expectedResult] = await Promise.all([ + all(ipfs.add(urlSource(url), addOpts)), + all(ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts)) + ]) + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result).to.deep.equal(expectedResult) + }) + + it('should add from a URL with wrap-with-directory=true and URL-escaped file name', async () => { + const filename = `320px-Domažlice,_Jiráskova_43_(${Date.now()}).jpg` // also acts as data + const url = echoUrl(filename) + '?foo=bar#buzz' + const addOpts = { wrapWithDirectory: true } + + const [result, expectedResult] = await Promise.all([ + all(ipfs.add(urlSource(url), addOpts)), + all(ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts)) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result).to.deep.equal(expectedResult) + }) + + it('should not add from an invalid url', () => { + return expect(all(ipfs.add(urlSource('123http://invalid')))).to.eventually.be.rejected() + }) + }) +} diff --git a/src/bitswap/utils.js b/src/bitswap/utils.js index 7e6ae646..48636b2b 100644 --- a/src/bitswap/utils.js +++ b/src/bitswap/utils.js @@ -4,6 +4,8 @@ const delay = require('delay') async function waitForWantlistKey (ipfs, key, opts = {}) { opts.timeout = opts.timeout || 10000 + opts.interval = opts.interval || 100 + const end = Date.now() + opts.timeout while (Date.now() < end) { @@ -13,7 +15,7 @@ async function waitForWantlistKey (ipfs, key, opts = {}) { return } - await delay(500) + await delay(opts.interval) } throw new Error(`Timed out waiting for ${key} in wantlist`) diff --git a/src/bitswap/wantlist.js b/src/bitswap/wantlist.js index 63ca125d..08c63b12 100644 --- a/src/bitswap/wantlist.js +++ b/src/bitswap/wantlist.js @@ -23,7 +23,7 @@ module.exports = (createCommon, options) => { ipfsB = await common.setup() // Add key to the wantlist for ipfsB - ipfsB.block.get(key).catch(() => {}) + ipfsB.block.get(key).catch(() => { /* is ok, expected on teardown */ }) await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) diff --git a/src/block/rm.js b/src/block/rm.js index e5fe1fe0..5bc6ebab 100644 --- a/src/block/rm.js +++ b/src/block/rm.js @@ -3,6 +3,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const hat = require('hat') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -29,17 +30,17 @@ module.exports = (createCommon, options) => { }) // block should be present in the local store - const localRefs = await ipfs.refs.local() + const localRefs = await all(ipfs.refs.local()) expect(localRefs).to.have.property('length').that.is.greaterThan(0) expect(localRefs.find(ref => ref.ref === cid.toString())).to.be.ok() - const result = await ipfs.block.rm(cid) + const result = await all(ipfs.block.rm(cid)) expect(result).to.be.an('array').and.to.have.lengthOf(1) expect(result[0]).to.have.property('hash', cid.toString()) expect(result[0]).to.not.have.property('error') // did we actually remove the block? - const localRefsAfterRemove = await ipfs.refs.local() + const localRefsAfterRemove = await all(ipfs.refs.local()) expect(localRefsAfterRemove).to.have.property('length').that.is.greaterThan(0) expect(localRefsAfterRemove.find(ref => ref.ref === cid.toString())).to.not.be.ok() }) @@ -49,7 +50,7 @@ module.exports = (createCommon, options) => { format: 'raw', hashAlg: 'sha2-256' }) - const result = await ipfs.block.rm(cid.toString()) + const result = await all(ipfs.block.rm(cid.toString())) expect(result).to.be.an('array').and.to.have.lengthOf(1) expect(result[0]).to.have.property('hash', cid.toString()) @@ -61,7 +62,7 @@ module.exports = (createCommon, options) => { format: 'raw', hashAlg: 'sha2-256' }) - const result = await ipfs.block.rm(cid.buffer) + const result = await all(ipfs.block.rm(cid.buffer)) expect(result).to.be.an('array').and.to.have.lengthOf(1) expect(result[0]).to.have.property('hash', cid.toString()) @@ -84,7 +85,7 @@ module.exports = (createCommon, options) => { }) ] - const result = await ipfs.block.rm(cids) + const result = await all(ipfs.block.rm(cids)) expect(result).to.be.an('array').and.to.have.lengthOf(3) @@ -101,10 +102,10 @@ module.exports = (createCommon, options) => { }) // remove it - await ipfs.block.rm(cid) + await all(ipfs.block.rm(cid)) // remove it again - const result = await ipfs.block.rm(cid) + const result = await all(ipfs.block.rm(cid)) expect(result).to.be.an('array').and.to.have.lengthOf(1) expect(result[0]).to.have.property('error').and.to.include('block not found') @@ -117,10 +118,10 @@ module.exports = (createCommon, options) => { }) // remove it - await ipfs.block.rm(cid) + await all(ipfs.block.rm(cid)) // remove it again - const result = await ipfs.block.rm(cid, { force: true }) + const result = await all(ipfs.block.rm(cid, { force: true })) expect(result).to.be.an('array').and.to.have.lengthOf(1) expect(result[0]).to.have.property('hash', cid.toString()) @@ -132,7 +133,7 @@ module.exports = (createCommon, options) => { format: 'raw', hashAlg: 'sha2-256' }) - const result = await ipfs.block.rm(cid, { quiet: true }) + const result = await all(ipfs.block.rm(cid, { quiet: true })) expect(result).to.be.an('array').and.to.have.lengthOf(0) }) @@ -144,7 +145,7 @@ module.exports = (createCommon, options) => { }) await ipfs.pin.add(cid.toString()) - const result = await ipfs.block.rm(cid) + const result = await all(ipfs.block.rm(cid)) expect(result).to.be.an('array').and.to.have.lengthOf(1) expect(result[0]).to.have.property('error').and.to.include('pinned') diff --git a/src/files-regular/cat.js b/src/cat.js similarity index 67% rename from src/files-regular/cat.js rename to src/cat.js index 41523adf..e90e2a33 100644 --- a/src/files-regular/cat.js +++ b/src/cat.js @@ -2,9 +2,10 @@ 'use strict' const { fixtures } = require('./utils') -const bs58 = require('bs58') const CID = require('cids') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const concat = require('it-concat') +const all = require('it-all') +const { getDescribe, getIt, expect } = require('./utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -27,79 +28,79 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) before(() => Promise.all([ - ipfs.add(fixtures.smallFile.data), - ipfs.add(fixtures.bigFile.data) + all(ipfs.add(fixtures.smallFile.data)), + all(ipfs.add(fixtures.bigFile.data)) ])) it('should cat with a base58 string encoded multihash', async () => { - const data = await ipfs.cat(fixtures.smallFile.cid) + const data = await concat(ipfs.cat(fixtures.smallFile.cid)) expect(data.toString()).to.contain('Plz add me!') }) it('should cat with a Buffer multihash', async () => { - const cid = Buffer.from(bs58.decode(fixtures.smallFile.cid)) + const cid = new CID(fixtures.smallFile.cid).multihash - const data = await ipfs.cat(cid) + const data = await concat(ipfs.cat(cid)) expect(data.toString()).to.contain('Plz add me!') }) it('should cat with a CID object', async () => { const cid = new CID(fixtures.smallFile.cid) - const data = await ipfs.cat(cid) + const data = await concat(ipfs.cat(cid)) expect(data.toString()).to.contain('Plz add me!') }) it('should cat a file added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - const res = await ipfs.add(input, { cidVersion: 0 }) + const res = await all(ipfs.add(input, { cidVersion: 0 })) const cidv0 = new CID(res[0].hash) expect(cidv0.version).to.equal(0) const cidv1 = cidv0.toV1() - const output = await ipfs.cat(cidv1) - expect(output).to.eql(input) + const output = await concat(ipfs.cat(cidv1)) + expect(output.slice()).to.eql(input) }) it('should cat a file added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) + const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false })) const cidv1 = new CID(res[0].hash) expect(cidv1.version).to.equal(1) const cidv0 = cidv1.toV0() - const output = await ipfs.cat(cidv0) - expect(output).to.eql(input) + const output = await concat(ipfs.cat(cidv0)) + expect(output.slice()).to.eql(input) }) it('should cat a BIG file', async () => { - const data = await ipfs.cat(fixtures.bigFile.cid) + const data = await concat(ipfs.cat(fixtures.bigFile.cid)) expect(data.length).to.equal(fixtures.bigFile.data.length) - expect(data).to.eql(fixtures.bigFile.data) + expect(data.slice()).to.eql(fixtures.bigFile.data) }) it('should cat with IPFS path', async () => { const ipfsPath = '/ipfs/' + fixtures.smallFile.cid - const data = await ipfs.cat(ipfsPath) + const data = await concat(ipfs.cat(ipfsPath)) expect(data.toString()).to.contain('Plz add me!') }) it('should cat with IPFS path, nested value', async () => { const fileToAdd = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await ipfs.add([fileToAdd]) + const filesAdded = await all(ipfs.add([fileToAdd])) const file = await filesAdded.find((f) => f.path === 'a') expect(file).to.exist() - const data = await ipfs.cat(`/ipfs/${file.hash}/testfile.txt`) + const data = await concat(ipfs.cat(`/ipfs/${file.hash}/testfile.txt`)) expect(data.toString()).to.contain('Plz add me!') }) @@ -107,23 +108,23 @@ module.exports = (createCommon, options) => { it('should cat with IPFS path, deeply nested value', async () => { const fileToAdd = { path: 'a/b/testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await ipfs.add([fileToAdd]) + const filesAdded = await all(ipfs.add([fileToAdd])) const file = filesAdded.find((f) => f.path === 'a') expect(file).to.exist() - const data = await ipfs.cat(`/ipfs/${file.hash}/b/testfile.txt`) + const data = await concat(ipfs.cat(`/ipfs/${file.hash}/b/testfile.txt`)) expect(data.toString()).to.contain('Plz add me!') }) it('should error on invalid key', () => { const invalidCid = 'somethingNotMultihash' - return expect(ipfs.cat(invalidCid)).to.eventually.be.rejected() + return expect(concat(ipfs.cat(invalidCid))).to.eventually.be.rejected() }) it('should error on unknown path', () => { - return expect(ipfs.cat(fixtures.smallFile.cid + '/does-not-exist')).to.eventually.be.rejected() + return expect(concat(ipfs.cat(fixtures.smallFile.cid + '/does-not-exist'))).to.eventually.be.rejected() .and.be.an.instanceOf(Error) .and.to.have.property('message') .to.be.oneOf([ @@ -135,7 +136,7 @@ module.exports = (createCommon, options) => { it('should error on dir path', async () => { const file = { path: 'dir/testfile.txt', content: fixtures.smallFile.data } - const filesAdded = await ipfs.add([file]) + const filesAdded = await all(ipfs.add([file])) expect(filesAdded.length).to.equal(2) const files = filesAdded.filter((file) => file.path === 'dir') @@ -143,7 +144,7 @@ module.exports = (createCommon, options) => { const dir = files[0] - const err = await expect(ipfs.cat(dir.hash)).to.be.rejected() + const err = await expect(concat(ipfs.cat(dir.hash))).to.be.rejected() expect(err.message).to.contain('this dag node is a directory') }) @@ -151,7 +152,7 @@ module.exports = (createCommon, options) => { const offset = 1 const length = 3 - const data = await ipfs.cat(fixtures.smallFile.cid, { offset, length }) + const data = await concat(ipfs.cat(fixtures.smallFile.cid, { offset, length })) expect(data.toString()).to.equal('lz ') }) }) diff --git a/src/config/get.js b/src/config/get.js index 9470c1b6..4f2f01d2 100644 --- a/src/config/get.js +++ b/src/config/get.js @@ -2,7 +2,6 @@ 'use strict' const { getDescribe, getIt, expect } = require('../utils/mocha') -const isPlainObject = require('is-plain-object') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -27,7 +26,6 @@ module.exports = (createCommon, options) => { const config = await ipfs.config.get() expect(config).to.be.an('object') - expect(isPlainObject(config)).to.equal(true) }) it('should retrieve a value through a key', async () => { diff --git a/src/dag/get.js b/src/dag/get.js index dc8ee9ee..0de6d02a 100644 --- a/src/dag/get.js +++ b/src/dag/get.js @@ -1,12 +1,12 @@ /* eslint-env mocha */ 'use strict' -const pEachSeries = require('p-each-series') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const Unixfs = require('ipfs-unixfs') const CID = require('cids') +const all = require('it-all') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -50,13 +50,8 @@ module.exports = (createCommon, options) => { cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) - await pEachSeries([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ], (el) => ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - })) + await ipfs.dag.put(nodePb, { format: 'dag-pb', hashAlg: 'sha2-256' }) + await ipfs.dag.put(nodeCbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) }) it('should get a dag-pb node', async () => { @@ -163,7 +158,7 @@ module.exports = (createCommon, options) => { it('should get a node added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) + const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false })) const cidv1 = new CID(res[0].hash) expect(cidv1.version).to.equal(1) diff --git a/src/dag/tree.js b/src/dag/tree.js index fe982620..2838fcb8 100644 --- a/src/dag/tree.js +++ b/src/dag/tree.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const pEachSeries = require('p-each-series') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') @@ -40,13 +39,8 @@ module.exports = (createCommon, options) => { } cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) - await pEachSeries([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ], (el) => ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - })) + await ipfs.dag.put(nodePb, { format: 'dag-pb', hashAlg: 'sha2-256' }) + await ipfs.dag.put(nodeCbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) }) it('should get tree with CID', async () => { diff --git a/src/dht/find-peer.js b/src/dht/find-peer.js index 5c6c3b4e..460f3b14 100644 --- a/src/dht/find-peer.js +++ b/src/dht/find-peer.js @@ -33,9 +33,9 @@ module.exports = (createCommon, options) => { it('should find other peers', async () => { const res = await nodeA.dht.findPeer(nodeB.peerId.id) - const id = res.id.toB58String() + const id = res.id.toString() const nodeAddresses = nodeB.peerId.addresses.map((addr) => addr.split('/ipfs/')[0]) // remove '/ipfs/' - const peerAddresses = res.multiaddrs.toArray().map((ma) => ma.toString().split('/ipfs/')[0]) + const peerAddresses = res.addrs.map(ma => ma.toString().split('/ipfs/')[0]) expect(id).to.be.eql(nodeB.peerId.id) expect(nodeAddresses).to.include(peerAddresses[0]) diff --git a/src/dht/find-provs.js b/src/dht/find-provs.js index 9e301666..12f7a569 100644 --- a/src/dht/find-provs.js +++ b/src/dht/find-provs.js @@ -1,17 +1,9 @@ /* eslint-env mocha */ 'use strict' -const multihashing = require('multihashing-async') -const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') - -async function fakeCid () { - const bytes = Buffer.from(`TEST${Date.now()}`) - - const mh = await multihashing(bytes, 'sha2-256') - - return new CID(0, 'dag-pb', mh) -} +const all = require('it-all') +const { fakeCid } = require('./utils') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -55,16 +47,16 @@ module.exports = (createCommon, options) => { providedCid = cids[0] await Promise.all([ - nodeB.dht.provide(providedCid), - nodeC.dht.provide(providedCid) + all(nodeB.dht.provide(providedCid)), + all(nodeC.dht.provide(providedCid)) ]) }) it('should be able to find providers', async function () { this.timeout(20 * 1000) - const provs = await nodeA.dht.findProvs(providedCid) - const providerIds = provs.map((p) => p.id.toB58String()) + const provs = await all(nodeA.dht.findProvs(providedCid, { numProviders: 2 })) + const providerIds = provs.map((p) => p.id.toString()) expect(providerIds).to.have.members([ nodeB.peerId.id, @@ -79,7 +71,7 @@ module.exports = (createCommon, options) => { const cidV0 = await fakeCid() - await expect(nodeA.dht.findProvs(cidV0, options)).to.be.rejected() + await expect(all(nodeA.dht.findProvs(cidV0, options))).to.be.rejected() }) }) } diff --git a/src/dht/get.js b/src/dht/get.js index 0fe6df9e..b8ebe822 100644 --- a/src/dht/get.js +++ b/src/dht/get.js @@ -36,9 +36,10 @@ module.exports = (createCommon, options) => { .and.be.an.instanceOf(Error) }) - it('should get a value after it was put on another node', async function () { - this.timeout(80 * 1000) - + // TODO: revisit this test - it puts an invalid key and so go-ipfs throws + // "invalid record keytype" - it needs to put a valid key and value for it to + // be a useful test. + it.skip('should get a value after it was put on another node', async () => { const key = Buffer.from(hat()) const value = Buffer.from(hat()) diff --git a/src/dht/provide.js b/src/dht/provide.js index c4c6fafd..ae2dea32 100644 --- a/src/dht/provide.js +++ b/src/dht/provide.js @@ -2,6 +2,7 @@ 'use strict' const CID = require('cids') +const all = require('it-all') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -31,46 +32,46 @@ module.exports = (createCommon, options) => { }) it('should provide local CID', async () => { - const res = await ipfs.add(Buffer.from('test')) + const res = await all(ipfs.add(Buffer.from('test'))) - await ipfs.dht.provide(new CID(res[0].hash)) + await all(ipfs.dht.provide(new CID(res[0].hash))) }) it('should not provide if block not found locally', () => { const cid = new CID('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ') - return expect(ipfs.dht.provide(cid)).to.eventually.be.rejected + return expect(all(ipfs.dht.provide(cid))).to.eventually.be.rejected .and.be.an.instanceOf(Error) .and.have.property('message') .that.include('not found locally') }) it('should allow multiple CIDs to be passed', async () => { - const res = await ipfs.add([ + const res = await all(ipfs.add([ { content: Buffer.from('t0') }, { content: Buffer.from('t1') } - ]) + ])) - await ipfs.dht.provide([ + await all(ipfs.dht.provide([ new CID(res[0].hash), new CID(res[1].hash) - ]) + ])) }) it('should provide a CIDv1', async () => { - const res = await ipfs.add(Buffer.from('test'), { cidVersion: 1 }) + const res = await all(ipfs.add(Buffer.from('test'), { cidVersion: 1 })) const cid = new CID(res[0].hash) - await ipfs.dht.provide(cid) + await all(ipfs.dht.provide(cid)) }) it('should error on non CID arg', () => { - return expect(ipfs.dht.provide({})).to.eventually.be.rejected() + return expect(all(ipfs.dht.provide({}))).to.eventually.be.rejected() }) it('should error on array containing non CID arg', () => { - return expect(ipfs.dht.provide([{}])).to.eventually.be.rejected() + return expect(all(ipfs.dht.provide([{}]))).to.eventually.be.rejected() }) }) } diff --git a/src/dht/query.js b/src/dht/query.js index 015147d4..4b99d232 100644 --- a/src/dht/query.js +++ b/src/dht/query.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const pTimeout = require('p-timeout') const { getDescribe, getIt, expect } = require('../utils/mocha') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -36,9 +36,8 @@ module.exports = (createCommon, options) => { this.timeout(timeout) try { - const peers = await pTimeout(nodeA.dht.query(nodeB.peerId.id), timeout - 1000) - - expect(peers.map((p) => p.id.toB58String())).to.include(nodeB.peerId.id) + const peers = await all(nodeA.dht.query(nodeB.peerId.id, { timeout: timeout - 1000 })) + expect(peers.map(p => p.id.toString())).to.include(nodeB.peerId.id) } catch (err) { if (err.name === 'TimeoutError') { // This test is meh. DHT works best with >= 20 nodes. Therefore a diff --git a/src/dht/utils.js b/src/dht/utils.js new file mode 100644 index 00000000..9beceb78 --- /dev/null +++ b/src/dht/utils.js @@ -0,0 +1,10 @@ +'use strict' + +const multihashing = require('multihashing-async') +const CID = require('cids') + +exports.fakeCid = async (data) => { + const bytes = data || Buffer.from(`TEST${Date.now()}`) + const mh = await multihashing(bytes, 'sha2-256') + return new CID(0, 'dag-pb', mh) +} diff --git a/src/files-mfs/ls-pull-stream.js b/src/files-mfs/ls-pull-stream.js deleted file mode 100644 index 72f6563f..00000000 --- a/src/files-mfs/ls-pull-stream.js +++ /dev/null @@ -1,75 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const hat = require('hat') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.files.lsPullStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should not ls not found file/dir, expect error', () => { - const testDir = `/test-${hat()}` - - return expect(pullToPromise.any(ipfs.files.lsPullStream(`${testDir}/404`))).to.eventually.be.rejected - .and.be.an.instanceOf(Error) - .and.have.property('message') - .that.include('does not exist') - }) - - it('should ls directory', async () => { - const testDir = `/test-${hat()}` - - await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - - const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir)) - - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - }) - - it('should ls directory with long option', async () => { - const testDir = `/test-${hat()}` - - await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - - const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir, { long: true })) - - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - }) - }) -} diff --git a/src/files-mfs/ls-readable-stream.js b/src/files-mfs/ls-readable-stream.js deleted file mode 100644 index c32cfd70..00000000 --- a/src/files-mfs/ls-readable-stream.js +++ /dev/null @@ -1,79 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const hat = require('hat') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.files.lsReadableStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should not ls not found file/dir, expect error', () => { - const testDir = `/test-${hat()}` - const stream = ipfs.files.lsReadableStream(`${testDir}/404`) - - return expect(getStream(stream)).to.eventually.be.rejected - .and.be.an.instanceOf(Error) - .and.have.property('message') - .that.include('does not exist') - }) - - it('should ls directory', async () => { - const testDir = `/test-${hat()}` - - await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - - const stream = ipfs.files.lsReadableStream(testDir) - - const entries = await getStream.array(stream) - - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - }) - - it('should ls directory with long option', async () => { - const testDir = `/test-${hat()}` - - await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - - const stream = ipfs.files.lsReadableStream(testDir, { long: true }) - const entries = await getStream.array(stream) - - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - }) - }) -} diff --git a/src/files-mfs/read-pull-stream.js b/src/files-mfs/read-pull-stream.js deleted file mode 100644 index 45b9af8d..00000000 --- a/src/files-mfs/read-pull-stream.js +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const hat = require('hat') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.files.readPullStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should not read not found, expect error', () => { - const testDir = `/test-${hat()}` - - return expect(pullToPromise.any(ipfs.files.readPullStream(`${testDir}/404`))).to.eventually.be.rejected - .and.be.an.instanceOf(Error) - .and.have.property('message') - .that.include('does not exist') - }) - - it('should read file', async () => { - const testDir = `/test-${hat()}` - - await ipfs.files.mkdir(testDir) - await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - - const bufs = await pullToPromise.any(ipfs.files.readPullStream(`${testDir}/a`)) - - expect(bufs).to.eql([Buffer.from('Hello, world!')]) - }) - }) -} diff --git a/src/files-mfs/read-readable-stream.js b/src/files-mfs/read-readable-stream.js deleted file mode 100644 index d8344ee1..00000000 --- a/src/files-mfs/read-readable-stream.js +++ /dev/null @@ -1,50 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const hat = require('hat') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.files.readReadableStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should not read not found, expect error', () => { - const testDir = `/test-${hat()}` - const stream = ipfs.files.readReadableStream(`${testDir}/404`) - - return expect(getStream(stream)).to.eventually.be.rejected - .and.be.an.instanceOf(Error) - .and.have.property('message') - .that.include('does not exist') - }) - - it('should read file', async () => { - const testDir = `/test-${hat()}` - - await ipfs.files.mkdir(testDir) - await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - - const stream = ipfs.files.readReadableStream(`${testDir}/a`) - - const buf = await getStream(stream) - expect(buf).to.eql('Hello, world!') - }) - }) -} diff --git a/src/files-regular/add-from-fs.js b/src/files-regular/add-from-fs.js deleted file mode 100644 index dc5f892d..00000000 --- a/src/files-regular/add-from-fs.js +++ /dev/null @@ -1,82 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const path = require('path') -const expectTimeout = require('../utils/expect-timeout') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const fs = require('fs') -const os = require('os') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.addFromFs', function () { - this.timeout(40 * 1000) - - const fixturesPath = path.join(__dirname, '../../test/fixtures') - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should add a directory from the file system', async () => { - const filesPath = path.join(fixturesPath, 'test-folder') - - const result = await ipfs.addFromFs(filesPath, { recursive: true }) - expect(result.length).to.be.above(8) - }) - - it('should add a directory from the file system with an odd name', async () => { - const filesPath = path.join(fixturesPath, 'weird name folder [v0]') - - const result = await ipfs.addFromFs(filesPath, { recursive: true }) - expect(result.length).to.be.above(8) - }) - - it('should ignore a directory from the file system', async () => { - const filesPath = path.join(fixturesPath, 'test-folder') - - const result = await ipfs.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] }) - expect(result.length).to.be.below(9) - }) - - it('should add a file from the file system', async () => { - const filePath = path.join(fixturesPath, 'testfile.txt') - - const result = await ipfs.addFromFs(filePath) - expect(result.length).to.equal(1) - expect(result[0].path).to.equal('testfile.txt') - }) - - it('should add a hidden file in a directory from the file system', async () => { - const filesPath = path.join(fixturesPath, 'hidden-files-folder') - - const result = await ipfs.addFromFs(filesPath, { recursive: true, hidden: true }) - expect(result.length).to.be.above(10) - expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt') - expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') - }) - - it('should add a file from the file system with only-hash=true', async function () { - this.slow(10 * 1000) - - const content = String(Math.random() + Date.now()) - const filepath = path.join(os.tmpdir(), `${content}.txt`) - fs.writeFileSync(filepath, content) - - const out = await ipfs.addFromFs(filepath, { onlyHash: true }) - - fs.unlinkSync(filepath) - await expectTimeout(ipfs.object.get(out[0].hash), 4000) - }) - }) -} diff --git a/src/files-regular/add-from-stream.js b/src/files-regular/add-from-stream.js deleted file mode 100644 index 9df66ee3..00000000 --- a/src/files-regular/add-from-stream.js +++ /dev/null @@ -1,41 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { Readable } = require('readable-stream') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const { fixtures } = require('./utils') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.addFromStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should add from a stream', async () => { - const stream = new Readable({ - read () { - this.push(fixtures.bigFile.data) - this.push(null) - } - }) - - const result = await ipfs.addFromStream(stream) - expect(result.length).to.equal(1) - expect(result[0].hash).to.equal(fixtures.bigFile.cid) - }) - }) -} diff --git a/src/files-regular/add-from-url.js b/src/files-regular/add-from-url.js deleted file mode 100644 index d1e68baa..00000000 --- a/src/files-regular/add-from-url.js +++ /dev/null @@ -1,114 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const pTimeout = require('p-timeout') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const { echoUrl, redirectUrl } = require('../utils/echo-http-server') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.addFromURL', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should add from a HTTP URL', async () => { - const text = `TEST${Date.now()}` - const url = echoUrl(text) - - const [result, expectedResult] = await Promise.all([ - ipfs.addFromURL(url), - ipfs.add(Buffer.from(text)) - ]) - - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result[0].hash).to.equal(expectedResult[0].hash) - expect(result[0].size).to.equal(expectedResult[0].size) - expect(result[0].path).to.equal(text) - }) - - it('should add from a HTTP URL with redirection', async () => { - const text = `TEST${Date.now()}` - const url = echoUrl(text) + '?foo=bar#buzz' - - const [result, expectedResult] = await Promise.all([ - ipfs.addFromURL(redirectUrl(url)), - ipfs.add(Buffer.from(text)) - ]) - - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result[0].hash).to.equal(expectedResult[0].hash) - expect(result[0].size).to.equal(expectedResult[0].size) - expect(result[0].path).to.equal(text) - }) - - it('should add from a URL with only-hash=true', async function () { - const text = `TEST${Date.now()}` - const url = echoUrl(text) - - const res = await ipfs.addFromURL(url, { onlyHash: true }) - - try { - // A successful object.get for this size data took my laptop ~14ms - await pTimeout(ipfs.object.get(res[0].hash), 500) - } catch (err) { - if (err.name === 'TimeoutError') { - // This doesn't seem to be the right approach: - // the test shouldn't be passing when it gets a timeout error - // but this is pretty the same logic as the previous callback one - return Promise.resolve() - } - - throw err - } - }) - - it('should add from a URL with wrap-with-directory=true', async () => { - const filename = `TEST${Date.now()}.txt` // also acts as data - const url = echoUrl(filename) + '?foo=bar#buzz' - const addOpts = { wrapWithDirectory: true } - - const [result, expectedResult] = await Promise.all([ - ipfs.addFromURL(url, addOpts), - ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts) - ]) - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result).to.deep.equal(expectedResult) - }) - - it('should add from a URL with wrap-with-directory=true and URL-escaped file name', async () => { - const filename = `320px-Domažlice,_Jiráskova_43_(${Date.now()}).jpg` // also acts as data - const url = echoUrl(filename) + '?foo=bar#buzz' - const addOpts = { wrapWithDirectory: true } - - const [result, expectedResult] = await Promise.all([ - ipfs.addFromURL(url, addOpts), - ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts) - ]) - - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result).to.deep.equal(expectedResult) - }) - - it('should not add from an invalid url', () => { - return expect(ipfs.addFromURL('123http://invalid')).to.eventually.be.rejected() - }) - }) -} diff --git a/src/files-regular/add-pull-stream.js b/src/files-regular/add-pull-stream.js deleted file mode 100644 index 9606e249..00000000 --- a/src/files-regular/add-pull-stream.js +++ /dev/null @@ -1,67 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const pull = require('pull-stream') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.addPullStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should add pull stream of valid files and dirs', async function () { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const files = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const stream = ipfs.addPullStream() - - const filesAdded = await pullToPromise.any(pull(pull.values(files), stream)) - const testFolderIndex = filesAdded.length - 1 - - expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].path`, 'test-folder') - expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].hash`, fixtures.directory.cid) - }) - - it('should add with object chunks and pull stream content', async () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - const data = [{ content: pull.values([Buffer.from('test')]) }] - const stream = ipfs.addPullStream() - - const res = await pullToPromise.any(pull(pull.values(data), stream)) - expect(res).to.have.property('length', 1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) - }) -} diff --git a/src/files-regular/add-readable-stream.js b/src/files-regular/add-readable-stream.js deleted file mode 100644 index 6b5d33ea..00000000 --- a/src/files-regular/add-readable-stream.js +++ /dev/null @@ -1,58 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.addReadableStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should add readable stream of valid files and dirs', async function () { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const files = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const stream = ipfs.addReadableStream() - - files.forEach((file) => stream.write(file)) - stream.end() - - const filesArray = await getStream.array(stream) - const file = filesArray[filesArray.length - 1] - - expect(file.hash).to.equal(fixtures.directory.cid) - }) - }) -} diff --git a/src/files-regular/add.js b/src/files-regular/add.js deleted file mode 100644 index 9faa3775..00000000 --- a/src/files-regular/add.js +++ /dev/null @@ -1,305 +0,0 @@ -/* eslint-env mocha, browser */ -'use strict' - -const { fixtures } = require('./utils') -const Readable = require('readable-stream').Readable -const pull = require('pull-stream') -const expectTimeout = require('../utils/expect-timeout') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const { supportsFileReader } = require('ipfs-utils/src/supports') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.add', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should add a File', async function () { - if (!supportsFileReader) return this.skip('skip in node') - - const filesAdded = await ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' })) - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - }) - - it('should add a File as tuple', async function () { - if (!supportsFileReader) return this.skip('skip in node') - - const tuple = { - path: 'filename.txt', - content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) - } - - const filesAdded = await ipfs.add(tuple) - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - }) - - it('should add a File as array of tuple', async function () { - if (!supportsFileReader) return this.skip('skip in node') - - const tuple = { - path: 'filename.txt', - content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) - } - - const filesAdded = await ipfs.add([tuple]) - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - }) - - it('should add a Buffer', async () => { - const filesAdded = await ipfs.add(fixtures.smallFile.data) - expect(filesAdded).to.have.length(1) - - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal(fixtures.smallFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(fixtures.smallFile.data.length) - }) - - it('should add a BIG Buffer', async () => { - const filesAdded = await ipfs.add(fixtures.bigFile.data) - expect(filesAdded).to.have.length(1) - - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(fixtures.bigFile.data.length) - }) - - it('should add a BIG Buffer with progress enabled', async () => { - let progCalled = false - let accumProgress = 0 - function handler (p) { - progCalled = true - accumProgress = p - } - - const filesAdded = await ipfs.add(fixtures.bigFile.data, { progress: handler }) - expect(filesAdded).to.have.length(1) - - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) - expect(progCalled).to.be.true() - expect(accumProgress).to.equal(fixtures.bigFile.data.length) - }) - - it('should add a Buffer as tuple', async () => { - const tuple = { path: 'testfile.txt', content: fixtures.smallFile.data } - - const filesAdded = await ipfs.add([tuple]) - expect(filesAdded).to.have.length(1) - - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal('testfile.txt') - }) - - it('should add a string', async () => { - const data = 'a string' - const expectedCid = 'QmQFRCwEpwQZ5aQMqCsCaFbdjNLLHoyZYDjr92v1F7HeqX' - - const filesAdded = await ipfs.add(data) - expect(filesAdded).to.be.length(1) - - const { path, size, hash } = filesAdded[0] - expect(path).to.equal(expectedCid) - expect(size).to.equal(16) - expect(hash).to.equal(expectedCid) - }) - - it('should add a TypedArray', async () => { - const data = Uint8Array.from([1, 3, 8]) - const expectedCid = 'QmRyUEkVCuHC8eKNNJS9BDM9jqorUvnQJK1DM81hfngFqd' - - const filesAdded = await ipfs.add(data) - expect(filesAdded).to.be.length(1) - - const { path, size, hash } = filesAdded[0] - expect(path).to.equal(expectedCid) - expect(size).to.equal(11) - expect(hash).to.equal(expectedCid) - }) - - it('should add readable stream', async () => { - const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - - const rs = new Readable() - rs.push(Buffer.from('some data')) - rs.push(null) - - const filesAdded = await ipfs.add(rs) - expect(filesAdded).to.be.length(1) - - const file = filesAdded[0] - expect(file.path).to.equal(expectedCid) - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - }) - - it('should add array of objects with readable stream content', async () => { - const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' - - const rs = new Readable() - rs.push(Buffer.from('some data')) - rs.push(null) - - const tuple = { path: 'data.txt', content: rs } - - const filesAdded = await ipfs.add([tuple]) - expect(filesAdded).to.be.length(1) - - const file = filesAdded[0] - expect(file.path).to.equal('data.txt') - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - }) - - it('should add pull stream', async () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - const res = await ipfs.add(pull.values([Buffer.from('test')])) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) - - it('should add array of objects with pull stream content', async () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - const res = await ipfs.add([{ content: pull.values([Buffer.from('test')]) }]) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) - - it('should add a nested directory as array of tupples', async function () { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const res = await ipfs.add(dirs) - - const root = res[res.length - 1] - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - }) - - it('should add a nested directory as array of tupples with progress', async function () { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const total = dirs.reduce((i, entry) => { - return i + (entry.content ? entry.content.length : 0) - }, 0) - - let progCalled = false - let accumProgress = 0 - const handler = (p) => { - progCalled = true - accumProgress += p - } - - const filesAdded = await ipfs.add(dirs, { progress: handler }) - - const root = filesAdded[filesAdded.length - 1] - expect(progCalled).to.be.true() - expect(accumProgress).to.be.at.least(total) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - }) - - it('should add files to a directory non sequentially', async function () { - const content = path => ({ - path: `test-dir/${path}`, - content: fixtures.directory.files[path.split('/').pop()] - }) - - const input = [ - content('a/pp.txt'), - content('a/holmes.txt'), - content('b/jungle.txt'), - content('a/alice.txt') - ] - - const filesAdded = await ipfs.add(input) - - const toPath = ({ path }) => path - const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/')) - const filesAddedPaths = filesAdded.map(toPath) - - expect(nonSeqDirFilePaths.every(p => filesAddedPaths.includes(p))).to.be.true() - }) - - it('should fail when passed invalid input', () => { - const nonValid = 138 - - return expect(ipfs.add(nonValid)).to.eventually.be.rejected() - }) - - it('should wrap content in a directory', async () => { - const data = { path: 'testfile.txt', content: fixtures.smallFile.data } - - const filesAdded = await ipfs.add(data, { wrapWithDirectory: true }) - expect(filesAdded).to.have.length(2) - - const file = filesAdded[0] - const wrapped = filesAdded[1] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal('testfile.txt') - expect(wrapped.path).to.equal('') - }) - - it('should add with only-hash=true', async function () { - this.slow(10 * 1000) - const content = String(Math.random() + Date.now()) - - const files = await ipfs.add(Buffer.from(content), { onlyHash: true }) - expect(files).to.have.length(1) - - await expectTimeout(ipfs.object.get(files[0].hash), 4000) - }) - }) -} diff --git a/src/files-regular/cat-pull-stream.js b/src/files-regular/cat-pull-stream.js deleted file mode 100644 index 18842d13..00000000 --- a/src/files-regular/cat-pull-stream.js +++ /dev/null @@ -1,51 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.catPullStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - before(() => ipfs.add(fixtures.smallFile.data)) - after(() => common.teardown()) - - it('should return a Pull Stream for a CID', async () => { - const stream = ipfs.catPullStream(fixtures.smallFile.cid) - - const data = Buffer.concat(await pullToPromise.any(stream)) - - expect(data.length).to.equal(fixtures.smallFile.data.length) - expect(data.toString()).to.deep.equal(fixtures.smallFile.data.toString()) - }) - - it('should export a chunk of a file in a Pull Stream', async () => { - const offset = 1 - const length = 3 - - const stream = ipfs.catPullStream(fixtures.smallFile.cid, { - offset, - length - }) - - const data = Buffer.concat(await pullToPromise.any(stream)) - expect(data.toString()).to.equal('lz ') - }) - }) -} diff --git a/src/files-regular/cat-readable-stream.js b/src/files-regular/cat-readable-stream.js deleted file mode 100644 index 17778b66..00000000 --- a/src/files-regular/cat-readable-stream.js +++ /dev/null @@ -1,50 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.catReadableStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - await ipfs.add(fixtures.bigFile.data) - await ipfs.add(fixtures.smallFile.data) - }) - - after(() => common.teardown()) - - it('should return a Readable Stream for a CID', async () => { - const stream = ipfs.catReadableStream(fixtures.bigFile.cid) - const data = await getStream.buffer(stream) - - expect(data).to.eql(fixtures.bigFile.data) - }) - - it('should export a chunk of a file in a Readable Stream', async () => { - const offset = 1 - const length = 3 - - const stream = ipfs.catReadableStream(fixtures.smallFile.cid, { - offset, - length - }) - - const data = await getStream.buffer(stream) - expect(data.toString()).to.equal('lz ') - }) - }) -} diff --git a/src/files-regular/get-pull-stream.js b/src/files-regular/get-pull-stream.js deleted file mode 100644 index ee50075e..00000000 --- a/src/files-regular/get-pull-stream.js +++ /dev/null @@ -1,39 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.getPullStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - before(() => ipfs.add(fixtures.smallFile.data)) - - after(() => common.teardown()) - - it('should return a Pull Stream of Pull Streams', async () => { - const stream = ipfs.getPullStream(fixtures.smallFile.cid) - - const files = await pullToPromise.any(stream) - - const data = Buffer.concat(await pullToPromise.any(files[0].content)) - expect(data.toString()).to.contain('Plz add me!') - }) - }) -} diff --git a/src/files-regular/get-readable-stream.js b/src/files-regular/get-readable-stream.js deleted file mode 100644 index ed1837bb..00000000 --- a/src/files-regular/get-readable-stream.js +++ /dev/null @@ -1,50 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const through = require('through2') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.getReadableStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - await ipfs.add(fixtures.smallFile.data) - }) - - after(() => common.teardown()) - - it('should return a Readable Stream of Readable Streams', async () => { - const stream = ipfs.getReadableStream(fixtures.smallFile.cid) - - // I was not able to use 'get-stream' module here - // as it exceeds the timeout. I think it might be related - // to 'pump' module that get-stream uses - const files = await new Promise((resolve, reject) => { - const filesArr = [] - stream.pipe(through.obj(async (file, enc, next) => { - const content = await getStream.buffer(file.content) - filesArr.push({ path: file.path, content: content }) - next() - }, () => resolve(filesArr))) - }) - - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString()).to.contain('Plz add me!') - }) - }) -} diff --git a/src/files-regular/index.js b/src/files-regular/index.js deleted file mode 100644 index d097ad9c..00000000 --- a/src/files-regular/index.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const { createSuite } = require('../utils/suite') - -const tests = { - add: require('./add'), - addReadableStream: require('./add-readable-stream'), - addPullStream: require('./add-pull-stream'), - addFromStream: require('./add-from-stream'), - addFromURL: require('./add-from-url'), - addFromFs: require('./add-from-fs'), - cat: require('./cat'), - catReadableStream: require('./cat-readable-stream'), - catPullStream: require('./cat-pull-stream'), - get: require('./get'), - getReadableStream: require('./get-readable-stream'), - getPullStream: require('./get-pull-stream'), - ls: require('./ls'), - lsReadableStream: require('./ls-readable-stream'), - lsPullStream: require('./ls-pull-stream'), - refs: require('./refs'), - refsReadableStream: require('./refs-readable-stream'), - refsPullStream: require('./refs-pull-stream'), - refsLocal: require('./refs-local'), - refsLocalPullStream: require('./refs-local-pull-stream'), - refsLocalReadableStream: require('./refs-local-readable-stream') -} - -module.exports = createSuite(tests) diff --git a/src/files-regular/ls-pull-stream.js b/src/files-regular/ls-pull-stream.js deleted file mode 100644 index 8b9ff173..00000000 --- a/src/files-regular/ls-pull-stream.js +++ /dev/null @@ -1,109 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.lsPullStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should pull stream ls with a base58 encoded CID', async function () { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const res = await ipfs.add(dirs) - - const root = res[res.length - 1] - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsPullStream(cid) - - const files = await pullToPromise.any(stream) - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - }) - }) -} diff --git a/src/files-regular/ls-readable-stream.js b/src/files-regular/ls-readable-stream.js deleted file mode 100644 index ccf27dc6..00000000 --- a/src/files-regular/ls-readable-stream.js +++ /dev/null @@ -1,109 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.lsReadableStream', function () { - this.timeout(40 * 1000) - - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should readable stream ls with a base58 encoded CID', async function () { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - const res = await ipfs.add(dirs) - - const root = res[res.length - 1] - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsReadableStream(cid) - - const files = await getStream.array(stream) - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - }) - }) -} diff --git a/src/files-regular/refs-local-pull-stream.js b/src/files-regular/refs-local-pull-stream.js deleted file mode 100644 index 88774247..00000000 --- a/src/files-regular/refs-local-pull-stream.js +++ /dev/null @@ -1,13 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const ipfsRefsLocal = (ipfs) => { - const stream = ipfs.refs.localPullStream() - - return pullToPromise.any(stream) - } - require('./refs-local-tests')(createCommon, '.refs.localPullStream', ipfsRefsLocal, options) -} diff --git a/src/files-regular/refs-local-readable-stream.js b/src/files-regular/refs-local-readable-stream.js deleted file mode 100644 index 236961d1..00000000 --- a/src/files-regular/refs-local-readable-stream.js +++ /dev/null @@ -1,12 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const ipfsRefsLocal = (ipfs) => { - const stream = ipfs.refs.localReadableStream() - return getStream.array(stream) - } - require('./refs-local-tests')(createCommon, '.refs.localReadableStream', ipfsRefsLocal, options) -} diff --git a/src/files-regular/refs-local.js b/src/files-regular/refs-local.js deleted file mode 100644 index 53737e5d..00000000 --- a/src/files-regular/refs-local.js +++ /dev/null @@ -1,7 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -module.exports = (createCommon, options) => { - const ipfsRefsLocal = (ipfs) => ipfs.refs.local() - require('./refs-local-tests')(createCommon, '.refs.local', ipfsRefsLocal, options) -} diff --git a/src/files-regular/refs-pull-stream.js b/src/files-regular/refs-pull-stream.js deleted file mode 100644 index 51885754..00000000 --- a/src/files-regular/refs-pull-stream.js +++ /dev/null @@ -1,13 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const pullToPromise = require('pull-to-promise') - -module.exports = (createCommon, options) => { - const ipfsRefs = (ipfs) => (path, params) => { - const stream = ipfs.refsPullStream(path, params) - - return pullToPromise.any(stream) - } - require('./refs-tests')(createCommon, '.refsPullStream', ipfsRefs, options) -} diff --git a/src/files-regular/refs-readable-stream.js b/src/files-regular/refs-readable-stream.js deleted file mode 100644 index b49072ea..00000000 --- a/src/files-regular/refs-readable-stream.js +++ /dev/null @@ -1,12 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const ipfsRefs = (ipfs) => (path, params) => { - const stream = ipfs.refsReadableStream(path, params) - return getStream.array(stream) - } - require('./refs-tests')(createCommon, '.refsReadableStream', ipfsRefs, options) -} diff --git a/src/files-regular/refs.js b/src/files-regular/refs.js deleted file mode 100644 index 41dd8c03..00000000 --- a/src/files-regular/refs.js +++ /dev/null @@ -1,7 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -module.exports = (createCommon, options) => { - const ipfsRefs = (ipfs) => ipfs.refs.bind(ipfs) - require('./refs-tests')(createCommon, '.refs', ipfsRefs, options) -} diff --git a/src/files-mfs/cp.js b/src/files/cp.js similarity index 85% rename from src/files-mfs/cp.js rename to src/files/cp.js index 400fe88a..01c2a0d9 100644 --- a/src/files-mfs/cp.js +++ b/src/files/cp.js @@ -2,7 +2,9 @@ 'use strict' const hat = require('hat') -const { fixtures } = require('../files-regular/utils') +const all = require('it-all') +const concat = require('it-concat') +const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -55,11 +57,11 @@ module.exports = (createCommon, options) => { it('should copy from outside of mfs', async () => { const [{ hash - }] = await ipfs.add(fixtures.smallFile.data) + }] = await all(ipfs.add(fixtures.smallFile.data)) const testFilePath = `/${hat()}` await ipfs.files.cp(`/ipfs/${hash}`, testFilePath) - const testFileData = await ipfs.files.read(testFilePath) - expect(testFileData).to.eql(fixtures.smallFile.data) + const testFileData = await concat(ipfs.files.read(testFilePath)) + expect(testFileData.slice()).to.eql(fixtures.smallFile.data) }) }) } diff --git a/src/files-mfs/flush.js b/src/files/flush.js similarity index 100% rename from src/files-mfs/flush.js rename to src/files/flush.js diff --git a/src/files-mfs/index.js b/src/files/index.js similarity index 63% rename from src/files-mfs/index.js rename to src/files/index.js index 6f70a80d..d532707c 100644 --- a/src/files-mfs/index.js +++ b/src/files/index.js @@ -10,11 +10,7 @@ const tests = { rm: require('./rm'), stat: require('./stat'), read: require('./read'), - readReadableStream: require('./read-readable-stream'), - readPullStream: require('./read-pull-stream'), ls: require('./ls'), - lsReadableStream: require('./ls-readable-stream'), - lsPullStream: require('./ls-pull-stream'), flush: require('./flush') } diff --git a/src/files-mfs/ls.js b/src/files/ls.js similarity index 50% rename from src/files-mfs/ls.js rename to src/files/ls.js index 0cf36c66..627f0622 100644 --- a/src/files-mfs/ls.js +++ b/src/files/ls.js @@ -2,7 +2,9 @@ 'use strict' const hat = require('hat') -const { fixtures } = require('../files-regular/utils') +const all = require('it-all') +const CID = require('cids') +const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -28,7 +30,7 @@ module.exports = (createCommon, options) => { it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - return expect(ipfs.files.ls(`${testDir}/404`)).to.eventually.be.rejected() + return expect(all(ipfs.files.ls(`${testDir}/404`))).to.eventually.be.rejected() }) it('should ls directory', async () => { @@ -37,52 +39,42 @@ module.exports = (createCommon, options) => { await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - const info = await ipfs.files.ls(testDir) + const info = await all(ipfs.files.ls(testDir)) - expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - }) - - it('should ls directory with long option', async () => { - const testDir = `/test-${hat()}` - - await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) - await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - - const info = await ipfs.files.ls(testDir, { long: true }) - - expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) + const expectedListing = [{ + name: 'b', + type: 0, + size: 13, + cid: new CID('QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T') + }, { + name: 'lv1', + type: 1, + size: 0, + cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') + }] + + info.sort((a, b) => a.name.localeCompare(b.name)).forEach((f, i) => { + expect(f.name).to.equal(expectedListing[i].name) + expect(f.type).to.equal(expectedListing[i].type) + expect(f.size).to.equal(expectedListing[i].size) + expect(f.cid.toString()).to.equal(expectedListing[i].cid.toString()) + }) }) it('should ls from outside of mfs', async () => { const testFileName = hat() const [{ - hash - }] = await ipfs.add({ path: `/test/${testFileName}`, content: fixtures.smallFile.data }) - const listing = await ipfs.files.ls('/ipfs/' + hash) + cid + }] = await all(ipfs.add({ path: `/test/${testFileName}`, content: fixtures.smallFile.data })) + const listing = await ipfs.files.ls('/ipfs/' + cid) expect(listing).to.have.length(1) - expect(listing[0].name).to.equal(hash) + expect(listing[0].name).to.equal(cid.toString()) }) it('should list an empty directory', async () => { const testDir = `/test-${hat()}` await ipfs.files.mkdir(testDir) - const contents = await ipfs.files.ls(testDir) + const contents = await all(ipfs.files.ls(testDir)) expect(contents).to.be.an('array').and.to.be.empty() }) @@ -93,14 +85,10 @@ module.exports = (createCommon, options) => { await ipfs.files.write(filePath, Buffer.from('Hello world'), { create: true }) - const contents = await ipfs.files.ls(filePath) + const contents = await all(ipfs.files.ls(filePath)) - expect(contents).to.be.an('array').and.have.lengthOf(1).and.to.deep.equal([{ - hash: '', - name: fileName, - size: 0, - type: 0 - }]) + expect(contents).to.be.an('array').and.have.lengthOf(1) + expect(contents[0].name).to.equal(fileName) }) }) } diff --git a/src/files-mfs/mkdir.js b/src/files/mkdir.js similarity index 100% rename from src/files-mfs/mkdir.js rename to src/files/mkdir.js diff --git a/src/files-mfs/mv.js b/src/files/mv.js similarity index 100% rename from src/files-mfs/mv.js rename to src/files/mv.js diff --git a/src/files-mfs/read.js b/src/files/read.js similarity index 79% rename from src/files-mfs/read.js rename to src/files/read.js index 91c6e533..61a2fdfc 100644 --- a/src/files-mfs/read.js +++ b/src/files/read.js @@ -2,7 +2,8 @@ 'use strict' const hat = require('hat') -const { fixtures } = require('../files-regular/utils') +const concat = require('it-concat') +const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -40,17 +41,17 @@ module.exports = (createCommon, options) => { await ipfs.files.mkdir(testDir) await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - const buf = await ipfs.files.read(`${testDir}/a`) + const buf = await concat(ipfs.files.read(`${testDir}/a`)) - expect(buf).to.eql(Buffer.from('Hello, world!')) + expect(buf.slice()).to.eql(Buffer.from('Hello, world!')) }) it('should read from outside of mfs', async () => { const [{ hash }] = await ipfs.add(fixtures.smallFile.data) - const testFileData = await ipfs.files.read(`/ipfs/${hash}`) - expect(testFileData).to.eql(fixtures.smallFile.data) + const testFileData = await concat(ipfs.files.read(`/ipfs/${hash}`)) + expect(testFileData.slice()).to.eql(fixtures.smallFile.data) }) }) } diff --git a/src/files-mfs/rm.js b/src/files/rm.js similarity index 90% rename from src/files-mfs/rm.js rename to src/files/rm.js index 9d13ca06..a639b903 100644 --- a/src/files-mfs/rm.js +++ b/src/files/rm.js @@ -2,6 +2,7 @@ 'use strict' const hat = require('hat') +const all = require('it-all') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -38,7 +39,7 @@ module.exports = (createCommon, options) => { await ipfs.files.rm(`${testDir}/c`) - const contents = await ipfs.files.ls(testDir) + const contents = await all(ipfs.files.ls(testDir)) expect(contents).to.be.an('array').and.to.be.empty() }) @@ -49,7 +50,7 @@ module.exports = (createCommon, options) => { await ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }) - const lv1Contents = await ipfs.files.ls(`${testDir}/lv1`) + const lv1Contents = await all(ipfs.files.ls(`${testDir}/lv1`)) expect(lv1Contents).to.be.an('array').and.to.be.empty() }) }) diff --git a/src/files-mfs/stat.js b/src/files/stat.js similarity index 83% rename from src/files-mfs/stat.js rename to src/files/stat.js index b0619ea1..758c904e 100644 --- a/src/files-mfs/stat.js +++ b/src/files/stat.js @@ -2,7 +2,8 @@ 'use strict' const hat = require('hat') -const { fixtures } = require('../files-regular/utils') +const all = require('it-all') +const { fixtures } = require('../utils') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -16,13 +17,11 @@ module.exports = (createCommon, options) => { let ipfs before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step this.timeout(60 * 1000) - ipfs = await common.setup() }) - before(async () => { await ipfs.add(fixtures.smallFile.data) }) + + before(() => all(ipfs.add(fixtures.smallFile.data))) after(() => common.teardown()) @@ -39,12 +38,13 @@ module.exports = (createCommon, options) => { await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const stat = await ipfs.files.stat(`${testDir}/b`) + stat.cid = stat.cid.toString() expect(stat).to.include({ type: 'file', blocks: 1, size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cid: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', cumulativeSize: 71, withLocality: false }) @@ -59,12 +59,13 @@ module.exports = (createCommon, options) => { await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) const stat = await ipfs.files.stat(testDir) + stat.cid = stat.cid.toString() expect(stat).to.include({ type: 'directory', blocks: 1, size: 0, - hash: 'QmQGn7EvzJZRbhcwHrp4UeMeS56WsLmrey9JhfkymjzXQu', + cid: 'QmQGn7EvzJZRbhcwHrp4UeMeS56WsLmrey9JhfkymjzXQu', cumulativeSize: 118, withLocality: false }) @@ -75,12 +76,13 @@ module.exports = (createCommon, options) => { // TODO enable this test when this feature gets released on go-ipfs it.skip('should stat withLocal file', async function () { const stat = await ipfs.files.stat('/test/b', { withLocal: true }) + stat.cid = stat.cid.toString() expect(stat).to.eql({ type: 'file', blocks: 1, size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cid: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', cumulativeSize: 71, withLocality: true, local: true, @@ -91,12 +93,13 @@ module.exports = (createCommon, options) => { // TODO enable this test when this feature gets released on go-ipfs it.skip('should stat withLocal dir', async function () { const stat = await ipfs.files.stat('/test', { withLocal: true }) + stat.cid = stat.cid.toString() expect(stat).to.eql({ type: 'directory', blocks: 2, size: 0, - hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', + cid: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', cumulativeSize: 216, withLocality: true, local: true, @@ -106,12 +109,13 @@ module.exports = (createCommon, options) => { it('should stat outside of mfs', async () => { const stat = await ipfs.files.stat('/ipfs/' + fixtures.smallFile.cid) + stat.cid = stat.cid.toString() expect(stat).to.include({ type: 'file', blocks: 0, size: 12, - hash: fixtures.smallFile.cid, + cid: fixtures.smallFile.cid, cumulativeSize: 20, withLocality: false }) diff --git a/src/files-mfs/write.js b/src/files/write.js similarity index 100% rename from src/files-mfs/write.js rename to src/files/write.js diff --git a/src/files-regular/get.js b/src/get.js similarity index 68% rename from src/files-regular/get.js rename to src/get.js index 8a940f2b..dc48d24e 100644 --- a/src/files-regular/get.js +++ b/src/get.js @@ -2,9 +2,10 @@ 'use strict' const { fixtures } = require('./utils') -const bs58 = require('bs58') const CID = require('cids') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const all = require('it-all') +const concat = require('it-concat') +const { getDescribe, getIt, expect } = require('./utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -22,62 +23,63 @@ module.exports = (createCommon, options) => { this.timeout(60 * 1000) ipfs = await common.setup() - await ipfs.add(fixtures.smallFile.data) - await ipfs.add(fixtures.bigFile.data) + await all(ipfs.add(fixtures.smallFile.data)) + await all(ipfs.add(fixtures.bigFile.data)) }) after(() => common.teardown()) it('should get with a base58 encoded multihash', async () => { - const files = await ipfs.get(fixtures.smallFile.cid) + const files = await all(ipfs.get(fixtures.smallFile.cid)) expect(files).to.be.length(1) expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + expect((await concat(files[0].content)).toString()).to.contain('Plz add me!') }) it('should get with a Buffer multihash', async () => { - const cidBuf = Buffer.from(bs58.decode(fixtures.smallFile.cid)) + const cidBuf = new CID(fixtures.smallFile.cid).multihash - const files = await ipfs.get(cidBuf) + const files = await all(ipfs.get(cidBuf)) expect(files).to.be.length(1) expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + expect((await concat(files[0].content)).toString()).to.contain('Plz add me!') }) it('should get a file added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - const res = await ipfs.add(input, { cidVersion: 0 }) + const res = await all(ipfs.add(input, { cidVersion: 0 })) const cidv0 = new CID(res[0].hash) expect(cidv0.version).to.equal(0) const cidv1 = cidv0.toV1() - const output = await ipfs.get(cidv1) - expect(output[0].content).to.eql(input) + const output = await all(ipfs.get(cidv1)) + expect((await concat(output[0].content)).slice()).to.eql(input) }) it('should get a file added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) + const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false })) const cidv1 = new CID(res[0].hash) expect(cidv1.version).to.equal(1) const cidv0 = cidv1.toV0() - const output = await ipfs.get(cidv0) - expect(output[0].content).to.eql(input) + const output = await all(ipfs.get(cidv0)) + expect((await concat(output[0].content)).slice()).to.eql(input) }) it('should get a BIG file', async () => { - const files = await ipfs.get(fixtures.bigFile.cid) - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(fixtures.bigFile.cid) - expect(files[0].content.length).to.eql(fixtures.bigFile.data.length) - expect(files[0].content).to.eql(fixtures.bigFile.data) + for await (const file of ipfs.get(fixtures.bigFile.cid)) { + expect(file.path).to.equal(fixtures.bigFile.cid) + const content = await concat(file.content) + expect(content.length).to.eql(fixtures.bigFile.data.length) + expect(content.slice()).to.eql(fixtures.bigFile.data) + } }) it('should get a directory', async function () { @@ -99,13 +101,18 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - const res = await ipfs.add(dirs) + const res = await all(ipfs.add(dirs)) const root = res[res.length - 1] expect(root.path).to.equal('test-folder') expect(root.hash).to.equal(fixtures.directory.cid) - let files = await ipfs.get(fixtures.directory.cid) + let files = await all((async function * () { + for await (let { path, content } of ipfs.get(fixtures.directory.cid)) { + content = content ? (await concat(content)).toString() : null + yield { path, content } + } + })()) files = files.sort((a, b) => { if (a.path > b.path) return 1 @@ -129,13 +136,7 @@ module.exports = (createCommon, options) => { ]) // Check contents - const contents = files.map((file) => { - return file.content - ? file.content.toString() - : null - }) - - expect(contents).to.include.members([ + expect(files.map(f => f.content)).to.include.members([ fixtures.directory.files['alice.txt'].toString(), fixtures.directory.files['files/hello.txt'].toString(), fixtures.directory.files['files/ipfs.txt'].toString(), @@ -151,13 +152,13 @@ module.exports = (createCommon, options) => { content: fixtures.smallFile.data } - const filesAdded = await ipfs.add(file) + const filesAdded = await all(ipfs.add(file)) filesAdded.forEach(async (file) => { if (file.path === 'a') { - const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`) + const files = await all(ipfs.get(`/ipfs/${file.hash}/testfile.txt`)) expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + expect((await concat(files[0].content)).toString()).to.contain('Plz add me!') } }) }) @@ -168,13 +169,13 @@ module.exports = (createCommon, options) => { content: fixtures.smallFile.data } - const filesAdded = await ipfs.add([file]) + const filesAdded = await all(ipfs.add([file])) filesAdded.forEach(async (file) => { if (file.path === 'a') { - const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`) + const files = await all(ipfs.get(`/ipfs/${file.hash}/testfile.txt`)) expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + expect((await concat(files[0].content)).toString()).to.contain('Plz add me!') } }) }) @@ -182,7 +183,7 @@ module.exports = (createCommon, options) => { it('should error on invalid key', async () => { const invalidCid = 'somethingNotMultihash' - const err = await expect(ipfs.get(invalidCid)).to.be.rejected() + const err = await expect(all(ipfs.get(invalidCid))).to.be.rejected() switch (err.toString()) { case 'Error: invalid ipfs ref path': diff --git a/src/index.js b/src/index.js index 2c353fae..3e8bd774 100644 --- a/src/index.js +++ b/src/index.js @@ -1,7 +1,17 @@ 'use strict' -exports.filesRegular = require('./files-regular') -exports.filesMFS = require('./files-mfs') +const { createSuite } = require('./utils/suite') + +exports.root = createSuite({ + add: require('./add'), + cat: require('./cat'), + get: require('./get'), + ls: require('./ls'), + refs: require('./refs'), + refsLocal: require('./refs-local') +}) + +exports.files = require('./files') exports.bitswap = require('./bitswap') exports.block = require('./block') diff --git a/src/key/list.js b/src/key/list.js index b8b1af78..aa718069 100644 --- a/src/key/list.js +++ b/src/key/list.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const pTimes = require('p-times') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -26,7 +25,7 @@ module.exports = (createCommon, options) => { it('should list all the keys', async function () { this.timeout(60 * 1000) - const keys = await pTimes(3, () => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }), { concurrency: 1 }) + const keys = await Promise.all([1, 2, 3].map(() => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }))) const res = await ipfs.key.list() expect(res).to.exist() diff --git a/src/files-regular/ls.js b/src/ls.js similarity index 64% rename from src/files-regular/ls.js rename to src/ls.js index 6e32e6bc..86394804 100644 --- a/src/files-regular/ls.js +++ b/src/ls.js @@ -2,8 +2,9 @@ 'use strict' const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('./utils/mocha') const CID = require('cids') +const all = require('it-all') const randomName = prefix => `${prefix}${Math.round(Math.random() * 1000)}` @@ -46,22 +47,22 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - const res = await ipfs.add(dirs) + const res = await all(ipfs.add(dirs)) const root = res[res.length - 1] expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) + expect(root.cid.toString()).to.equal(fixtures.directory.cid) const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const files = await ipfs.ls(cid) + const files = await all(ipfs.ls(cid)) - expect(files).to.eql([ + const expectedFiles = [ { depth: 1, name: 'alice.txt', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + cid: new CID('QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi'), type: 'file' }, { @@ -69,7 +70,7 @@ module.exports = (createCommon, options) => { name: 'empty-folder', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + cid: new CID('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), type: 'dir' }, { @@ -77,7 +78,7 @@ module.exports = (createCommon, options) => { name: 'files', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + cid: new CID('QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74'), type: 'dir' }, { @@ -85,7 +86,7 @@ module.exports = (createCommon, options) => { name: 'holmes.txt', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + cid: new CID('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr'), type: 'file' }, { @@ -93,7 +94,7 @@ module.exports = (createCommon, options) => { name: 'jungle.txt', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + cid: new CID('QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9'), type: 'file' }, { @@ -101,10 +102,21 @@ module.exports = (createCommon, options) => { name: 'pp.txt', path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + cid: new CID('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn'), type: 'file' } - ]) + ] + + expect(files).to.have.length(expectedFiles.length) + + expectedFiles.forEach((f, i) => { + expect(f.depth).to.equal(files[i].depth) + expect(f.name).to.equal(files[i].name) + expect(f.path).to.equal(files[i].path) + expect(f.size).to.equal(files[i].size) + expect(f.cid.toString()).to.equal(files[i].cid.toString()) + expect(f.type).to.equal(files[i].type) + }) }) it('should ls files added as CIDv0 with a CIDv1', async () => { @@ -115,18 +127,18 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - const res = await ipfs.add(input, { cidVersion: 0 }) + const res = await all(ipfs.add(input, { cidVersion: 0 })) - const cidv0 = new CID(res[res.length - 1].hash) + const cidv0 = res[res.length - 1].cid expect(cidv0.version).to.equal(0) const cidv1 = cidv0.toV1() - const output = await ipfs.ls(cidv1) + const output = await all(ipfs.ls(cidv1)) expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() + output.forEach(({ cid }) => { + expect(res.find(file => file.cid.toString() === cid.toString())).to.exist() }) }) @@ -138,27 +150,27 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) + const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false })) - const cidv1 = new CID(res[res.length - 1].hash) + const cidv1 = res[res.length - 1].cid expect(cidv1.version).to.equal(1) const cidv0 = cidv1.toV1() - const output = await ipfs.ls(cidv0) + const output = await all(ipfs.ls(cidv0)) expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() + output.forEach(({ cid }) => { + expect(res.find(file => file.cid.toString() === cid.toString())).to.exist() }) }) it('should correctly handle a non existing hash', () => { - return expect(ipfs.ls('surelynotavalidhashheh?')).to.eventually.be.rejected() + return expect(all(ipfs.ls('surelynotavalidhashheh?'))).to.eventually.be.rejected() }) it('should correctly handle a non existing path', () => { - return expect(ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there')).to.eventually.be.rejected() + return expect(all(ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there'))).to.eventually.be.rejected() }) it('should ls files by path', async () => { @@ -169,12 +181,12 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - const res = await ipfs.add(input) - const output = await ipfs.ls(`/ipfs/${res[res.length - 1].hash}`) + const res = await all(ipfs.add(input)) + const output = await all(ipfs.ls(`/ipfs/${res[res.length - 1].cid}`)) expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() + output.forEach(({ cid }) => { + expect(res.find(file => file.cid.toString() === cid.toString())).to.exist() }) }) }) diff --git a/src/miscellaneous/resolve.js b/src/miscellaneous/resolve.js index 6eb65819..d44b3ed4 100644 --- a/src/miscellaneous/resolve.js +++ b/src/miscellaneous/resolve.js @@ -6,6 +6,7 @@ const loadFixture = require('aegir/fixtures') const hat = require('hat') const multibase = require('multibase') const { getDescribe, getIt, expect } = require('../utils/mocha') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -25,13 +26,13 @@ module.exports = (createCommon, options) => { it('should resolve an IPFS hash', async () => { const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core') - const [{ hash }] = await ipfs.add(content) + const [{ hash }] = await all(ipfs.add(content)) const path = await ipfs.resolve(`/ipfs/${hash}`) expect(path).to.equal(`/ipfs/${hash}`) }) it('should resolve an IPFS hash and return a base64url encoded CID in path', async () => { - const [{ hash }] = await ipfs.add(Buffer.from('base64url encoded')) + const [{ hash }] = await all(ipfs.add(Buffer.from('base64url encoded'))) const path = await ipfs.resolve(`/ipfs/${hash}`, { cidBase: 'base64url' }) const [,, cid] = path.split('/') @@ -42,7 +43,7 @@ module.exports = (createCommon, options) => { it('should resolve an IPFS path link', async () => { const path = 'path/to/testfile.txt' const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core') - const [{ hash: fileHash }, , , { hash: rootHash }] = await ipfs.add([{ path, content }], { wrapWithDirectory: true }) + const [{ hash: fileHash }, , , { hash: rootHash }] = await all(ipfs.add([{ path, content }], { wrapWithDirectory: true })) const resolve = await ipfs.resolve(`/ipfs/${rootHash}/${path}`) expect(resolve).to.equal(`/ipfs/${fileHash}`) @@ -87,7 +88,7 @@ module.exports = (createCommon, options) => { await ipfs.swarm.connect(node.peerId.addresses.find((a) => a.includes('127.0.0.1'))) } - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === true')) + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive === true'))) const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) diff --git a/src/name-pubsub/cancel.js b/src/name-pubsub/cancel.js index 2a887ca1..fb592036 100644 --- a/src/name-pubsub/cancel.js +++ b/src/name-pubsub/cancel.js @@ -2,8 +2,6 @@ 'use strict' const PeerId = require('peer-id') -const { promisify } = require('es6-promisify') - const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -38,7 +36,7 @@ module.exports = (createCommon, options) => { it('should cancel a subscription correctly returning true', async function () { this.timeout(300 * 1000) - const peerId = await promisify(PeerId.create.bind(PeerId))({ bits: 512 }) + const peerId = await PeerId.create({ bits: 512 }) const id = peerId.toB58String() const ipnsPath = `/ipns/${id}` diff --git a/src/name/publish.js b/src/name/publish.js index 7c4733db..5587e216 100644 --- a/src/name/publish.js +++ b/src/name/publish.js @@ -5,6 +5,8 @@ const hat = require('hat') const { fixture } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const all = require('it-all') +const last = require('it-last') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -17,13 +19,10 @@ module.exports = (createCommon, options) => { let nodeId before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step this.timeout(60 * 1000) - ipfs = await common.setup() nodeId = ipfs.peerId.id - await ipfs.add(fixture.data, { pin: false }) + await all(ipfs.add(fixture.data, { pin: false })) }) after(() => common.teardown()) @@ -40,10 +39,9 @@ module.exports = (createCommon, options) => { }) it('should publish correctly with the lifetime option and resolve', async () => { - const [{ path }] = await ipfs.add(Buffer.from('should publish correctly with the lifetime option and resolve')) + const [{ path }] = await all(ipfs.add(Buffer.from('should publish correctly with the lifetime option and resolve'))) await ipfs.name.publish(path, { allowOffline: true, resolve: false, lifetime: '2h' }) - - return expect(await ipfs.name.resolve(`/ipns/${nodeId}`)).to.eq(`/ipfs/${path}`) + expect(await last(ipfs.name.resolve(`/ipns/${nodeId}`))).to.eq(`/ipfs/${path}`) }) it('should publish correctly when the file was not added but resolve is disabled', async function () { @@ -78,8 +76,8 @@ module.exports = (createCommon, options) => { } const key = await ipfs.key.gen(keyName, { type: 'rsa', size: 2048 }) - const res = await ipfs.name.publish(value, options) + expect(res).to.exist() expect(res.name).to.equal(key.id) expect(res.value).to.equal(`/ipfs/${value}`) diff --git a/src/name/resolve.js b/src/name/resolve.js index 94ff8249..5216dc2f 100644 --- a/src/name/resolve.js +++ b/src/name/resolve.js @@ -4,6 +4,8 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') const CID = require('cids') +const all = require('it-all') +const last = require('it-last') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -24,84 +26,80 @@ module.exports = (createCommon, options) => { it('should resolve a record default options', async function () { this.timeout(20 * 1000) - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record default options')) - + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record default options'))) const { id: keyId } = await ipfs.key.gen('key-name-default', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name-default' }) - return expect(await ipfs.name.resolve(`/ipns/${keyId}`)) + expect(await last(ipfs.name.resolve(`/ipns/${keyId}`))) .to.eq(`/ipfs/${path}`) }) it('should resolve a record from peerid as cidv1 in base32', async function () { this.timeout(20 * 1000) - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record from cidv1b32')) + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record from cidv1b32'))) const { id: peerId } = await ipfs.id() await ipfs.name.publish(path, { allowOffline: true }) // Represent Peer ID as CIDv1 Base32 // https://github.com/libp2p/specs/blob/master/RFC/0001-text-peerid-cid.md const keyCid = new CID(peerId).toV1().toString('base32') - const resolvedPath = await ipfs.name.resolve(`/ipns/${keyCid}`) + const resolvedPath = await last(ipfs.name.resolve(`/ipns/${keyCid}`)) - return expect(resolvedPath).to.equal(`/ipfs/${path}`) + expect(resolvedPath).to.equal(`/ipfs/${path}`) }) it('should resolve a record recursive === false', async () => { - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === false')) + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive === false'))) await ipfs.name.publish(path, { allowOffline: true }) - return expect(await ipfs.name.resolve(`/ipns/${nodeId}`, { recursive: false })) + expect(await last(ipfs.name.resolve(`/ipns/${nodeId}`, { recursive: false }))) .to.eq(`/ipfs/${path}`) }) it('should resolve a record recursive === true', async function () { this.timeout(20 * 1000) - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === true')) - + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive === true'))) const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name' }) - return expect(await ipfs.name.resolve(`/ipns/${keyId}`, { recursive: true })) + expect(await last(ipfs.name.resolve(`/ipns/${keyId}`, { recursive: true }))) .to.eq(`/ipfs/${path}`) }) it('should resolve a record default options with remainder', async function () { this.timeout(20 * 1000) - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record default options with remainder')) - + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record default options with remainder'))) const { id: keyId } = await ipfs.key.gen('key-name-remainder-default', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name-remainder-default' }) - return expect(await ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`)) + expect(await last(ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`))) .to.eq(`/ipfs/${path}/remainder/file.txt`) }) it('should resolve a record recursive === false with remainder', async () => { - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive = false with remainder')) + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive = false with remainder'))) await ipfs.name.publish(path, { allowOffline: true }) - return expect(await ipfs.name.resolve(`/ipns/${nodeId}/remainder/file.txt`, { recursive: false })) + expect(await last(ipfs.name.resolve(`/ipns/${nodeId}/remainder/file.txt`, { recursive: false }))) .to.eq(`/ipfs/${path}/remainder/file.txt`) }) it('should resolve a record recursive === true with remainder', async function () { this.timeout(20 * 1000) - const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive = true with remainder')) - + const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive = true with remainder'))) const { id: keyId } = await ipfs.key.gen('key-name-remainder', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name-remainder' }) - return expect(await ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`, { recursive: true })) + expect(await last(ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`, { recursive: true }))) .to.eq(`/ipfs/${path}/remainder/file.txt`) }) @@ -113,13 +111,13 @@ module.exports = (createCommon, options) => { } // we add new data instead of re-using fixture to make sure lifetime handling doesn't break - const [{ path }] = await ipfs.add(Buffer.from('should not get the entry if its validity time expired')) + const [{ path }] = await all(ipfs.add(Buffer.from('should not get the entry if its validity time expired'))) await ipfs.name.publish(path, publishOptions) await delay(500) // go only has 1 possible error https://github.com/ipfs/go-ipfs/blob/master/namesys/interface.go#L51 // so here we just expect an Error and don't match the error type to expiration try { - await ipfs.name.resolve(nodeId) + await last(ipfs.name.resolve(nodeId)) } catch (error) { expect(error).to.exist() } @@ -138,45 +136,45 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) it('should resolve /ipns/ipfs.io', async () => { - return expect(await ipfs.name.resolve('/ipns/ipfs.io')) + expect(await last(ipfs.name.resolve('/ipns/ipfs.io'))) .to.match(/\/ipfs\/.+$/) }) it('should resolve /ipns/ipfs.io recursive === false', async () => { - return expect(await ipfs.name.resolve('/ipns/ipfs.io', { recursive: false })) + expect(await last(ipfs.name.resolve('/ipns/ipfs.io', { recursive: false }))) .to.match(/\/ipns\/.+$/) }) it('should resolve /ipns/ipfs.io recursive === true', async () => { - return expect(await ipfs.name.resolve('/ipns/ipfs.io', { recursive: true })) + expect(await last(ipfs.name.resolve('/ipns/ipfs.io', { recursive: true }))) .to.match(/\/ipfs\/.+$/) }) it('should resolve /ipns/ipfs.io with remainder', async () => { - return expect(await ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg')) + expect(await last(ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg'))) .to.match(/\/ipfs\/.+\/images\/ipfs-logo.svg$/) }) it('should resolve /ipns/ipfs.io with remainder recursive === false', async () => { - return expect(await ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: false })) + expect(await last(ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: false }))) .to.match(/\/ipns\/.+\/images\/ipfs-logo.svg$/) }) it('should resolve /ipns/ipfs.io with remainder recursive === true', async () => { - return expect(await ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: true })) + expect(await last(ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: true }))) .to.match(/\/ipfs\/.+\/images\/ipfs-logo.svg$/) }) it('should fail to resolve /ipns/ipfs.a', async () => { try { - await ipfs.name.resolve('ipfs.a') + await last(ipfs.name.resolve('ipfs.a')) } catch (error) { expect(error).to.exist() } }) it('should resolve ipns path with hamt-shard recursive === true', async () => { - return expect(await ipfs.name.resolve('/ipns/tr.wikipedia-on-ipfs.org/wiki/Anasayfa.html', { recursive: true })) + expect(await last(ipfs.name.resolve('/ipns/tr.wikipedia-on-ipfs.org/wiki/Anasayfa.html', { recursive: true }))) .to.match(/\/ipfs\/.+$/) }) }) diff --git a/src/object/data.js b/src/object/data.js index e2eeb515..fa301bbe 100644 --- a/src/object/data.js +++ b/src/object/data.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const bs58 = require('bs58') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -33,29 +32,7 @@ module.exports = (createCommon, options) => { const nodeCid = await ipfs.object.put(testObj) - let data = await ipfs.object.data(nodeCid) - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(testObj.Data).to.deep.equal(data) - }) - - it('should get data by base58 encoded multihash', async () => { - const testObj = { - Data: Buffer.from(hat()), - Links: [] - } - - const nodeCid = await ipfs.object.put(testObj) - - let data = await ipfs.object.data(bs58.encode(nodeCid.buffer), { enc: 'base58' }) - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } + const data = await ipfs.object.data(nodeCid) expect(testObj.Data).to.deep.equal(data) }) @@ -67,12 +44,7 @@ module.exports = (createCommon, options) => { const nodeCid = await ipfs.object.put(testObj) - let data = await ipfs.object.data(bs58.encode(nodeCid.buffer).toString(), { enc: 'base58' }) - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } + const data = await ipfs.object.data(nodeCid.toV0().toString(), { enc: 'base58' }) expect(testObj.Data).to.eql(data) }) diff --git a/src/object/get.js b/src/object/get.js index c4f662df..fb3eb888 100644 --- a/src/object/get.js +++ b/src/object/get.js @@ -8,6 +8,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const UnixFs = require('ipfs-unixfs') const crypto = require('crypto') const { asDAGLink } = require('./utils') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -132,10 +133,10 @@ module.exports = (createCommon, options) => { // has to be big enough to span several DAGNodes const data = crypto.randomBytes(1024 * 3000) - const result = await ipfs.add({ + const result = await all(ipfs.add({ path: '', content: data - }) + })) const node = await ipfs.object.get(result[0].hash) const meta = UnixFs.unmarshal(node.Data) diff --git a/src/object/links.js b/src/object/links.js index 0770ef21..ecc065bc 100644 --- a/src/object/links.js +++ b/src/object/links.js @@ -7,6 +7,7 @@ const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') const CID = require('cids') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -87,10 +88,10 @@ module.exports = (createCommon, options) => { it('should get links from CBOR object', async () => { const hashes = [] - const res1 = await ipfs.add(Buffer.from('test data')) + const res1 = await all(ipfs.add(Buffer.from('test data'))) hashes.push(res1[0].hash) - const res2 = await ipfs.add(Buffer.from('more test data')) + const res2 = await all(ipfs.add(Buffer.from('more test data'))) hashes.push(res2[0].hash) const obj = { diff --git a/src/object/utils.js b/src/object/utils.js index f426dfd9..e16547e0 100644 --- a/src/object/utils.js +++ b/src/object/utils.js @@ -1,28 +1,15 @@ 'use strict' -const { promisify } = require('es6-promisify') const dagPB = require('ipld-dag-pb') -const { DAGNode, DAGLink } = dagPB -const calculateCid = (node) => dagPB.util.cid(node.serialize(), { cidVersion: 0 }) - -const createDAGNode = promisify((data, links, cb) => { - cb(null, new DAGNode(data, links)) -}) - -const addLinkToDAGNode = promisify((parent, link, cb) => { - cb(null, new DAGNode(parent.Data, parent.Links.concat(link))) -}) +const calculateCid = node => dagPB.util.cid(node.serialize(), { cidVersion: 0 }) const asDAGLink = async (node, name = '') => { const cid = await calculateCid(node) - - return new DAGLink(name, node.size, cid) + return new dagPB.DAGLink(name, node.size, cid) } module.exports = { calculateCid, - createDAGNode, - addLinkToDAGNode, asDAGLink } diff --git a/src/pin/add.js b/src/pin/add.js index d8789080..a22a3b03 100644 --- a/src/pin/add.js +++ b/src/pin/add.js @@ -3,6 +3,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -21,7 +22,7 @@ module.exports = (createCommon, options) => { ipfs = await common.setup() await Promise.all(fixtures.files.map(file => { - return ipfs.add(file.data, { pin: false }) + return all(ipfs.add(file.data, { pin: false })) })) }) diff --git a/src/pin/ls.js b/src/pin/ls.js index 47caed6c..40291c04 100644 --- a/src/pin/ls.js +++ b/src/pin/ls.js @@ -3,6 +3,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -22,13 +23,13 @@ module.exports = (createCommon, options) => { ipfs = await common.setup() // two files wrapped in directories, only root CID pinned recursively const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) - await ipfs.add(dir, { pin: false, cidVersion: 0 }) + await all(ipfs.add(dir, { pin: false, cidVersion: 0 })) await ipfs.pin.add(fixtures.directory.cid, { recursive: true }) // a file (CID pinned recursively) - await ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 }) + await all(ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 })) await ipfs.pin.add(fixtures.files[0].cid, { recursive: true }) // a single CID (pinned directly) - await ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 }) + await all(ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 })) await ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) }) @@ -36,7 +37,7 @@ module.exports = (createCommon, options) => { // 1st, because ipfs.add pins automatically it('should list all recursive pins', async () => { - const pinset = await ipfs.pin.ls({ type: 'recursive' }) + const pinset = await all(ipfs.pin.ls({ type: 'recursive' })) expect(pinset).to.deep.include({ type: 'recursive', hash: fixtures.files[0].cid @@ -48,7 +49,7 @@ module.exports = (createCommon, options) => { }) it('should list all indirect pins', async () => { - const pinset = await ipfs.pin.ls({ type: 'indirect' }) + const pinset = await all(ipfs.pin.ls({ type: 'indirect' })) expect(pinset).to.not.deep.include({ type: 'recursive', hash: fixtures.files[0].cid @@ -72,7 +73,7 @@ module.exports = (createCommon, options) => { }) it('should list all types of pins', async () => { - const pinset = await ipfs.pin.ls() + const pinset = await all(ipfs.pin.ls()) expect(pinset).to.not.be.empty() // check the three "roots" expect(pinset).to.deep.include({ @@ -98,7 +99,7 @@ module.exports = (createCommon, options) => { }) it('should list all direct pins', async () => { - const pinset = await ipfs.pin.ls({ type: 'direct' }) + const pinset = await all(ipfs.pin.ls({ type: 'direct' })) expect(pinset).to.have.lengthOf(1) expect(pinset).to.deep.include({ type: 'direct', @@ -107,7 +108,7 @@ module.exports = (createCommon, options) => { }) it('should list pins for a specific hash', async () => { - const pinset = await ipfs.pin.ls(fixtures.files[0].cid) + const pinset = await all(ipfs.pin.ls(fixtures.files[0].cid)) expect(pinset).to.deep.equal([{ type: 'recursive', hash: fixtures.files[0].cid @@ -116,21 +117,21 @@ module.exports = (createCommon, options) => { it('should throw an error on missing direct pins for existing path', () => { // ipfs.txt is an indirect pin, so lookup for direct one should throw an error - return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' })) + return expect(all(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' }))) .to.eventually.be.rejected .and.be.an.instanceOf(Error) .and.to.have.property('message', `path '/ipfs/${fixtures.directory.cid}/files/ipfs.txt' is not pinned`) }) it('should throw an error on missing link for a specific path', () => { - return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' })) + return expect(all(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' }))) .to.eventually.be.rejected .and.be.an.instanceOf(Error) .and.to.have.property('message', `no link named "I-DONT-EXIST.txt" under ${fixtures.directory.cid}`) }) it('should list indirect pins for a specific path', async () => { - const pinset = await ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' }) + const pinset = await all(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' })) expect(pinset).to.deep.include({ type: `indirect through ${fixtures.directory.cid}`, hash: fixtures.directory.files[1].cid @@ -138,7 +139,7 @@ module.exports = (createCommon, options) => { }) it('should list recursive pins for a specific hash', async () => { - const pinset = await ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' }) + const pinset = await all(ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' })) expect(pinset).to.deep.equal([{ type: 'recursive', hash: fixtures.files[0].cid @@ -146,7 +147,7 @@ module.exports = (createCommon, options) => { }) it('should list pins for multiple CIDs', async () => { - const pinset = await ipfs.pin.ls([fixtures.files[0].cid, fixtures.files[1].cid]) + const pinset = await all(ipfs.pin.ls([fixtures.files[0].cid, fixtures.files[1].cid])) const cids = pinset.map(({ hash }) => hash) expect(cids).to.include(fixtures.files[0].cid) expect(cids).to.include(fixtures.files[1].cid) diff --git a/src/pin/rm.js b/src/pin/rm.js index 6be13eff..1cec3f01 100644 --- a/src/pin/rm.js +++ b/src/pin/rm.js @@ -3,6 +3,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -20,9 +21,9 @@ module.exports = (createCommon, options) => { this.timeout(60 * 1000) ipfs = await common.setup() - await ipfs.add(fixtures.files[0].data, { pin: false }) + await all(ipfs.add(fixtures.files[0].data, { pin: false })) await ipfs.pin.add(fixtures.files[0].cid, { recursive: true }) - await ipfs.add(fixtures.files[1].data, { pin: false }) + await all(ipfs.add(fixtures.files[1].data, { pin: false })) await ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) }) @@ -34,7 +35,7 @@ module.exports = (createCommon, options) => { hash: fixtures.files[0].cid }]) - const pinset = await ipfs.pin.ls({ type: 'recursive' }) + const pinset = await all(ipfs.pin.ls({ type: 'recursive' })) expect(pinset).to.not.deep.include({ hash: fixtures.files[0].cid, type: 'recursive' @@ -47,7 +48,7 @@ module.exports = (createCommon, options) => { hash: fixtures.files[1].cid }]) - const pinset = await ipfs.pin.ls({ type: 'direct' }) + const pinset = await all(ipfs.pin.ls({ type: 'direct' })) expect(pinset).to.not.deep.include({ hash: fixtures.files[1].cid }) diff --git a/src/ping/index.js b/src/ping/index.js index a33bbddc..1994eced 100644 --- a/src/ping/index.js +++ b/src/ping/index.js @@ -2,9 +2,7 @@ const { createSuite } = require('../utils/suite') const tests = { - ping: require('./ping'), - pingPullStream: require('./ping-pull-stream'), - pingReadableStream: require('./ping-readable-stream') + ping: require('./ping') } module.exports = createSuite(tests) diff --git a/src/ping/ping-pull-stream.js b/src/ping/ping-pull-stream.js deleted file mode 100644 index e9e65add..00000000 --- a/src/ping/ping-pull-stream.js +++ /dev/null @@ -1,61 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const pullToPromise = require('pull-to-promise') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const { isPong } = require('./utils.js') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.pingPullStream', function () { - this.timeout(60 * 1000) - - let ipfsA - let ipfsB - - before(async () => { - ipfsA = await common.setup() - ipfsB = await common.setup() - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) - }) - - after(() => common.teardown()) - - it('should send the specified number of packets over pull stream', async () => { - const count = 3 - - const results = await pullToPromise.any(ipfsA.pingPullStream(ipfsB.peerId.id, { count })) - - const packetNum = results.reduce((acc, result) => { - expect(result.success).to.be.true() - - if (isPong(result)) { - acc++ - } - - return acc - }, 0) - - expect(packetNum).to.equal(count) - }) - - it('should fail when pinging an unknown peer over pull stream', () => { - const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' - const count = 2 - - return expect(pullToPromise.any(ipfsA.pingPullStream(unknownPeerId, { count }))) - .to.eventually.be.rejected() - }) - - it('should fail when pinging an invalid peer id over pull stream', () => { - const invalidPeerId = 'not a peer ID' - const count = 2 - - return expect(pullToPromise.any(ipfsA.pingPullStream(invalidPeerId, { count }))) - .to.eventually.be.rejected() - }) - }) -} diff --git a/src/ping/ping-readable-stream.js b/src/ping/ping-readable-stream.js deleted file mode 100644 index 3c69fc36..00000000 --- a/src/ping/ping-readable-stream.js +++ /dev/null @@ -1,92 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const pump = require('pump') -const { Writable } = require('stream') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const { isPong } = require('./utils.js') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.pingReadableStream', function () { - this.timeout(60 * 1000) - - let ipfsA - let ipfsB - - before(async () => { - ipfsA = await common.setup() - ipfsB = await common.setup() - await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) - }) - - after(() => common.teardown()) - - it('should send the specified number of packets over readable stream', () => { - let packetNum = 0 - const count = 3 - - return new Promise((resolve, reject) => { - pump( - ipfsA.pingReadableStream(ipfsB.peerId.id, { count }), - new Writable({ - objectMode: true, - write (res, enc, cb) { - expect(res.success).to.be.true() - // It's a pong - if (isPong(res)) { - packetNum++ - } - - cb() - } - }), - (err) => { - expect(err).to.not.exist() - expect(packetNum).to.equal(count) - resolve() - } - ) - }) - }) - - it('should fail when pinging peer that is not available over readable stream', () => { - const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' - - return new Promise((resolve, reject) => { - pump( - ipfsA.pingReadableStream(unknownPeerId, {}), - new Writable({ - objectMode: true, - write: (res, enc, cb) => cb() - }), - (err) => { - expect(err).to.exist() - resolve() - } - ) - }) - }) - - it('should fail when pinging an invalid peer id over readable stream', () => { - const invalidPeerId = 'not a peer ID' - - return new Promise((resolve, reject) => { - pump( - ipfsA.pingReadableStream(invalidPeerId, {}), - new Writable({ - objectMode: true, - write: (chunk, enc, cb) => cb() - }), - (err) => { - expect(err).to.exist() - resolve() - } - ) - }) - }) - }) -} diff --git a/src/ping/ping.js b/src/ping/ping.js index 030a77f1..be1204ed 100644 --- a/src/ping/ping.js +++ b/src/ping/ping.js @@ -3,6 +3,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -29,7 +30,7 @@ module.exports = (createCommon, options) => { it('should send the specified number of packets', async () => { const count = 3 - const responses = await ipfsA.ping(ipfsB.peerId.id, { count }) + const responses = await all(ipfsA.ping(ipfsB.peerId.id, { count })) responses.forEach(expectIsPingResponse) const pongs = responses.filter(isPong) @@ -40,14 +41,14 @@ module.exports = (createCommon, options) => { const notAvailablePeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' const count = 2 - return expect(ipfsA.ping(notAvailablePeerId, { count })).to.eventually.be.rejected() + return expect(all(ipfsA.ping(notAvailablePeerId, { count }))).to.eventually.be.rejected() }) it('should fail when pinging an invalid peer Id', () => { const invalidPeerId = 'not a peer ID' const count = 2 - return expect(ipfsA.ping(invalidPeerId, { count })).to.eventually.be.rejected() + return expect(all(ipfsA.ping(invalidPeerId, { count }))).to.eventually.be.rejected() }) }) } diff --git a/src/pubsub/subscribe.js b/src/pubsub/subscribe.js index 103aa937..7af21955 100644 --- a/src/pubsub/subscribe.js +++ b/src/pubsub/subscribe.js @@ -2,7 +2,7 @@ 'use strict' const pushable = require('it-pushable') -const { collect } = require('streaming-iterables') +const all = require('it-all') const { waitForPeers, getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') @@ -106,10 +106,10 @@ module.exports = (createCommon, options) => { await ipfs1.pubsub.publish(topic, Buffer.from('hello')) - const [handler1Msg] = await collect(msgStream1) + const [handler1Msg] = await all(msgStream1) expect(handler1Msg.data.toString()).to.eql('hello') - const [handler2Msg] = await collect(msgStream2) + const [handler2Msg] = await all(msgStream2) expect(handler2Msg.data.toString()).to.eql('hello') await ipfs1.pubsub.unsubscribe(topic, handler1) @@ -179,11 +179,11 @@ module.exports = (createCommon, options) => { await ipfs2.pubsub.publish(topic, Buffer.from(expectedString)) - const [sub1Msg] = await collect(msgStream1) + const [sub1Msg] = await all(msgStream1) expect(sub1Msg.data.toString()).to.be.eql(expectedString) expect(sub1Msg.from).to.eql(ipfs2.peerId.id) - const [sub2Msg] = await collect(msgStream2) + const [sub2Msg] = await all(msgStream2) expect(sub2Msg.data.toString()).to.be.eql(expectedString) expect(sub2Msg.from).to.eql(ipfs2.peerId.id) }) @@ -213,11 +213,11 @@ module.exports = (createCommon, options) => { await ipfs2.pubsub.publish(topic, buffer) - const [sub1Msg] = await collect(msgStream1) + const [sub1Msg] = await all(msgStream1) expect(sub1Msg.data.toString('hex')).to.be.eql(expectedHex) expect(sub1Msg.from).to.eql(ipfs2.peerId.id) - const [sub2Msg] = await collect(msgStream2) + const [sub2Msg] = await all(msgStream2) expect(sub2Msg.data.toString('hex')).to.be.eql(expectedHex) expect(sub2Msg.from).to.eql(ipfs2.peerId.id) }) @@ -251,12 +251,12 @@ module.exports = (createCommon, options) => { outbox.forEach(msg => ipfs2.pubsub.publish(topic, Buffer.from(msg))) - const sub1Msgs = await collect(msgStream1) + const sub1Msgs = await all(msgStream1) sub1Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2.peerId.id)) const inbox1 = sub1Msgs.map(msg => msg.data.toString()) expect(inbox1.sort()).to.eql(outbox.sort()) - const sub2Msgs = await collect(msgStream2) + const sub2Msgs = await all(msgStream2) sub2Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2.peerId.id)) const inbox2 = sub2Msgs.map(msg => msg.data.toString()) expect(inbox2.sort()).to.eql(outbox.sort()) @@ -290,7 +290,7 @@ module.exports = (createCommon, options) => { await ipfs2.pubsub.publish(topic, msgData) } - const msgs = await collect(msgStream) + const msgs = await all(msgStream) const duration = new Date().getTime() - startTime const opsPerSec = Math.floor(count / (duration / 1000)) diff --git a/src/files-regular/refs-local-tests.js b/src/refs-local.js similarity index 76% rename from src/files-regular/refs-local-tests.js rename to src/refs-local.js index 6d5cda04..39b307ab 100644 --- a/src/files-regular/refs-local-tests.js +++ b/src/refs-local.js @@ -2,15 +2,16 @@ 'use strict' const { fixtures } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('./utils/mocha') +const all = require('it-all') -module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => { +module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe(suiteName, function () { - this.timeout(40 * 1000) + describe('.refs.local', function () { + this.timeout(60 * 1000) let ipfs @@ -35,9 +36,9 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => { content('holmes.txt') ] - await ipfs.add(dirs) + await all(ipfs.add(dirs)) - const refs = await ipfsRefsLocal(ipfs) + const refs = await all(ipfs.refs.local()) const cids = refs.map(r => r.ref) expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn') diff --git a/src/files-regular/refs-tests.js b/src/refs.js similarity index 83% rename from src/files-regular/refs-tests.js rename to src/refs.js index 3725800d..709eb850 100644 --- a/src/files-regular/refs-tests.js +++ b/src/refs.js @@ -1,19 +1,18 @@ /* eslint-env mocha */ 'use strict' -const pMapSeries = require('p-map-series') -const pTimeout = require('p-timeout') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('./utils/mocha') const loadFixture = require('aegir/fixtures') const CID = require('cids') +const all = require('it-all') -module.exports = (createCommon, suiteName, ipfsRefs, options) => { +module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe(suiteName, function () { - this.timeout(40 * 1000) + describe('.refs', function () { + this.timeout(60 * 1000) let ipfs, pbRootCb, dagRootCid @@ -47,28 +46,28 @@ module.exports = (createCommon, suiteName, ipfsRefs, options) => { const p = (path ? path(pbRootCb) : pbRootCb) if (expectTimeout) { - return expect(pTimeout(ipfsRefs(ipfs)(p, params), expectTimeout)).to.eventually.be.rejected + return expect(all(ipfs.refs(p, params))).to.eventually.be.rejected .and.be.an.instanceOf(Error) .and.to.have.property('name') .to.eql('TimeoutError') } if (expectError) { - return expect(ipfsRefs(ipfs)(p, params)).to.be.eventually.rejected.and.be.an.instanceOf(Error) + return expect(all(ipfs.refs(p, params))).to.be.eventually.rejected.and.be.an.instanceOf(Error) } - const refs = await ipfsRefs(ipfs)(p, params) + const refs = await all(ipfs.refs(p, params)) // Check there was no error and the refs match what was expected expect(refs.map(r => r.ref)).to.eql(expected) }) } - it('dag refs test', async function () { + it('should get refs with cbor links', async function () { this.timeout(20 * 1000) // Call out to IPFS - const refs = await ipfsRefs(ipfs)(`/ipfs/${dagRootCid}`, { recursive: true }) + const refs = await all(ipfs.refs(`/ipfs/${dagRootCid}`, { recursive: true })) // Check the refs match what was expected expect(refs.map(r => r.ref).sort()).to.eql([ 'QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbSC', @@ -111,7 +110,7 @@ function getMockObjects () { function getRefsTests () { return { - 'prints added files': { + 'should print added files': { params: {}, expected: [ 'QmYEJ7qQNZUvBnv4SZ3rEbksagaan3sGvnUq948vSG8Z34', @@ -121,7 +120,7 @@ function getRefsTests () { ] }, - 'prints files in edges format': { + 'should print files in edges format': { params: { edges: true }, expected: [ 'Qmd5MhNjx3NSZm3L2QKG1TFvqkTRbtZwGJinqEfqpfHH7s -> QmYEJ7qQNZUvBnv4SZ3rEbksagaan3sGvnUq948vSG8Z34', @@ -131,7 +130,7 @@ function getRefsTests () { ] }, - 'prints files in custom format': { + 'should print files in custom format': { params: { format: ': => ' }, expected: [ 'animals: Qmd5MhNjx3NSZm3L2QKG1TFvqkTRbtZwGJinqEfqpfHH7s => QmYEJ7qQNZUvBnv4SZ3rEbksagaan3sGvnUq948vSG8Z34', @@ -141,7 +140,7 @@ function getRefsTests () { ] }, - 'follows a path, /': { + 'should follow a path, /': { path: (cid) => `/ipfs/${cid}/animals`, params: { format: '' }, expected: [ @@ -150,7 +149,7 @@ function getRefsTests () { ] }, - 'follows a path, //': { + 'should follow a path, //': { path: (cid) => `/ipfs/${cid}/animals/land`, params: { format: '' }, expected: [ @@ -160,7 +159,7 @@ function getRefsTests () { ] }, - 'follows a path with recursion, /': { + 'should follow a path with recursion, /': { path: (cid) => `/ipfs/${cid}/animals`, params: { format: '', recursive: true }, expected: [ @@ -174,7 +173,7 @@ function getRefsTests () { ] }, - 'recursively follows folders, -r': { + 'should recursively follows folders, -r': { params: { format: '', recursive: true }, expected: [ 'animals', @@ -192,7 +191,7 @@ function getRefsTests () { ] }, - 'recursive with unique option': { + 'should get refs with recursive and unique option': { params: { format: '', recursive: true, unique: true }, expected: [ 'animals', @@ -209,7 +208,7 @@ function getRefsTests () { ] }, - 'max depth of 1': { + 'should get refs with max depth of 1': { params: { format: '', recursive: true, maxDepth: 1 }, expected: [ 'animals', @@ -219,7 +218,7 @@ function getRefsTests () { ] }, - 'max depth of 2': { + 'should get refs with max depth of 2': { params: { format: '', recursive: true, maxDepth: 2 }, expected: [ 'animals', @@ -232,7 +231,7 @@ function getRefsTests () { ] }, - 'max depth of 3': { + 'should get refs with max depth of 3': { params: { format: '', recursive: true, maxDepth: 3 }, expected: [ 'animals', @@ -250,12 +249,12 @@ function getRefsTests () { ] }, - 'max depth of 0': { + 'should get refs with max depth of 0': { params: { recursive: true, maxDepth: 0 }, expected: [] }, - 'follows a path with max depth 1, /': { + 'should follow a path with max depth 1, /': { path: (cid) => `/ipfs/${cid}/animals`, params: { format: '', recursive: true, maxDepth: 1 }, expected: [ @@ -264,7 +263,7 @@ function getRefsTests () { ] }, - 'follows a path with max depth 2, /': { + 'should follow a path with max depth 2, /': { path: (cid) => `/ipfs/${cid}/animals`, params: { format: '', recursive: true, maxDepth: 2 }, expected: [ @@ -278,7 +277,7 @@ function getRefsTests () { ] }, - 'prints refs for multiple paths': { + 'should print refs for multiple paths': { path: (cid) => [`/ipfs/${cid}/animals`, `/ipfs/${cid}/fruits`], params: { format: '', recursive: true }, expected: [ @@ -293,14 +292,15 @@ function getRefsTests () { ] }, - 'cannot specify edges and format': { + 'should not be able to specify edges and format': { params: { format: '', edges: true }, expectError: true }, - 'prints nothing for non-existent hashes': { + 'should print nothing for non-existent hashes': { path: () => 'QmYmW4HiZhotsoSqnv2o1oSssvkRM8b9RweBoH7ao5nki2', - expectTimeout: 4000 + params: { timeout: 2000 }, + expectTimeout: true } } } @@ -320,7 +320,7 @@ function loadPbContent (ipfs, node) { function loadDagContent (ipfs, node) { const store = { putData: async (data) => { - const res = await ipfs.add(data) + const res = await all(ipfs.add(data)) return res[0].hash }, putLinks: (links) => { @@ -350,10 +350,12 @@ async function loadContent (ipfs, store, node) { return 0 }) - const res = await pMapSeries(sorted, async ([name, child]) => { - const cid = await loadContent(ipfs, store, child) - return { name, cid: cid && cid.toString() } - }) + const res = await all((async function * () { + for (const [name, child] of sorted) { + const cid = await loadContent(ipfs, store, child) + yield { name, cid: cid && cid.toString() } + } + })()) return store.putLinks(res) } diff --git a/src/repo/gc.js b/src/repo/gc.js index 2c29c2de..3953eac6 100644 --- a/src/repo/gc.js +++ b/src/repo/gc.js @@ -3,6 +3,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { DAGNode } = require('ipld-dag-pb') +const all = require('it-all') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -23,56 +24,56 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) it('should run garbage collection', async () => { - const res = await ipfs.add(Buffer.from('apples')) + const res = await all(ipfs.add(Buffer.from('apples'))) - const pinset = await ipfs.pin.ls() + const pinset = await all(ipfs.pin.ls()) expect(pinset.map((obj) => obj.hash)).includes(res[0].hash) await ipfs.pin.rm(res[0].hash) - await ipfs.repo.gc() + await all(ipfs.repo.gc()) - const finalPinset = await ipfs.pin.ls() + const finalPinset = await all(ipfs.pin.ls()) expect(finalPinset.map((obj) => obj.hash)).not.includes(res[0].hash) }) it('should clean up unpinned data', async () => { // Get initial list of local blocks - const refsBeforeAdd = await ipfs.refs.local() + const refsBeforeAdd = await all(ipfs.refs.local()) // Add some data. Note: this will implicitly pin the data, which causes // some blocks to be added for the data itself and for the pinning // information that refers to the blocks - const addRes = await ipfs.add(Buffer.from('apples')) + const addRes = await all(ipfs.add(Buffer.from('apples'))) const hash = addRes[0].hash // Get the list of local blocks after the add, should be bigger than // the initial list and contain hash - const refsAfterAdd = await ipfs.refs.local() + const refsAfterAdd = await all(ipfs.refs.local()) expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) expect(refsAfterAdd.map(r => r.ref)).includes(hash) // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the hash, // because the file is still pinned - const refsAfterGc = await ipfs.refs.local() + const refsAfterGc = await all(ipfs.refs.local()) expect(refsAfterGc.map(r => r.ref)).includes(hash) // Unpin the data await ipfs.pin.rm(hash) // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // The list of local blocks should no longer contain the hash - const refsAfterUnpinAndGc = await ipfs.refs.local() + const refsAfterUnpinAndGc = await all(ipfs.refs.local()) expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash) }) it('should clean up removed MFS files', async () => { // Get initial list of local blocks - const refsBeforeAdd = await ipfs.refs.local() + const refsBeforeAdd = await all(ipfs.refs.local()) // Add a file to MFS await ipfs.files.write('/test', Buffer.from('oranges'), { create: true }) @@ -82,32 +83,32 @@ module.exports = (createCommon, options) => { // Get the list of local blocks after the add, should be bigger than // the initial list and contain hash - const refsAfterAdd = await ipfs.refs.local() + const refsAfterAdd = await all(ipfs.refs.local()) expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) expect(refsAfterAdd.map(r => r.ref)).includes(hash) // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the hash, // because the file is in MFS - const refsAfterGc = await ipfs.refs.local() + const refsAfterGc = await all(ipfs.refs.local()) expect(refsAfterGc.map(r => r.ref)).includes(hash) // Remove the file await ipfs.files.rm('/test') // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // The list of local blocks should no longer contain the hash - const refsAfterUnpinAndGc = await ipfs.refs.local() + const refsAfterUnpinAndGc = await all(ipfs.refs.local()) expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash) }) it('should clean up block only after unpinned and removed from MFS', async () => { // Get initial list of local blocks - const refsBeforeAdd = await ipfs.refs.local() + const refsBeforeAdd = await all(ipfs.refs.local()) // Add a file to MFS await ipfs.files.write('/test', Buffer.from('peaches'), { create: true }) @@ -119,22 +120,22 @@ module.exports = (createCommon, options) => { const block = await ipfs.block.get(mfsFileHash) // Add the data to IPFS (which implicitly pins the data) - const addRes = await ipfs.add(block.data) + const addRes = await all(ipfs.add(block.data)) const dataHash = addRes[0].hash // Get the list of local blocks after the add, should be bigger than // the initial list and contain the data hash - const refsAfterAdd = await ipfs.refs.local() + const refsAfterAdd = await all(ipfs.refs.local()) expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) const hashesAfterAdd = refsAfterAdd.map(r => r.ref) expect(hashesAfterAdd).includes(dataHash) // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the hash, // because the file is pinned and in MFS - const refsAfterGc = await ipfs.refs.local() + const refsAfterGc = await all(ipfs.refs.local()) const hashesAfterGc = refsAfterGc.map(r => r.ref) expect(hashesAfterGc).includes(dataHash) @@ -142,11 +143,11 @@ module.exports = (createCommon, options) => { await ipfs.files.rm('/test') // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the hash, // because the file is still pinned - const refsAfterRmAndGc = await ipfs.refs.local() + const refsAfterRmAndGc = await all(ipfs.refs.local()) const hashesAfterRmAndGc = refsAfterRmAndGc.map(r => r.ref) expect(hashesAfterRmAndGc).not.includes(mfsFileHash) expect(hashesAfterRmAndGc).includes(dataHash) @@ -155,10 +156,10 @@ module.exports = (createCommon, options) => { await ipfs.pin.rm(dataHash) // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // The list of local blocks should no longer contain the hashes - const refsAfterUnpinAndGc = await ipfs.refs.local() + const refsAfterUnpinAndGc = await all(ipfs.refs.local()) const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref) expect(hashesAfterUnpinAndGc).not.includes(mfsFileHash) expect(hashesAfterUnpinAndGc).not.includes(dataHash) @@ -166,10 +167,10 @@ module.exports = (createCommon, options) => { it('should clean up indirectly pinned data after recursive pin removal', async () => { // Get initial list of local blocks - const refsBeforeAdd = await ipfs.refs.local() + const refsBeforeAdd = await all(ipfs.refs.local()) // Add some data - const addRes = await ipfs.add(Buffer.from('pears')) + const addRes = await all(ipfs.add(Buffer.from('pears'))) const dataHash = addRes[0].hash // Unpin the data @@ -186,11 +187,11 @@ module.exports = (createCommon, options) => { const objHash = (await ipfs.object.put(obj)).toString() // Putting an object doesn't pin it - expect((await ipfs.pin.ls()).map(p => p.hash)).not.includes(objHash) + expect((await all(ipfs.pin.ls())).map(p => p.hash)).not.includes(objHash) // Get the list of local blocks after the add, should be bigger than // the initial list and contain data and object hash - const refsAfterAdd = await ipfs.refs.local() + const refsAfterAdd = await all(ipfs.refs.local()) expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length) const hashesAfterAdd = refsAfterAdd.map(r => r.ref) expect(hashesAfterAdd).includes(objHash) @@ -200,25 +201,25 @@ module.exports = (createCommon, options) => { await ipfs.pin.add(objHash, { recursive: true }) // The data should now be indirectly pinned - const pins = await ipfs.pin.ls() + const pins = await all(ipfs.pin.ls()) expect(pins.find(p => p.hash === dataHash).type).to.eql('indirect') // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // Get the list of local blocks after GC, should still contain the data // hash, because the data is still (indirectly) pinned - const refsAfterGc = await ipfs.refs.local() + const refsAfterGc = await all(ipfs.refs.local()) expect(refsAfterGc.map(r => r.ref)).includes(dataHash) // Recursively unpin the object await ipfs.pin.rm(objHash) // Run garbage collection - await ipfs.repo.gc() + await all(ipfs.repo.gc()) // The list of local blocks should no longer contain the hashes - const refsAfterUnpinAndGc = await ipfs.refs.local() + const refsAfterUnpinAndGc = await all(ipfs.refs.local()) const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref) expect(hashesAfterUnpinAndGc).not.includes(objHash) expect(hashesAfterUnpinAndGc).not.includes(dataHash) diff --git a/src/stats/bw-pull-stream.js b/src/stats/bw-pull-stream.js deleted file mode 100644 index 97d96193..00000000 --- a/src/stats/bw-pull-stream.js +++ /dev/null @@ -1,33 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expectIsBandwidth } = require('./utils') -const pullToPromise = require('pull-to-promise') -const { getDescribe, getIt } = require('../utils/mocha') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.stats.bwPullStream', () => { - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should get bandwidth stats over pull stream', async () => { - const stream = ipfs.stats.bwPullStream() - - const data = await pullToPromise.any(stream) - expectIsBandwidth(null, data[0]) - }) - }) -} diff --git a/src/stats/bw-readable-stream.js b/src/stats/bw-readable-stream.js deleted file mode 100644 index 50e0a8c0..00000000 --- a/src/stats/bw-readable-stream.js +++ /dev/null @@ -1,34 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expectIsBandwidth } = require('./utils') -const { getDescribe, getIt } = require('../utils/mocha') -const getStream = require('get-stream') - -module.exports = (createCommon, options) => { - const describe = getDescribe(options) - const it = getIt(options) - const common = createCommon() - - describe('.stats.bwReadableStream', () => { - let ipfs - - before(async function () { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - ipfs = await common.setup() - }) - - after(() => common.teardown()) - - it('should get bandwidth stats over readable stream', async () => { - const stream = ipfs.stats.bwReadableStream() - - const [data] = await getStream.array(stream) - - expectIsBandwidth(null, data) - }) - }) -} diff --git a/src/stats/bw.js b/src/stats/bw.js index 7994bca8..04208ac1 100644 --- a/src/stats/bw.js +++ b/src/stats/bw.js @@ -3,6 +3,7 @@ const { expectIsBandwidth } = require('./utils') const { getDescribe, getIt } = require('../utils/mocha') +const last = require('it-last') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -23,7 +24,7 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) it('should get bandwidth stats ', async () => { - const res = await ipfs.stats.bw() + const res = await last(ipfs.stats.bw()) expectIsBandwidth(null, res) }) }) diff --git a/src/stats/index.js b/src/stats/index.js index e07efd47..17aa13ba 100644 --- a/src/stats/index.js +++ b/src/stats/index.js @@ -4,8 +4,6 @@ const { createSuite } = require('../utils/suite') const tests = { bitswap: require('./bitswap'), bw: require('./bw'), - bwPullStream: require('./bw-pull-stream'), - bwReadableStream: require('./bw-readable-stream'), repo: require('./repo') } diff --git a/src/swarm/addrs.js b/src/swarm/addrs.js index 03196577..32616faf 100644 --- a/src/swarm/addrs.js +++ b/src/swarm/addrs.js @@ -1,7 +1,8 @@ /* eslint-env mocha */ 'use strict' -const PeerInfo = require('peer-info') +const CID = require('cids') +const Multiaddr = require('multiaddr') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -27,7 +28,10 @@ module.exports = (createCommon, options) => { const peerInfos = await ipfsA.swarm.addrs() expect(peerInfos).to.not.be.empty() expect(peerInfos).to.be.an('array') - peerInfos.forEach(m => expect(PeerInfo.isPeerInfo(m)).to.be.true()) + peerInfos.forEach(m => { + expect(CID.isCID(m.id)).to.be.true() + m.addrs.forEach(addr => expect(Multiaddr.isMultiaddr(addr)).to.be.true()) + }) }) }) } diff --git a/src/swarm/peers.js b/src/swarm/peers.js index f37471c9..84323519 100644 --- a/src/swarm/peers.js +++ b/src/swarm/peers.js @@ -2,7 +2,7 @@ 'use strict' const multiaddr = require('multiaddr') -const PeerId = require('peer-id') +const CID = require('cids') const delay = require('delay') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -21,7 +21,7 @@ module.exports = (createCommon, options) => { ipfsA = await common.setup() ipfsB = await common.setup() await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) - await delay(60 * 1000) // wait for open streams in the connection available + await delay(10 * 1000) // wait for open streams in the connection available }) after(() => common.teardown()) @@ -35,7 +35,7 @@ module.exports = (createCommon, options) => { expect(peer).to.have.a.property('addr') expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) expect(peer).to.have.a.property('peer') - expect(PeerId.isPeerId(peer.peer)).to.equal(true) + expect(CID.isCID(peer.peer)).to.equal(true) expect(peer).to.not.have.a.property('latency') /* TODO: These assertions must be uncommented as soon as diff --git a/src/utils/expect-timeout.js b/src/utils/expect-timeout.js deleted file mode 100644 index 51c73307..00000000 --- a/src/utils/expect-timeout.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -/** - * Resolve if @param promise hangs for at least @param ms, throw otherwise - * @param {Promise} promise promise that you expect to hang - * @param {Number} ms millis to wait - * @return {Promise} - */ -module.exports = (promise, ms) => { - return Promise.race([ - promise.then((out) => { - throw new Error('Expected Promise to timeout but it was successful.') - }), - new Promise((resolve, reject) => setTimeout(resolve, ms)) - ]) -} diff --git a/src/files-regular/utils.js b/src/utils/index.js similarity index 100% rename from src/files-regular/utils.js rename to src/utils/index.js diff --git a/src/utils/suite.js b/src/utils/suite.js index b2ad5268..ce940298 100644 --- a/src/utils/suite.js +++ b/src/utils/suite.js @@ -31,4 +31,4 @@ function createSuite (tests, parent) { return Object.assign(suite, tests) } -module.exports.createSuite = createSuite +exports.createSuite = createSuite