From 3b81e072a40608796d164c6a0b2734297de7bcfb Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 17 Dec 2019 21:59:09 +0000 Subject: [PATCH] refactor: async iterables --- package.json | 51 ++- src/cli/commands/add.js | 12 +- src/cli/commands/bitswap/stat.js | 2 +- src/cli/commands/bitswap/wantlist.js | 2 +- src/cli/commands/block/rm.js | 2 +- src/cli/commands/block/stat.js | 2 +- src/cli/commands/dag/resolve.js | 3 +- src/cli/commands/init.js | 2 +- src/cli/commands/pin/add.js | 2 +- src/cli/commands/pin/ls.js | 7 +- src/cli/commands/pin/rm.js | 2 +- src/cli/daemon.js | 4 +- src/core/api-manager.js | 21 + src/core/boot.js | 90 ---- src/core/components/add/index.js | 121 +++++ .../{files-regular => add}/utils.js | 51 +-- src/core/components/bitswap.js | 72 --- src/core/components/bitswap/stat.js | 22 + src/core/components/bitswap/unwant.js | 20 + src/core/components/bitswap/wantlist.js | 13 + src/core/components/block/get.js | 16 + src/core/components/block/put.js | 51 +++ src/core/components/block/rm.js | 65 +++ src/core/components/block/stat.js | 18 + src/core/components/block/utils.js | 17 + src/core/components/bootstrap.js | 66 --- src/core/components/bootstrap/add.js | 26 ++ src/core/components/bootstrap/list.js | 8 + src/core/components/bootstrap/rm.js | 31 ++ src/core/components/bootstrap/utils.js | 11 + .../cat-async-iterator.js => cat.js} | 12 +- src/core/components/config.js | 17 +- src/core/components/dag.js | 170 ------- src/core/components/dag/get.js | 34 ++ src/core/components/dag/put.js | 70 +++ src/core/components/dag/resolve.js | 15 + src/core/components/dag/tree.js | 15 + src/core/components/dag/utils.js | 48 ++ src/core/components/dns.js | 5 +- src/core/components/files-mfs.js | 374 --------------- .../files-regular/add-async-iterator.js | 155 ------- .../components/files-regular/add-from-fs.js | 3 - .../files-regular/add-from-stream.js | 3 - .../components/files-regular/add-from-url.js | 23 - .../files-regular/add-pull-stream.js | 11 - .../files-regular/add-readable-stream.js | 13 - src/core/components/files-regular/add.js | 22 - .../files-regular/cat-pull-stream.js | 9 - .../files-regular/cat-readable-stream.js | 11 - src/core/components/files-regular/cat.js | 10 - .../files-regular/get-pull-stream.js | 20 - .../files-regular/get-readable-stream.js | 19 - src/core/components/files-regular/get.js | 18 - src/core/components/files-regular/index.js | 34 -- .../files-regular/ls-pull-stream.js | 9 - .../files-regular/ls-readable-stream.js | 11 - src/core/components/files-regular/ls.js | 10 - .../files-regular/refs-local-pull-stream.js | 9 - .../refs-local-readable-stream.js | 11 - .../components/files-regular/refs-local.js | 10 - .../files-regular/refs-pull-stream.js | 9 - .../files-regular/refs-readable-stream.js | 11 - src/core/components/files-regular/refs.js | 16 - src/core/components/files.js | 178 ++++++++ .../get-async-iterator.js => get.js} | 12 +- src/core/components/id.js | 13 +- src/core/components/index.js | 104 ++++- src/core/components/init-assets.js | 20 - src/core/components/init.js | 427 +++++++++++++----- src/core/components/is-online.js | 6 +- .../ls-async-iterator.js => ls.js} | 14 +- src/core/components/name-pubsub.js | 78 ---- src/core/components/name.js | 179 -------- src/core/components/name/publish.js | 102 +++++ src/core/components/name/pubsub/cancel.js | 17 + src/core/components/name/pubsub/state.js | 18 + src/core/components/name/pubsub/subs.js | 16 + src/core/components/name/pubsub/utils.js | 25 + src/core/components/name/resolve.js | 89 ++++ src/core/components/name/utils.js | 15 + src/core/components/object.js | 302 ------------- src/core/components/object/data.js | 9 + src/core/components/object/get.js | 48 ++ src/core/components/object/links.js | 58 +++ src/core/components/object/new.js | 43 ++ src/core/components/object/patch/add-link.js | 12 + .../components/object/patch/append-data.js | 14 + src/core/components/object/patch/rm-link.js | 12 + src/core/components/object/patch/set-data.js | 13 + src/core/components/object/put.js | 85 ++++ src/core/components/object/stat.js | 28 ++ src/core/components/pin.js | 248 ---------- src/core/components/pin/add.js | 74 +++ src/core/components/pin/gc-lock.js | 83 ---- src/core/components/pin/gc.js | 153 ------- src/core/components/pin/ls.js | 89 ++++ src/core/components/pin/rm.js | 64 +++ src/core/components/ping-pull-stream.js | 100 ---- src/core/components/ping-readable-stream.js | 7 - src/core/components/ping.js | 52 ++- src/core/components/pre-start.js | 75 --- src/core/components/pubsub.js | 92 +--- .../refs-async-iterator.js => refs/index.js} | 38 +- .../local.js} | 6 +- src/core/components/repo/gc.js | 110 +++++ src/core/components/repo/stat.js | 15 + src/core/components/repo/version.js | 33 ++ src/core/components/resolve.js | 33 +- src/core/components/start.js | 240 ++++++++-- src/core/components/stats.js | 83 ---- src/core/components/stats/bw.js | 63 +++ src/core/components/stop.js | 208 +++++++-- src/core/components/swarm.js | 79 ---- src/core/components/swarm/addrs.js | 13 + src/core/components/swarm/connect.js | 7 + src/core/components/swarm/disconnect.js | 7 + src/core/components/swarm/local-addrs.js | 7 + src/core/components/swarm/peers.js | 34 ++ src/core/components/version.js | 9 +- src/core/config.js | 101 ----- src/core/errors.js | 54 +++ src/core/index.js | 211 +++------ src/core/ipns/index.js | 3 - src/core/ipns/path.js | 25 - src/core/preload.js | 4 +- src/core/runtime/add-from-fs-browser.js | 9 - src/core/runtime/add-from-fs-nodejs.js | 13 - src/core/runtime/init-assets-browser.js | 1 + src/core/runtime/init-assets-nodejs.js | 15 + src/core/runtime/repo-browser.js | 5 +- src/core/runtime/repo-nodejs.js | 8 +- src/core/utils.js | 56 ++- src/http/api/resources/bitswap.js | 8 +- src/http/api/resources/block.js | 4 +- src/http/api/resources/dag.js | 2 +- src/http/api/resources/files-regular.js | 4 +- src/http/api/resources/pin.js | 27 +- src/index.js | 2 +- src/utils/mutex.js | 52 --- test/cli/files.js | 2 +- test/core/config.spec.js | 223 --------- test/core/files.spec.js | 13 +- test/core/interface.spec.js | 12 +- 143 files changed, 3083 insertions(+), 3747 deletions(-) create mode 100644 src/core/api-manager.js delete mode 100644 src/core/boot.js create mode 100644 src/core/components/add/index.js rename src/core/components/{files-regular => add}/utils.js (67%) delete mode 100644 src/core/components/bitswap.js create mode 100644 src/core/components/bitswap/stat.js create mode 100644 src/core/components/bitswap/unwant.js create mode 100644 src/core/components/bitswap/wantlist.js create mode 100644 src/core/components/block/get.js create mode 100644 src/core/components/block/put.js create mode 100644 src/core/components/block/rm.js create mode 100644 src/core/components/block/stat.js create mode 100644 src/core/components/block/utils.js delete mode 100644 src/core/components/bootstrap.js create mode 100644 src/core/components/bootstrap/add.js create mode 100644 src/core/components/bootstrap/list.js create mode 100644 src/core/components/bootstrap/rm.js create mode 100644 src/core/components/bootstrap/utils.js rename src/core/components/{files-regular/cat-async-iterator.js => cat.js} (64%) delete mode 100644 src/core/components/dag.js create mode 100644 src/core/components/dag/get.js create mode 100644 src/core/components/dag/put.js create mode 100644 src/core/components/dag/resolve.js create mode 100644 src/core/components/dag/tree.js create mode 100644 src/core/components/dag/utils.js delete mode 100644 src/core/components/files-mfs.js delete mode 100644 src/core/components/files-regular/add-async-iterator.js delete mode 100644 src/core/components/files-regular/add-from-fs.js delete mode 100644 src/core/components/files-regular/add-from-stream.js delete mode 100644 src/core/components/files-regular/add-from-url.js delete mode 100644 src/core/components/files-regular/add-pull-stream.js delete mode 100644 src/core/components/files-regular/add-readable-stream.js delete mode 100644 src/core/components/files-regular/add.js delete mode 100644 src/core/components/files-regular/cat-pull-stream.js delete mode 100644 src/core/components/files-regular/cat-readable-stream.js delete mode 100644 src/core/components/files-regular/cat.js delete mode 100644 src/core/components/files-regular/get-pull-stream.js delete mode 100644 src/core/components/files-regular/get-readable-stream.js delete mode 100644 src/core/components/files-regular/get.js delete mode 100644 src/core/components/files-regular/index.js delete mode 100644 src/core/components/files-regular/ls-pull-stream.js delete mode 100644 src/core/components/files-regular/ls-readable-stream.js delete mode 100644 src/core/components/files-regular/ls.js delete mode 100644 src/core/components/files-regular/refs-local-pull-stream.js delete mode 100644 src/core/components/files-regular/refs-local-readable-stream.js delete mode 100644 src/core/components/files-regular/refs-local.js delete mode 100644 src/core/components/files-regular/refs-pull-stream.js delete mode 100644 src/core/components/files-regular/refs-readable-stream.js delete mode 100644 src/core/components/files-regular/refs.js create mode 100644 src/core/components/files.js rename src/core/components/{files-regular/get-async-iterator.js => get.js} (53%) delete mode 100644 src/core/components/init-assets.js rename src/core/components/{files-regular/ls-async-iterator.js => ls.js} (71%) delete mode 100644 src/core/components/name-pubsub.js delete mode 100644 src/core/components/name.js create mode 100644 src/core/components/name/publish.js create mode 100644 src/core/components/name/pubsub/cancel.js create mode 100644 src/core/components/name/pubsub/state.js create mode 100644 src/core/components/name/pubsub/subs.js create mode 100644 src/core/components/name/pubsub/utils.js create mode 100644 src/core/components/name/resolve.js create mode 100644 src/core/components/name/utils.js delete mode 100644 src/core/components/object.js create mode 100644 src/core/components/object/data.js create mode 100644 src/core/components/object/get.js create mode 100644 src/core/components/object/links.js create mode 100644 src/core/components/object/new.js create mode 100644 src/core/components/object/patch/add-link.js create mode 100644 src/core/components/object/patch/append-data.js create mode 100644 src/core/components/object/patch/rm-link.js create mode 100644 src/core/components/object/patch/set-data.js create mode 100644 src/core/components/object/put.js create mode 100644 src/core/components/object/stat.js delete mode 100644 src/core/components/pin.js create mode 100644 src/core/components/pin/add.js delete mode 100644 src/core/components/pin/gc-lock.js delete mode 100644 src/core/components/pin/gc.js create mode 100644 src/core/components/pin/ls.js create mode 100644 src/core/components/pin/rm.js delete mode 100644 src/core/components/ping-pull-stream.js delete mode 100644 src/core/components/ping-readable-stream.js delete mode 100644 src/core/components/pre-start.js rename src/core/components/{files-regular/refs-async-iterator.js => refs/index.js} (78%) rename src/core/components/{files-regular/refs-local-async-iterator.js => refs/local.js} (75%) create mode 100644 src/core/components/repo/gc.js create mode 100644 src/core/components/repo/stat.js create mode 100644 src/core/components/repo/version.js delete mode 100644 src/core/components/stats.js create mode 100644 src/core/components/stats/bw.js delete mode 100644 src/core/components/swarm.js create mode 100644 src/core/components/swarm/addrs.js create mode 100644 src/core/components/swarm/connect.js create mode 100644 src/core/components/swarm/disconnect.js create mode 100644 src/core/components/swarm/local-addrs.js create mode 100644 src/core/components/swarm/peers.js delete mode 100644 src/core/config.js create mode 100644 src/core/errors.js delete mode 100644 src/core/ipns/path.js delete mode 100644 src/core/runtime/add-from-fs-browser.js delete mode 100644 src/core/runtime/add-from-fs-nodejs.js create mode 100644 src/core/runtime/init-assets-browser.js create mode 100644 src/core/runtime/init-assets-nodejs.js delete mode 100644 src/utils/mutex.js delete mode 100644 test/core/config.spec.js diff --git a/package.json b/package.json index 0300c5b8d1..9633ad0af7 100644 --- a/package.json +++ b/package.json @@ -15,8 +15,7 @@ ], "main": "src/core/index.js", "browser": { - "./src/core/components/init-assets.js": false, - "./src/core/runtime/add-from-fs-nodejs.js": "./src/core/runtime/add-from-fs-browser.js", + "./src/core/runtime/init-assets-nodejs.js": "./src/core/runtime/init-assets-browser.js", "./src/core/runtime/config-nodejs.js": "./src/core/runtime/config-browser.js", "./src/core/runtime/dns-nodejs.js": "./src/core/runtime/dns-browser.js", "./src/core/runtime/libp2p-nodejs.js": "./src/core/runtime/libp2p-browser.js", @@ -25,7 +24,8 @@ "./src/core/runtime/repo-nodejs.js": "./src/core/runtime/repo-browser.js", "./src/core/runtime/ipld-nodejs.js": "./src/core/runtime/ipld-browser.js", "./test/utils/create-repo-nodejs.js": "./test/utils/create-repo-browser.js", - "stream": "readable-stream" + "stream": "readable-stream", + "ipfs-utils/src/files/glob-source": false }, "browser-all-ipld-formats": { "./src/core/runtime/ipld-browser.js": "./src/core/runtime/ipld-browser-all.js" @@ -95,20 +95,19 @@ "glob": "^7.1.3", "hapi-pino": "^6.1.0", "hashlru": "^2.3.0", - "human-to-milliseconds": "^2.0.0", "interface-datastore": "~0.8.0", "ipfs-bitswap": "^0.26.0", "ipfs-block": "~0.8.1", "ipfs-block-service": "~0.16.0", - "ipfs-http-client": "^40.0.1", + "ipfs-http-client": "github:ipfs/js-ipfs-http-client#refactor/async-iterables2", "ipfs-http-response": "~0.4.0", - "ipfs-mfs": "^0.13.2", + "ipfs-mfs": "github:ipfs/js-ipfs-mfs#refactor/remove-streams", "ipfs-multipart": "^0.2.0", "ipfs-repo": "^0.30.0", "ipfs-unixfs": "~0.1.16", "ipfs-unixfs-exporter": "^0.38.0", "ipfs-unixfs-importer": "^0.40.0", - "ipfs-utils": "~0.4.0", + "ipfs-utils": "^0.5.0", "ipld": "~0.25.0", "ipld-bitcoin": "~0.3.0", "ipld-dag-cbor": "~0.15.0", @@ -125,34 +124,35 @@ "iso-url": "~0.4.6", "it-pipe": "^1.0.1", "it-to-stream": "^0.1.1", + "iterable-ndjson": "^1.1.0", "jsondiffpatch": "~0.3.11", "just-safe-set": "^2.1.0", "kind-of": "^6.0.2", "ky": "^0.15.0", "ky-universal": "~0.3.0", - "libp2p": "^0.26.2", - "libp2p-bootstrap": "~0.9.3", - "libp2p-crypto": "^0.16.2", + "libp2p": "^0.27.0-pre.1", + "libp2p-bootstrap": "^0.10.2", + "libp2p-crypto": "^0.17.1", "libp2p-delegated-content-routing": "^0.4.1", - "libp2p-delegated-peer-routing": "^0.3.1", - "libp2p-floodsub": "^0.18.0", - "libp2p-gossipsub": "~0.0.5", - "libp2p-kad-dht": "~0.16.0", + "libp2p-delegated-peer-routing": "^0.4.0", + "libp2p-floodsub": "^0.20.0", + "libp2p-gossipsub": "^0.2.0", + "libp2p-kad-dht": "^0.18.3", "libp2p-keychain": "^0.5.2", - "libp2p-mdns": "~0.12.0", + "libp2p-mdns": "^0.13.0", "libp2p-record": "~0.7.0", - "libp2p-secio": "~0.11.0", - "libp2p-tcp": "^0.13.0", - "libp2p-webrtc-star": "~0.16.0", + "libp2p-secio": "^0.12.1", + "libp2p-tcp": "^0.14.2", + "libp2p-webrtc-star": "^0.17.0", "libp2p-websocket-star-multi": "~0.4.3", - "libp2p-websockets": "~0.12.3", + "libp2p-websockets": "^0.13.0", "lodash.flatten": "^4.4.0", "mafmt": "^6.0.10", "merge-options": "^2.0.0", "mime-types": "^2.1.21", "mkdirp": "~0.5.1", "mortice": "^2.0.0", - "multiaddr": "^6.1.1", + "multiaddr": "^7.2.1", "multiaddr-to-uri": "^5.0.0", "multibase": "~0.6.0", "multicodec": "~0.5.5", @@ -161,9 +161,9 @@ "node-fetch": "^2.3.0", "p-iteration": "^1.1.8", "p-queue": "^6.1.0", - "peer-book": "^0.9.1", - "peer-id": "~0.12.2", - "peer-info": "~0.15.1", + "parse-duration": "^0.1.2", + "peer-id": "^0.13.5", + "peer-info": "^0.17.0", "pretty-bytes": "^5.3.0", "progress": "^2.0.1", "promise-nodeify": "^3.0.1", @@ -205,9 +205,10 @@ "execa": "^3.0.0", "form-data": "^3.0.0", "hat": "0.0.3", - "interface-ipfs-core": "^0.124.1", + "interface-ipfs-core": "github:ipfs/interface-js-ipfs-core#refactor/async-iterables", "ipfs-interop": "^0.1.1", - "ipfsd-ctl": "^0.47.2", + "ipfsd-ctl": "github:ipfs/js-ipfsd-ctl#fix/do-not-call-shutdown-twice", + "it-all": "^1.0.1", "libp2p-websocket-star": "~0.10.2", "lodash": "^4.17.15", "ncp": "^2.0.0", diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 3bfe6f9866..b482522e02 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -156,20 +156,20 @@ module.exports = { ? globSource(argv.file, { recursive: argv.recursive }) : process.stdin // Pipe directly to ipfs.add - let finalHash + let finalCid try { - for await (const file of ipfs._addAsyncIterator(source, options)) { + for await (const file of ipfs.add(source, options)) { if (argv.silent) { continue } if (argv.quieter) { - finalHash = file.hash + finalCid = file.cid continue } - const cid = cidToString(file.hash, { base: argv.cidBase }) + const cid = cidToString(file.cid, { base: argv.cidBase }) let message = cid if (!argv.quiet) { @@ -184,7 +184,7 @@ module.exports = { bar.terminate() } - // Tweak the error message and add more relevant infor for the CLI + // Tweak the error message and add more relevant info for the CLI if (err.code === 'ERR_DIR_NON_RECURSIVE') { err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` } @@ -197,7 +197,7 @@ module.exports = { } if (argv.quieter) { - log(cidToString(finalHash, { base: argv.cidBase })) + log(cidToString(finalCid, { base: argv.cidBase })) } })()) } diff --git a/src/cli/commands/bitswap/stat.js b/src/cli/commands/bitswap/stat.js index e333e6b137..db55ef3339 100644 --- a/src/cli/commands/bitswap/stat.js +++ b/src/cli/commands/bitswap/stat.js @@ -35,7 +35,7 @@ module.exports = { stats.dupDataReceived = prettyBytes(stats.dupDataReceived.toNumber()).toUpperCase() stats.wantlist = `[${stats.wantlist.length} keys]` } else { - const wantlist = stats.wantlist.map((elem) => cidToString(elem['/'], { base: cidBase, upgrade: false })) + const wantlist = stats.wantlist.map(cid => cidToString(cid, { base: cidBase, upgrade: false })) stats.wantlist = `[${wantlist.length} keys] ${wantlist.join('\n ')}` } diff --git a/src/cli/commands/bitswap/wantlist.js b/src/cli/commands/bitswap/wantlist.js index bcd4d73783..4c2d4c4941 100644 --- a/src/cli/commands/bitswap/wantlist.js +++ b/src/cli/commands/bitswap/wantlist.js @@ -25,7 +25,7 @@ module.exports = { resolve((async () => { const ipfs = await getIpfs() const list = await ipfs.bitswap.wantlist(peer) - list.Keys.forEach(k => print(cidToString(k['/'], { base: cidBase, upgrade: false }))) + list.forEach(cid => print(cidToString(cid, { base: cidBase, upgrade: false }))) })()) } } diff --git a/src/cli/commands/block/rm.js b/src/cli/commands/block/rm.js index 1f92ed1a06..83982e2273 100644 --- a/src/cli/commands/block/rm.js +++ b/src/cli/commands/block/rm.js @@ -25,7 +25,7 @@ module.exports = { const ipfs = await getIpfs() let errored = false - for await (const result of ipfs.block._rmAsyncIterator(hash, { + for await (const result of ipfs.block.rm(hash, { force, quiet })) { diff --git a/src/cli/commands/block/stat.js b/src/cli/commands/block/stat.js index b268ff6e2d..c60f28c0f2 100644 --- a/src/cli/commands/block/stat.js +++ b/src/cli/commands/block/stat.js @@ -20,7 +20,7 @@ module.exports = { resolve((async () => { const ipfs = await getIpfs() const stats = await ipfs.block.stat(key) - print('Key: ' + cidToString(stats.key, { base: cidBase })) + print('Key: ' + cidToString(stats.cid, { base: cidBase })) print('Size: ' + stats.size) })()) } diff --git a/src/cli/commands/dag/resolve.js b/src/cli/commands/dag/resolve.js index bba7886034..7a9907f427 100644 --- a/src/cli/commands/dag/resolve.js +++ b/src/cli/commands/dag/resolve.js @@ -19,10 +19,9 @@ module.exports = { const options = {} try { - const result = await ipfs.dag.resolve(ref, options) let lastCid - for (const res of result) { + for await (const res of ipfs.dag.resolve(ref, options)) { if (CID.isCID(res.value)) { lastCid = res.value } diff --git a/src/cli/commands/init.js b/src/cli/commands/init.js index a8c4f01b5f..899d2883ac 100644 --- a/src/cli/commands/init.js +++ b/src/cli/commands/init.js @@ -65,7 +65,7 @@ module.exports = { const IPFS = require('../../core') const Repo = require('ipfs-repo') - const node = new IPFS({ + const node = await IPFS.create({ repo: new Repo(path), init: false, start: false, diff --git a/src/cli/commands/pin/add.js b/src/cli/commands/pin/add.js index a97fa28f48..39dde1550d 100644 --- a/src/cli/commands/pin/add.js +++ b/src/cli/commands/pin/add.js @@ -28,7 +28,7 @@ module.exports = { const ipfs = await getIpfs() const results = await ipfs.pin.add(ipfsPath, { recursive }) results.forEach((res) => { - print(`pinned ${cidToString(res.hash, { base: cidBase })} ${type}ly`) + print(`pinned ${cidToString(res.cid, { base: cidBase })} ${type}ly`) }) })()) } diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js index 5f75b6e410..18b69e928b 100644 --- a/src/cli/commands/pin/ls.js +++ b/src/cli/commands/pin/ls.js @@ -34,14 +34,13 @@ module.exports = { resolve((async () => { const paths = ipfsPath const ipfs = await getIpfs() - const results = await ipfs.pin.ls(paths, { type }) - results.forEach((res) => { - let line = cidToString(res.hash, { base: cidBase }) + for await (const res of ipfs.pin.ls(paths, { type })) { + let line = cidToString(res.cid, { base: cidBase }) if (!quiet) { line += ` ${res.type}` } print(line) - }) + } })()) } } diff --git a/src/cli/commands/pin/rm.js b/src/cli/commands/pin/rm.js index 3e08374c99..9b4e750509 100644 --- a/src/cli/commands/pin/rm.js +++ b/src/cli/commands/pin/rm.js @@ -27,7 +27,7 @@ module.exports = { const ipfs = await getIpfs() const results = await ipfs.pin.rm(ipfsPath, { recursive }) results.forEach((res) => { - print(`unpinned ${cidToString(res.hash, { base: cidBase })}`) + print(`unpinned ${cidToString(res.cid, { base: cidBase })}`) }) })()) } diff --git a/src/cli/daemon.js b/src/cli/daemon.js index c2dc556a03..9c320b8098 100644 --- a/src/cli/daemon.js +++ b/src/cli/daemon.js @@ -55,9 +55,7 @@ class Daemon { // start the daemon const ipfsOpts = Object.assign({}, { init: true, start: true, libp2p }, this._options) - const ipfs = await IPFS.create(ipfsOpts) - - this._ipfs = ipfs + const ipfs = this._ipfs = await IPFS.create(ipfsOpts) // start HTTP servers (if API or Gateway is enabled in options) const httpApi = new HttpApi(ipfs, ipfsOpts) diff --git a/src/core/api-manager.js b/src/core/api-manager.js new file mode 100644 index 0000000000..1000a28b22 --- /dev/null +++ b/src/core/api-manager.js @@ -0,0 +1,21 @@ +module.exports = class ApiManager { + constructor () { + this._api = {} + this._onUndef = () => undefined + this.api = new Proxy({}, { + get: (_, prop) => { + if (prop === 'then') return undefined // Not a promise! + return this._api[prop] === undefined ? this._onUndef(prop) : this._api[prop] + }, + has: (_, prop) => prop in this._api + }) + } + + update (nextApi, onUndef) { + const prevApi = this._api + const prevUndef = this._onUndef + this._api = nextApi + if (onUndef) this._onUndef = onUndef + return { cancel: () => this.update(prevApi, prevUndef), api: this.api } + } +} diff --git a/src/core/boot.js b/src/core/boot.js deleted file mode 100644 index aa47be16af..0000000000 --- a/src/core/boot.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' - -const RepoErrors = require('ipfs-repo').errors - -// Boot an IPFS node depending on the options set -module.exports = async (self) => { - self.log('booting') - const options = self._options - const doInit = options.init - const doStart = options.start - - // Checks if a repo exists, and if so opens it - // Will return callback with a bool indicating the existence - // of the repo - async function repoOpened () { - // nothing to do - if (!self._repo.closed) { - return true - } - - try { - await self._repo.open() - } catch (err) { - if (isRepoUninitializedError(err)) { - return false - } - - if (err) { - throw err - } - } - - return true - } - - // Do the actual boot sequence - try { - // Init with existing initialized, opened, repo - if (await repoOpened()) { - try { - await self.init({ repo: self._repo }) - } catch (err) { - throw Object.assign(err, { emitted: true }) - } - } else if (doInit) { - const defaultInitOptions = { - bits: 2048, - pass: self._options.pass - } - - const initOptions = Object.assign(defaultInitOptions, typeof options.init === 'object' ? options.init : {}) - - await self.init(initOptions) - } - - if (doStart) { - await self.start() - } - - self.log('booted') - self.emit('ready') - } catch (err) { - if (!err.emitted) { - self.emit('error', err) - } - } -} - -function isRepoUninitializedError (err) { - // If the error is that no repo exists, - // which happens when the version file is not found - // we just want to signal that no repo exist, not - // fail the whole process. - - // Use standardized errors as much as possible - if (err.code === RepoErrors.ERR_REPO_NOT_INITIALIZED) { - return true - } - - // TODO: As error codes continue to be standardized, this logic can be phase out; - // it is here to maintain compatibility - if (err.message.match(/not found/) || // indexeddb - err.message.match(/ENOENT/) || // fs - err.message.match(/No value/) // memory - ) { - return true - } - - return false -} diff --git a/src/core/components/add/index.js b/src/core/components/add/index.js new file mode 100644 index 0000000000..872ffbacd6 --- /dev/null +++ b/src/core/components/add/index.js @@ -0,0 +1,121 @@ +'use strict' + +const importer = require('ipfs-unixfs-importer') +const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') +const { parseChunkerString } = require('./utils') +const pipe = require('it-pipe') + +module.exports = ({ ipld, dag, gcLock, preload, pin, options: constructorOptions }) => { + const isShardingEnabled = constructorOptions.EXPERIMENTAL && constructorOptions.EXPERIMENTAL.sharding + return async function * add (source, options) { + options = options || {} + + const opts = { + shardSplitThreshold: isShardingEnabled ? 1000 : Infinity, + ...options, + strategy: 'balanced', + ...parseChunkerString(options.chunker) + } + + // CID v0 is for multihashes encoded with sha2-256 + if (opts.hashAlg && opts.cidVersion !== 1) { + opts.cidVersion = 1 + } + + if (opts.trickle) { + opts.strategy = 'trickle' + } + + delete opts.trickle + + if (opts.progress) { + let total = 0 + const prog = opts.progress + + opts.progress = (bytes) => { + total += bytes + prog(total) + } + } + + const iterator = pipe( + normaliseAddInput(source), + source => importer(source, ipld, opts), + transformFile(dag, opts), + preloadFile(preload, opts), + pinFile(pin, opts) + ) + + const releaseLock = await gcLock.readLock() + + try { + yield * iterator + } finally { + releaseLock() + } + } +} + +function transformFile (dag, opts) { + return async function * (source) { + for await (const { cid, path, unixfs } of source) { + if (opts.onlyHash) { + yield { + cid, + path: path || cid.toString(), + size: unixfs.fileSize() + } + + continue + } + + const { value: node } = await dag.get(cid, { ...opts, preload: false }) + + yield { + cid, + path: path || cid.toString(), + size: Buffer.isBuffer(node) ? node.length : node.size + } + } + } +} + +function preloadFile (preload, opts) { + return async function * (source) { + for await (const file of source) { + const isRootFile = !file.path || opts.wrapWithDirectory + ? file.path === '' + : !file.path.includes('/') + + const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false + + if (shouldPreload) { + preload(file.hash) + } + + yield file + } + } +} + +function pinFile (pin, opts) { + return async function * (source) { + for await (const file of source) { + // Pin a file if it is the root dir of a recursive add or the single file + // of a direct add. + const isRootDir = !file.path.includes('/') + const shouldPin = (opts.pin == null ? true : opts.pin) && isRootDir && !opts.onlyHash + + if (shouldPin) { + // Note: addAsyncIterator() has already taken a GC lock, so tell + // pin.add() not to take a (second) GC lock + await pin.add(file.hash, { + preload: false, + lock: false + }) + } + + yield file + } + } +} diff --git a/src/core/components/files-regular/utils.js b/src/core/components/add/utils.js similarity index 67% rename from src/core/components/files-regular/utils.js rename to src/core/components/add/utils.js index 876d0b0d48..5c3ee6cc2a 100644 --- a/src/core/components/files-regular/utils.js +++ b/src/core/components/add/utils.js @@ -1,25 +1,5 @@ 'use strict' -const CID = require('cids') -const { Buffer } = require('buffer') -const { cidToString } = require('../../../utils/cid') - -const normalizePath = (path) => { - if (Buffer.isBuffer(path)) { - return new CID(path).toString() - } - if (CID.isCID(path)) { - return path.toString() - } - if (path.indexOf('/ipfs/') === 0) { - path = path.substring('/ipfs/'.length) - } - if (path.charAt(path.length - 1) === '/') { - path = path.substring(0, path.length - 1) - } - return path -} - /** * Parses chunker string into options used by DAGBuilder in ipfs-unixfs-engine * @@ -100,37 +80,8 @@ const parseChunkSize = (str, name) => { return size } -const mapFile = (file, options) => { - options = options || {} - - let size = 0 - let type = 'dir' - - if (file.unixfs && file.unixfs.type === 'file') { - size = file.unixfs.fileSize() - type = 'file' - } - - const output = { - hash: cidToString(file.cid, { base: options.cidBase }), - path: file.path, - name: file.name, - depth: file.path.split('/').length, - size, - type - } - - if (options.includeContent && file.unixfs && file.unixfs.type === 'file') { - output.content = file.content - } - - return output -} - module.exports = { - normalizePath, parseChunkSize, parseRabinString, - parseChunkerString, - mapFile + parseChunkerString } diff --git a/src/core/components/bitswap.js b/src/core/components/bitswap.js deleted file mode 100644 index 654f9f045b..0000000000 --- a/src/core/components/bitswap.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const callbackify = require('callbackify') -const Big = require('bignumber.js') -const CID = require('cids') -const PeerId = require('peer-id') -const errCode = require('err-code') - -function formatWantlist (list, cidBase) { - return Array.from(list).map((e) => ({ '/': e[1].cid.toBaseEncodedString(cidBase) })) -} - -module.exports = function bitswap (self) { - return { - wantlist: callbackify.variadic(async (peerId) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - let list - - if (peerId) { - peerId = PeerId.createFromB58String(peerId) - - list = self._bitswap.wantlistForPeer(peerId) - } else { - list = self._bitswap.getWantlist() - } - - return { Keys: formatWantlist(list) } - }), - - stat: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const snapshot = self._bitswap.stat().snapshot - - return { - provideBufLen: parseInt(snapshot.providesBufferLength.toString()), - blocksReceived: new Big(snapshot.blocksReceived), - wantlist: formatWantlist(self._bitswap.getWantlist()), - peers: self._bitswap.peers().map((id) => id.toB58String()), - dupBlksReceived: new Big(snapshot.dupBlksReceived), - dupDataReceived: new Big(snapshot.dupDataReceived), - dataReceived: new Big(snapshot.dataReceived), - blocksSent: new Big(snapshot.blocksSent), - dataSent: new Big(snapshot.dataSent) - } - }), - - unwant: callbackify(async (keys) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - if (!Array.isArray(keys)) { - keys = [keys] - } - - try { - keys = keys.map((key) => new CID(key)) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - - return self._bitswap.unwant(keys) - }) - } -} diff --git a/src/core/components/bitswap/stat.js b/src/core/components/bitswap/stat.js new file mode 100644 index 0000000000..ae57e4e5b1 --- /dev/null +++ b/src/core/components/bitswap/stat.js @@ -0,0 +1,22 @@ +'use strict' + +const Big = require('bignumber.js') +const CID = require('cids') + +module.exports = ({ bitswap }) => { + return async function stat () { // eslint-disable-line require-await + const snapshot = bitswap.stat().snapshot + + return { + provideBufLen: parseInt(snapshot.providesBufferLength.toString()), + blocksReceived: new Big(snapshot.blocksReceived), + wantlist: Array.from(bitswap.getWantlist()).map(e => e[1].cid), + peers: bitswap.peers().map(id => new CID(id.toB58String())), + dupBlksReceived: new Big(snapshot.dupBlksReceived), + dupDataReceived: new Big(snapshot.dupDataReceived), + dataReceived: new Big(snapshot.dataReceived), + blocksSent: new Big(snapshot.blocksSent), + dataSent: new Big(snapshot.dataSent) + } + } +} diff --git a/src/core/components/bitswap/unwant.js b/src/core/components/bitswap/unwant.js new file mode 100644 index 0000000000..9f71172ef4 --- /dev/null +++ b/src/core/components/bitswap/unwant.js @@ -0,0 +1,20 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +module.exports = ({ bitswap }) => { + return async function unwant (keys) { // eslint-disable-line require-await + if (!Array.isArray(keys)) { + keys = [keys] + } + + try { + keys = keys.map((key) => new CID(key)) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + + return bitswap.unwant(keys) + } +} diff --git a/src/core/components/bitswap/wantlist.js b/src/core/components/bitswap/wantlist.js new file mode 100644 index 0000000000..9878bb52fa --- /dev/null +++ b/src/core/components/bitswap/wantlist.js @@ -0,0 +1,13 @@ +'use strict' + +const PeerId = require('peer-id') + +module.exports = ({ bitswap }) => { + return async function wantlist (peerId) { // eslint-disable-line require-await + const list = peerId + ? bitswap.wantlistForPeer(PeerId.createFromCID(peerId)) + : bitswap.getWantlist() + + return Array.from(list).map(e => e[1].cid) + } +} diff --git a/src/core/components/block/get.js b/src/core/components/block/get.js new file mode 100644 index 0000000000..afc95d8b45 --- /dev/null +++ b/src/core/components/block/get.js @@ -0,0 +1,16 @@ +'use strict' + +const { cleanCid } = require('./utils') + +module.exports = ({ blockService, preload }) => { + return async function get (cid, options) { // eslint-disable-line require-await + options = options || {} + cid = cleanCid(cid) + + if (options.preload !== false) { + preload(cid) + } + + return blockService.get(cid) + } +} diff --git a/src/core/components/block/put.js b/src/core/components/block/put.js new file mode 100644 index 0000000000..526bc23e7f --- /dev/null +++ b/src/core/components/block/put.js @@ -0,0 +1,51 @@ +'use strict' + +const Block = require('ipfs-block') +const multihashing = require('multihashing-async') +const CID = require('cids') + +module.exports = ({ blockService, gcLock, preload }) => { + return async function put (block, options) { + options = options || {} + + if (Array.isArray(block)) { + throw new Error('Array is not supported') + } + + if (!Block.isBlock(block)) { + if (options.cid && CID.isCID(options.cid)) { + block = new Block(block, options.cid) + } else { + const mhtype = options.mhtype || 'sha2-256' + const format = options.format || 'dag-pb' + let cidVersion + + if (options.version == null) { + // Pick appropriate CID version + cidVersion = mhtype === 'sha2-256' && format === 'dag-pb' ? 0 : 1 + } else { + cidVersion = options.version + } + + const multihash = await multihashing(block, mhtype) + const cid = new CID(cidVersion, format, multihash) + + block = new Block(block, cid) + } + } + + const release = await gcLock.readLock() + + try { + await blockService.put(block) + + if (options.preload !== false) { + preload(block.cid) + } + + return block + } finally { + release() + } + } +} diff --git a/src/core/components/block/rm.js b/src/core/components/block/rm.js new file mode 100644 index 0000000000..eba0a8fdf5 --- /dev/null +++ b/src/core/components/block/rm.js @@ -0,0 +1,65 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') +const { parallelMap, filter } = require('streaming-iterables') +const pipe = require('it-pipe') +const { PinTypes } = require('../pin/pin-manager') +const { cleanCid } = require('./utils') + +const BLOCK_RM_CONCURRENCY = 8 + +module.exports = ({ blockService, gcLock, pinManager }) => { + return async function * rm (cids, options) { + options = options || {} + + if (!Array.isArray(cids)) { + cids = [cids] + } + + // We need to take a write lock here to ensure that adding and removing + // blocks are exclusive operations + const release = await gcLock.writeLock() + + try { + yield * pipe( + cids, + parallelMap(BLOCK_RM_CONCURRENCY, async cid => { + cid = cleanCid(cid) + + const result = { hash: cid.toString() } + + try { + const pinResult = await pinManager.isPinnedWithType(cid, PinTypes.all) + + if (pinResult.pinned) { + if (CID.isCID(pinResult.reason)) { // eslint-disable-line max-depth + throw errCode(new Error(`pinned via ${pinResult.reason}`)) + } + + throw errCode(new Error(`pinned: ${pinResult.reason}`)) + } + + // remove has check when https://github.com/ipfs/js-ipfs-block-service/pull/88 is merged + const has = await blockService._repo.blocks.has(cid) + + if (!has) { + throw errCode(new Error('block not found'), 'ERR_BLOCK_NOT_FOUND') + } + + await blockService.delete(cid) + } catch (err) { + if (!options.force) { + result.error = `cannot remove ${cid}: ${err.message}` + } + } + + return result + }), + filter(() => !options.quiet) + ) + } finally { + release() + } + } +} diff --git a/src/core/components/block/stat.js b/src/core/components/block/stat.js new file mode 100644 index 0000000000..22f1169fe0 --- /dev/null +++ b/src/core/components/block/stat.js @@ -0,0 +1,18 @@ +'use strict' + +const { cleanCid } = require('./utils') + +module.exports = ({ blockService, preload }) => { + return async function stat (cid, options) { + options = options || {} + cid = cleanCid(cid) + + if (options.preload !== false) { + preload(cid) + } + + const block = await blockService.get(cid) + + return { cid, size: block.data.length } + } +} diff --git a/src/core/components/block/utils.js b/src/core/components/block/utils.js new file mode 100644 index 0000000000..76ca4fa293 --- /dev/null +++ b/src/core/components/block/utils.js @@ -0,0 +1,17 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +exports.cleanCid = cid => { + if (CID.isCID(cid)) { + return cid + } + + // CID constructor knows how to do the cleaning :) + try { + return new CID(cid) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } +} diff --git a/src/core/components/bootstrap.js b/src/core/components/bootstrap.js deleted file mode 100644 index dad39cdd26..0000000000 --- a/src/core/components/bootstrap.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -const defaultConfig = require('../runtime/config-nodejs.js') -const isMultiaddr = require('mafmt').IPFS.matches -const callbackify = require('callbackify') - -function isValidMultiaddr (ma) { - try { - return isMultiaddr(ma) - } catch (err) { - return false - } -} - -function invalidMultiaddrError (ma) { - return new Error(`${ma} is not a valid Multiaddr`) -} - -module.exports = function bootstrap (self) { - return { - list: callbackify(async () => { - const config = await self._repo.config.get() - - return { Peers: config.Bootstrap } - }), - add: callbackify.variadic(async (multiaddr, args = { default: false }) => { - if (multiaddr && !isValidMultiaddr(multiaddr)) { - throw invalidMultiaddrError(multiaddr) - } - - const config = await self._repo.config.get() - if (args.default) { - config.Bootstrap = defaultConfig().Bootstrap - } else if (multiaddr && config.Bootstrap.indexOf(multiaddr) === -1) { - config.Bootstrap.push(multiaddr) - } - await self._repo.config.set(config) - - return { - Peers: args.default ? defaultConfig().Bootstrap : [multiaddr] - } - }), - rm: callbackify.variadic(async (multiaddr, args = { all: false }) => { - if (multiaddr && !isValidMultiaddr(multiaddr)) { - throw invalidMultiaddrError(multiaddr) - } - - let res = [] - const config = await self._repo.config.get() - if (args.all) { - res = config.Bootstrap - config.Bootstrap = [] - } else { - config.Bootstrap = config.Bootstrap.filter((mh) => mh !== multiaddr) - } - - await self._repo.config.set(config) - - if (!args.all && multiaddr) { - res.push(multiaddr) - } - - return { Peers: res } - }) - } -} diff --git a/src/core/components/bootstrap/add.js b/src/core/components/bootstrap/add.js new file mode 100644 index 0000000000..791d41a38a --- /dev/null +++ b/src/core/components/bootstrap/add.js @@ -0,0 +1,26 @@ +'use strict' + +const defaultConfig = require('../../runtime/config-nodejs.js') +const { isValidMultiaddr } = require('./utils') + +module.exports = ({ repo }) => { + return async function add (multiaddr, options) { + options = options || {} + + if (multiaddr && !isValidMultiaddr(multiaddr)) { + throw new Error(`${multiaddr} is not a valid Multiaddr`) + } + + const config = await repo.config.get() + if (options.default) { + config.Bootstrap = defaultConfig().Bootstrap + } else if (multiaddr && config.Bootstrap.indexOf(multiaddr) === -1) { + config.Bootstrap.push(multiaddr) + } + await repo.config.set(config) + + return { + Peers: options.default ? defaultConfig().Bootstrap : [multiaddr] + } + } +} diff --git a/src/core/components/bootstrap/list.js b/src/core/components/bootstrap/list.js new file mode 100644 index 0000000000..bc21c6e708 --- /dev/null +++ b/src/core/components/bootstrap/list.js @@ -0,0 +1,8 @@ +'use strict' + +module.exports = ({ repo }) => { + return async function list () { + const config = await repo.config.get() + return { Peers: config.Bootstrap || [] } + } +} diff --git a/src/core/components/bootstrap/rm.js b/src/core/components/bootstrap/rm.js new file mode 100644 index 0000000000..070ae9bb14 --- /dev/null +++ b/src/core/components/bootstrap/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { isValidMultiaddr } = require('./utils') + +module.exports = ({ repo }) => { + return async function rm (multiaddr, options) { + options = options || {} + + if (multiaddr && !isValidMultiaddr(multiaddr)) { + throw new Error(`${multiaddr} is not a valid Multiaddr`) + } + + let res = [] + const config = await repo.config.get() + + if (options.all) { + res = config.Bootstrap || [] + config.Bootstrap = [] + } else { + config.Bootstrap = (config.Bootstrap || []).filter(ma => ma !== multiaddr) + } + + await repo.config.set(config) + + if (!options.all && multiaddr) { + res.push(multiaddr) + } + + return { Peers: res } + } +} diff --git a/src/core/components/bootstrap/utils.js b/src/core/components/bootstrap/utils.js new file mode 100644 index 0000000000..4e525ce021 --- /dev/null +++ b/src/core/components/bootstrap/utils.js @@ -0,0 +1,11 @@ +'use strict' + +const isMultiaddr = require('mafmt').IPFS.matches + +exports.isValidMultiaddr = ma => { + try { + return isMultiaddr(ma) + } catch (err) { + return false + } +} diff --git a/src/core/components/files-regular/cat-async-iterator.js b/src/core/components/cat.js similarity index 64% rename from src/core/components/files-regular/cat-async-iterator.js rename to src/core/components/cat.js index 6b7f1af116..14a85978d4 100644 --- a/src/core/components/files-regular/cat-async-iterator.js +++ b/src/core/components/cat.js @@ -1,20 +1,20 @@ 'use strict' const exporter = require('ipfs-unixfs-exporter') -const { normalizePath } = require('./utils') +const { normalizeCidPath } = require('../utils') -module.exports = function (self) { - return async function * catAsyncIterator (ipfsPath, options) { +module.exports = function ({ ipld, preload }) { + return async function * cat (ipfsPath, options) { options = options || {} - ipfsPath = normalizePath(ipfsPath) + ipfsPath = normalizeCidPath(ipfsPath) if (options.preload !== false) { const pathComponents = ipfsPath.split('/') - self._preload(pathComponents[0]) + preload(pathComponents[0]) } - const file = await exporter(ipfsPath, self._ipld, options) + const file = await exporter(ipfsPath, ipld, options) // File may not have unixfs prop if small & imported with rawLeaves true if (file.unixfs && file.unixfs.type.includes('dir')) { diff --git a/src/core/components/config.js b/src/core/components/config.js index 381a36ce61..c747387c73 100644 --- a/src/core/components/config.js +++ b/src/core/components/config.js @@ -1,17 +1,16 @@ 'use strict' -const callbackify = require('callbackify') const getDefaultConfig = require('../runtime/config-nodejs.js') const log = require('debug')('ipfs:core:config') -module.exports = function config (self) { +module.exports = ({ repo }) => { return { - get: callbackify.variadic(self._repo.config.get), - set: callbackify(self._repo.config.set), - replace: callbackify.variadic(self._repo.config.set), + get: repo.config.get, + set: repo.config.set, + replace: repo.config.set, profiles: { - apply: callbackify.variadic(applyProfile), - list: callbackify.variadic(listProfiles) + apply: applyProfile, + list: listProfiles } } @@ -26,12 +25,12 @@ module.exports = function config (self) { } try { - const oldCfg = await self.config.get() + const oldCfg = await repo.config.get() let newCfg = JSON.parse(JSON.stringify(oldCfg)) // clone newCfg = profile.transform(newCfg) if (!dryRun) { - await self.config.replace(newCfg) + await repo.config.set(newCfg) } // Scrub private key from output diff --git a/src/core/components/dag.js b/src/core/components/dag.js deleted file mode 100644 index fd704e8139..0000000000 --- a/src/core/components/dag.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const CID = require('cids') -const all = require('async-iterator-all') -const errCode = require('err-code') -const multicodec = require('multicodec') - -function parseArgs (cid, path, options) { - options = options || {} - - // Allow options in path position - if (path !== undefined && typeof path !== 'string') { - options = path - path = undefined - } - - if (typeof cid === 'string') { - if (cid.startsWith('/ipfs/')) { - cid = cid.substring(6) - } - - const split = cid.split('/') - - try { - cid = new CID(split[0]) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - - split.shift() - - if (split.length > 0) { - path = split.join('/') - } else { - path = path || '/' - } - } else if (Buffer.isBuffer(cid)) { - try { - cid = new CID(cid) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - } - - return [ - cid, - path, - options - ] -} - -module.exports = function dag (self) { - return { - put: callbackify.variadic(async (dagNode, options) => { - options = options || {} - - if (options.cid && (options.format || options.hashAlg)) { - throw new Error('Can\'t put dag node. Please provide either `cid` OR `format` and `hashAlg` options.') - } else if (((options.format && !options.hashAlg) || (!options.format && options.hashAlg))) { - throw new Error('Can\'t put dag node. Please provide `format` AND `hashAlg` options.') - } - - const optionDefaults = { - format: multicodec.DAG_CBOR, - hashAlg: multicodec.SHA2_256 - } - - // The IPLD expects the format and hashAlg as constants - if (options.format && typeof options.format === 'string') { - const constantName = options.format.toUpperCase().replace(/-/g, '_') - options.format = multicodec[constantName] - } - if (options.hashAlg && typeof options.hashAlg === 'string') { - const constantName = options.hashAlg.toUpperCase().replace(/-/g, '_') - options.hashAlg = multicodec[constantName] - } - - options = options.cid ? options : Object.assign({}, optionDefaults, options) - - // js-ipld defaults to verion 1 CIDs. Hence set version 0 explicitly for - // dag-pb nodes - if (options.version === undefined) { - if (options.format === multicodec.DAG_PB && options.hashAlg === multicodec.SHA2_256) { - options.version = 0 - } else { - options.version = 1 - } - } - - let release - - if (options.pin) { - release = await self._gcLock.readLock() - } - - try { - const cid = await self._ipld.put(dagNode, options.format, { - hashAlg: options.hashAlg, - cidVersion: options.version - }) - - if (options.pin) { - await self.pin.add(cid, { - lock: false - }) - } - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - } finally { - if (release) { - release() - } - } - }), - - get: callbackify.variadic(async (cid, path, options) => { - [cid, path, options] = parseArgs(cid, path, options) - - if (options.preload !== false) { - self._preload(cid) - } - - if (path == null || path === '/') { - const value = await self._ipld.get(cid) - - return { - value, - remainderPath: '' - } - } else { - let result - - for await (const entry of self._ipld.resolve(cid, path)) { - if (options.localResolve) { - return entry - } - - result = entry - } - - return result - } - }), - - tree: callbackify.variadic(async (cid, path, options) => { // eslint-disable-line require-await - [cid, path, options] = parseArgs(cid, path, options) - - if (options.preload !== false) { - self._preload(cid) - } - - return all(self._ipld.tree(cid, path, options)) - }), - - resolve: callbackify.variadic(async (cid, path, options) => { // eslint-disable-line require-await - [cid, path, options] = parseArgs(cid, path, options) - - if (options.preload !== false) { - self._preload(cid) - } - - return all(self._ipld.resolve(cid, path)) - }) - } -} diff --git a/src/core/components/dag/get.js b/src/core/components/dag/get.js new file mode 100644 index 0000000000..11c17152bc --- /dev/null +++ b/src/core/components/dag/get.js @@ -0,0 +1,34 @@ +'use strict' + +const { parseArgs } = require('./utils') + +module.exports = ({ ipld, preload }) => { + return async function get (cid, path, options) { + [cid, path, options] = parseArgs(cid, path, options) + + if (options.preload !== false) { + preload(cid) + } + + if (path == null || path === '/') { + const value = await ipld.get(cid) + + return { + value, + remainderPath: '' + } + } else { + let result + + for await (const entry of ipld.resolve(cid, path)) { + if (options.localResolve) { + return entry + } + + result = entry + } + + return result + } + } +} diff --git a/src/core/components/dag/put.js b/src/core/components/dag/put.js new file mode 100644 index 0000000000..301c87ba8c --- /dev/null +++ b/src/core/components/dag/put.js @@ -0,0 +1,70 @@ +'use strict' + +const multicodec = require('multicodec') +const nameToCodec = name => multicodec[name.toUpperCase().replace(/-/g, '_')] + +module.exports = ({ ipld, pin, gcLock, preload }) => { + return async function put (dagNode, options) { + options = options || {} + + if (options.cid && (options.format || options.hashAlg)) { + throw new Error('Can\'t put dag node. Please provide either `cid` OR `format` and `hashAlg` options.') + } else if (((options.format && !options.hashAlg) || (!options.format && options.hashAlg))) { + throw new Error('Can\'t put dag node. Please provide `format` AND `hashAlg` options.') + } + + const optionDefaults = { + format: multicodec.DAG_CBOR, + hashAlg: multicodec.SHA2_256 + } + + // The IPLD expects the format and hashAlg as constants + if (options.format && typeof options.format === 'string') { + options.format = nameToCodec(options.format) + } + if (options.hashAlg && typeof options.hashAlg === 'string') { + options.hashAlg = nameToCodec(options.hashAlg) + } + + options = options.cid ? options : Object.assign({}, optionDefaults, options) + + // js-ipld defaults to verion 1 CIDs. Hence set version 0 explicitly for + // dag-pb nodes + if (options.version === undefined) { + if (options.format === multicodec.DAG_PB && options.hashAlg === multicodec.SHA2_256) { + options.version = 0 + } else { + options.version = 1 + } + } + + let release + + if (options.pin) { + release = await gcLock.readLock() + } + + try { + const cid = await ipld.put(dagNode, options.format, { + hashAlg: options.hashAlg, + cidVersion: options.version + }) + + if (options.pin) { + await pin.add(cid, { + lock: false + }) + } + + if (options.preload !== false) { + preload(cid) + } + + return cid + } finally { + if (release) { + release() + } + } + } +} diff --git a/src/core/components/dag/resolve.js b/src/core/components/dag/resolve.js new file mode 100644 index 0000000000..e95e5b526f --- /dev/null +++ b/src/core/components/dag/resolve.js @@ -0,0 +1,15 @@ +'use strict' + +const { parseArgs } = require('./utils') + +module.exports = ({ ipld, preload }) => { + return async function * resolve (cid, path, options) { // eslint-disable-line require-await + [cid, path, options] = parseArgs(cid, path, options) + + if (options.preload !== false) { + preload(cid) + } + + yield * ipld.resolve(cid, path) + } +} diff --git a/src/core/components/dag/tree.js b/src/core/components/dag/tree.js new file mode 100644 index 0000000000..07d2d03e65 --- /dev/null +++ b/src/core/components/dag/tree.js @@ -0,0 +1,15 @@ +'use strict' + +const { parseArgs } = require('./utils') + +module.exports = ({ ipld, preload }) => { + return async function * tree (cid, path, options) { // eslint-disable-line require-await + [cid, path, options] = parseArgs(cid, path, options) + + if (options.preload !== false) { + preload(cid) + } + + yield * ipld.tree(cid, path, options) + } +} diff --git a/src/core/components/dag/utils.js b/src/core/components/dag/utils.js new file mode 100644 index 0000000000..810b0e2f9a --- /dev/null +++ b/src/core/components/dag/utils.js @@ -0,0 +1,48 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +exports.parseArgs = (cid, path, options) => { + options = options || {} + + // Allow options in path position + if (path !== undefined && typeof path !== 'string') { + options = path + path = undefined + } + + if (typeof cid === 'string') { + if (cid.startsWith('/ipfs/')) { + cid = cid.substring(6) + } + + const split = cid.split('/') + + try { + cid = new CID(split[0]) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + + split.shift() + + if (split.length > 0) { + path = split.join('/') + } else { + path = path || '/' + } + } else if (Buffer.isBuffer(cid)) { + try { + cid = new CID(cid) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + } + + return [ + cid, + path, + options + ] +} diff --git a/src/core/components/dns.js b/src/core/components/dns.js index 380be30329..3769d4f14e 100644 --- a/src/core/components/dns.js +++ b/src/core/components/dns.js @@ -2,7 +2,6 @@ // dns-nodejs gets replaced by dns-browser when webpacked/browserified const dns = require('../runtime/dns-nodejs') -const callbackify = require('callbackify') function fqdnFixups (domain) { // Allow resolution of .eth names via .eth.link @@ -14,7 +13,7 @@ function fqdnFixups (domain) { } module.exports = () => { - return callbackify.variadic(async (domain, opts) => { // eslint-disable-line require-await + return async (domain, opts) => { // eslint-disable-line require-await opts = opts || {} if (typeof domain !== 'string') { @@ -24,5 +23,5 @@ module.exports = () => { domain = fqdnFixups(domain) return dns(domain, opts) - }) + } } diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js deleted file mode 100644 index 9d621ad60a..0000000000 --- a/src/core/components/files-mfs.js +++ /dev/null @@ -1,374 +0,0 @@ -'use strict' - -const mfs = require('ipfs-mfs/core') -const isPullStream = require('is-pull-stream') -const toPullStream = require('async-iterator-to-pull-stream') -const toReadableStream = require('async-iterator-to-stream') -const pullStreamToAsyncIterator = require('pull-stream-to-async-iterator') -const all = require('async-iterator-all') -const nodeify = require('promise-nodeify') -const PassThrough = require('stream').PassThrough -const pull = require('pull-stream/pull') -const map = require('pull-stream/throughs/map') -const isIpfs = require('is-ipfs') -const { cidToString } = require('../../utils/cid') - -/** - * @typedef { import("readable-stream").Readable } ReadableStream - * @typedef { import("pull-stream") } PullStream - */ - -const mapLsFile = (options) => { - options = options || {} - - const long = options.long || options.l - - return (file) => { - return { - hash: long ? cidToString(file.cid, { base: options.cidBase }) : '', - name: file.name, - type: long ? file.type : 0, - size: long ? file.size || 0 : 0 - } - } -} - -module.exports = (/** @type { import("../index") } */ ipfs) => { - const methodsOriginal = mfs({ - ipld: ipfs._ipld, - blocks: ipfs._blockService, - datastore: ipfs._repo.root, - repoOwner: ipfs._options.repoOwner - }) - - const withPreload = fn => (...args) => { - const paths = args.filter(arg => isIpfs.ipfsPath(arg) || isIpfs.cid(arg)) - - if (paths.length) { - const options = args[args.length - 1] - if (options && options.preload !== false) { - paths.forEach(path => ipfs._preload(path)) - } - } - - return fn(...args) - } - - const methods = { - ...methodsOriginal, - cp: withPreload(methodsOriginal.cp), - ls: withPreload(methodsOriginal.ls), - mv: withPreload(methodsOriginal.mv), - read: withPreload(methodsOriginal.read), - stat: withPreload(methodsOriginal.stat) - } - - return { - /** - * Copy files - * - * @param {String | Array} from - The path(s) of the source to copy. - * @param {String} to - The path of the destination to copy to. - * @param {Object} [opts] - Options for copy. - * @param {boolean} [opts.parents=false] - Whether or not to make the parent directories if they don't exist. (default: false) - * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb) - * @param {String} [opts.hashAlg=sha2-256] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} - * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). - * @param {function(Error): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - */ - cp: (from, to, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(methods.cp(from, to, opts), cb) - }, - - /** - * Make a directory - * - * @param {String} path - The path to the directory to make. - * @param {Object} [opts] - Options for mkdir. - * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) - * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). - * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} - * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). - * @param {function(Error): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - */ - mkdir: (path, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(methods.mkdir(path, opts), cb) - }, - - /** - * @typedef {Object} StatOutput - * @prop {String} hash - Output hash. - * @prop {number} size - File size in bytes. - * @prop {number} cumulativeSize - Integer with the size of the DAGNodes making up the file in Bytes. - * @prop {string} type - Output type either 'directory' or 'file'. - * @prop {number} blocks - If type is directory, this is the number of files in the directory. If it is file it is the number of blocks that make up the file. - * @prop {boolean} withLocality - Indicate if locality information is present. - * @prop {boolean} local - Indicate if the queried dag is fully present locally. - * @prop {number} sizeLocal - Integer indicating the cumulative size of the data present locally. - */ - - /** - * Get file or directory status. - * - * @param {String} path - Path to the file or directory to stat. - * @param {Object} [opts] - Options for stat. - * @param {boolean} [opts.hash=false] - Return only the hash. (default: false) - * @param {boolean} [opts.size=false] - Return only the size. (default: false) - * @param {boolean} [opts.withLocal=false] - Compute the amount of the dag that is local, and if possible the total size. (default: false) - * @param {String} [opts.cidBase=base58btc] - Which number base to use to format hashes - e.g. base32, base64 etc. (default: base58btc) - * @param {function(Error, StatOutput): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - */ - stat: (path, opts, cb) => { - const stat = async (path, opts = {}) => { - const stats = await methods.stat(path, opts) - - stats.hash = stats.cid.toBaseEncodedString(opts && opts.cidBase) - delete stats.cid - - return stats - } - - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - return nodeify(stat(path, opts), cb) - }, - - /** - * Remove a file or directory. - * - * @param {String | Array} paths - One or more paths to remove. - * @param {Object} [opts] - Options for remove. - * @param {boolean} [opts.recursive=false] - Whether or not to remove directories recursively. (default: false) - * @param {function(Error): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - */ - rm: (paths, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(methods.rm(paths, opts), cb) - }, - - /** - * @typedef {Object} ReadOptions - * @prop {number} [opts.offset=0] - Integer with the byte offset to begin reading from (default: 0). - * @prop {number} [opts.length] - Integer with the maximum number of bytes to read (default: Read to the end of stream). - */ - - /** - * Read a file into a Buffer. - * - * @param {string} path - Path of the file to read and must point to a file (and not a directory). - * @param {ReadOptions} [opts] - Object for read. - * @param {function(Error, Buffer): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - */ - read: (path, opts, cb) => { - const read = async (path, opts = {}) => { - return Buffer.concat(await all(methods.read(path, opts))) - } - - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(read(path, opts), cb) - }, - - /** - * Read a file into a ReadableStream. - * - * @param {string} path - Path of the file to read and must point to a file (and not a directory). - * @param {ReadOptions} [opts] - Object for read. - * @returns {ReadableStream} Returns a ReadableStream with the contents of path. - */ - readReadableStream: (path, opts = {}) => toReadableStream(methods.read(path, opts)), - - /** - * Read a file into a PullStrean. - * - * @param {string} path - Path of the file to read and must point to a file (and not a directory). - * @param {ReadOptions} [opts] - Object for read. - * @returns {PullStream} Returns a PullStream with the contents of path. - */ - readPullStream: (path, opts = {}) => toPullStream.source(methods.read(path, opts)), - - /** - * Write to a file. - * - * @param {string} path - Path of the file to write. - * @param {Buffer | PullStream | ReadableStream | Blob | string} content - Content to write. - * @param {Object} opts - Options for write. - * @param {number} [opts.offset=0] - Integer with the byte offset to begin writing at. (default: 0) - * @param {boolean} [opts.create=false] - Indicate to create the file if it doesn't exist. (default: false) - * @param {boolean} [opts.truncate=false] - Indicate if the file should be truncated after writing all the bytes from content. (default: false) - * @param {boolena} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) - * @param {number} [opts.length] - Maximum number of bytes to read. (default: Read all bytes from content) - * @param {boolean} [opts.rawLeaves=false] - If true, DAG leaves will contain raw file data and not be wrapped in a protobuf. (default: false) - * @param {number} [opts.cidVersion=0] - The CID version to use when storing the data (storage keys are based on the CID, including its version). (default: 0) - * @param {function(Error): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - */ - write: (path, content, opts, cb) => { - const write = async (path, content, opts = {}) => { - if (isPullStream.isSource(content)) { - content = pullStreamToAsyncIterator(content) - } - - await methods.write(path, content, opts) - } - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(write(path, content, opts), cb) - }, - - /** - * Move files. - * - * @param {string | Array} from - Path(s) of the source to move. - * @param {string} to - Path of the destination to move to. - * @param {Object} opts - Options for mv. - * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) - * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). - * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} - * @param {boolean} [opts.flush=true] - Value to decide whether or not to immediately flush MFS changes to disk. (default: true) - * @param {function(Error): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - * @description - * If from has multiple values then to must be a directory. - * - * If from has a single value and to exists and is a directory, from will be moved into to. - * - * If from has a single value and to exists and is a file, from must be a file and the contents of to will be replaced with the contents of from otherwise an error will be returned. - * - * If from is an IPFS path, and an MFS path exists with the same name, the IPFS path will be chosen. - * - * All values of from will be removed after the operation is complete unless they are an IPFS path. - */ - mv: (from, to, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(methods.mv(from, to, opts), cb) - }, - - /** - * Flush a given path's data to the disk. - * - * @param {string | Array} [paths] - String paths to flush. (default: /) - * @param {function(Error): void} [cb] - Callback function. - * @returns {Promise | void} When callback is provided nothing is returned. - */ - flush: (paths, cb) => { - if (typeof paths === 'function') { - cb = paths - paths = undefined - } - return nodeify(methods.flush(paths), cb) - }, - - /** - * @typedef {Object} ListOutputFile - * @prop {string} name - Which is the file's name. - * @prop {string} type - Which is the object's type (directory or file). - * @prop {number} size - The size of the file in bytes. - * @prop {string} hash - The hash of the file. - */ - - /** - * @typedef {Object} ListOptions - * @prop {boolean} [long=false] - Value to decide whether or not to populate type, size and hash. (default: false) - * @prop {string} [cidBase=base58btc] - Which number base to use to format hashes - e.g. base32, base64 etc. (default: base58btc) - * @prop {boolean} [sort=false] - If true entries will be sorted by filename. (default: false) - */ - - /** - * List directories in the local mutable namespace. - * - * @param {string} [path="/"] - String to show listing for. (default: /) - * @param {ListOptions} [opts] - Options for list. - * @param {function(Error, Array): void} [cb] - Callback function. - * @returns {Promise> | void} When callback is provided nothing is returned. - */ - ls: (path, opts, cb) => { - const ls = async (path, opts = {}) => { - const files = await all(methods.ls(path, opts)) - - return files.map(mapLsFile(opts)) - } - - if (typeof path === 'function') { - cb = path - path = '/' - opts = {} - } - - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(ls(path, opts), cb) - }, - - /** - * Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams. - * - * @param {string} [path="/"] - String to show listing for. (default: /) - * @param {ListOptions} [opts] - Options for list. - * @returns {ReadableStream} It returns a Readable Stream in Object mode that will yield {@link ListOutputFile} - */ - lsReadableStream: (path, opts = {}) => { - const stream = toReadableStream.obj(methods.ls(path, opts)) - const through = new PassThrough({ - objectMode: true - }) - stream.on('data', (file) => { - through.write(mapLsFile(opts)(file)) - }) - stream.on('error', (err) => { - through.destroy(err) - }) - stream.on('end', (file, enc, cb) => { - if (file) { - file = mapLsFile(opts)(file) - } - - through.end(file, enc, cb) - }) - - return through - }, - - /** - * Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as PullStreams. - * - * @param {string} [path="/"] - String to show listing for. (default: /) - * @param {ListOptions} [opts] - Options for list. - * @returns {PullStream} It returns a PullStream that will yield {@link ListOutputFile} - */ - lsPullStream: (path, opts = {}) => { - return pull( - toPullStream.source(methods.ls(path, opts)), - map(mapLsFile(opts)) - ) - } - } -} diff --git a/src/core/components/files-regular/add-async-iterator.js b/src/core/components/files-regular/add-async-iterator.js deleted file mode 100644 index e138a1cd66..0000000000 --- a/src/core/components/files-regular/add-async-iterator.js +++ /dev/null @@ -1,155 +0,0 @@ -'use strict' - -const importer = require('ipfs-unixfs-importer') -const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') -const { parseChunkerString } = require('./utils') -const pipe = require('it-pipe') -const log = require('debug')('ipfs:add') -log.error = require('debug')('ipfs:add:error') - -function noop () {} - -module.exports = function (self) { - // Internal add func that gets used by all add funcs - return async function * addAsyncIterator (source, options) { - options = options || {} - - const chunkerOptions = parseChunkerString(options.chunker) - - const opts = Object.assign({}, { - shardSplitThreshold: self._options.EXPERIMENTAL.sharding - ? 1000 - : Infinity - }, options, { - strategy: 'balanced', - chunker: chunkerOptions.chunker, - chunkerOptions: chunkerOptions.chunkerOptions - }) - - // CID v0 is for multihashes encoded with sha2-256 - if (opts.hashAlg && opts.cidVersion !== 1) { - opts.cidVersion = 1 - } - - if (opts.trickle) { - opts.strategy = 'trickle' - } - - delete opts.trickle - - let total = 0 - - const prog = opts.progress || noop - const progress = (bytes) => { - total += bytes - prog(total) - } - - opts.progress = progress - - const iterator = pipe( - normaliseAddInput(source), - doImport(self, opts), - transformFile(self, opts), - preloadFile(self, opts), - pinFile(self, opts) - ) - - const releaseLock = await self._gcLock.readLock() - - try { - yield * iterator - } finally { - releaseLock() - } - } -} - -function doImport (ipfs, opts) { - return async function * (source) { // eslint-disable-line require-await - yield * importer(source, ipfs._ipld, opts) - } -} - -function transformFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - let cid = file.cid - const hash = cid.toBaseEncodedString() - let path = file.path ? file.path : hash - - if (opts.wrapWithDirectory && !file.path) { - path = '' - } - - if (opts.onlyHash) { - yield { - path, - hash, - size: file.unixfs.fileSize() - } - - return - } - - const node = await ipfs.object.get(file.cid, Object.assign({}, opts, { preload: false })) - - if (opts.cidVersion === 1) { - cid = cid.toV1() - } - - let size = node.size - - if (Buffer.isBuffer(node)) { - size = node.length - } - - yield { - path, - hash, - size - } - } - } -} - -function preloadFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - const isRootFile = !file.path || opts.wrapWithDirectory - ? file.path === '' - : !file.path.includes('/') - - const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false - - if (shouldPreload) { - ipfs._preload(file.hash) - } - - yield file - } - } -} - -function pinFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - // Pin a file if it is the root dir of a recursive add or the single file - // of a direct add. - const pin = 'pin' in opts ? opts.pin : true - const isRootDir = !file.path.includes('/') - const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg - - if (shouldPin) { - // Note: addAsyncIterator() has already taken a GC lock, so tell - // pin.add() not to take a (second) GC lock - await ipfs.pin.add(file.hash, { - preload: false, - lock: false - }) - } - - yield file - } - } -} diff --git a/src/core/components/files-regular/add-from-fs.js b/src/core/components/files-regular/add-from-fs.js deleted file mode 100644 index 409c4e8d7e..0000000000 --- a/src/core/components/files-regular/add-from-fs.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = (self) => require('../../runtime/add-from-fs-nodejs')(self) diff --git a/src/core/components/files-regular/add-from-stream.js b/src/core/components/files-regular/add-from-stream.js deleted file mode 100644 index 6925dce2ba..0000000000 --- a/src/core/components/files-regular/add-from-stream.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = self => require('./add')(self) diff --git a/src/core/components/files-regular/add-from-url.js b/src/core/components/files-regular/add-from-url.js deleted file mode 100644 index bc12850cbe..0000000000 --- a/src/core/components/files-regular/add-from-url.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -const { URL } = require('iso-url') -const nodeify = require('promise-nodeify') -const { default: ky } = require('ky-universal') - -module.exports = (ipfs) => { - const addFromURL = async (url, opts) => { - opts = opts || {} - const res = await ky.get(url) - const path = decodeURIComponent(new URL(res.url).pathname.split('/').pop()) - const content = Buffer.from(await res.arrayBuffer()) - return ipfs.add({ content, path }, opts) - } - - return (name, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - return nodeify(addFromURL(name, opts), cb) - } -} diff --git a/src/core/components/files-regular/add-pull-stream.js b/src/core/components/files-regular/add-pull-stream.js deleted file mode 100644 index e3c1519531..0000000000 --- a/src/core/components/files-regular/add-pull-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function addPullStream (options) { - return toPullStream.transform((source) => { - return self._addAsyncIterator(source, options) - }) - } -} diff --git a/src/core/components/files-regular/add-readable-stream.js b/src/core/components/files-regular/add-readable-stream.js deleted file mode 100644 index 914a1fd9fb..0000000000 --- a/src/core/components/files-regular/add-readable-stream.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function addReadableStream (options) { - return toStream.transform(source => { - return self._addAsyncIterator(source, options) - }, { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/add.js b/src/core/components/files-regular/add.js deleted file mode 100644 index dcf9e7bf52..0000000000 --- a/src/core/components/files-regular/add.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict' - -const all = require('async-iterator-all') - -module.exports = function (self) { - // can't use callbackify because if `data` is a pull stream - // it thinks we are passing a callback. This is why we can't have nice things. - return function add (data, options, callback) { - if (!callback && typeof options === 'function') { - callback = options - options = {} - } - - const result = all(self._addAsyncIterator(data, options)) - - if (!callback) { - return result - } - - result.then((result) => callback(null, result), callback) - } -} diff --git a/src/core/components/files-regular/cat-pull-stream.js b/src/core/components/files-regular/cat-pull-stream.js deleted file mode 100644 index 5ad3f79411..0000000000 --- a/src/core/components/files-regular/cat-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function catPullStream (ipfsPath, options) { - return toPullStream.source(self._catAsyncIterator(ipfsPath, options)) - } -} diff --git a/src/core/components/files-regular/cat-readable-stream.js b/src/core/components/files-regular/cat-readable-stream.js deleted file mode 100644 index 70df087f16..0000000000 --- a/src/core/components/files-regular/cat-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function catReadableStream (ipfsPath, options) { - return toStream.readable(self._catAsyncIterator(ipfsPath, options), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/cat.js b/src/core/components/files-regular/cat.js deleted file mode 100644 index 656946ad03..0000000000 --- a/src/core/components/files-regular/cat.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function cat (ipfsPath, options) { - return Buffer.concat(await all(self._catAsyncIterator(ipfsPath, options))) - }) -} diff --git a/src/core/components/files-regular/get-pull-stream.js b/src/core/components/files-regular/get-pull-stream.js deleted file mode 100644 index fa769aa89c..0000000000 --- a/src/core/components/files-regular/get-pull-stream.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') -const pull = require('pull-stream/pull') -const map = require('pull-stream/throughs/map') - -module.exports = function (self) { - return function getPullStream (ipfsPath, options) { - return pull( - toPullStream.source(self._getAsyncIterator(ipfsPath, options)), - map(file => { - if (file.content) { - file.content = toPullStream.source(file.content()) - } - - return file - }) - ) - } -} diff --git a/src/core/components/files-regular/get-readable-stream.js b/src/core/components/files-regular/get-readable-stream.js deleted file mode 100644 index 3b6b78b3dd..0000000000 --- a/src/core/components/files-regular/get-readable-stream.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function getReadableStream (ipfsPath, options) { - return toStream.readable((async function * mapStreamFileContents () { - for await (const file of self._getAsyncIterator(ipfsPath, options)) { - if (file.content) { - file.content = toStream.readable(file.content()) - } - - yield file - } - })(), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/get.js b/src/core/components/files-regular/get.js deleted file mode 100644 index 58e0434dfc..0000000000 --- a/src/core/components/files-regular/get.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function get (ipfsPath, options) { // eslint-disable-line require-await - return all(async function * () { - for await (const file of self._getAsyncIterator(ipfsPath, options)) { - if (file.content) { - file.content = Buffer.concat(await all(file.content())) - } - - yield file - } - }()) - }) -} diff --git a/src/core/components/files-regular/index.js b/src/core/components/files-regular/index.js deleted file mode 100644 index 8bff57d331..0000000000 --- a/src/core/components/files-regular/index.js +++ /dev/null @@ -1,34 +0,0 @@ -'use strict' - -module.exports = (self) => { - const filesRegular = { - add: require('./add')(self), - addFromFs: require('./add-from-fs')(self), - addFromStream: require('./add-from-stream')(self), - addFromURL: require('./add-from-url')(self), - addPullStream: require('./add-pull-stream')(self), - addReadableStream: require('./add-readable-stream')(self), - _addAsyncIterator: require('./add-async-iterator')(self), - cat: require('./cat')(self), - catPullStream: require('./cat-pull-stream')(self), - catReadableStream: require('./cat-readable-stream')(self), - _catAsyncIterator: require('./cat-async-iterator')(self), - get: require('./get')(self), - getPullStream: require('./get-pull-stream')(self), - getReadableStream: require('./get-readable-stream')(self), - _getAsyncIterator: require('./get-async-iterator')(self), - ls: require('./ls')(self), - lsPullStream: require('./ls-pull-stream')(self), - lsReadableStream: require('./ls-readable-stream')(self), - _lsAsyncIterator: require('./ls-async-iterator')(self), - refs: require('./refs')(self), - refsReadableStream: require('./refs-readable-stream')(self), - refsPullStream: require('./refs-pull-stream')(self), - _refsAsyncIterator: require('./refs-async-iterator')(self) - } - filesRegular.refs.local = require('./refs-local')(self) - filesRegular.refs.localReadableStream = require('./refs-local-readable-stream')(self) - filesRegular.refs.localPullStream = require('./refs-local-pull-stream')(self) - filesRegular.refs._localAsyncIterator = require('./refs-local-async-iterator')(self) - return filesRegular -} diff --git a/src/core/components/files-regular/ls-pull-stream.js b/src/core/components/files-regular/ls-pull-stream.js deleted file mode 100644 index 3cf5a6cd74..0000000000 --- a/src/core/components/files-regular/ls-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function lsPullStream (ipfsPath, options) { - return toPullStream.source(self._lsAsyncIterator(ipfsPath, options)) - } -} diff --git a/src/core/components/files-regular/ls-readable-stream.js b/src/core/components/files-regular/ls-readable-stream.js deleted file mode 100644 index 794095f752..0000000000 --- a/src/core/components/files-regular/ls-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function lsReadableStream (ipfsPath, options) { - return toStream.readable(self._lsAsyncIterator(ipfsPath, options), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/ls.js b/src/core/components/files-regular/ls.js deleted file mode 100644 index 9ae4a71a97..0000000000 --- a/src/core/components/files-regular/ls.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function ls (ipfsPath, options) { // eslint-disable-line require-await - return all(self._lsAsyncIterator(ipfsPath, options)) - }) -} diff --git a/src/core/components/files-regular/refs-local-pull-stream.js b/src/core/components/files-regular/refs-local-pull-stream.js deleted file mode 100644 index 1bb02cec51..0000000000 --- a/src/core/components/files-regular/refs-local-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function refsLocalPullStream () { - return toPullStream.source(self.refs._localAsyncIterator()) - } -} diff --git a/src/core/components/files-regular/refs-local-readable-stream.js b/src/core/components/files-regular/refs-local-readable-stream.js deleted file mode 100644 index f66920ef51..0000000000 --- a/src/core/components/files-regular/refs-local-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function refsLocalReadableStream () { - return toStream.readable(self.refs._localAsyncIterator(), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/refs-local.js b/src/core/components/files-regular/refs-local.js deleted file mode 100644 index 799b384e30..0000000000 --- a/src/core/components/files-regular/refs-local.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function refsLocal (ipfsPath, options) { // eslint-disable-line require-await - return all(self.refs._localAsyncIterator(ipfsPath, options)) - }) -} diff --git a/src/core/components/files-regular/refs-pull-stream.js b/src/core/components/files-regular/refs-pull-stream.js deleted file mode 100644 index 4d10812c37..0000000000 --- a/src/core/components/files-regular/refs-pull-stream.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const toPullStream = require('async-iterator-to-pull-stream') - -module.exports = function (self) { - return function refsPullStream (ipfsPath, options) { - return toPullStream.source(self._refsAsyncIterator(ipfsPath, options)) - } -} diff --git a/src/core/components/files-regular/refs-readable-stream.js b/src/core/components/files-regular/refs-readable-stream.js deleted file mode 100644 index 509e65a508..0000000000 --- a/src/core/components/files-regular/refs-readable-stream.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const toStream = require('it-to-stream') - -module.exports = function (self) { - return function refsReadableStream (ipfsPath, options) { - return toStream.readable(self._refsAsyncIterator(ipfsPath, options), { - objectMode: true - }) - } -} diff --git a/src/core/components/files-regular/refs.js b/src/core/components/files-regular/refs.js deleted file mode 100644 index 4876457606..0000000000 --- a/src/core/components/files-regular/refs.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const all = require('async-iterator-all') - -module.exports = function (self) { - return callbackify.variadic(async function refs (ipfsPath, options) { // eslint-disable-line require-await - return all(self._refsAsyncIterator(ipfsPath, options)) - }) -} - -// Preset format strings -module.exports.Format = { - default: '', - edges: ' -> ' -} diff --git a/src/core/components/files.js b/src/core/components/files.js new file mode 100644 index 0000000000..971eb88dca --- /dev/null +++ b/src/core/components/files.js @@ -0,0 +1,178 @@ +'use strict' + +const mfs = require('ipfs-mfs/core') +const isIpfs = require('is-ipfs') + +module.exports = ({ ipld, blockService, repo, preload, options: constructorOptions }) => { + const methods = mfs({ + ipld, + blocks: blockService, + datastore: repo.root, + repoOwner: constructorOptions.repoOwner + }) + + const withPreload = fn => (...args) => { + const paths = args.filter(arg => isIpfs.ipfsPath(arg) || isIpfs.cid(arg)) + + if (paths.length) { + const options = args[args.length - 1] + if (options && options.preload !== false) { + paths.forEach(path => preload(path)) + } + } + + return fn(...args) + } + + return { + ...methods, + + /** + * Copy files + * + * @param {String | Array} from - The path(s) of the source to copy. + * @param {String} to - The path of the destination to copy to. + * @param {Object} [opts] - Options for copy. + * @param {boolean} [opts.parents=false] - Whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb) + * @param {String} [opts.hashAlg=sha2-256] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). + * @returns {Promise} + */ + cp: withPreload(methods.cp), + + /** + * Make a directory + * + * @param {String} path - The path to the directory to make. + * @param {Object} [opts] - Options for mkdir. + * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). + * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). + * @returns {Promise} + */ + mkdir: methods.mkdir, + + /** + * @typedef {Object} StatOutput + * @prop {String} hash - Output hash. + * @prop {number} size - File size in bytes. + * @prop {number} cumulativeSize - Integer with the size of the DAGNodes making up the file in Bytes. + * @prop {string} type - Output type either 'directory' or 'file'. + * @prop {number} blocks - If type is directory, this is the number of files in the directory. If it is file it is the number of blocks that make up the file. + * @prop {boolean} withLocality - Indicate if locality information is present. + * @prop {boolean} local - Indicate if the queried dag is fully present locally. + * @prop {number} sizeLocal - Integer indicating the cumulative size of the data present locally. + */ + + /** + * Get file or directory status. + * + * @param {String} path - Path to the file or directory to stat. + * @param {Object} [opts] - Options for stat. + * @param {boolean} [opts.hash=false] - Return only the hash. (default: false) + * @param {boolean} [opts.size=false] - Return only the size. (default: false) + * @param {boolean} [opts.withLocal=false] - Compute the amount of the dag that is local, and if possible the total size. (default: false) + * @returns {Promise} + */ + stat: withPreload(methods.stat), + + /** + * Remove a file or directory. + * + * @param {String | Array} paths - One or more paths to remove. + * @param {Object} [opts] - Options for remove. + * @param {boolean} [opts.recursive=false] - Whether or not to remove directories recursively. (default: false) + * @returns {Promise} + */ + rm: methods.rm, + + /** + * @typedef {Object} ReadOptions + * @prop {number} [opts.offset=0] - Integer with the byte offset to begin reading from (default: 0). + * @prop {number} [opts.length] - Integer with the maximum number of bytes to read (default: Read to the end of stream). + */ + + /** + * Read a file into a Buffer. + * + * @param {string} path - Path of the file to read and must point to a file (and not a directory). + * @param {ReadOptions} [opts] - Object for read. + * @returns {AsyncIterable} + */ + read: withPreload(methods.read), + + /** + * Write to a file. + * + * @param {string} path - Path of the file to write. + * @param {Buffer | PullStream | ReadableStream | Blob | string} content - Content to write. + * @param {Object} opts - Options for write. + * @param {number} [opts.offset=0] - Integer with the byte offset to begin writing at. (default: 0) + * @param {boolean} [opts.create=false] - Indicate to create the file if it doesn't exist. (default: false) + * @param {boolean} [opts.truncate=false] - Indicate if the file should be truncated after writing all the bytes from content. (default: false) + * @param {boolena} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {number} [opts.length] - Maximum number of bytes to read. (default: Read all bytes from content) + * @param {boolean} [opts.rawLeaves=false] - If true, DAG leaves will contain raw file data and not be wrapped in a protobuf. (default: false) + * @param {number} [opts.cidVersion=0] - The CID version to use when storing the data (storage keys are based on the CID, including its version). (default: 0) + * @returns {Promise} + */ + write: methods.write, + + /** + * Move files. + * + * @param {string | Array} from - Path(s) of the source to move. + * @param {string} to - Path of the destination to move to. + * @param {Object} opts - Options for mv. + * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) + * @param {String} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). + * @param {String} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} + * @param {boolean} [opts.flush=true] - Value to decide whether or not to immediately flush MFS changes to disk. (default: true) + * @returns {Promise} + * @description + * If from has multiple values then to must be a directory. + * + * If from has a single value and to exists and is a directory, from will be moved into to. + * + * If from has a single value and to exists and is a file, from must be a file and the contents of to will be replaced with the contents of from otherwise an error will be returned. + * + * If from is an IPFS path, and an MFS path exists with the same name, the IPFS path will be chosen. + * + * All values of from will be removed after the operation is complete unless they are an IPFS path. + */ + mv: withPreload(methods.mv), + + /** + * Flush a given path's data to the disk. + * + * @param {string | Array} [paths] - String paths to flush. (default: /) + * @returns {Promise} + */ + flush: methods.flush, + + /** + * @typedef {Object} ListOutputFile + * @prop {string} name - Which is the file's name. + * @prop {string} type - Which is the object's type (directory or file). + * @prop {number} size - The size of the file in bytes. + * @prop {string} hash - The hash of the file. + */ + + /** + * @typedef {Object} ListOptions + * @prop {boolean} [long=false] - Value to decide whether or not to populate type, size and hash. (default: false) + * @prop {boolean} [sort=false] - If true entries will be sorted by filename. (default: false) + */ + + /** + * List directories in the local mutable namespace. + * + * @param {string} [path="/"] - String to show listing for. (default: /) + * @param {ListOptions} [opts] - Options for list. + * @returns {AsyncIterable} + */ + ls: withPreload(methods.ls) + } +} diff --git a/src/core/components/files-regular/get-async-iterator.js b/src/core/components/get.js similarity index 53% rename from src/core/components/files-regular/get-async-iterator.js rename to src/core/components/get.js index b9ad234f4b..4872a7e7f5 100644 --- a/src/core/components/files-regular/get-async-iterator.js +++ b/src/core/components/get.js @@ -2,25 +2,25 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') -const { normalizePath, mapFile } = require('./utils') +const { normalizeCidPath, mapFile } = require('../utils') -module.exports = function (self) { - return async function * getAsyncIterator (ipfsPath, options) { +module.exports = function ({ ipld, preload }) { + return async function * get (ipfsPath, options) { options = options || {} if (options.preload !== false) { let pathComponents try { - pathComponents = normalizePath(ipfsPath).split('/') + pathComponents = normalizeCidPath(ipfsPath).split('/') } catch (err) { throw errCode(err, 'ERR_INVALID_PATH') } - self._preload(pathComponents[0]) + preload(pathComponents[0]) } - for await (const file of exporter.recursive(ipfsPath, self._ipld, options)) { + for await (const file of exporter.recursive(ipfsPath, ipld, options)) { yield mapFile(file, { ...options, includeContent: true diff --git a/src/core/components/id.js b/src/core/components/id.js index a8fd75f92d..f848d96e8f 100644 --- a/src/core/components/id.js +++ b/src/core/components/id.js @@ -1,14 +1,13 @@ 'use strict' -const callbackify = require('callbackify') const pkgversion = require('../../../package.json').version -module.exports = function id (self) { - return callbackify(async () => { // eslint-disable-line require-await +module.exports = ({ peerInfo }) => { + return async function id () { // eslint-disable-line require-await return { - id: self._peerInfo.id.toB58String(), - publicKey: self._peerInfo.id.pubKey.bytes.toString('base64'), - addresses: self._peerInfo.multiaddrs + id: peerInfo.id.toB58String(), + publicKey: peerInfo.id.pubKey.bytes.toString('base64'), + addresses: peerInfo.multiaddrs .toArray() .map((ma) => ma.toString()) .filter((ma) => ma.indexOf('ipfs') >= 0) @@ -16,5 +15,5 @@ module.exports = function id (self) { agentVersion: `js-ipfs/${pkgversion}`, protocolVersion: '9000' } - }) + } } diff --git a/src/core/components/index.js b/src/core/components/index.js index ac893efbdd..3220c00295 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -1,31 +1,85 @@ 'use strict' -exports.preStart = require('./pre-start') -exports.start = require('./start') -exports.stop = require('./stop') -exports.isOnline = require('./is-online') -exports.version = require('./version') +exports.add = require('./add') +exports.block = { + get: require('./block/get'), + put: require('./block/put'), + rm: require('./block/rm'), + stat: require('./block/stat') +} +exports.bitswap = { + stat: require('./bitswap/stat'), + unwant: require('./bitswap/unwant'), + wantlist: require('./bitswap/wantlist') +} +exports.bootstrap = { + add: require('./bootstrap/add'), + list: require('./bootstrap/list'), + rm: require('./bootstrap/rm') +} +exports.cat = require('./cat') +exports.config = require('./config') +exports.dag = { + get: require('./dag/get'), + put: require('./dag/put'), + resolve: require('./dag/resolve'), + tree: require('./dag/tree') +} +exports.dns = require('./dns') +exports.files = require('./files') +exports.get = require('./get') exports.id = require('./id') -exports.repo = require('./repo') exports.init = require('./init') -exports.bootstrap = require('./bootstrap') -exports.config = require('./config') -exports.block = require('./block') -exports.object = require('./object') -exports.dag = require('./dag') -exports.libp2p = require('./libp2p') -exports.swarm = require('./swarm') +exports.isOnline = require('./is-online') +exports.ls = require('./ls') +exports.name = { + publish: require('./name/publish'), + pubsub: { + cancel: require('./name/pubsub/cancel'), + state: require('./name/pubsub/state'), + subs: require('./name/pubsub/subs') + }, + resolve: require('./name/resolve') +} +exports.object = { + data: require('./object/data'), + get: require('./object/get'), + links: require('./object/links'), + new: require('./object/new'), + patch: { + addLink: require('./object/patch/add-link'), + appendData: require('./object/patch/append-data'), + rmLink: require('./object/patch/rm-link'), + setData: require('./object/patch/set-data') + }, + put: require('./object/put'), + stat: require('./object/stat') +} +exports.pin = { + add: require('./pin/add'), + ls: require('./pin/ls'), + rm: require('./pin/rm') +} exports.ping = require('./ping') -exports.pingPullStream = require('./ping-pull-stream') -exports.pingReadableStream = require('./ping-readable-stream') -exports.pin = require('./pin') -exports.filesRegular = require('./files-regular') -exports.filesMFS = require('./files-mfs') -exports.bitswap = require('./bitswap') -exports.pubsub = require('./pubsub') -exports.dht = require('./dht') -exports.dns = require('./dns') -exports.key = require('./key') -exports.stats = require('./stats') +exports.refs = require('./refs') +exports.refs.local = require('./refs/local') +exports.repo = { + gc: require('./repo/gc'), + stat: require('./repo/stat'), + version: require('./repo/version') +} exports.resolve = require('./resolve') -exports.name = require('./name') +exports.start = require('./start') +exports.stop = require('./stop') +exports.swarm = { + addrs: require('./swarm/addrs'), + connect: require('./swarm/connect'), + disconnect: require('./swarm/disconnect'), + localAddrs: require('./swarm/local-addrs'), + peers: require('./swarm/peers') +} +exports.version = require('./version') + +exports.legacy = { // TODO: these will be removed as the new API is completed + libp2p: require('./libp2p') +} diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js deleted file mode 100644 index 6f0e4799d3..0000000000 --- a/src/core/components/init-assets.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const path = require('path') -const CID = require('cids') - -// Add the default assets to the repo. -module.exports = async function addDefaultAssets (self, log) { - const initDocsPath = path.join(__dirname, '../../init-files/init-docs') - - const results = await self.addFromFs(initDocsPath, { - recursive: true, - preload: false - }) - - const dir = results.filter(file => file.path === 'init-docs').pop() - const cid = new CID(dir.hash) - - log('to get started, enter:\n') - log(`\tjsipfs cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) -} diff --git a/src/core/components/init.js b/src/core/components/init.js index 5b2a1ec2df..c42db1fc4e 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -1,164 +1,383 @@ 'use strict' -const peerId = require('peer-id') +const log = require('debug')('ipfs:components:init') +const PeerId = require('peer-id') +const PeerInfo = require('peer-info') const mergeOptions = require('merge-options') -const callbackify = require('callbackify') const promisify = require('promisify-es6') -const defaultConfig = require('../runtime/config-nodejs.js') +const getDefaultConfig = require('../runtime/config-nodejs.js') +const createRepo = require('../runtime/repo-nodejs') const Keychain = require('libp2p-keychain') -const { - DAGNode -} = require('ipld-dag-pb') +const NoKeychain = require('./no-keychain') +const mortice = require('mortice') +const { DAGNode } = require('ipld-dag-pb') const UnixFs = require('ipfs-unixfs') const multicodec = require('multicodec') - +const multiaddr = require('multiaddr') +const { + AlreadyInitializingError, + AlreadyInitializedError, + NotStartedError +} = require('../errors') +const BlockService = require('ipfs-block-service') +const Ipld = require('ipld') +const getDefaultIpldOptions = require('../runtime/ipld-nodejs') +const createPreloader = require('../preload') +const { ERR_REPO_NOT_INITIALIZED } = require('ipfs-repo').errors const IPNS = require('../ipns') const OfflineDatastore = require('../ipns/routing/offline-datastore') +const initAssets = require('../runtime/init-assets-nodejs') +const PinManager = require('./pin/pin-manager') +const Components = require('./') + +module.exports = ({ + apiManager, + print, + options: constructorOptions +}) => async function init (options) { + const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) + + try { + options = options || {} + + if (typeof constructorOptions.init === 'object') { + options = mergeOptions(options, constructorOptions.init) + } -const addDefaultAssets = require('./init-assets') -const { profiles } = require('./config') + if (constructorOptions.pass) { + options.pass = constructorOptions.pass + } -function createPeerId (self, opts) { - if (opts.privateKey) { - self.log('using user-supplied private-key') - if (typeof opts.privateKey === 'object') { - return opts.privateKey - } else { - return promisify(peerId.createFromPrivKey)(Buffer.from(opts.privateKey, 'base64')) + if (constructorOptions.config) { + options.config = constructorOptions.config } - } else { - // Generate peer identity keypair + transform to desired format + add to config. - opts.log(`generating ${opts.bits}-bit RSA keypair...`, false) - self.log('generating peer id: %s bits', opts.bits) - return promisify(peerId.create)({ bits: opts.bits }) - } -} + const repo = typeof options.repo === 'string' || options.repo == null + ? createRepo({ path: options.repo, autoMigrate: options.repoAutoMigrate }) + : options.repo + + let isInitialized = true + + if (repo.closed) { + try { + await repo.open() + } catch (err) { + if (err.code === ERR_REPO_NOT_INITIALIZED) { + isInitialized = false + } else { + throw err + } + } + } -async function createRepo (self, opts) { - if (self.state.state() !== 'uninitialized') { - throw new Error('Not able to init from state: ' + self.state.state()) - } + const { peerId, config, keychain } = isInitialized + ? await initExistingRepo(repo, options) + : await initNewRepo(repo, options) + + log('peer created') + const peerInfo = new PeerInfo(peerId) + + if (config.Addresses && config.Addresses.Swarm) { + config.Addresses.Swarm.forEach(addr => { + let ma = multiaddr(addr) + + if (ma.getPeerId()) { + ma = ma.encapsulate(`/p2p/${peerInfo.id.toB58String()}`) + } - self.state.init() - self.log('init') + peerInfo.multiaddrs.add(ma) + }) + } + + const blockService = new BlockService(repo) + const ipld = new Ipld(getDefaultIpldOptions(blockService, constructorOptions.ipld, log)) + + const preload = createPreloader(constructorOptions.preload) + await preload.start() + + // Make sure GC lock is specific to repo, for tests where there are + // multiple instances of IPFS + const gcLock = mortice(repo.path, { singleProcess: constructorOptions.repoOwner }) + const dag = { + get: Components.dag.get({ ipld, preload }), + resolve: Components.dag.resolve({ ipld, preload }), + tree: Components.dag.tree({ ipld, preload }) + } + const object = { + data: Components.object.data({ ipld, preload }), + get: Components.object.get({ ipld, preload }), + links: Components.object.links({ dag }), + new: Components.object.new({ ipld, preload }), + patch: { + addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Components.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Components.object.put({ ipld, gcLock, preload }), + stat: Components.object.stat({ ipld, preload }) + } - // An initialized, open repo was passed, use this one! - if (opts.repo) { - self._repo = opts.repo + const pinManager = new PinManager(repo, dag) + await pinManager.load() + + const pin = { + add: Components.pin.add({ pinManager, gcLock, dag, object }), + ls: Components.pin.ls({ pinManager, object }), + rm: Components.pin.rm({ pinManager, gcLock, object }) + } - return + // FIXME: resolve this circular dependency + dag.put = Components.dag.put({ ipld, pin, gcLock, preload }) + + const add = Components.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + if (!isInitialized && !options.emptyRepo) { + // add empty unixfs dir object (go-ipfs assumes this exists) + const emptyDirCid = await addEmptyDir({ dag }) + + log('adding default assets') + await initAssets({ add, print }) + + log('initializing IPNS keyspace') + // Setup the offline routing for IPNS. + // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. + const offlineDatastore = new OfflineDatastore(repo) + const ipns = new IPNS(offlineDatastore, repo.datastore, peerInfo, keychain, { pass: options.pass }) + await ipns.initializeKeyspace(peerId.privKey.bytes, emptyDirCid.toString()) + } + + const api = createApi({ + add, + apiManager, + constructorOptions, + blockService, + dag, + gcLock, + initOptions: options, + ipld, + keychain, + object, + peerInfo, + pin, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => { throw new NotStartedError() }) + } catch (err) { + cancel() + throw err } - opts.emptyRepo = opts.emptyRepo || false - opts.bits = Number(opts.bits) || 2048 - opts.log = opts.log || function () {} + return apiManager.api +} - const config = mergeOptions(defaultConfig(), self._options.config) +async function initNewRepo (repo, { privateKey, emptyRepo, bits, profiles, config, pass, print }) { + emptyRepo = emptyRepo || false + bits = bits == null ? 2048 : Number(bits) - applyProfile(self, config, opts) + config = mergeOptions(getDefaultConfig(), config) + config = applyProfiles(profiles, config) // Verify repo does not exist yet - const exists = await self._repo.exists() - self.log('repo exists?', exists) + const exists = await repo.exists() + log('repo exists?', exists) + if (exists === true) { - throw Error('repo already exists') + throw new Error('repo already exists') } - const peerId = await createPeerId(self, opts) + const peerId = await createPeerId({ privateKey, bits, print }) + let keychain = new NoKeychain() - self.log('identity generated') + log('identity generated') config.Identity = { PeerID: peerId.toB58String(), PrivKey: peerId.privKey.bytes.toString('base64') } - const privateKey = peerId.privKey - if (opts.pass) { - config.Keychain = Keychain.generateOptions() + privateKey = peerId.privKey + + config.Keychain = Keychain.generateOptions() + + log('peer identity: %s', config.Identity.PeerID) + + await repo.init(config) + await repo.open() + + log('repo opened') + + if (pass) { + log('creating keychain') + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + await keychain.importPeer('self', { privKey: privateKey }) } - opts.log('done') - opts.log('peer identity: ' + config.Identity.PeerID) + return { peerId, keychain, config } +} - await self._repo.init(config) - await self._repo.open() +async function initExistingRepo (repo, { config: newConfig, profiles, pass }) { + let config = await repo.config.get() - self.log('repo opened') + if (newConfig || profiles) { + if (newConfig) { + config = mergeOptions(config, newConfig) + } + if (profiles) { + config = applyProfiles(profiles, config) + } + await repo.config.set(config) + } - if (opts.pass) { - self.log('creating keychain') - const keychainOptions = Object.assign({ passPhrase: opts.pass }, config.Keychain) - self._keychain = new Keychain(self._repo.keys, keychainOptions) + let keychain = new NoKeychain() - await self._keychain.importPeer('self', { privKey: privateKey }) + if (pass) { + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + log('keychain constructed') } - // Setup the offline routing for IPNS. - // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. - const offlineDatastore = new OfflineDatastore(self._repo) + const peerId = await promisify(PeerId.createFromPrivKey)(config.Identity.PrivKey) - self._ipns = new IPNS(offlineDatastore, self._repo.datastore, self._peerInfo, self._keychain, self._options) + // Import the private key as 'self', if needed. + if (pass) { + try { + await keychain.findKeyByName('self') + } catch (err) { + log('Creating "self" key') + await keychain.importPeer('self', peerId) + } + } - // add empty unixfs dir object (go-ipfs assumes this exists) - return addRepoAssets(self, privateKey, opts) + return { peerId, keychain, config } } -async function addRepoAssets (self, privateKey, opts) { - if (opts.emptyRepo) { - return +function createPeerId ({ privateKey, bits, print }) { + if (privateKey) { + log('using user-supplied private-key') + return typeof privateKey === 'object' + ? privateKey + : promisify(PeerId.createFromPrivKey)(Buffer.from(privateKey, 'base64')) + } else { + // Generate peer identity keypair + transform to desired format + add to config. + print('generating %s-bit RSA keypair...', bits) + return promisify(PeerId.create)({ bits }) } +} - self.log('adding assets') - +async function addEmptyDir ({ dag }) { const node = new DAGNode(new UnixFs('directory').marshal()) - const cid = await self.dag.put(node, { + return dag.put(node, { version: 0, format: multicodec.DAG_PB, hashAlg: multicodec.SHA2_256, preload: false }) - - await self._ipns.initializeKeyspace(privateKey, cid.toBaseEncodedString()) - - self.log('Initialised keyspace') - - if (typeof addDefaultAssets === 'function') { - self.log('Adding default assets') - // addDefaultAssets is undefined on browsers. - // See package.json browser config - return addDefaultAssets(self, opts.log) - } } -// Apply profiles (eg "server,lowpower") to config -function applyProfile (self, config, opts) { - if (opts.profiles) { - for (const name of opts.profiles) { - const profile = profiles[name] - - if (!profile) { - throw new Error(`Could not find profile with name '${name}'`) - } - - self.log(`applying profile ${name}`) - profile.transform(config) +// Apply profiles (e.g. ['server', 'lowpower']) to config +function applyProfiles (profiles, config) { + return (profiles || []).reduce((name, config) => { + const profile = require('./config').profiles[name] + if (!profile) { + throw new Error(`No profile with name '${name}'`) } - } + log('applying profile %s', name) + return profile.transform(config) + }) } -module.exports = function init (self) { - return callbackify.variadic(async (opts) => { - opts = opts || {} - - await createRepo(self, opts) - self.log('Created repo') - - await self.preStart() - self.log('Done pre-start') +function createApi ({ + add, + apiManager, + constructorOptions, + blockService, + dag, + gcLock, + initOptions, + ipld, + keychain, + object, + peerInfo, + pin, + pinManager, + preload, + print, + repo +}) { + const refs = () => { throw new NotStartedError() } + refs.local = Components.refs.local({ repo }) + + const api = { + add, + bootstrap: { + add: Components.bootstrap.add({ repo }), + list: Components.bootstrap.list({ repo }), + rm: Components.bootstrap.rm({ repo }) + }, + block: { + get: Components.block.get({ blockService, preload }), + put: Components.block.put({ blockService, gcLock, preload }), + rm: Components.block.rm({ blockService, gcLock, pinManager }), + stat: Components.block.stat({ blockService, preload }) + }, + cat: Components.cat({ ipld, preload }), + config: Components.config({ repo }), + dag, + dns: Components.dns(), + files: Components.files({ ipld, blockService, repo, preload, options: constructorOptions }), + get: Components.get({ ipld, preload }), + id: Components.id({ peerInfo }), + init: () => { throw new AlreadyInitializedError() }, + isOnline: Components.isOnline({}), + ls: Components.ls({ ipld, preload }), + object, + pin, + refs, + repo: { + // TODO: gc should be available after init + // `resolve` (passed to `refs` API) which is a dependency for `gc` API + // needs to be altered to allow `name` API dependency to be optional, so + // that `resolve` can also be available when not started, and so `gc` can + // be run when not started. + // gc: Commands.repo.gc({ gcLock, pin, pinManager, refs, repo }), + stat: Components.repo.stat({ repo }), + version: Components.repo.version({ repo }) + }, + start: Components.start({ + apiManager, + options: constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }), + stats: { + bitswap: () => { throw new NotStartedError() }, + bw: () => { throw new NotStartedError() }, + repo: Components.repo.stat({ repo }) + }, + stop: () => apiManager.api, + swarm: { + addrs: () => { throw new NotStartedError() }, + connect: () => { throw new NotStartedError() }, + disconnect: () => { throw new NotStartedError() }, + localAddrs: Components.swarm.localAddrs({ peerInfo }), + peers: () => { throw new NotStartedError() } + }, + version: Components.version({ repo }) + } - self.state.initialized() - self.emit('init') - }) + return api } diff --git a/src/core/components/is-online.js b/src/core/components/is-online.js index 68abdebe61..3aad832f57 100644 --- a/src/core/components/is-online.js +++ b/src/core/components/is-online.js @@ -1,7 +1,5 @@ 'use strict' -module.exports = function isOnline (self) { - return () => { - return Boolean(self._bitswap && self.libp2p && self.libp2p.isStarted()) - } +module.exports = ({ libp2p }) => { + return () => Boolean(libp2p && libp2p.isStarted()) } diff --git a/src/core/components/files-regular/ls-async-iterator.js b/src/core/components/ls.js similarity index 71% rename from src/core/components/files-regular/ls-async-iterator.js rename to src/core/components/ls.js index 34777a523e..df3e0596dc 100644 --- a/src/core/components/files-regular/ls-async-iterator.js +++ b/src/core/components/ls.js @@ -2,21 +2,21 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') -const { normalizePath, mapFile } = require('./utils') +const { normalizeCidPath, mapFile } = require('../utils') -module.exports = function (self) { - return async function * lsAsyncIterator (ipfsPath, options) { +module.exports = function ({ ipld, preload }) { + return async function * ls (ipfsPath, options) { options = options || {} - const path = normalizePath(ipfsPath) + const path = normalizeCidPath(ipfsPath) const recursive = options.recursive const pathComponents = path.split('/') if (options.preload !== false) { - self._preload(pathComponents[0]) + preload(pathComponents[0]) } - const file = await exporter(ipfsPath, self._ipld, options) + const file = await exporter(ipfsPath, ipld, options) if (!file.unixfs) { throw errCode(new Error('dag node was not a UnixFS node'), 'ERR_NOT_UNIXFS') @@ -28,7 +28,7 @@ module.exports = function (self) { if (file.unixfs.type.includes('dir')) { if (recursive) { - for await (const child of exporter.recursive(file.cid, self._ipld, options)) { + for await (const child of exporter.recursive(file.cid, ipld, options)) { if (file.cid.toBaseEncodedString() === child.cid.toBaseEncodedString()) { continue } diff --git a/src/core/components/name-pubsub.js b/src/core/components/name-pubsub.js deleted file mode 100644 index 4fc4775713..0000000000 --- a/src/core/components/name-pubsub.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict' - -const debug = require('debug') -const errcode = require('err-code') -const callbackify = require('callbackify') - -const IpnsPubsubDatastore = require('../ipns/routing/pubsub-datastore') - -const log = debug('ipfs:name-pubsub') -log.error = debug('ipfs:name-pubsub:error') - -// Is pubsub enabled -const isNamePubsubEnabled = (node) => { - try { - return Boolean(getPubsubRouting(node)) - } catch (err) { - return false - } -} - -// Get pubsub from IPNS routing -const getPubsubRouting = (node) => { - if (!node._ipns || !node._options.EXPERIMENTAL.ipnsPubsub) { - throw errcode(new Error('IPNS pubsub subsystem is not enabled'), 'ERR_IPNS_PUBSUB_NOT_ENABLED') - } - - // Only one store and it is pubsub - if (IpnsPubsubDatastore.isIpnsPubsubDatastore(node._ipns.routing)) { - return node._ipns.routing - } - - // Find in tiered - const pubsub = (node._ipns.routing.stores || []).find(s => IpnsPubsubDatastore.isIpnsPubsubDatastore(s)) - - if (!pubsub) { - throw errcode(new Error('IPNS pubsub datastore not found'), 'ERR_PUBSUB_DATASTORE_NOT_FOUND') - } - - return pubsub -} - -module.exports = function namePubsub (self) { - return { - /** - * Query the state of IPNS pubsub. - * - * @returns {Promise|void} - */ - state: callbackify(async () => { // eslint-disable-line require-await - return { - enabled: isNamePubsubEnabled(self) - } - }), - /** - * Cancel a name subscription. - * - * @param {String} name subscription name. - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - cancel: callbackify(async (name) => { // eslint-disable-line require-await - const pubsub = getPubsubRouting(self) - - return pubsub.cancel(name) - }), - /** - * Show current name subscriptions. - * - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - subs: callbackify(async () => { // eslint-disable-line require-await - const pubsub = getPubsubRouting(self) - - return pubsub.getSubscriptions() - }) - } -} diff --git a/src/core/components/name.js b/src/core/components/name.js deleted file mode 100644 index 96614eda4d..0000000000 --- a/src/core/components/name.js +++ /dev/null @@ -1,179 +0,0 @@ -'use strict' - -const debug = require('debug') -const callbackify = require('callbackify') -const human = require('human-to-milliseconds') -const crypto = require('libp2p-crypto') -const errcode = require('err-code') -const mergeOptions = require('merge-options') -const CID = require('cids') -const isDomain = require('is-domain-name') -const promisify = require('promisify-es6') - -const log = debug('ipfs:name') -log.error = debug('ipfs:name:error') - -const namePubsub = require('./name-pubsub') -const utils = require('../utils') -const path = require('../ipns/path') - -const keyLookup = async (ipfsNode, kname) => { - if (kname === 'self') { - return ipfsNode._peerInfo.id.privKey - } - - try { - const pass = ipfsNode._options.pass - const pem = await ipfsNode._keychain.exportKey(kname, pass) - const privateKey = await promisify(crypto.keys.import.bind(crypto.keys))(pem, pass) - - return privateKey - } catch (err) { - log.error(err) - - throw errcode(err, 'ERR_CANNOT_GET_KEY') - } -} - -const appendRemainder = async (result, remainder) => { - result = await result - - if (remainder.length) { - return result + '/' + remainder.join('/') - } - - return result -} - -/** - * @typedef { import("../index") } IPFS - */ - -/** - * IPNS - Inter-Planetary Naming System - * - * @param {IPFS} self - * @returns {Object} - */ -module.exports = function name (self) { - return { - /** - * IPNS is a PKI namespace, where names are the hashes of public keys, and - * the private key enables publishing new (signed) values. In both publish - * and resolve, the default name used is the node's own PeerID, - * which is the hash of its public key. - * - * @param {String} value ipfs path of the object to be published. - * @param {Object} options ipfs publish options. - * @param {boolean} options.resolve resolve given path before publishing. - * @param {String} options.lifetime time duration that the record will be valid for. - This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are - "ns", "ms", "s", "m", "h". Default is 24h. - * @param {String} options.ttl time duration this record should be cached for (NOT IMPLEMENTED YET). - * This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are - "ns", "ms", "s", "m", "h" (caution: experimental). - * @param {String} options.key name of the key to be used, as listed by 'ipfs key list -l'. - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - publish: callbackify.variadic(async (value, options) => { - options = options || {} - - const resolve = !(options.resolve === false) - const lifetime = options.lifetime || '24h' - const key = options.key || 'self' - - if (!self.isOnline()) { - throw errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR') - } - - // TODO: params related logic should be in the core implementation - - // Normalize path value - try { - value = utils.normalizePath(value) - } catch (err) { - log.error(err) - - throw err - } - - let pubLifetime - try { - pubLifetime = human(lifetime) - - // Calculate lifetime with nanoseconds precision - pubLifetime = pubLifetime.toFixed(6) - } catch (err) { - log.error(err) - - throw err - } - - // TODO: ttl human for cache - const results = await Promise.all([ - // verify if the path exists, if not, an error will stop the execution - keyLookup(self, key), - resolve.toString() === 'true' ? path.resolvePath(self, value) : Promise.resolve() - ]) - - // Start publishing process - return self._ipns.publish(results[0], value, pubLifetime) - }), - - /** - * Given a key, query the DHT for its best value. - * - * @param {String} name ipns name to resolve. Defaults to your node's peerID. - * @param {Object} options ipfs resolve options. - * @param {boolean} options.nocache do not use cached entries. - * @param {boolean} options.recursive resolve until the result is not an IPNS name. - * @param {function(Error)} [callback] - * @returns {Promise|void} - */ - resolve: callbackify.variadic(async (name, options) => { // eslint-disable-line require-await - options = mergeOptions({ - nocache: false, - recursive: true - }, options || {}) - - const offline = self._options.offline - - // TODO: params related logic should be in the core implementation - if (offline && options.nocache) { - throw errcode(new Error('cannot specify both offline and nocache'), 'ERR_NOCACHE_AND_OFFLINE') - } - - // Set node id as name for being resolved, if it is not received - if (!name) { - name = self._peerInfo.id.toB58String() - } - - if (!name.startsWith('/ipns/')) { - name = `/ipns/${name}` - } - - const [namespace, hash, ...remainder] = name.slice(1).split('/') - try { - new CID(hash) // eslint-disable-line no-new - } catch (err) { - // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns - if (isDomain(hash)) { - return appendRemainder(self.dns(hash, options), remainder) - } - - log.error(err) - throw errcode(new Error('Invalid IPNS name'), 'ERR_IPNS_INVALID_NAME') - } - - // multihash is valid lets resolve with IPNS - // IPNS resolve needs a online daemon - if (!self.isOnline() && !offline) { - throw errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR') - } - - return appendRemainder(self._ipns.resolve(`/${namespace}/${hash}`, options), remainder) - }), - pubsub: namePubsub(self) - } -} diff --git a/src/core/components/name/publish.js b/src/core/components/name/publish.js new file mode 100644 index 0000000000..cf43f7e8dd --- /dev/null +++ b/src/core/components/name/publish.js @@ -0,0 +1,102 @@ +'use strict' + +const debug = require('debug') +const parseDuration = require('parse-duration') +const crypto = require('libp2p-crypto') +const errcode = require('err-code') + +const log = debug('ipfs:name:publish') +log.error = debug('ipfs:name:publish:error') + +const { OFFLINE_ERROR, normalizePath } = require('../../utils') +const { resolvePath } = require('./utils') + +/** + * @typedef { import("../index") } IPFS + */ + +/** + * IPNS - Inter-Planetary Naming System + * + * @param {IPFS} self + * @returns {Object} + */ +module.exports = ({ ipns, dag, peerInfo, isOnline, keychain, options: constructorOptions }) => { + const lookupKey = async keyName => { + if (keyName === 'self') { + return peerInfo.id.privKey + } + + try { + const pass = constructorOptions.pass + const pem = await keychain.exportKey(keyName, pass) + const privateKey = await crypto.keys.import(pem, pass) + return privateKey + } catch (err) { + log.error(err) + throw errcode(err, 'ERR_CANNOT_GET_KEY') + } + } + + /** + * IPNS is a PKI namespace, where names are the hashes of public keys, and + * the private key enables publishing new (signed) values. In both publish + * and resolve, the default name used is the node's own PeerID, + * which is the hash of its public key. + * + * @param {String} value ipfs path of the object to be published. + * @param {Object} options ipfs publish options. + * @param {boolean} options.resolve resolve given path before publishing. + * @param {String} options.lifetime time duration that the record will be valid for. + This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are + "ns", "ms", "s", "m", "h". Default is 24h. + * @param {String} options.ttl time duration this record should be cached for (NOT IMPLEMENTED YET). + * This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are + "ns", "ms", "s", "m", "h" (caution: experimental). + * @param {String} options.key name of the key to be used, as listed by 'ipfs key list -l'. + * @param {function(Error)} [callback] + * @returns {Promise|void} + */ + return async function publish (value, options) { + options = options || {} + + const resolve = !(options.resolve === false) + const lifetime = options.lifetime || '24h' + const key = options.key || 'self' + + if (!isOnline()) { + throw errcode(new Error(OFFLINE_ERROR), 'OFFLINE_ERROR') + } + + // TODO: params related logic should be in the core implementation + + // Normalize path value + try { + value = normalizePath(value) + } catch (err) { + log.error(err) + throw err + } + + let pubLifetime + try { + pubLifetime = parseDuration(lifetime) + + // Calculate lifetime with nanoseconds precision + pubLifetime = pubLifetime.toFixed(6) + } catch (err) { + log.error(err) + throw err + } + + // TODO: ttl human for cache + const results = await Promise.all([ + // verify if the path exists, if not, an error will stop the execution + lookupKey(key), + resolve ? resolvePath({ ipns, dag }, value) : Promise.resolve() + ]) + + // Start publishing process + return ipns.publish(results[0], value, pubLifetime) + } +} diff --git a/src/core/components/name/pubsub/cancel.js b/src/core/components/name/pubsub/cancel.js new file mode 100644 index 0000000000..b0ce98c6ef --- /dev/null +++ b/src/core/components/name/pubsub/cancel.js @@ -0,0 +1,17 @@ +'use strict' + +const { getPubsubRouting } = require('./utils') + +module.exports = ({ ipns, options: constructorOptions }) => { + /** + * Cancel a name subscription. + * + * @param {String} name subscription name. + * @param {function(Error)} [callback] + * @returns {Promise<{ canceled: boolean }>} + */ + return function cancel (name) { + const pubsub = getPubsubRouting(ipns, constructorOptions) + return pubsub.cancel(name) + } +} diff --git a/src/core/components/name/pubsub/state.js b/src/core/components/name/pubsub/state.js new file mode 100644 index 0000000000..83033c7875 --- /dev/null +++ b/src/core/components/name/pubsub/state.js @@ -0,0 +1,18 @@ +'use strict' + +const { getPubsubRouting } = require('./utils') + +module.exports = ({ ipns, options: constructorOptions }) => { + /** + * Query the state of IPNS pubsub. + * + * @returns {Promise} + */ + return async function state () { // eslint-disable-line require-await + try { + return { enabled: Boolean(getPubsubRouting(ipns, constructorOptions)) } + } catch (err) { + return false + } + } +} diff --git a/src/core/components/name/pubsub/subs.js b/src/core/components/name/pubsub/subs.js new file mode 100644 index 0000000000..bcf6ede16e --- /dev/null +++ b/src/core/components/name/pubsub/subs.js @@ -0,0 +1,16 @@ +'use strict' + +const { getPubsubRouting } = require('./utils') + +module.exports = ({ ipns, options: constructorOptions }) => { + /** + * Show current name subscriptions. + * + * @param {function(Error)} [callback] + * @returns {Promise} + */ + return function subs () { + const pubsub = getPubsubRouting(ipns, constructorOptions) + return pubsub.getSubscriptions() + } +} diff --git a/src/core/components/name/pubsub/utils.js b/src/core/components/name/pubsub/utils.js new file mode 100644 index 0000000000..ee53a96f9c --- /dev/null +++ b/src/core/components/name/pubsub/utils.js @@ -0,0 +1,25 @@ +'use strict' + +const IpnsPubsubDatastore = require('../../../ipns/routing/pubsub-datastore') +const errcode = require('err-code') + +// Get pubsub from IPNS routing +exports.getPubsubRouting = (ipns, options) => { + if (!ipns || !(options.EXPERIMENTAL && options.EXPERIMENTAL.ipnsPubsub)) { + throw errcode(new Error('IPNS pubsub subsystem is not enabled'), 'ERR_IPNS_PUBSUB_NOT_ENABLED') + } + + // Only one store and it is pubsub + if (IpnsPubsubDatastore.isIpnsPubsubDatastore(ipns.routing)) { + return ipns.routing + } + + // Find in tiered + const pubsub = (ipns.routing.stores || []).find(s => IpnsPubsubDatastore.isIpnsPubsubDatastore(s)) + + if (!pubsub) { + throw errcode(new Error('IPNS pubsub datastore not found'), 'ERR_PUBSUB_DATASTORE_NOT_FOUND') + } + + return pubsub +} diff --git a/src/core/components/name/resolve.js b/src/core/components/name/resolve.js new file mode 100644 index 0000000000..04734fd985 --- /dev/null +++ b/src/core/components/name/resolve.js @@ -0,0 +1,89 @@ +'use strict' + +const debug = require('debug') +const errcode = require('err-code') +const mergeOptions = require('merge-options') +const CID = require('cids') +const isDomain = require('is-domain-name') + +const log = debug('ipfs:name:resolve') +log.error = debug('ipfs:name:resolve:error') + +const { OFFLINE_ERROR } = require('../../utils') + +const appendRemainder = async (result, remainder) => { + result = await result + + if (remainder.length) { + return result + '/' + remainder.join('/') + } + + return result +} + +/** + * @typedef { import("../index") } IPFS + */ + +/** + * IPNS - Inter-Planetary Naming System + * + * @param {IPFS} self + * @returns {Object} + */ +module.exports = ({ dns, ipns, peerInfo, isOnline, options: constructorOptions }) => { + /** + * Given a key, query the DHT for its best value. + * + * @param {String} name ipns name to resolve. Defaults to your node's peerID. + * @param {Object} options ipfs resolve options. + * @param {boolean} options.nocache do not use cached entries. + * @param {boolean} options.recursive resolve until the result is not an IPNS name. + * @param {function(Error)} [callback] + * @returns {Promise|void} + */ + return async function * resolve (name, options) { // eslint-disable-line require-await + options = mergeOptions({ + nocache: false, + recursive: true + }, options || {}) + + const { offline } = constructorOptions + + // TODO: params related logic should be in the core implementation + if (offline && options.nocache) { + throw errcode(new Error('cannot specify both offline and nocache'), 'ERR_NOCACHE_AND_OFFLINE') + } + + // Set node id as name for being resolved, if it is not received + if (!name) { + name = peerInfo.id.toB58String() + } + + if (!name.startsWith('/ipns/')) { + name = `/ipns/${name}` + } + + const [namespace, hash, ...remainder] = name.slice(1).split('/') + try { + new CID(hash) // eslint-disable-line no-new + } catch (err) { + // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns + if (isDomain(hash)) { + return appendRemainder(dns(hash, options), remainder) + } + + log.error(err) + throw errcode(new Error('Invalid IPNS name'), 'ERR_IPNS_INVALID_NAME') + } + + // multihash is valid lets resolve with IPNS + // IPNS resolve needs a online daemon + if (!isOnline() && !offline) { + throw errcode(new Error(OFFLINE_ERROR), 'OFFLINE_ERROR') + } + + // TODO: convert ipns.resolve to return an iterator + yield appendRemainder(ipns.resolve(`/${namespace}/${hash}`, options), remainder) + } +} diff --git a/src/core/components/name/utils.js b/src/core/components/name/utils.js new file mode 100644 index 0000000000..acfb307fbd --- /dev/null +++ b/src/core/components/name/utils.js @@ -0,0 +1,15 @@ +'use strict' + +const isIPFS = require('is-ipfs') + +// resolves the given path by parsing out protocol-specific entries +// (e.g. /ipns/) and then going through the /ipfs/ entries and returning the final node +exports.resolvePath = ({ ipns, dag }, name) => { + // ipns path + if (isIPFS.ipnsPath(name)) { + return ipns.resolve(name) + } + + // ipfs path + return dag.get(name.substring('/ipfs/'.length)) +} diff --git a/src/core/components/object.js b/src/core/components/object.js deleted file mode 100644 index 1f7e3f7cbe..0000000000 --- a/src/core/components/object.js +++ /dev/null @@ -1,302 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink -const CID = require('cids') -const mh = require('multihashes') -const multicodec = require('multicodec') -const Unixfs = require('ipfs-unixfs') -const errCode = require('err-code') - -function normalizeMultihash (multihash, enc) { - if (typeof multihash === 'string') { - if (enc === 'base58' || !enc) { - return multihash - } - - return Buffer.from(multihash, enc) - } else if (Buffer.isBuffer(multihash)) { - return multihash - } else if (CID.isCID(multihash)) { - return multihash.buffer - } else { - throw new Error('unsupported multihash') - } -} - -function parseBuffer (buf, encoding) { - switch (encoding) { - case 'json': - return parseJSONBuffer(buf) - case 'protobuf': - return parseProtoBuffer(buf) - default: - throw new Error(`unkown encoding: ${encoding}`) - } -} - -function parseJSONBuffer (buf) { - let data - let links - - try { - const parsed = JSON.parse(buf.toString()) - - links = (parsed.Links || []).map((link) => { - return new DAGLink( - link.Name || link.name, - link.Size || link.size, - mh.fromB58String(link.Hash || link.hash || link.multihash) - ) - }) - data = Buffer.from(parsed.Data) - } catch (err) { - throw new Error('failed to parse JSON: ' + err) - } - - return new DAGNode(data, links) -} - -function parseProtoBuffer (buf) { - return dagPB.util.deserialize(buf) -} - -function findLinks (node, links = []) { - for (const key in node) { - const val = node[key] - - if (key === '/' && Object.keys(node).length === 1) { - try { - links.push(new DAGLink('', 0, new CID(val))) - continue - } catch (_) { - // not a CID - } - } - - if (CID.isCID(val)) { - links.push(new DAGLink('', 0, val)) - - continue - } - - if (Array.isArray(val)) { - findLinks(val, links) - } - - if (typeof val === 'object' && !(val instanceof String)) { - findLinks(val, links) - } - } - - return links -} - -module.exports = function object (self) { - async function editAndSave (multihash, edit, options) { - options = options || {} - - const node = await self.object.get(multihash, options) - - // edit applies the edit func passed to - // editAndSave - const cid = await self._ipld.put(edit(node), multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - } - - return { - new: callbackify.variadic(async (template, options) => { - options = options || {} - - // allow options in the template position - if (template && typeof template !== 'string') { - options = template - template = null - } - - let data - - if (template) { - if (template === 'unixfs-dir') { - data = (new Unixfs('directory')).marshal() - } else { - throw new Error('unknown template') - } - } else { - data = Buffer.alloc(0) - } - - const node = new DAGNode(data) - - const cid = await self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - }), - put: callbackify.variadic(async (obj, options) => { - options = options || {} - - const encoding = options.enc - let node - - if (Buffer.isBuffer(obj)) { - if (encoding) { - node = await parseBuffer(obj, encoding) - } else { - node = new DAGNode(obj) - } - } else if (DAGNode.isDAGNode(obj)) { - // already a dag node - node = obj - } else if (typeof obj === 'object') { - node = new DAGNode(obj.Data, obj.Links) - } else { - throw new Error('obj not recognized') - } - - const release = await self._gcLock.readLock() - - try { - const cid = await self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - } finally { - release() - } - }), - - get: callbackify.variadic(async (multihash, options) => { // eslint-disable-line require-await - options = options || {} - - let mh, cid - - try { - mh = normalizeMultihash(multihash, options.enc) - } catch (err) { - throw errCode(err, 'ERR_INVALID_MULTIHASH') - } - - try { - cid = new CID(mh) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - - if (options.cidVersion === 1) { - cid = cid.toV1() - } - - if (options.preload !== false) { - self._preload(cid) - } - - return self._ipld.get(cid) - }), - - data: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const node = await self.object.get(multihash, options) - - return node.Data - }), - - links: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const cid = new CID(multihash) - const result = await self.dag.get(cid, options) - - if (cid.codec === 'raw') { - return [] - } - - if (cid.codec === 'dag-pb') { - return result.value.Links - } - - if (cid.codec === 'dag-cbor') { - return findLinks(result) - } - - throw new Error(`Cannot resolve links from codec ${cid.codec}`) - }), - - stat: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const node = await self.object.get(multihash, options) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized, { - cidVersion: 0 - }) - - const blockSize = serialized.length - const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) - - return { - Hash: cid.toBaseEncodedString(), - NumLinks: node.Links.length, - BlockSize: blockSize, - LinksSize: blockSize - node.Data.length, - DataSize: node.Data.length, - CumulativeSize: blockSize + linkLength - } - }), - - patch: { - addLink: callbackify.variadic(async (multihash, link, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - node.addLink(link) - - return node - }, options) - }), - - rmLink: callbackify.variadic(async (multihash, linkRef, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - node.rmLink(linkRef.Name || linkRef.name) - - return node - }, options) - }), - - appendData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - const newData = Buffer.concat([node.Data, data]) - - return new DAGNode(newData, node.Links) - }, options) - }), - - setData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - return new DAGNode(data, node.Links) - }, options) - }) - } - } -} diff --git a/src/core/components/object/data.js b/src/core/components/object/data.js new file mode 100644 index 0000000000..e7066f3d74 --- /dev/null +++ b/src/core/components/object/data.js @@ -0,0 +1,9 @@ +'use strict' + +module.exports = ({ ipld, preload }) => { + const get = require('./get')({ ipld, preload }) + return async function data (multihash, options) { + const node = await get(multihash, options) + return node.Data + } +} diff --git a/src/core/components/object/get.js b/src/core/components/object/get.js new file mode 100644 index 0000000000..394235cc6c --- /dev/null +++ b/src/core/components/object/get.js @@ -0,0 +1,48 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +function normalizeMultihash (multihash, enc) { + if (typeof multihash === 'string') { + if (enc === 'base58' || !enc) { + return multihash + } + return Buffer.from(multihash, enc) + } else if (Buffer.isBuffer(multihash)) { + return multihash + } else if (CID.isCID(multihash)) { + return multihash.buffer + } + throw new Error('unsupported multihash') +} + +module.exports = ({ ipld, preload }) => { + return async function get (multihash, options) { // eslint-disable-line require-await + options = options || {} + + let mh, cid + + try { + mh = normalizeMultihash(multihash, options.enc) + } catch (err) { + throw errCode(err, 'ERR_INVALID_MULTIHASH') + } + + try { + cid = new CID(mh) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + + if (options.cidVersion === 1) { + cid = cid.toV1() + } + + if (options.preload !== false) { + preload(cid) + } + + return ipld.get(cid) + } +} diff --git a/src/core/components/object/links.js b/src/core/components/object/links.js new file mode 100644 index 0000000000..8e6a58f177 --- /dev/null +++ b/src/core/components/object/links.js @@ -0,0 +1,58 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGLink = dagPB.DAGLink +const CID = require('cids') + +function findLinks (node, links = []) { + for (const key in node) { + const val = node[key] + + if (key === '/' && Object.keys(node).length === 1) { + try { + links.push(new DAGLink('', 0, new CID(val))) + continue + } catch (_) { + // not a CID + } + } + + if (CID.isCID(val)) { + links.push(new DAGLink('', 0, val)) + continue + } + + if (Array.isArray(val)) { + findLinks(val, links) + } + + if (val && typeof val === 'object') { + findLinks(val, links) + } + } + + return links +} + +module.exports = ({ dag }) => { + return async function links (multihash, options) { + options = options || {} + + const cid = new CID(multihash) + const result = await dag.get(cid, options) + + if (cid.codec === 'raw') { + return [] + } + + if (cid.codec === 'dag-pb') { + return result.value.Links + } + + if (cid.codec === 'dag-cbor') { + return findLinks(result) + } + + throw new Error(`Cannot resolve links from codec ${cid.codec}`) + } +} diff --git a/src/core/components/object/new.js b/src/core/components/object/new.js new file mode 100644 index 0000000000..4d6e6291b0 --- /dev/null +++ b/src/core/components/object/new.js @@ -0,0 +1,43 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const multicodec = require('multicodec') +const Unixfs = require('ipfs-unixfs') + +module.exports = ({ ipld, preload }) => { + return async function _new (template, options) { + options = options || {} + + // allow options in the template position + if (template && typeof template !== 'string') { + options = template + template = null + } + + let data + + if (template) { + if (template === 'unixfs-dir') { + data = (new Unixfs('directory')).marshal() + } else { + throw new Error('unknown template') + } + } else { + data = Buffer.alloc(0) + } + + const node = new DAGNode(data) + + const cid = await ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + preload(cid) + } + + return cid + } +} diff --git a/src/core/components/object/patch/add-link.js b/src/core/components/object/patch/add-link.js new file mode 100644 index 0000000000..2cdd990749 --- /dev/null +++ b/src/core/components/object/patch/add-link.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function addLink (multihash, link, options) { + const node = await get(multihash, options) + node.addLink(link) + return put(node, options) + } +} diff --git a/src/core/components/object/patch/append-data.js b/src/core/components/object/patch/append-data.js new file mode 100644 index 0000000000..511d79feb3 --- /dev/null +++ b/src/core/components/object/patch/append-data.js @@ -0,0 +1,14 @@ +'use strict' + +const { DAGNode } = require('ipld-dag-pb') + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function appendData (multihash, data, options) { + const node = await get(multihash, options) + const newData = Buffer.concat([node.Data, data]) + return put(new DAGNode(newData, node.Links), options) + } +} diff --git a/src/core/components/object/patch/rm-link.js b/src/core/components/object/patch/rm-link.js new file mode 100644 index 0000000000..bd3033a06b --- /dev/null +++ b/src/core/components/object/patch/rm-link.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function rmLink (multihash, linkRef, options) { + const node = await get(multihash, options) + node.rmLink(linkRef.Name || linkRef.name) + return put(node, options) + } +} diff --git a/src/core/components/object/patch/set-data.js b/src/core/components/object/patch/set-data.js new file mode 100644 index 0000000000..7693a5b5ba --- /dev/null +++ b/src/core/components/object/patch/set-data.js @@ -0,0 +1,13 @@ +'use strict' + +const { DAGNode } = require('ipld-dag-pb') + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function setData (multihash, data, options) { + const node = await get(multihash, options) + return put(new DAGNode(data, node.Links), options) + } +} diff --git a/src/core/components/object/put.js b/src/core/components/object/put.js new file mode 100644 index 0000000000..2a8a195f53 --- /dev/null +++ b/src/core/components/object/put.js @@ -0,0 +1,85 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const DAGLink = dagPB.DAGLink +const mh = require('multihashes') +const multicodec = require('multicodec') + +function parseBuffer (buf, encoding) { + switch (encoding) { + case 'json': + return parseJSONBuffer(buf) + case 'protobuf': + return parseProtoBuffer(buf) + default: + throw new Error(`unkown encoding: ${encoding}`) + } +} + +function parseJSONBuffer (buf) { + let data + let links + + try { + const parsed = JSON.parse(buf.toString()) + + links = (parsed.Links || []).map((link) => { + return new DAGLink( + link.Name || link.name, + link.Size || link.size, + mh.fromB58String(link.Hash || link.hash || link.multihash) + ) + }) + data = Buffer.from(parsed.Data) + } catch (err) { + throw new Error('failed to parse JSON: ' + err) + } + + return new DAGNode(data, links) +} + +function parseProtoBuffer (buf) { + return dagPB.util.deserialize(buf) +} + +module.exports = ({ ipld, gcLock, preload }) => { + return async function put (obj, options) { + options = options || {} + + const encoding = options.enc + let node + + if (Buffer.isBuffer(obj)) { + if (encoding) { + node = await parseBuffer(obj, encoding) + } else { + node = new DAGNode(obj) + } + } else if (DAGNode.isDAGNode(obj)) { + // already a dag node + node = obj + } else if (typeof obj === 'object') { + node = new DAGNode(obj.Data, obj.Links) + } else { + throw new Error('obj not recognized') + } + + const release = await gcLock.readLock() + + try { + const cid = await ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + preload(cid) + } + + return cid + } finally { + release() + } + } +} diff --git a/src/core/components/object/stat.js b/src/core/components/object/stat.js new file mode 100644 index 0000000000..ea2f06c72c --- /dev/null +++ b/src/core/components/object/stat.js @@ -0,0 +1,28 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') + +module.exports = ({ ipld, preload }) => { + const get = require('./get')({ ipld, preload }) + return async function stat (multihash, options) { + options = options || {} + + const node = await get(multihash, options) + const serialized = dagPB.util.serialize(node) + const cid = await dagPB.util.cid(serialized, { + cidVersion: 0 + }) + + const blockSize = serialized.length + const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) + + return { + Hash: cid.toBaseEncodedString(), + NumLinks: node.Links.length, + BlockSize: blockSize, + LinksSize: blockSize - node.Data.length, + DataSize: node.Data.length, + CumulativeSize: blockSize + linkLength + } + } +} diff --git a/src/core/components/pin.js b/src/core/components/pin.js deleted file mode 100644 index cbe0c8a250..0000000000 --- a/src/core/components/pin.js +++ /dev/null @@ -1,248 +0,0 @@ -/* eslint max-nested-callbacks: ["error", 8] */ -'use strict' - -const callbackify = require('callbackify') -const errCode = require('err-code') -const multibase = require('multibase') -const { resolvePath } = require('../utils') -const PinManager = require('./pin/pin-manager') -const PinTypes = PinManager.PinTypes - -module.exports = (self) => { - const dag = self.dag - const pinManager = new PinManager(self._repo, dag) - - const pin = { - add: callbackify.variadic(async (paths, options) => { - options = options || {} - - const recursive = options.recursive !== false - const cids = await resolvePath(self.object, paths) - const pinAdd = async () => { - const results = [] - - // verify that each hash can be pinned - for (const cid of cids) { - const key = cid.toBaseEncodedString() - - if (recursive) { - if (pinManager.recursivePins.has(key)) { - // it's already pinned recursively - results.push(key) - - continue - } - - // entire graph of nested links should be pinned, - // so make sure we have all the objects - await pinManager.fetchCompleteDag(key, { preload: options.preload }) - - // found all objects, we can add the pin - results.push(key) - } else { - if (pinManager.recursivePins.has(key)) { - // recursive supersedes direct, can't have both - throw new Error(`${key} already pinned recursively`) - } - - if (!pinManager.directPins.has(key)) { - // make sure we have the object - await dag.get(cid, { preload: options.preload }) - } - - results.push(key) - } - } - - // update the pin sets in memory - const pinset = recursive ? pinManager.recursivePins : pinManager.directPins - results.forEach(key => pinset.add(key)) - - // persist updated pin sets to datastore - await pinManager.flushPins() - - return results.map(hash => ({ hash })) - } - - // When adding a file, we take a lock that gets released after pinning - // is complete, so don't take a second lock here - const lock = Boolean(options.lock) - - if (!lock) { - return pinAdd() - } - - const release = await self._gcLock.readLock() - - try { - await pinAdd() - } finally { - release() - } - }), - - rm: callbackify.variadic(async (paths, options) => { - options = options || {} - - const recursive = options.recursive == null ? true : options.recursive - - if (options.cidBase && !multibase.names.includes(options.cidBase)) { - throw errCode(new Error('invalid multibase'), 'ERR_INVALID_MULTIBASE') - } - - const cids = await resolvePath(self.object, paths) - const release = await self._gcLock.readLock() - const results = [] - - try { - // verify that each hash can be unpinned - for (const cid of cids) { - const res = await pinManager.isPinnedWithType(cid, PinTypes.all) - - const { pinned, reason } = res - const key = cid.toBaseEncodedString() - - if (!pinned) { - throw new Error(`${key} is not pinned`) - } - - switch (reason) { - case (PinTypes.recursive): - if (!recursive) { - throw new Error(`${key} is pinned recursively`) - } - - results.push(key) - - break - case (PinTypes.direct): - results.push(key) - - break - default: - throw new Error(`${key} is pinned indirectly under ${reason}`) - } - } - - // update the pin sets in memory - results.forEach(key => { - if (recursive && pinManager.recursivePins.has(key)) { - pinManager.recursivePins.delete(key) - } else { - pinManager.directPins.delete(key) - } - }) - - // persist updated pin sets to datastore - await pinManager.flushPins() - - self.log(`Removed pins: ${results}`) - - return results.map(hash => ({ hash })) - } finally { - release() - } - }), - - ls: callbackify.variadic(async (paths, options) => { - options = options || {} - - let type = PinTypes.all - - if (paths && paths.type) { - options = paths - paths = null - } - - if (options.type) { - type = options.type - if (typeof options.type === 'string') { - type = options.type.toLowerCase() - } - const err = PinManager.checkPinType(type) - if (err) { - throw err - } - } - - if (paths) { - // check the pinned state of specific hashes - const cids = await resolvePath(self.object, paths) - const results = [] - - for (const cid of cids) { - const { key, reason, pinned } = await pinManager.isPinnedWithType(cid, type) - - if (pinned) { - switch (reason) { - case PinTypes.direct: - case PinTypes.recursive: - results.push({ - hash: key, - type: reason - }) - break - default: - results.push({ - hash: key, - type: `${PinTypes.indirect} through ${reason}` - }) - } - } - } - - if (!results.length) { - throw new Error(`path '${paths}' is not pinned`) - } - - return results - } - - // show all pinned items of type - let pins = [] - - if (type === PinTypes.direct || type === PinTypes.all) { - pins = pins.concat( - Array.from(pinManager.directPins).map(hash => ({ - type: PinTypes.direct, - hash - })) - ) - } - - if (type === PinTypes.recursive || type === PinTypes.all) { - pins = pins.concat( - Array.from(pinManager.recursivePins).map(hash => ({ - type: PinTypes.recursive, - hash - })) - ) - } - - if (type === PinTypes.indirect || type === PinTypes.all) { - const indirects = await pinManager.getIndirectKeys(options) - - pins = pins - // if something is pinned both directly and indirectly, - // report the indirect entry - .filter(({ hash }) => - !indirects.includes(hash) || - (indirects.includes(hash) && !pinManager.directPins.has(hash)) - ) - .concat(indirects.map(hash => ({ - type: PinTypes.indirect, - hash - }))) - - return pins - } - - return pins - }), - - // used by tests - pinManager - } - - return pin -} diff --git a/src/core/components/pin/add.js b/src/core/components/pin/add.js new file mode 100644 index 0000000000..39e809676f --- /dev/null +++ b/src/core/components/pin/add.js @@ -0,0 +1,74 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +'use strict' + +const { resolvePath } = require('../../utils') + +module.exports = ({ pinManager, gcLock, dag, object }) => { + return async function add (paths, options) { + options = options || {} + + const recursive = options.recursive !== false + const cids = await resolvePath(object, paths) + const pinAdd = async () => { + const results = [] + + // verify that each hash can be pinned + for (const cid of cids) { + const key = cid.toBaseEncodedString() + + if (recursive) { + if (pinManager.recursivePins.has(key)) { + // it's already pinned recursively + results.push(cid) + + continue + } + + // entire graph of nested links should be pinned, + // so make sure we have all the objects + await pinManager.fetchCompleteDag(key, { preload: options.preload }) + + // found all objects, we can add the pin + results.push(cid) + } else { + if (pinManager.recursivePins.has(key)) { + // recursive supersedes direct, can't have both + throw new Error(`${key} already pinned recursively`) + } + + if (!pinManager.directPins.has(key)) { + // make sure we have the object + await dag.get(cid, { preload: options.preload }) + } + + results.push(cid) + } + } + + // update the pin sets in memory + const pinset = recursive ? pinManager.recursivePins : pinManager.directPins + results.forEach(key => pinset.add(key)) + + // persist updated pin sets to datastore + await pinManager.flushPins() + + return results.map(cid => ({ cid })) + } + + // When adding a file, we take a lock that gets released after pinning + // is complete, so don't take a second lock here + const lock = Boolean(options.lock) + + if (!lock) { + return pinAdd() + } + + const release = await gcLock.readLock() + + try { + await pinAdd() + } finally { + release() + } + } +} diff --git a/src/core/components/pin/gc-lock.js b/src/core/components/pin/gc-lock.js deleted file mode 100644 index faceea12cf..0000000000 --- a/src/core/components/pin/gc-lock.js +++ /dev/null @@ -1,83 +0,0 @@ -'use strict' - -const pull = require('pull-stream/pull') -const pullThrough = require('pull-stream/throughs/through') -const pullAsyncMap = require('pull-stream/throughs/async-map') -const Mutex = require('../../../utils/mutex') -const log = require('debug')('ipfs:gc:lock') - -class GCLock { - constructor (repoOwner, options) { - options = options || {} - - this.mutex = new Mutex(repoOwner, { ...options, log }) - } - - readLock () { - return this.mutex.readLock() - } - - writeLock () { - return this.mutex.writeLock() - } - - pullReadLock (lockedPullFn) { - return this.pullLock('readLock', lockedPullFn) - } - - pullWriteLock (lockedPullFn) { - return this.pullLock('writeLock', lockedPullFn) - } - - pullLock (type, lockedPullFn) { - const pullLocker = new PullLocker(this.mutex, type) - - return pull( - pullLocker.take(), - lockedPullFn(), - pullLocker.release() - ) - } -} - -class PullLocker { - constructor (mutex, type) { - this.mutex = mutex - this.type = type - - // The function to call to release the lock. It is set when the lock is taken - this.releaseLock = null - } - - take () { - return pullAsyncMap((i, cb) => { - // Check if the lock has already been acquired. - // Note: new items will only come through the pull stream once the first - // item has acquired a lock. - if (this.releaseLock) { - // The lock has been acquired so return immediately - return cb(null, i) - } - - // Request the lock - this.mutex[this.type]() - .then(release => { - // Save the release function to be called when the stream completes - this.releaseLock = release - - // The lock has been granted, so run the locked piece of code - cb(null, i) - }, cb) - }) - } - - // Releases the lock - release () { - return pullThrough(null, (err) => { - // When the stream completes, release the lock - this.releaseLock(err) - }) - } -} - -module.exports = GCLock diff --git a/src/core/components/pin/gc.js b/src/core/components/pin/gc.js deleted file mode 100644 index a974a85de5..0000000000 --- a/src/core/components/pin/gc.js +++ /dev/null @@ -1,153 +0,0 @@ -'use strict' - -const CID = require('cids') -const base32 = require('base32.js') -const callbackify = require('callbackify') -const { cidToString } = require('../../../utils/cid') -const log = require('debug')('ipfs:gc') -const { default: Queue } = require('p-queue') -// TODO: Use exported key from root when upgraded to ipfs-mfs@>=13 -// https://github.com/ipfs/js-ipfs-mfs/pull/58 -const { MFS_ROOT_KEY } = require('ipfs-mfs/src/core/utils/constants') - -const { Errors } = require('interface-datastore') -const ERR_NOT_FOUND = Errors.notFoundError().code - -// Limit on the number of parallel block remove operations -const BLOCK_RM_CONCURRENCY = 256 - -// Perform mark and sweep garbage collection -module.exports = function gc (self) { - return callbackify(async () => { - const start = Date.now() - log('Creating set of marked blocks') - - const release = await self._gcLock.writeLock() - - try { - const [ - blockKeys, markedSet - ] = await Promise.all([ - // Get all blocks keys from the blockstore - self._repo.blocks.query({ keysOnly: true }), - - // Mark all blocks that are being used - createMarkedSet(self) - ]) - - // Delete blocks that are not being used - const res = await deleteUnmarkedBlocks(self, markedSet, blockKeys) - - log(`Complete (${Date.now() - start}ms)`) - - return res - } finally { - release() - } - }) -} - -// Get Set of CIDs of blocks to keep -async function createMarkedSet (ipfs) { - const output = new Set() - - const addPins = pins => { - log(`Found ${pins.length} pinned blocks`) - - pins.forEach(pin => { - output.add(cidToString(new CID(pin), { base: 'base32' })) - }) - } - - await Promise.all([ - // All pins, direct and indirect - ipfs.pin.ls() - .then(pins => pins.map(pin => pin.hash)) - .then(addPins), - - // Blocks used internally by the pinner - ipfs.pin.pinManager.getInternalBlocks() - .then(addPins), - - // The MFS root and all its descendants - ipfs._repo.root.get(MFS_ROOT_KEY) - .then(mh => getDescendants(ipfs, new CID(mh))) - .then(addPins) - .catch(err => { - if (err.code === ERR_NOT_FOUND) { - log('No blocks in MFS') - return [] - } - - throw err - }) - ]) - - return output -} - -// Recursively get descendants of the given CID -async function getDescendants (ipfs, cid) { - const refs = await ipfs.refs(cid, { recursive: true }) - const cids = [cid, ...refs.map(r => new CID(r.ref))] - log(`Found ${cids.length} MFS blocks`) - // log(' ' + cids.join('\n ')) - - return cids -} - -// Delete all blocks that are not marked as in use -async function deleteUnmarkedBlocks (ipfs, markedSet, blockKeys) { - // Iterate through all blocks and find those that are not in the marked set - // The blockKeys variable has the form [ { key: Key() }, { key: Key() }, ... ] - const unreferenced = [] - const result = [] - - const queue = new Queue({ - concurrency: BLOCK_RM_CONCURRENCY - }) - - for await (const { key: k } of blockKeys) { - try { - const cid = dsKeyToCid(k) - const b32 = cid.toV1().toString('base32') - if (!markedSet.has(b32)) { - unreferenced.push(cid) - - queue.add(async () => { - const res = { - cid - } - - try { - await ipfs._repo.blocks.delete(cid) - } catch (err) { - res.err = new Error(`Could not delete block with CID ${cid}: ${err.message}`) - } - - result.push(res) - }) - } - } catch (err) { - const msg = `Could not convert block with key '${k}' to CID` - log(msg, err) - result.push({ err: new Error(msg + `: ${err.message}`) }) - } - } - - await queue.onIdle() - - log(`Marked set has ${markedSet.size} unique blocks. Blockstore has ${blockKeys.length} blocks. ` + - `Deleted ${unreferenced.length} blocks.`) - - return result -} - -// TODO: Use exported utility when upgrade to ipfs-repo@>=0.27.1 -// https://github.com/ipfs/js-ipfs-repo/pull/206 -function dsKeyToCid (key) { - // Block key is of the form / - const decoder = new base32.Decoder() - const buff = decoder.write(key.toString().slice(1)).finalize() - return new CID(Buffer.from(buff)) -} diff --git a/src/core/components/pin/ls.js b/src/core/components/pin/ls.js new file mode 100644 index 0000000000..663543ec26 --- /dev/null +++ b/src/core/components/pin/ls.js @@ -0,0 +1,89 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +'use strict' + +const { parallelMap } = require('streaming-iterables') +const CID = require('cids') +const { resolvePath } = require('../../utils') +const PinManager = require('./pin-manager') +const { PinTypes } = PinManager + +const PIN_LS_CONCURRENCY = 8 + +module.exports = ({ pinManager, object }) => { + return async function * ls (paths, options) { + options = options || {} + + let type = PinTypes.all + + if (paths && paths.type) { + options = paths + paths = null + } + + if (options.type) { + type = options.type + if (typeof options.type === 'string') { + type = options.type.toLowerCase() + } + const err = PinManager.checkPinType(type) + if (err) { + throw err + } + } + + if (paths) { + // check the pinned state of specific hashes + const cids = await resolvePath(object, paths) + + yield * parallelMap(PIN_LS_CONCURRENCY, async cid => { + const { reason, pinned } = await pinManager.isPinnedWithType(cid, type) + + if (!pinned) { + throw new Error(`path '${paths[cids.indexOf(cid)]}' is not pinned`) + } + + if (reason === PinTypes.direct || reason === PinTypes.recursive) { + return { cid, type: reason } + } + + return { cid, type: `${PinTypes.indirect} through ${reason}` } + }, cids) + + return + } + + // show all pinned items of type + let pins = [] + + if (type === PinTypes.direct || type === PinTypes.all) { + pins = pins.concat( + Array.from(pinManager.directPins).map(cid => ({ + type: PinTypes.direct, + cid: new CID(cid) + })) + ) + } + + if (type === PinTypes.recursive || type === PinTypes.all) { + pins = pins.concat( + Array.from(pinManager.recursivePins).map(cid => ({ + type: PinTypes.recursive, + cid: new CID(cid) + })) + ) + } + + if (type === PinTypes.indirect || type === PinTypes.all) { + const indirects = await pinManager.getIndirectKeys(options) + + pins = pins + // if something is pinned both directly and indirectly, + // report the indirect entry + .filter(({ cid }) => !indirects.includes(cid.toString()) || !pinManager.directPins.has(cid.toString())) + .concat(indirects.map(cid => ({ type: PinTypes.indirect, cid: new CID(cid) }))) + } + + // FIXME: https://github.com/ipfs/js-ipfs/issues/2244 + yield * pins + } +} diff --git a/src/core/components/pin/rm.js b/src/core/components/pin/rm.js new file mode 100644 index 0000000000..5fec489c3c --- /dev/null +++ b/src/core/components/pin/rm.js @@ -0,0 +1,64 @@ +'use strict' + +const errCode = require('err-code') +const multibase = require('multibase') +const { parallelMap, collect } = require('streaming-iterables') +const pipe = require('it-pipe') +const { resolvePath } = require('../../utils') +const { PinTypes } = require('./pin-manager') + +const PIN_RM_CONCURRENCY = 8 + +module.exports = ({ pinManager, gcLock, object }) => { + return async function rm (paths, options) { + options = options || {} + + const recursive = options.recursive !== false + + if (options.cidBase && !multibase.names.includes(options.cidBase)) { + throw errCode(new Error('invalid multibase'), 'ERR_INVALID_MULTIBASE') + } + + const cids = await resolvePath(object, paths) + const release = await gcLock.readLock() + + try { + // verify that each hash can be unpinned + const results = await pipe( + cids, + parallelMap(PIN_RM_CONCURRENCY, async cid => { + const { pinned, reason } = await pinManager.isPinnedWithType(cid, PinTypes.all) + + if (!pinned) { + throw new Error(`${cid} is not pinned`) + } + if (reason !== PinTypes.recursive && reason !== PinTypes.direct) { + throw new Error(`${cid} is pinned indirectly under ${reason}`) + } + if (reason === PinTypes.recursive && !recursive) { + throw new Error(`${cid} is pinned recursively`) + } + + return cid + }), + collect + ) + + // update the pin sets in memory + results.forEach(cid => { + if (recursive && pinManager.recursivePins.has(cid.toString())) { + pinManager.recursivePins.delete(cid.toString()) + } else { + pinManager.directPins.delete(cid.toString()) + } + }) + + // persist updated pin sets to datastore + await pinManager.flushPins() + + return results.map(cid => ({ cid })) + } finally { + release() + } + } +} diff --git a/src/core/components/ping-pull-stream.js b/src/core/components/ping-pull-stream.js deleted file mode 100644 index 838378bace..0000000000 --- a/src/core/components/ping-pull-stream.js +++ /dev/null @@ -1,100 +0,0 @@ -'use strict' - -const debug = require('debug') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const PeerId = require('peer-id') -const pull = require('pull-stream') -const Pushable = require('pull-pushable') - -const log = debug('ipfs:pingPullStream') -log.error = debug('ipfs:pingPullStream:error') - -module.exports = function pingPullStream (self) { - return (peerId, opts) => { - if (!self.isOnline()) { - return pull.error(new Error(OFFLINE_ERROR)) - } - - opts = Object.assign({ count: 10 }, opts) - - const source = Pushable() - - getPeer(self.libp2p, source, peerId, (err, peer) => { - if (err) { - log.error(err) - source.end(err) - return - } - - runPing(self.libp2p, source, opts.count, peer, (err) => { - if (err) { - log.error(err) - source.push(getPacket({ success: false, text: err.toString() })) - source.end() - } - }) - }) - - return source - } -} - -function getPacket (msg) { - // Default msg - const basePacket = { success: true, time: 0, text: '' } - return Object.assign(basePacket, msg) -} - -function getPeer (libp2pNode, statusStream, peerIdStr, cb) { - let peerId - - try { - peerId = PeerId.createFromB58String(peerIdStr) - } catch (err) { - return cb(err) - } - - let peerInfo - - try { - peerInfo = libp2pNode.peerBook.get(peerId) - } catch (err) { - log('Peer not found in peer book, trying peer routing') - - // Share lookup status just as in the go implemmentation - statusStream.push(getPacket({ text: `Looking up peer ${peerIdStr}` })) - return libp2pNode.peerRouting.findPeer(peerId, cb) - } - - cb(null, peerInfo) -} - -function runPing (libp2pNode, statusStream, count, peer, cb) { - libp2pNode.ping(peer, (err, p) => { - if (err) { return cb(err) } - - let packetCount = 0 - let totalTime = 0 - statusStream.push(getPacket({ text: `PING ${peer.id.toB58String()}` })) - - p.on('ping', (time) => { - statusStream.push(getPacket({ time })) - totalTime += time - packetCount++ - if (packetCount >= count) { - const average = totalTime / count - p.stop() - statusStream.push(getPacket({ text: `Average latency: ${average}ms` })) - statusStream.end() - } - }) - - p.on('error', (err) => { - log.error(err) - p.stop() - cb(err) - }) - - p.start() - }) -} diff --git a/src/core/components/ping-readable-stream.js b/src/core/components/ping-readable-stream.js deleted file mode 100644 index b6809ffb48..0000000000 --- a/src/core/components/ping-readable-stream.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const toStream = require('pull-stream-to-stream') - -module.exports = function pingReadableStream (self) { - return (peerId, opts) => toStream.source(self.pingPullStream(peerId, opts)) -} diff --git a/src/core/components/ping.js b/src/core/components/ping.js index 5f0aa61be3..efc0e1bc33 100644 --- a/src/core/components/ping.js +++ b/src/core/components/ping.js @@ -1,18 +1,44 @@ 'use strict' -const promisify = require('promisify-es6') -const pull = require('pull-stream/pull') - -module.exports = function ping (self) { - return promisify((peerId, opts, callback) => { - if (typeof opts === 'function') { - callback = opts - opts = {} +const PeerId = require('peer-id') +const basePacket = { success: true, time: 0, text: '' } + +module.exports = ({ libp2p }) => { + return async function * (peerId, options) { + options = options || {} + options.count = options.count || 10 + + if (!PeerId.isPeerId(peerId)) { + peerId = PeerId.createFromCID(peerId) } - pull( - self.pingPullStream(peerId, opts), - pull.collect(callback) - ) - }) + let peerInfo + if (libp2p.peerStore.has(peerId)) { + peerInfo = libp2p.peerStore.get(peerId) + } else { + yield { ...basePacket, text: `Looking up peer ${peerId}` } + peerInfo = await libp2p.peerRouting.findPeer(peerId) + } + + yield { ...basePacket, text: `PING ${peerInfo.id.toB58String()}` } + + let packetCount = 0 + let totalTime = 0 + + for (let i = 0; i < options.count; i++) { + try { + const time = libp2p.ping(peerInfo) + totalTime += time + packetCount++ + yield { ...basePacket, time } + } catch (err) { + yield { ...basePacket, success: false, text: err.toString() } + } + } + + if (packetCount) { + const average = totalTime / packetCount + yield { ...basePacket, text: `Average latency: ${average}ms` } + } + } } diff --git a/src/core/components/pre-start.js b/src/core/components/pre-start.js deleted file mode 100644 index 639b94a61f..0000000000 --- a/src/core/components/pre-start.js +++ /dev/null @@ -1,75 +0,0 @@ -'use strict' - -const peerId = require('peer-id') -const PeerInfo = require('peer-info') -const multiaddr = require('multiaddr') -const Keychain = require('libp2p-keychain') -const mergeOptions = require('merge-options') -const NoKeychain = require('./no-keychain') -const callbackify = require('callbackify') -const promisify = require('promisify-es6') - -/* - * Load stuff from Repo into memory - */ -module.exports = function preStart (self) { - return callbackify(async () => { - self.log('pre-start') - - const pass = self._options.pass - let config = await self._repo.config.get() - - if (self._options.config) { - config = mergeOptions(config, self._options.config) - await self.config.replace(config) - } - - // Create keychain configuration, if needed. - if (!config.Keychain) { - config.Keychain = Keychain.generateOptions() - await self.config.set('Keychain', config.Keychain) - self.log('using default keychain options') - } - - // Construct the keychain - if (self._keychain) { - // most likely an init or upgrade has happened - } else if (pass) { - const keychainOptions = Object.assign({ passPhrase: pass }, config.Keychain) - self._keychain = new Keychain(self._repo.keys, keychainOptions) - self.log('keychain constructed') - } else { - self._keychain = new NoKeychain() - self.log('no keychain, use --pass') - } - - const privKey = config.Identity.PrivKey - const id = await promisify(peerId.createFromPrivKey)(privKey) - - // Import the private key as 'self', if needed. - if (pass) { - try { - await self._keychain.findKeyByName('self') - } catch (err) { - self.log('Creating "self" key') - await self._keychain.importPeer('self', id) - } - } - - self.log('peer created') - self._peerInfo = new PeerInfo(id) - if (config.Addresses && config.Addresses.Swarm) { - config.Addresses.Swarm.forEach((addr) => { - let ma = multiaddr(addr) - - if (ma.getPeerId()) { - ma = ma.encapsulate('/ipfs/' + self._peerInfo.id.toB58String()) - } - - self._peerInfo.multiaddrs.add(ma) - }) - } - - await self.pin.pinManager.load() - }) -} diff --git a/src/core/components/pubsub.js b/src/core/components/pubsub.js index 8c5916b906..0954304f46 100644 --- a/src/core/components/pubsub.js +++ b/src/core/components/pubsub.js @@ -1,90 +1,12 @@ 'use strict' -const callbackify = require('callbackify') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const errcode = require('err-code') - -module.exports = function pubsub (self) { - function checkOnlineAndEnabled () { - if (!self.isOnline()) { - throw errcode(new Error(OFFLINE_ERROR), 'ERR_OFFLINE') - } - - if (!self.libp2p.pubsub) { - throw errcode(new Error('pubsub is not enabled'), 'ERR_PUBSUB_DISABLED') - } - } - +module.exports = ({ libp2p }) => { return { - subscribe: (topic, handler, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - - if (typeof callback === 'function') { - try { - checkOnlineAndEnabled() - } catch (err) { - return callback(err) - } - - self.libp2p.pubsub.subscribe(topic, handler, options, callback) - return - } - - try { - checkOnlineAndEnabled() - } catch (err) { - return Promise.reject(err) - } - - return self.libp2p.pubsub.subscribe(topic, handler, options) - }, - - unsubscribe: (topic, handler, callback) => { - if (typeof callback === 'function') { - try { - checkOnlineAndEnabled() - } catch (err) { - return callback(err) - } - - self.libp2p.pubsub.unsubscribe(topic, handler, callback) - return - } - - try { - checkOnlineAndEnabled() - } catch (err) { - return Promise.reject(err) - } - - return self.libp2p.pubsub.unsubscribe(topic, handler) - }, - - publish: callbackify(async (topic, data) => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - await self.libp2p.pubsub.publish(topic, data) - }), - - ls: callbackify(async () => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - return self.libp2p.pubsub.ls() - }), - - peers: callbackify(async (topic) => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - return self.libp2p.pubsub.peers(topic) - }), - - setMaxListeners (n) { - checkOnlineAndEnabled() - - self.libp2p.pubsub.setMaxListeners(n) - } + subscribe: (...args) => libp2p.pubsub.subscribe(...args), + unsubscribe: (...args) => libp2p.pubsub.unsubscribe(...args), + publish: (...args) => libp2p.pubsub.publish(...args), + ls: (...args) => libp2p.pubsub.getTopics(...args), + peers: (...args) => libp2p.pubsub.getSubscribers(...args), + setMaxListeners: (n) => libp2p.pubsub.setMaxListeners(n) } } diff --git a/src/core/components/files-regular/refs-async-iterator.js b/src/core/components/refs/index.js similarity index 78% rename from src/core/components/files-regular/refs-async-iterator.js rename to src/core/components/refs/index.js index 0d3dbe08d3..39e61b4468 100644 --- a/src/core/components/files-regular/refs-async-iterator.js +++ b/src/core/components/refs/index.js @@ -3,13 +3,17 @@ const isIpfs = require('is-ipfs') const CID = require('cids') const { DAGNode } = require('ipld-dag-pb') -const { normalizePath } = require('./utils') -const { Format } = require('./refs') +const { normalizeCidPath } = require('../../utils') const { Errors } = require('interface-datastore') const ERR_NOT_FOUND = Errors.notFoundError().code -module.exports = function (self) { - return async function * refsAsyncIterator (ipfsPath, options) { // eslint-disable-line require-await +const Format = { + default: '', + edges: ' -> ' +} + +module.exports = function ({ ipld, resolve, preload }) { + return async function * refs (ipfsPath, options) { // eslint-disable-line require-await options = options || {} if (options.maxDepth === 0) { @@ -27,18 +31,18 @@ module.exports = function (self) { } const rawPaths = Array.isArray(ipfsPath) ? ipfsPath : [ipfsPath] - const paths = rawPaths.map(p => getFullPath(self, p, options)) + const paths = rawPaths.map(p => getFullPath(preload, p, options)) for (const path of paths) { - yield * refsStream(self, path, options) + yield * refsStream(resolve, ipld, path, options) } } } -function getFullPath (ipfs, ipfsPath, options) { - // normalizePath() strips /ipfs/ off the front of the path so the CID will +function getFullPath (preload, ipfsPath, options) { + // normalizeCidPath() strips /ipfs/ off the front of the path so the CID will // be at the front of the path - const path = normalizePath(ipfsPath) + const path = normalizeCidPath(ipfsPath) const pathComponents = path.split('/') const cid = pathComponents[0] @@ -47,22 +51,22 @@ function getFullPath (ipfs, ipfsPath, options) { } if (options.preload !== false) { - ipfs._preload(cid) + preload(cid) } return '/ipfs/' + path } // Get a stream of refs at the given path -async function * refsStream (ipfs, path, options) { +async function * refsStream (resolve, ipld, path, options) { // Resolve to the target CID of the path - const resPath = await ipfs.resolve(path) + const resPath = await resolve(path) // path is /ipfs/ const parts = resPath.split('/') const cid = parts[2] // Traverse the DAG, converting it into a stream - for await (const obj of objectStream(ipfs, cid, options.maxDepth, options.unique)) { + for await (const obj of objectStream(ipld, cid, options.maxDepth, options.unique)) { // Root object will not have a parent if (!obj.parent) { continue @@ -90,7 +94,7 @@ function formatLink (srcCid, dstCid, linkName, format) { } // Do a depth first search of the DAG, starting from the given root cid -async function * objectStream (ipfs, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await +async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await const seen = new Set() async function * traverseLevel (parent, depth) { @@ -104,7 +108,7 @@ async function * objectStream (ipfs, rootCid, maxDepth, uniqueOnly) { // eslint- // Get this object's links try { // Look at each link, parent and the new depth - for (const link of await getLinks(ipfs, parent.cid)) { + for (const link of await getLinks(ipld, parent.cid)) { yield { parent: parent, node: link, @@ -130,8 +134,8 @@ async function * objectStream (ipfs, rootCid, maxDepth, uniqueOnly) { // eslint- } // Fetch a node from IPLD then get all its links -async function getLinks (ipfs, cid) { - const node = await ipfs._ipld.get(new CID(cid)) +async function getLinks (ipld, cid) { + const node = await ipld.get(new CID(cid)) if (DAGNode.isDAGNode(node)) { return node.Links.map(({ Name, Hash }) => ({ name: Name, cid: new CID(Hash) })) diff --git a/src/core/components/files-regular/refs-local-async-iterator.js b/src/core/components/refs/local.js similarity index 75% rename from src/core/components/files-regular/refs-local-async-iterator.js rename to src/core/components/refs/local.js index 62029cbac9..365f82d326 100644 --- a/src/core/components/files-regular/refs-local-async-iterator.js +++ b/src/core/components/refs/local.js @@ -3,9 +3,9 @@ const CID = require('cids') const base32 = require('base32.js') -module.exports = function (self) { - return async function * refsLocalAsyncIterator () { - for await (const result of self._repo.blocks.query({ keysOnly: true })) { +module.exports = function ({ repo }) { + return async function * refsLocal () { + for await (const result of repo.blocks.query({ keysOnly: true })) { yield dsKeyToRef(result.key) } } diff --git a/src/core/components/repo/gc.js b/src/core/components/repo/gc.js new file mode 100644 index 0000000000..f5337cec05 --- /dev/null +++ b/src/core/components/repo/gc.js @@ -0,0 +1,110 @@ +'use strict' + +const CID = require('cids') +const { cidToString } = require('../../../utils/cid') +const log = require('debug')('ipfs:repo:gc') +const { MFS_ROOT_KEY } = require('ipfs-mfs') +const Repo = require('ipfs-repo') +const { Errors } = require('interface-datastore') +const ERR_NOT_FOUND = Errors.notFoundError().code +const { parallelMerge, transform, map } = require('streaming-iterables') + +// Limit on the number of parallel block remove operations +const BLOCK_RM_CONCURRENCY = 256 + +// Perform mark and sweep garbage collection +module.exports = ({ gcLock, pin, pinManager, refs, repo }) => { + return async function * gc () { + const start = Date.now() + log('Creating set of marked blocks') + + const release = await gcLock.writeLock() + + try { + // Mark all blocks that are being used + const markedSet = await createMarkedSet({ pin, pinManager, repo }) + // Get all blocks keys from the blockstore + const blockKeys = repo.blocks.query({ keysOnly: true }) + + // Delete blocks that are not being used + yield * deleteUnmarkedBlocks({ repo, refs }, markedSet, blockKeys) + + log(`Complete (${Date.now() - start}ms)`) + } finally { + release() + } + } +} + +// Get Set of CIDs of blocks to keep +async function createMarkedSet ({ pin, pinManager, refs, repo }) { + const pinsSource = map(({ hash }) => hash, pin.ls()) + + const pinInternalsSource = async function * () { + const cids = await pinManager.getInternalBlocks() + yield * cids + } + + const mfsSource = async function * () { + const mh = await repo.root.get(MFS_ROOT_KEY) + const rootCid = new CID(mh) + yield rootCid + try { + for await (const { ref } of refs(rootCid, { recursive: true })) { + yield new CID(ref) + } + } catch (err) { + if (err.code === ERR_NOT_FOUND) { + log('No blocks in MFS') + return + } + throw err + } + } + + const output = new Set() + for await (const cid of parallelMerge(pinsSource, pinInternalsSource, mfsSource)) { + output.add(cidToString(cid, { base: 'base32' })) + } + return output +} + +// Delete all blocks that are not marked as in use +async function * deleteUnmarkedBlocks ({ repo, refs }, markedSet, blockKeys) { + // Iterate through all blocks and find those that are not in the marked set + // blockKeys yields { key: Key() } + let blocksCount = 0 + let removedBlocksCount = 0 + + const removeBlock = async ({ key: k }) => { + blocksCount++ + + try { + const cid = Repo.utils.blockstore.keyToCid(k) + const b32 = cid.toV1().toString('base32') + if (markedSet.has(b32)) return null + const res = { cid } + + try { + await repo.blocks.delete(cid) + removedBlocksCount++ + } catch (err) { + res.err = new Error(`Could not delete block with CID ${cid}: ${err.message}`) + } + + return res + } catch (err) { + const msg = `Could not convert block with key '${k}' to CID` + log(msg, err) + return { err: new Error(msg + `: ${err.message}`) } + } + } + + for await (const res of transform(BLOCK_RM_CONCURRENCY, removeBlock, blockKeys)) { + // filter nulls (blocks that were retained) + if (res) yield res + } + + log(`Marked set has ${markedSet.size} unique blocks. Blockstore has ${blocksCount} blocks. ` + + `Deleted ${removedBlocksCount} blocks.`) +} diff --git a/src/core/components/repo/stat.js b/src/core/components/repo/stat.js new file mode 100644 index 0000000000..d6310c8746 --- /dev/null +++ b/src/core/components/repo/stat.js @@ -0,0 +1,15 @@ +'use strict' + +module.exports = ({ repo }) => { + return async function stat () { + const stats = await repo.stat() + + return { + numObjects: stats.numObjects, + repoSize: stats.repoSize, + repoPath: stats.repoPath, + version: stats.version.toString(), + storageMax: stats.storageMax + } + } +} diff --git a/src/core/components/repo/version.js b/src/core/components/repo/version.js new file mode 100644 index 0000000000..9af7b07735 --- /dev/null +++ b/src/core/components/repo/version.js @@ -0,0 +1,33 @@ +'use strict' + +const { repoVersion } = require('ipfs-repo') + +module.exports = ({ repo }) => { + /** + * If the repo has been initialized, report the current version. + * Otherwise report the version that would be initialized. + * + * @returns {number} + */ + return async function version () { + try { + await repo._checkInitialized() + } catch (err) { + // TODO: (dryajov) This is really hacky, there must be a better way + const match = [ + /Key not found in database \[\/version\]/, + /ENOENT/, + /repo is not initialized yet/ + ].some((m) => { + return m.test(err.message) + }) + if (match) { + // this repo has not been initialized + return repoVersion + } + throw err + } + + return repo.version.get() + } +} diff --git a/src/core/components/resolve.js b/src/core/components/resolve.js index 268952dfe7..c8eac5bb64 100644 --- a/src/core/components/resolve.js +++ b/src/core/components/resolve.js @@ -2,7 +2,6 @@ const isIpfs = require('is-ipfs') const CID = require('cids') -const nodeify = require('promise-nodeify') const { cidToString } = require('../../utils/cid') /** @@ -32,26 +31,28 @@ const { cidToString } = require('../../utils/cid') * @param {IPFS} ipfs * @returns {ResolveWrapper} */ -module.exports = (ipfs) => { +module.exports = ({ name, ipld }) => { /** * IPFS Resolve - Resolve the value of names to IPFS * - * @param {String} name + * @param {String} path * @param {ResolveOptions} [opts={}] * @returns {Promise} */ - const resolve = async (name, opts) => { + return async function resolve (path, opts) { opts = opts || {} - if (!isIpfs.path(name)) { - throw new Error('invalid argument ' + name) + if (!isIpfs.path(path)) { + throw new Error('invalid argument ' + path) } - if (isIpfs.ipnsPath(name)) { - name = await ipfs.name.resolve(name, opts) + if (isIpfs.ipnsPath(path)) { + for await (const resolvedPath of name.resolve(path, opts)) { + path = resolvedPath + } } - const [, , hash, ...rest] = name.split('/') // ['', 'ipfs', 'hash', ...path] + const [, , hash, ...rest] = path.split('/') // ['', 'ipfs', 'hash', ...path] const cid = new CID(hash) // nothing to resolve return the input @@ -59,8 +60,9 @@ module.exports = (ipfs) => { return `/ipfs/${cidToString(cid, { base: opts.cidBase })}` } - const path = rest.join('/') - const results = ipfs._ipld.resolve(cid, path) + path = rest.join('/') + + const results = ipld.resolve(cid, path) let value = cid let remainderPath = path @@ -73,13 +75,4 @@ module.exports = (ipfs) => { return `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}` } - - return (name, opts, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - opts = opts || {} - return nodeify(resolve(name, opts), cb) - } } diff --git a/src/core/components/start.js b/src/core/components/start.js index b3ea02bfa3..5074f8748b 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -1,51 +1,227 @@ 'use strict' const Bitswap = require('ipfs-bitswap') -const callbackify = require('callbackify') - +const PeerBook = require('peer-book') const IPNS = require('../ipns') const routingConfig = require('../ipns/routing/config') -const createLibp2pBundle = require('./libp2p') - -module.exports = (self) => { - return callbackify(async () => { - if (self.state.state() !== 'stopped') { - throw new Error(`Not able to start from state: ${self.state.state()}`) - } +const defer = require('p-defer') +const { AlreadyInitializedError, NotEnabledError } = require('../errors') +const Components = require('./') - self.log('starting') - self.state.start() +module.exports = ({ + apiManager, + options: constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function start () { + const startPromise = defer() + const { cancel } = apiManager.update({ start: () => startPromise.promise }) + try { // The repo may be closed if previously stopped - if (self._repo.closed) { - await self._repo.open() + if (repo.closed) { + await repo.open() } - const config = await self._repo.config.get() - const libp2p = createLibp2pBundle(self, config) + const config = await repo.config.get() + + const peerBook = new PeerBook() + const libp2p = Components.legacy.libp2p({ + _options: constructorOptions, + _repo: repo, + _peerInfo: peerInfo, + _peerInfoBook: peerBook, + _print: print + }, config) await libp2p.start() - self.libp2p = libp2p - const ipnsRouting = routingConfig(self) - self._ipns = new IPNS(ipnsRouting, self._repo.datastore, self._peerInfo, self._keychain, self._options) + const ipnsRouting = routingConfig({ + _options: constructorOptions, + libp2p, + _repo: repo, + _peerInfo: peerInfo + }) + const ipns = new IPNS(ipnsRouting, repo.datastore, peerInfo, keychain, { pass: initOptions.pass }) + const bitswap = new Bitswap(libp2p, repo.blocks, { statsEnabled: true }) + + await bitswap.start() - self._bitswap = new Bitswap( - self.libp2p, - self._repo.blocks, { - statsEnabled: true - } - ) + blockService.setExchange(bitswap) - await self._bitswap.start() + await preload.start() + await ipns.republisher.start() + // TODO: start mfs preload here + + const api = createApi({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => undefined) + } catch (err) { + cancel() + startPromise.reject(err) + throw err + } + + startPromise.resolve(apiManager.api) + return apiManager.api +} - self._blockService.setExchange(self._bitswap) +function createApi ({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = { + get: Components.dag.get({ ipld, preload }), + resolve: Components.dag.resolve({ ipld, preload }), + tree: Components.dag.tree({ ipld, preload }) + } + const object = { + data: Components.object.data({ ipld, preload }), + get: Components.object.get({ ipld, preload }), + links: Components.object.links({ dag }), + new: Components.object.new({ ipld, preload }), + patch: { + addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Components.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Components.object.put({ ipld, gcLock, preload }), + stat: Components.object.stat({ ipld, preload }) + } + const pin = { + add: Components.pin.add({ pinManager, gcLock, dag, object }), + ls: Components.pin.ls({ pinManager, object }), + rm: Components.pin.rm({ pinManager, gcLock, object }) + } + // FIXME: resolve this circular dependency + dag.put = Components.dag.put({ ipld, pin, gcLock, preload }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, options: constructorOptions }) + const isOnline = Components.isOnline({ libp2p }) + const dns = Components.dns() + const name = { + pubsub: { + cancel: Components.name.pubsub.cancel({ ipns, options: constructorOptions }), + state: Components.name.pubsub.state({ ipns, options: constructorOptions }), + subs: Components.name.pubsub.subs({ ipns, options: constructorOptions }) + }, + publish: Components.name.publish({ ipns, dag, peerInfo, isOnline, keychain, options: constructorOptions }), + resolve: Components.name.resolve({ dns, ipns, peerInfo, isOnline, options: constructorOptions }) + } + const resolve = Components.resolve({ name, ipld }) + const refs = Components.refs({ ipld, resolve, preload }) + refs.local = Components.refs.local({ repo }) - await self._preload.start() - await self._ipns.republisher.start() - await self._mfsPreload.start() + const api = { + add, + bitswap: { + stat: Components.bitswap.stat({ bitswap }), + unwant: Components.bitswap.unwant({ bitswap }), + wantlist: Components.bitswap.wantlist({ bitswap }) + }, + block: { + get: Components.block.get({ blockService, preload }), + put: Components.block.put({ blockService, gcLock, preload }), + rm: Components.block.rm({ blockService, gcLock, pinManager }), + stat: Components.block.stat({ blockService, preload }) + }, + bootstrap: { + add: Components.bootstrap.add({ repo }), + list: Components.bootstrap.list({ repo }), + rm: Components.bootstrap.rm({ repo }) + }, + cat: Components.cat({ ipld, preload }), + config: Components.config({ repo }), + dns, + files: Components.files({ ipld, blockService, repo, preload, options: constructorOptions }), + get: Components.get({ ipld, preload }), + id: Components.id({ peerInfo }), + init: () => { throw new AlreadyInitializedError() }, + ls: Components.ls({ ipld, preload }), + name, + pin, + ping: Components.ping({ libp2p }), + pubsub: libp2p.pubsub + ? Components.pubsub({ libp2p }) + : () => { throw new NotEnabledError('pubsub not enabled') }, + refs, + repo: { + // TODO: this PR depends on `refs` refactor and the `pins` refactor + // https://github.com/ipfs/js-ipfs/pull/2658 + // https://github.com/ipfs/js-ipfs/pull/2660 + // gc: Commands.repo.gc({ gcLock, pin, pinManager, refs, repo }), + stat: Components.repo.stat({ repo }), + version: Components.repo.version({ repo }) + }, + resolve, + start: () => apiManager.api, + stats: { + bitswap: Components.bitswap.stat({ bitswap }), + bw: Components.stats.bw({ libp2p }), + repo: Components.repo.stat({ repo }) + }, + stop: Components.stop({ + apiManager, + bitswap, + options: constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + preload, + print, + repo + }), + swarm: { + addrs: () => Components.swarm.addrs({ libp2p }), + connect: () => Components.swarm.connect({ libp2p }), + disconnect: () => Components.swarm.disconnect({ libp2p }), + localAddrs: Components.swarm.localAddrs({ peerInfo }), + peers: () => Components.swarm.peers({ libp2p }) + }, + version: Components.version({ repo }) + } - self.state.started() - self.emit('start') - }) + return api } diff --git a/src/core/components/stats.js b/src/core/components/stats.js deleted file mode 100644 index 88c19b352e..0000000000 --- a/src/core/components/stats.js +++ /dev/null @@ -1,83 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const Big = require('bignumber.js') -const Pushable = require('pull-pushable') -const human = require('human-to-milliseconds') -const toStream = require('pull-stream-to-stream') -const errCode = require('err-code') - -function bandwidthStats (self, opts) { - let stats - - if (opts.peer) { - stats = self.libp2p.stats.forPeer(opts.peer) - } else if (opts.proto) { - stats = self.libp2p.stats.forProtocol(opts.proto) - } else { - stats = self.libp2p.stats.global - } - - if (!stats) { - return { - totalIn: new Big(0), - totalOut: new Big(0), - rateIn: new Big(0), - rateOut: new Big(0) - } - } - - const snapshot = stats.snapshot - const movingAverages = stats.movingAverages - - return { - totalIn: snapshot.dataReceived, - totalOut: snapshot.dataSent, - rateIn: new Big(movingAverages.dataReceived['60000'].movingAverage() / 60), - rateOut: new Big(movingAverages.dataSent['60000'].movingAverage() / 60) - } -} - -module.exports = function stats (self) { - const _bwPullStream = (opts) => { - opts = opts || {} - let interval = null - const stream = Pushable(true, () => { - if (interval) { - clearInterval(interval) - } - }) - - if (opts.poll) { - let value - try { - value = human(opts.interval || '1s') - } catch (err) { - // Pull stream expects async work, so we need to simulate it. - process.nextTick(() => { - stream.end(errCode(err, 'ERR_INVALID_POLL_INTERVAL')) - }) - } - - interval = setInterval(() => { - stream.push(bandwidthStats(self, opts)) - }, value) - } else { - stream.push(bandwidthStats(self, opts)) - stream.end() - } - - return stream.source - } - - return { - bitswap: require('./bitswap')(self).stat, - repo: require('./repo')(self).stat, - bw: callbackify.variadic(async (opts) => { // eslint-disable-line require-await - opts = opts || {} - return bandwidthStats(self, opts) - }), - bwReadableStream: (opts) => toStream.source(_bwPullStream(opts)), - bwPullStream: _bwPullStream - } -} diff --git a/src/core/components/stats/bw.js b/src/core/components/stats/bw.js new file mode 100644 index 0000000000..d7f72dae3e --- /dev/null +++ b/src/core/components/stats/bw.js @@ -0,0 +1,63 @@ +'use strict' + +const Big = require('bignumber.js') +const human = require('human-to-milliseconds') +const errCode = require('err-code') + +function getBandwidthStats (libp2p, opts) { + let stats + + if (opts.peer) { + stats = libp2p.metrics.forPeer(opts.peer) + } else if (opts.proto) { + stats = libp2p.metrics.forProtocol(opts.proto) + } else { + stats = libp2p.metrics.global + } + + if (!stats) { + return { + totalIn: new Big(0), + totalOut: new Big(0), + rateIn: new Big(0), + rateOut: new Big(0) + } + } + + const { movingAverages, snapshot } = stats + + return { + totalIn: snapshot.dataReceived, + totalOut: snapshot.dataSent, + rateIn: new Big(movingAverages.dataReceived[60000].movingAverage() / 60), + rateOut: new Big(movingAverages.dataSent[60000].movingAverage() / 60) + } +} + +module.exports = ({ libp2p }) => { + return async function * (options) { + options = options || {} + + if (!options.poll) { + yield getBandwidthStats(libp2p, options) + return + } + + let interval + try { + interval = human(options.interval || '1s') + } catch (err) { + throw errCode(err, 'ERR_INVALID_POLL_INTERVAL') + } + + let timeoutId + try { + while (true) { + yield getBandwidthStats(libp2p, options) + await new Promise(resolve => { timeoutId = setTimeout(resolve, interval) }) + } + } finally { + clearTimeout(timeoutId) + } + } +} diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 1ee7bb9518..6b7fbaee9e 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -1,40 +1,174 @@ 'use strict' -const callbackify = require('callbackify') - -module.exports = (self) => { - return callbackify(async () => { - self.log('stop') - - if (self.state.state() === 'stopped') { - throw new Error('Already stopped') - } - - if (self.state.state() !== 'running') { - throw new Error('Not able to stop from state: ' + self.state.state()) - } - - self.state.stop() - self._blockService.unsetExchange() - self._bitswap.stop() - self._preload.stop() - - const libp2p = self.libp2p - self.libp2p = null - - try { - await Promise.all([ - self._ipns.republisher.stop(), - self._mfsPreload.stop(), - libp2p.stop(), - self._repo.close() - ]) - - self.state.stopped() - self.emit('stop') - } catch (err) { - self.emit('error', err) - throw err - } - }) +const defer = require('p-defer') +const { NotStartedError, AlreadyInitializedError } = require('../errors') +const Components = require('./') + +module.exports = ({ + apiManager, + options: constructorOptions, + bitswap, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function stop () { + const stopPromise = defer() + const { cancel } = apiManager.update({ stop: () => stopPromise.promise }) + + try { + blockService.unsetExchange() + bitswap.stop() + preload.stop() + + await Promise.all([ + ipns.republisher.stop(), + // mfsPreload.stop(), + libp2p.stop(), + repo.close() + ]) + + const api = createApi({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => { throw new NotStartedError() }) + } catch (err) { + cancel() + stopPromise.reject(err) + throw err + } + + stopPromise.resolve(apiManager.api) + return apiManager.api +} + +function createApi ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = { + get: Components.dag.get({ ipld, preload }), + resolve: Components.dag.resolve({ ipld, preload }), + tree: Components.dag.tree({ ipld, preload }) + } + const object = { + data: Components.object.data({ ipld, preload }), + get: Components.object.get({ ipld, preload }), + links: Components.object.links({ dag }), + new: Components.object.new({ ipld, preload }), + patch: { + addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Components.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Components.object.put({ ipld, gcLock, preload }), + stat: Components.object.stat({ ipld, preload }) + } + const pin = { + add: Components.pin.add({ pinManager, gcLock, dag, object }), + ls: Components.pin.ls({ pinManager, object }), + rm: Components.pin.rm({ pinManager, gcLock, object }) + } + // FIXME: resolve this circular dependency + dag.put = Components.dag.put({ ipld, pin, gcLock, preload }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, options: constructorOptions }) + const refs = () => { throw new NotStartedError() } + refs.local = Components.refs.local({ repo }) + + const api = { + add, + block: { + get: Components.block.get({ blockService, preload }), + put: Components.block.put({ blockService, gcLock, preload }), + rm: Components.block.rm({ blockService, gcLock, pinManager }), + stat: Components.block.stat({ blockService, preload }) + }, + bootstrap: { + add: Components.bootstrap.add({ repo }), + list: Components.bootstrap.list({ repo }), + rm: Components.bootstrap.rm({ repo }) + }, + cat: Components.cat({ ipld, preload }), + config: Components.config({ repo }), + dns: Components.dns(), + files: Components.files({ ipld, blockService, repo, preload, options: constructorOptions }), + get: Components.get({ ipld, preload }), + id: Components.id({ peerInfo }), + init: () => { throw new AlreadyInitializedError() }, + isOnline: Components.isOnline({}), + ls: Components.ls({ ipld, preload }), + pin, + refs, + repo: { + // TODO: gc should be available when stopped + // `resolve` (passed to `refs` API) which is a dependency for `gc` API + // needs to be altered to allow `name` API dependency to be optional, so + // that `resolve` can also be available when not started, and so `gc` can + // be run when not started. + // gc: Commands.repo.gc({ gcLock, pin, pinManager, refs, repo }), + stat: Components.repo.stat({ repo }), + version: Components.repo.version({ repo }) + }, + start: Components.start({ + apiManager, + options: constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }), + stats: { + bitswap: () => { throw new NotStartedError() }, + bw: () => { throw new NotStartedError() }, + repo: Components.repo.stat({ repo }) + }, + stop: () => apiManager.api, + swarm: { + addrs: () => { throw new NotStartedError() }, + connect: () => { throw new NotStartedError() }, + disconnect: () => { throw new NotStartedError() }, + localAddrs: Components.swarm.localAddrs({ peerInfo }), + peers: () => { throw new NotStartedError() } + }, + version: Components.version({ repo }) + } + + return api } diff --git a/src/core/components/swarm.js b/src/core/components/swarm.js deleted file mode 100644 index 45d1b8ebe5..0000000000 --- a/src/core/components/swarm.js +++ /dev/null @@ -1,79 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR - -module.exports = function swarm (self) { - return { - peers: callbackify.variadic(async (opts) => { // eslint-disable-line require-await - opts = opts || {} - - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const verbose = opts.v || opts.verbose - // TODO: return latency and streams when verbose is set - // we currently don't have this information - - const peers = [] - - Object.values(self._peerInfoBook.getAll()).forEach((peer) => { - const connectedAddr = peer.isConnected() - - if (!connectedAddr) { return } - - const tupple = { - addr: connectedAddr, - peer: peer.id - } - if (verbose) { - tupple.latency = 'n/a' - } - - peers.push(tupple) - }) - - return peers - }), - - // all the addrs we know - addrs: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const peers = Object.values(self._peerInfoBook.getAll()) - - return peers - }), - - localAddrs: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.peerInfo.multiaddrs.toArray() - }), - - connect: callbackify(async (maddr) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.dial(maddr) - }), - - disconnect: callbackify(async (maddr) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.hangUp(maddr) - }), - - filters: callbackify(async () => { // eslint-disable-line require-await - throw new Error('Not implemented') - }) - } -} diff --git a/src/core/components/swarm/addrs.js b/src/core/components/swarm/addrs.js new file mode 100644 index 0000000000..ee095be07a --- /dev/null +++ b/src/core/components/swarm/addrs.js @@ -0,0 +1,13 @@ +'use strict' + +const CID = require('cids') + +module.exports = ({ libp2p }) => { + return async function addrs () { // eslint-disable-line require-await + const peers = [] + for (const [peerId, peerInfo] of libp2p.peerStore.entries()) { + peers.push({ id: new CID(peerId), addrs: peerInfo.multiaddrs.toArray() }) + } + return peers + } +} diff --git a/src/core/components/swarm/connect.js b/src/core/components/swarm/connect.js new file mode 100644 index 0000000000..98f7217f71 --- /dev/null +++ b/src/core/components/swarm/connect.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ libp2p }) => { + return function connect (addr) { + return libp2p.dial(addr) + } +} diff --git a/src/core/components/swarm/disconnect.js b/src/core/components/swarm/disconnect.js new file mode 100644 index 0000000000..3e9aadae52 --- /dev/null +++ b/src/core/components/swarm/disconnect.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ libp2p }) => { + return function disconnect (addr) { + return libp2p.hangUp(addr) + } +} diff --git a/src/core/components/swarm/local-addrs.js b/src/core/components/swarm/local-addrs.js new file mode 100644 index 0000000000..bc2ee7df71 --- /dev/null +++ b/src/core/components/swarm/local-addrs.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ peerInfo }) => { + return async function localAddrs () { // eslint-disable-line require-await + return peerInfo.multiaddrs.toArray() + } +} diff --git a/src/core/components/swarm/peers.js b/src/core/components/swarm/peers.js new file mode 100644 index 0000000000..3fbc45c9c8 --- /dev/null +++ b/src/core/components/swarm/peers.js @@ -0,0 +1,34 @@ +'use strict' + +const CID = require('cids') + +module.exports = ({ libp2p }) => { + return async function peers (options) { // eslint-disable-line require-await + options = options || {} + + const verbose = options.v || options.verbose + const peers = [] + + for (const [peerId, connections] of libp2p.connections) { + for (const connection of connections) { + const tupple = { + addr: connection.remoteAddr, + peer: new CID(peerId) + } + + if (verbose || options.direction) { + tupple.direction = connection.stat.direction + } + + if (verbose) { + tupple.muxer = connection.stat.multiplexer + tupple.latency = 'n/a' + } + + peers.push(tupple) + } + } + + return peers + } +} diff --git a/src/core/components/version.js b/src/core/components/version.js index cc850c465d..7b9d963b14 100644 --- a/src/core/components/version.js +++ b/src/core/components/version.js @@ -1,17 +1,16 @@ 'use strict' const pkg = require('../../../package.json') -const callbackify = require('callbackify') // TODO add the commit hash of the current ipfs version to the response. -module.exports = function version (self) { - return callbackify(async () => { - const repoVersion = await self.repo.version() +module.exports = ({ repo }) => { + return async function version () { + const repoVersion = await repo.version() return { version: pkg.version, repo: repoVersion, commit: '' } - }) + } } diff --git a/src/core/config.js b/src/core/config.js deleted file mode 100644 index 6f2353efb1..0000000000 --- a/src/core/config.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict' - -const Multiaddr = require('multiaddr') -const mafmt = require('mafmt') -const { struct, superstruct } = require('superstruct') -const { isTest } = require('ipfs-utils/src/env') - -const { optional, union } = struct -const s = superstruct({ - types: { - multiaddr: v => { - if (v === null) { - return `multiaddr invalid, value must be a string, Buffer, or another Multiaddr got ${v}` - } - - try { - Multiaddr(v) - } catch (err) { - return `multiaddr invalid, ${err.message}` - } - - return true - }, - 'multiaddr-ipfs': v => mafmt.IPFS.matches(v) ? true : 'multiaddr IPFS invalid' - } -}) - -const configSchema = s({ - repo: optional(s('object|string')), - repoOwner: 'boolean?', - repoAutoMigrate: 'boolean?', - preload: s({ - enabled: 'boolean?', - addresses: optional(s(['multiaddr'])), - interval: 'number?' - }, { enabled: !isTest, interval: 30 * 1000 }), - init: optional(union(['boolean', s({ - bits: 'number?', - emptyRepo: 'boolean?', - privateKey: optional(s('object|string')), // object should be a custom type for PeerId using 'kind-of' - pass: 'string?', - profiles: 'array?' - })])), - start: 'boolean?', - offline: 'boolean?', - pass: 'string?', - silent: 'boolean?', - relay: 'object?', // relay validates in libp2p - EXPERIMENTAL: optional(s({ - pubsub: 'boolean?', - ipnsPubsub: 'boolean?', - sharding: 'boolean?', - dht: 'boolean?' - })), - connectionManager: 'object?', - config: optional(s({ - API: 'object?', - Addresses: optional(s({ - Delegates: optional(s(['multiaddr'])), - Swarm: optional(s(['multiaddr'])), - API: optional(union([s('multiaddr'), s(['multiaddr'])])), - Gateway: optional(union([s('multiaddr'), s(['multiaddr'])])) - })), - Discovery: optional(s({ - MDNS: optional(s({ - Enabled: 'boolean?', - Interval: 'number?' - })), - webRTCStar: optional(s({ - Enabled: 'boolean?' - })) - })), - Bootstrap: optional(s(['multiaddr-ipfs'])), - Pubsub: optional(s({ - Router: 'string?', - Enabled: 'boolean?' - })), - Swarm: optional(s({ - ConnMgr: optional(s({ - LowWater: 'number?', - HighWater: 'number?' - })) - })) - })), - ipld: 'object?', - libp2p: optional(union(['function', 'object'])) // libp2p validates this -}, { - repoOwner: true -}) - -const validate = (opts) => { - const [err, options] = configSchema.validate(opts) - - if (err) { - throw err - } - - return options -} - -module.exports = { validate } diff --git a/src/core/errors.js b/src/core/errors.js new file mode 100644 index 0000000000..c7cf7ac938 --- /dev/null +++ b/src/core/errors.js @@ -0,0 +1,54 @@ +class NotInitializedError extends Error { + constructor (message = 'not initialized') { + super(message) + this.name = 'NotInitializedError' + this.code = NotInitializedError.code + } +} + +NotInitializedError.code = 'ERR_NOT_INITIALIZED' +exports.NotInitializedError = NotInitializedError + +class AlreadyInitializingError extends Error { + constructor (message = 'cannot initialize an initializing node') { + super(message) + this.name = 'AlreadyInitializingError' + this.code = AlreadyInitializedError.code + } +} + +AlreadyInitializingError.code = 'ERR_ALREADY_INITIALIZING' +exports.AlreadyInitializingError = AlreadyInitializingError + +class AlreadyInitializedError extends Error { + constructor (message = 'cannot re-initialize an initialized node') { + super(message) + this.name = 'AlreadyInitializedError' + this.code = AlreadyInitializedError.code + } +} + +AlreadyInitializedError.code = 'ERR_ALREADY_INITIALIZED' +exports.AlreadyInitializedError = AlreadyInitializedError + +class NotStartedError extends Error { + constructor (message = 'not started') { + super(message) + this.name = 'NotStartedError' + this.code = NotStartedError.code + } +} + +NotStartedError.code = 'ERR_NOT_STARTED' +exports.NotStartedError = NotStartedError + +class NotEnabledError extends Error { + constructor (message = 'not enabled') { + super(message) + this.name = 'NotEnabledError' + this.code = NotEnabledError.code + } +} + +NotEnabledError.code = 'ERR_NOT_ENABLED' +exports.NotEnabledError = NotEnabledError diff --git a/src/core/index.js b/src/core/index.js index a5ad33edf9..92530e2779 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -1,181 +1,78 @@ 'use strict' -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') +const log = require('debug')('ipfs') +const mergeOptions = require('merge-options') +const { isTest } = require('ipfs-utils/src/env') +const globSource = require('ipfs-utils/src/files/glob-source') +const urlSource = require('ipfs-utils/src/files/url-source') +const { Buffer } = require('buffer') const PeerId = require('peer-id') const PeerInfo = require('peer-info') const crypto = require('libp2p-crypto') const isIPFS = require('is-ipfs') const multiaddr = require('multiaddr') const multihash = require('multihashes') -const PeerBook = require('peer-book') const multibase = require('multibase') const multicodec = require('multicodec') const multihashing = require('multihashing-async') const CID = require('cids') -const debug = require('debug') -const mergeOptions = require('merge-options') -const EventEmitter = require('events') - -const config = require('./config') -const boot = require('./boot') -const components = require('./components') -const GCLock = require('./components/pin/gc-lock') - -// replaced by repo-browser when running in the browser -const defaultRepo = require('./runtime/repo-nodejs') -const preload = require('./preload') -const mfsPreload = require('./mfs-preload') -const ipldOptions = require('./runtime/ipld-nodejs') -const { isTest } = require('ipfs-utils/src/env') - -/** - * @typedef { import("./ipns/index") } IPNS - */ - -/** - * - * - * @class IPFS - * @extends {EventEmitter} - */ -class IPFS extends EventEmitter { - constructor (options) { - super() - - const defaults = { - init: true, - start: true, - EXPERIMENTAL: {}, - preload: { - enabled: !isTest, // preload by default, unless in test env - addresses: [ - '/dnsaddr/node0.preload.ipfs.io/https', - '/dnsaddr/node1.preload.ipfs.io/https' - ] - } - } - - options = config.validate(options || {}) - - this._options = mergeOptions(defaults, options) - - if (options.init === false) { - this._options.init = false - } - - if (!(options.start === false)) { - this._options.start = true - } - - if (typeof options.repo === 'string' || - options.repo === undefined) { - this._repo = defaultRepo(options) - } else { - this._repo = options.repo - } - - // IPFS utils - this.log = debug('ipfs') - this.log.err = debug('ipfs:err') - - // IPFS Core Internals - // this._repo - assigned above - this._peerInfoBook = new PeerBook() - this._peerInfo = undefined - this._bitswap = undefined - this._blockService = new BlockService(this._repo) - this._ipld = new Ipld(ipldOptions(this._blockService, this._options.ipld, this.log)) - this._preload = preload(this) - this._mfsPreload = mfsPreload(this) - /** @type {IPNS} */ - this._ipns = undefined - // eslint-disable-next-line no-console - this._print = this._options.silent ? this.log : console.log - this._gcLock = new GCLock(this._options.repoOwner, { - // Make sure GCLock is specific to repo, for tests where there are - // multiple instances of IPFS - morticeId: this._repo.path - }) - - // IPFS Core exposed components - // - for booting up a node - this.init = components.init(this) - this.preStart = components.preStart(this) - this.start = components.start(this) - this.stop = components.stop(this) - this.shutdown = this.stop - this.isOnline = components.isOnline(this) - // - interface-ipfs-core defined API - Object.assign(this, components.filesRegular(this)) - this.version = components.version(this) - this.id = components.id(this) - this.repo = components.repo(this) - this.bootstrap = components.bootstrap(this) - this.config = components.config(this) - this.block = components.block(this) - this.object = components.object(this) - this.dag = components.dag(this) - this.files = components.filesMFS(this) - this.libp2p = null // assigned on start - this.swarm = components.swarm(this) - this.name = components.name(this) - this.bitswap = components.bitswap(this) - this.pin = components.pin(this) - this.ping = components.ping(this) - this.pingPullStream = components.pingPullStream(this) - this.pingReadableStream = components.pingReadableStream(this) - this.pubsub = components.pubsub(this) - this.dht = components.dht(this) - this.dns = components.dns(this) - this.key = components.key(this) - this.stats = components.stats(this) - this.resolve = components.resolve(this) - - if (this._options.EXPERIMENTAL.ipnsPubsub) { - this.log('EXPERIMENTAL IPNS pubsub is enabled') - } - if (this._options.EXPERIMENTAL.sharding) { - this.log('EXPERIMENTAL sharding is enabled') - } +const { NotInitializedError } = require('./errors') +const Components = require('./components') +const ApiManager = require('./api-manager') + +const getDefaultOptions = () => ({ + init: true, + start: true, + EXPERIMENTAL: {}, + preload: { + enabled: !isTest, // preload by default, unless in test env + addresses: [ + '/dns4/node0.preload.ipfs.io/https', + '/dns4/node1.preload.ipfs.io/https' + ] + } +}) - this.state = require('./state')(this) +async function create (options) { + options = mergeOptions(getDefaultOptions(), options) - const onReady = () => { - this.removeListener('error', onError) - this._ready = true - } + // eslint-disable-next-line no-console + const print = options.silent ? log : console.log - const onError = err => { - this.removeListener('ready', onReady) - this._readyError = err - } + const apiManager = new ApiManager() - this.once('ready', onReady).once('error', onError) + const { api } = apiManager.update({ + init: Components.init({ apiManager, print, constructorOptions: options }), + dns: Components.dns(), + isOnline: Components.isOnline({}) + }, () => { throw new NotInitializedError() }) - boot(this) + if (!options.init) { + return api } - get ready () { - return new Promise((resolve, reject) => { - if (this._ready) return resolve(this) - if (this._readyError) return reject(this._readyError) - this.once('ready', () => resolve(this)) - this.once('error', reject) - }) - } -} + await api.init() -module.exports = IPFS - -// Note: We need to do this to force browserify to load the Buffer module -const BufferImpl = Buffer -Object.assign(module.exports, { crypto, isIPFS, Buffer: BufferImpl, CID, multiaddr, multibase, multihash, multihashing, multicodec, PeerId, PeerInfo }) + if (!options.start) { + return api + } -module.exports.createNode = (options) => { - return new IPFS(options) + return api.start() } -module.exports.create = (options) => { - return new IPFS(options).ready +module.exports = { + create, + crypto, + isIPFS, + Buffer, + CID, + multiaddr, + multibase, + multihash, + multihashing, + multicodec, + PeerId, + PeerInfo, + globSource, + urlSource } diff --git a/src/core/ipns/index.js b/src/core/ipns/index.js index c96fad80a2..f7bcc40108 100644 --- a/src/core/ipns/index.js +++ b/src/core/ipns/index.js @@ -11,7 +11,6 @@ log.error = debug('ipfs:ipns:error') const IpnsPublisher = require('./publisher') const IpnsRepublisher = require('./republisher') const IpnsResolver = require('./resolver') -const path = require('./path') const { normalizePath } = require('../utils') const TLRU = require('../../utils/tlru') const defaultRecordTtl = 60 * 1000 @@ -94,6 +93,4 @@ class IPNS { } } -IPNS.path = path - module.exports = IPNS diff --git a/src/core/ipns/path.js b/src/core/ipns/path.js deleted file mode 100644 index 0fb9e34ff7..0000000000 --- a/src/core/ipns/path.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' - -const isIPFS = require('is-ipfs') - -const debug = require('debug') -const log = debug('ipfs:ipns:path') -log.error = debug('ipfs:ipns:path:error') - -// resolves the given path by parsing out protocol-specific entries -// (e.g. /ipns/) and then going through the /ipfs/ entries and returning the final node -const resolvePath = (ipfsNode, name) => { - // ipns path - if (isIPFS.ipnsPath(name)) { - log(`resolve ipns path ${name}`) - - return ipfsNode._ipns.resolve(name) - } - - // ipfs path - return ipfsNode.dag.get(name.substring('/ipfs/'.length)) -} - -module.exports = { - resolvePath -} diff --git a/src/core/preload.js b/src/core/preload.js index 5427a2ecd0..053103c648 100644 --- a/src/core/preload.js +++ b/src/core/preload.js @@ -10,8 +10,8 @@ const preload = require('./runtime/preload-nodejs') const log = debug('ipfs:preload') log.error = debug('ipfs:preload:error') -module.exports = self => { - const options = self._options.preload || {} +module.exports = options => { + options = options || {} options.enabled = Boolean(options.enabled) options.addresses = options.addresses || [] diff --git a/src/core/runtime/add-from-fs-browser.js b/src/core/runtime/add-from-fs-browser.js deleted file mode 100644 index aaf9691c7c..0000000000 --- a/src/core/runtime/add-from-fs-browser.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') - -module.exports = () => { - return callbackify(async () => { // eslint-disable-line require-await - throw new Error('not available in the browser') - }) -} diff --git a/src/core/runtime/add-from-fs-nodejs.js b/src/core/runtime/add-from-fs-nodejs.js deleted file mode 100644 index 33bc3954e2..0000000000 --- a/src/core/runtime/add-from-fs-nodejs.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const globSource = require('ipfs-utils/src/files/glob-source') -const all = require('async-iterator-all') - -module.exports = self => { - return callbackify.variadic(async (...args) => { // eslint-disable-line require-await - const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() - - return all(self._addAsyncIterator(globSource(...args, options), options)) - }) -} diff --git a/src/core/runtime/init-assets-browser.js b/src/core/runtime/init-assets-browser.js new file mode 100644 index 0000000000..0c0c42d5b5 --- /dev/null +++ b/src/core/runtime/init-assets-browser.js @@ -0,0 +1 @@ +module.exports = () => {} diff --git a/src/core/runtime/init-assets-nodejs.js b/src/core/runtime/init-assets-nodejs.js new file mode 100644 index 0000000000..81c0a34832 --- /dev/null +++ b/src/core/runtime/init-assets-nodejs.js @@ -0,0 +1,15 @@ +'use strict' + +const path = require('path') +const globSource = require('ipfs-utils/src/files/glob-source') +const all = require('async-iterator-all') + +// Add the default assets to the repo. +module.exports = async function initAssets ({ add, print }) { + const initDocsPath = path.join(__dirname, '..', '..', 'init-files', 'init-docs') + const results = await all(add(globSource(initDocsPath, { recursive: true }), { preload: false })) + const dir = results.filter(file => file.path === 'init-docs').pop() + + print('to get started, enter:\n') + print(`\tjsipfs cat /ipfs/${dir.cid}/readme\n`) +} diff --git a/src/core/runtime/repo-browser.js b/src/core/runtime/repo-browser.js index 8bd0f330e2..de4c9f59bf 100644 --- a/src/core/runtime/repo-browser.js +++ b/src/core/runtime/repo-browser.js @@ -3,6 +3,7 @@ const IPFSRepo = require('ipfs-repo') module.exports = (options) => { - const repoPath = options.repo || 'ipfs' - return new IPFSRepo(repoPath, { autoMigrate: options.repoAutoMigrate }) + options = options || {} + const repoPath = options.path || 'ipfs' + return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/src/core/runtime/repo-nodejs.js b/src/core/runtime/repo-nodejs.js index 431d59b377..d8581b7e32 100644 --- a/src/core/runtime/repo-nodejs.js +++ b/src/core/runtime/repo-nodejs.js @@ -4,8 +4,8 @@ const os = require('os') const IPFSRepo = require('ipfs-repo') const path = require('path') -module.exports = (options) => { - const repoPath = options.repo || path.join(os.homedir(), '.jsipfs') - - return new IPFSRepo(repoPath, { autoMigrate: options.repoAutoMigrate }) +module.exports = options => { + options = options || {} + const repoPath = options.path || path.join(os.homedir(), '.jsipfs') + return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/src/core/utils.js b/src/core/utils.js index 8373797dde..f2675bf20c 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -2,6 +2,7 @@ const isIpfs = require('is-ipfs') const CID = require('cids') +const { cidToString } = require('../utils/cid') const ERR_BAD_PATH = 'ERR_BAD_PATH' exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' @@ -39,11 +40,6 @@ function parseIpfsPath (ipfsPath) { /** * Returns a well-formed ipfs Path. * The returned path will always be prefixed with /ipfs/ or /ipns/. - * If the received string is not a valid ipfs path, an error will be returned - * examples: - * b58Hash -> { hash: 'b58Hash', links: [] } - * b58Hash/mercury/venus -> { hash: 'b58Hash', links: ['mercury', 'venus']} - * /ipfs/b58Hash/links/by/name -> { hash: 'b58Hash', links: ['links', 'by', 'name'] } * * @param {String} pathStr An ipfs-path, or ipns-path or a cid * @return {String} ipfs-path or ipns-path @@ -51,12 +47,29 @@ function parseIpfsPath (ipfsPath) { */ const normalizePath = (pathStr) => { if (isIpfs.cid(pathStr)) { - return `/ipfs/${pathStr}` + return `/ipfs/${new CID(pathStr)}` } else if (isIpfs.path(pathStr)) { return pathStr } else { - throw Object.assign(new Error(`invalid ${pathStr} path`), { code: ERR_BAD_PATH }) + throw Object.assign(new Error(`invalid path: ${pathStr}`), { code: ERR_BAD_PATH }) + } +} + +// TODO: do we need both normalizePath and normalizeCidPath? +const normalizeCidPath = (path) => { + if (Buffer.isBuffer(path)) { + return new CID(path).toString() + } + if (CID.isCID(path)) { + return path.toString() + } + if (path.indexOf('/ipfs/') === 0) { + path = path.substring('/ipfs/'.length) + } + if (path.charAt(path.length - 1) === '/') { + path = path.substring(0, path.length - 1) } + return path } /** @@ -124,6 +137,35 @@ const resolvePath = async function (objectAPI, ipfsPaths) { return cids } +const mapFile = (file, options) => { + options = options || {} + + let size = 0 + let type = 'dir' + + if (file.unixfs && file.unixfs.type === 'file') { + size = file.unixfs.fileSize() + type = 'file' + } + + const output = { + hash: cidToString(file.cid, { base: options.cidBase }), + path: file.path, + name: file.name, + depth: file.path.split('/').length, + size, + type + } + + if (options.includeContent && file.unixfs && file.unixfs.type === 'file') { + output.content = file.content + } + + return output +} + exports.normalizePath = normalizePath +exports.normalizeCidPath = normalizeCidPath exports.parseIpfsPath = parseIpfsPath exports.resolvePath = resolvePath +exports.mapFile = mapFile diff --git a/src/http/api/resources/bitswap.js b/src/http/api/resources/bitswap.js index 0a8d9debf1..04e8080644 100644 --- a/src/http/api/resources/bitswap.js +++ b/src/http/api/resources/bitswap.js @@ -20,8 +20,8 @@ exports.wantlist = { const list = await ipfs.bitswap.wantlist(peerId) return h.response({ - Keys: list.Keys.map(k => ({ - '/': cidToString(k['/'], { base: cidBase, upgrade: false }) + Keys: list.map(cid => ({ + '/': cidToString(cid, { base: cidBase, upgrade: false }) })) }) } @@ -40,8 +40,8 @@ exports.stat = { const stats = await ipfs.bitswap.stat() - stats.wantlist = stats.wantlist.map(k => ({ - '/': cidToString(k['/'], { base: cidBase, upgrade: false }) + stats.wantlist = stats.wantlist.map(cid => ({ + '/': cidToString(cid, { base: cidBase, upgrade: false }) })) return h.response({ diff --git a/src/http/api/resources/block.js b/src/http/api/resources/block.js index c88b25b15f..6d12f6158a 100644 --- a/src/http/api/resources/block.js +++ b/src/http/api/resources/block.js @@ -132,7 +132,7 @@ exports.rm = { return streamResponse(request, h, async (output) => { try { - for await (const result of request.server.app.ipfs.block._rmAsyncIterator(arg, { + for await (const result of request.server.app.ipfs.block.rm(arg, { force, quiet })) { @@ -170,7 +170,7 @@ exports.stat = { } return h.response({ - Key: cidToString(stats.key, { base: request.query['cid-base'] }), + Key: cidToString(stats.cid, { base: request.query['cid-base'] }), Size: stats.size }) } diff --git a/src/http/api/resources/dag.js b/src/http/api/resources/dag.js index 436382bc38..f3dffc1b4f 100644 --- a/src/http/api/resources/dag.js +++ b/src/http/api/resources/dag.js @@ -248,7 +248,7 @@ exports.resolve = { let lastRemainderPath = path if (path) { - const result = ipfs._ipld.resolve(lastCid, path) + const result = ipfs.dag.resolve(lastCid, path) while (true) { const resolveResult = (await result.next()).value if (!CID.isCID(resolveResult.value)) { diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index bc963ce3d1..1f949fb7a0 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -208,7 +208,7 @@ exports.add = { } }, function (source) { - return ipfs._addAsyncIterator(source, { + return ipfs.add(source, { cidVersion: request.query['cid-version'], rawLeaves: request.query['raw-leaves'], progress: request.query.progress ? progressHandler : null, @@ -225,7 +225,7 @@ exports.add = { for await (const file of source) { output.write(JSON.stringify({ Name: file.path, - Hash: cidToString(file.hash, { base: request.query['cid-base'] }), + Hash: cidToString(file.cid, { base: request.query['cid-base'] }), Size: file.size }) + '\n') } diff --git a/src/http/api/resources/pin.js b/src/http/api/resources/pin.js index 576d9be88d..1e15822b0b 100644 --- a/src/http/api/resources/pin.js +++ b/src/http/api/resources/pin.js @@ -4,6 +4,10 @@ const multibase = require('multibase') const Joi = require('@hapi/joi') const Boom = require('@hapi/boom') const isIpfs = require('is-ipfs') +const toStream = require('it-to-stream') +const { map } = require('streaming-iterables') +const pipe = require('it-pipe') +const ndjson = require('iterable-ndjson') const { cidToString } = require('../../../utils/cid') function parseArgs (request, h) { @@ -53,20 +57,13 @@ exports.ls = { const { ipfs } = request.server.app const { path, type } = request.pre.args - let result - try { - result = await ipfs.pin.ls(path, { type }) - } catch (err) { - throw Boom.boomify(err) - } + const response = pipe( + ipfs.pin.ls(path, { type }), + map(({ type, cid }) => ({ Type: type, Hash: cidToString(cid, { base: request.query['cid-base'] }) })), + ndjson.stringify + ) - return h.response({ - Keys: result.reduce((acc, v) => { - const prop = cidToString(v.hash, { base: request.query['cid-base'] }) - acc[prop] = { Type: v.type } - return acc - }, {}) - }) + return h.response(toStream.readable(response)) } } @@ -94,7 +91,7 @@ exports.add = { } return h.response({ - Pins: result.map(obj => cidToString(obj.hash, { base: request.query['cid-base'] })) + Pins: result.map(obj => cidToString(obj.cid, { base: request.query['cid-base'] })) }) } } @@ -120,7 +117,7 @@ exports.rm = { } return h.response({ - Pins: result.map(obj => cidToString(obj.hash, { base: request.query['cid-base'] })) + Pins: result.map(obj => cidToString(obj.cid, { base: request.query['cid-base'] })) }) } } diff --git a/src/index.js b/src/index.js index aec25fb4ae..8140941bf4 100644 --- a/src/index.js +++ b/src/index.js @@ -2,4 +2,4 @@ const IPFS = require('./core') -exports = module.exports = IPFS +module.exports = IPFS diff --git a/src/utils/mutex.js b/src/utils/mutex.js deleted file mode 100644 index 8cb3df36cc..0000000000 --- a/src/utils/mutex.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict' - -const assert = require('assert') -const mortice = require('mortice') -const noop = () => {} - -// Wrap mortice to present a callback interface -class Mutex { - constructor (repoOwner, options) { - options = options || {} - - this.mutex = mortice(options.morticeId, { - singleProcess: repoOwner - }) - - this.log = options.log || noop - this.lockId = 0 - } - - readLock () { - return this._lock('readLock') - } - - writeLock () { - return this._lock('writeLock') - } - - /** - * Request a read or write lock - * - * @param {String} type The type of lock: readLock / writeLock - * @returns {Promise} - */ - async _lock (type) { - assert(typeof type === 'string', `first argument to Mutex.${type}() must be a string, got ${typeof type}`) - - const lockId = this.lockId++ - this.log(`[${lockId}] ${type} requested`) - - // Get a Promise for the lock, wrap it for logging - const release = await this.mutex[type]() - - this.log(`[${lockId}] ${type} started`) - - return () => { - this.log(`[${lockId}] ${type} released`) - release() - } - } -} - -module.exports = Mutex diff --git a/test/cli/files.js b/test/cli/files.js index 4858cc0cb0..ec7e0263a6 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -26,7 +26,7 @@ const HASH_ALGS = [ 'keccak-512' ] -describe('files', () => runOnAndOff((thing) => { +describe.only('files', () => runOnAndOff((thing) => { let ipfs const readme = fs.readFileSync(path.join(process.cwd(), '/src/init-files/init-docs/readme')) .toString('utf-8') diff --git a/test/core/config.spec.js b/test/core/config.spec.js deleted file mode 100644 index ee1fa4a00e..0000000000 --- a/test/core/config.spec.js +++ /dev/null @@ -1,223 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expect } = require('interface-ipfs-core/src/utils/mocha') -const config = require('../../src/core/config') - -describe('config', () => { - it('should allow empty config', () => { - const cfg = {} - expect(() => config.validate(cfg)).to.not.throw() - }) - - it('should allow undefined config', () => { - const cfg = undefined - expect(() => config.validate(cfg)).to.not.throw() - }) - - it('should validate valid repo', () => { - const cfgs = [ - { repo: { unknown: 'value' } }, - { repo: '/path/to-repo' }, - { repo: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid repo', () => { - const cfgs = [ - { repo: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid init', () => { - const cfgs = [ - { init: { bits: 138 } }, - { init: true }, - { init: false }, - { init: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid init', () => { - const cfgs = [ - { init: 138 }, - { init: { bits: 'not an int' } } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid start', () => { - const cfgs = [ - { start: true }, - { start: false }, - { start: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid start', () => { - const cfgs = [ - { start: 138 }, - { start: 'make it so number 1' }, - { start: null } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid pass', () => { - const cfgs = [ - { pass: 'correctbatteryhorsestaple' }, - { pass: '' }, - { pass: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid pass', () => { - const cfgs = [ - { pass: 138 }, - { pass: null } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid EXPERIMENTAL', () => { - const cfgs = [ - { EXPERIMENTAL: { dht: true, sharding: true } }, - { EXPERIMENTAL: { dht: false, sharding: false } }, - { EXPERIMENTAL: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid EXPERIMENTAL', () => { - const cfgs = [ - { EXPERIMENTAL: { dht: 138 } }, - { EXPERIMENTAL: { sharding: 138 } } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid config', () => { - const cfgs = [ - { config: { Addresses: { Swarm: ['/ip4/0.0.0.0/tcp/4002'] } } }, - { config: { Addresses: { Swarm: [] } } }, - { config: { Addresses: { Swarm: undefined } } }, - - { config: { Addresses: { API: '/ip4/127.0.0.1/tcp/5002' } } }, - { config: { Addresses: { API: ['/ip4/127.0.0.1/tcp/5002', '/ip4/127.0.0.1/tcp/5003'] } } }, - { config: { Addresses: { API: undefined } } }, - - { config: { Addresses: { Gateway: '/ip4/127.0.0.1/tcp/9090' } } }, - { config: { Addresses: { Gateway: ['/ip4/127.0.0.1/tcp/9090', '/ip4/127.0.0.1/tcp/9091'] } } }, - { config: { Addresses: { Gateway: undefined } } }, - - { config: { Addresses: { Delegates: ['/dns4/node0.preload.ipfs.io/tcp/443/https'] } } }, - { config: { Addresses: { Delegates: [] } } }, - { config: { Addresses: { Delegates: undefined } } }, - - { config: { Addresses: undefined } }, - - { config: { Discovery: { MDNS: { Enabled: true } } } }, - { config: { Discovery: { MDNS: { Enabled: false } } } }, - { config: { Discovery: { MDNS: { Interval: 138 } } } }, - { config: { Discovery: { MDNS: undefined } } }, - - { config: { Discovery: { webRTCStar: { Enabled: true } } } }, - { config: { Discovery: { webRTCStar: { Enabled: false } } } }, - { config: { Discovery: { webRTCStar: undefined } } }, - - { config: { Discovery: undefined } }, - - { config: { Bootstrap: ['/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'] } }, - { config: { Bootstrap: [] } }, - - { config: { Swarm: { ConnMgr: { LowWater: 200, HighWater: 500 } } } }, - { config: { Swarm: { ConnMgr: { LowWater: undefined, HighWater: undefined } } } }, - { config: { Swarm: { ConnMgr: undefined } } }, - { config: { Swarm: undefined } }, - - { config: { Pubsub: { Enabled: true, Router: 'gossipsub' } } }, - { config: { Pubsub: { Enabled: false } } }, - - { config: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid config', () => { - const cfgs = [ - { config: { Addresses: { Swarm: 138 } } }, - { config: { Addresses: { Swarm: null } } }, - - { config: { Addresses: { API: 138 } } }, - { config: { Addresses: { API: null } } }, - - { config: { Addresses: { Gateway: 138 } } }, - { config: { Addresses: { Gateway: null } } }, - - { config: { Discovery: { MDNS: { Enabled: 138 } } } }, - { config: { Discovery: { MDNS: { Interval: true } } } }, - - { config: { Discovery: { webRTCStar: { Enabled: 138 } } } }, - - { config: { Bootstrap: ['/ip4/0.0.0.0/tcp/4002'] } }, - { config: { Bootstrap: 138 } }, - - { config: { Swarm: { ConnMgr: { LowWater: 200, HighWater: {} } } } }, - { config: { Swarm: { ConnMgr: { LowWater: {}, HighWater: 500 } } } }, - { config: { Swarm: { ConnMgr: 138 } } }, - { config: { Swarm: 138 } }, - - { config: { Pubsub: { Enabled: 1 } } }, - - { config: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid libp2p', () => { - const cfgs = [ - { libp2p: { modules: {} } }, - { libp2p: () => {} }, - { libp2p: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid libp2p', () => { - const cfgs = [ - { libp2p: 'error' }, - { libp2p: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid profiles', () => { - expect( - () => config.validate({ init: { profiles: ['test'] } }) - ).to.not.throw() - }) - it('should validate invalid profiles', () => { - expect( - () => config.validate({ init: { profiles: 'test' } }) - ).to.throw() - }) -}) diff --git a/test/core/files.spec.js b/test/core/files.spec.js index ea8ca2380a..484c06ba43 100644 --- a/test/core/files.spec.js +++ b/test/core/files.spec.js @@ -6,9 +6,10 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const hat = require('hat') const pull = require('pull-stream') const IPFSFactory = require('ipfsd-ctl') +const all = require('it-all') const IPFS = require('../../src/core') -describe('files', function () { +describe.only('files', function () { this.timeout(10 * 1000) let ipfsd, ipfs @@ -74,13 +75,13 @@ describe('files', function () { describe('add', () => { it('should not error when passed null options', async () => { - await ipfs.add(Buffer.from(hat()), null) + await all(ipfs.add(Buffer.from(hat()), null)) }) it('should add a file with a v1 CID', async () => { - const files = await ipfs.add(Buffer.from([0, 1, 2]), { + const files = await all(ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1 - }) + })) expect(files.length).to.equal(1) expect(files[0].hash).to.equal('bafkreifojmzibzlof6xyh5auu3r5vpu5l67brf3fitaf73isdlglqw2t7q') @@ -88,10 +89,10 @@ describe('files', function () { }) it('should add a file with a v1 CID and not raw leaves', async () => { - const files = await ipfs.add(Buffer.from([0, 1, 2]), { + const files = await all(ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1, rawLeaves: false - }) + })) expect(files.length).to.equal(1) expect(files[0].hash).to.equal('bafybeide2caf5we5a7izifzwzz5ds2gla67vsfgrzvbzpnyyirnfzgwf5e') diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index bfc0cb6508..ff5cc32391 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -41,17 +41,7 @@ describe('interface-ipfs-core tests', function () { } }) - tests.filesRegular(defaultCommonFactory, { - skip: isNode ? null : [{ - name: 'addFromStream', - reason: 'Not designed to run in the browser' - }, { - name: 'addFromFs', - reason: 'Not designed to run in the browser' - }] - }) - - tests.filesMFS(defaultCommonFactory) + tests.files(defaultCommonFactory) tests.key(CommonFactory.createAsync({ spawnOptions: {