From c9639c123b26992ec425943558acd1684815cc3c Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 18 Jun 2019 08:40:49 +0200 Subject: [PATCH 01/19] chore: update contributors --- package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 60225e8303..b63f227685 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ipfs", - "version": "0.36.3", + "version": "0.36.4", "description": "JavaScript implementation of the IPFS specification", "keywords": [ "IPFS" @@ -248,8 +248,8 @@ "Henrique Dias ", "Henry Rodrick ", "Heo Sangmin ", - "Hugo Dias ", "Hugo Dias ", + "Hugo Dias ", "Irakli Gozalishvili ", "Jacob Heun ", "Jacob Heun ", @@ -282,6 +282,7 @@ "Paulo Rodrigues ", "Pedro Teixeira ", "Portia Burton ", + "Prabhakar Poudel ", "Raoul Millais ", "RasmusErik Voel Jensen ", "Richard Littauer ", From 88f4e264ee14986c2b2eb16263f747631ef58958 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 18 Jun 2019 08:40:50 +0200 Subject: [PATCH 02/19] chore: release version v0.36.4 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 444001f7c9..18a8c00fba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.36.4](https://github.com/ipfs/js-ipfs/compare/v0.36.3...v0.36.4) (2019-06-18) + + + ## [0.36.3](https://github.com/ipfs/js-ipfs/compare/v0.36.2...v0.36.3) (2019-05-30) From c5322b5067c17075d51f8fb13212b254662f36f2 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 24 Jun 2019 09:14:15 +0100 Subject: [PATCH 03/19] chore(package): update pull-stream to version 3.6.12 (#2143) --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b63f227685..260345a3ea 100644 --- a/package.json +++ b/package.json @@ -155,7 +155,7 @@ "pull-ndjson": "~0.1.1", "pull-pushable": "^2.2.0", "pull-sort": "^1.0.1", - "pull-stream": "^3.6.9", + "pull-stream": "^3.6.12", "pull-stream-to-async-iterator": "^1.0.1", "pull-stream-to-stream": "^1.3.4", "pull-traverse": "^1.0.3", From e46e6ad98d6decfab7560ef4c21579d5ceba5ac5 Mon Sep 17 00:00:00 2001 From: Qmstream <51881352+Qmstream@users.noreply.github.com> Date: Mon, 24 Jun 2019 06:06:26 -0300 Subject: [PATCH 04/19] fix: ipns reference to libp2p dht config (#2182) --- src/core/ipns/routing/config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/ipns/routing/config.js b/src/core/ipns/routing/config.js index 7faa469258..09f2f3aedd 100644 --- a/src/core/ipns/routing/config.js +++ b/src/core/ipns/routing/config.js @@ -22,7 +22,7 @@ module.exports = (ipfs) => { } // DHT should not be added as routing if we are offline or it is disabled - if (get(ipfs._options, 'offline') || !get(ipfs._options, 'libp2p.dht.enabled', false)) { + if (get(ipfs._options, 'offline') || !get(ipfs._options, 'libp2p.config.dht.enabled', false)) { const offlineDatastore = new OfflineDatastore(ipfs._repo) ipnsStores.push(offlineDatastore) } else { From 3e63ef282e26371cc5083b24ebdb54573a45cc78 Mon Sep 17 00:00:00 2001 From: "greenkeeper[bot]" <23040076+greenkeeper[bot]@users.noreply.github.com> Date: Mon, 24 Jun 2019 10:43:21 +0100 Subject: [PATCH 05/19] fix(package): update file-type to version 12.0.0 (#2176) --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 260345a3ea..68067f570f 100644 --- a/package.json +++ b/package.json @@ -81,7 +81,7 @@ "debug": "^4.1.0", "dlv": "^1.1.3", "err-code": "^1.1.2", - "file-type": "^11.1.0", + "file-type": "^12.0.0", "fnv1a": "^1.0.1", "fsm-event": "^2.1.0", "get-folder-size": "^2.0.0", From 9e2171b4f21e73836888c5b385617df960a3e46d Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 24 Jun 2019 11:45:47 +0100 Subject: [PATCH 06/19] docs: remove missing tutorial resolves #2190 --- examples/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/README.md b/examples/README.md index 9bd33aa125..73e3769e0b 100644 --- a/examples/README.md +++ b/examples/README.md @@ -8,7 +8,6 @@ Let us know if you find any issue or if you want to contribute and add a new tut - [Tutorial: IPFS 101, spawn a node and add a file to IPFS](./ipfs-101) - [Tutorial: Build a tiny browser app to exchange files between nodes](./exchange-files-in-browser) -- [Tutorial: Interact with IPFS directly from your Terminal](./ipfs-cli-fun) - [Tutorial: Resolve through IPLD graphs with the dag API](./traverse-ipld-graphs) - [Tutorial: Use IPFS to explore the Ethereum BlockChain](./explore-ethereum-blockchain) - [Tutorial (Video): How to build an application with IPFS PubSub Room](https://www.youtube.com/watch?v=Nv_Teb--1zg) From b1388a105230d27b4b2b50c23294281d42d5cff5 Mon Sep 17 00:00:00 2001 From: Alex Potsides Date: Mon, 24 Jun 2019 12:49:51 +0200 Subject: [PATCH 07/19] chore: encode buffers before printing (#2164) Printing raw buffers can end up outputting characters into the terminal that mess up the encoding for all subsequent lines, which is a pain when debugging things. This change just encodes the buffer before printing to stop that from happening. --- src/core/ipns/publisher.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/core/ipns/publisher.js b/src/core/ipns/publisher.js index 8caa9c8dba..4a3fb1a4de 100644 --- a/src/core/ipns/publisher.js +++ b/src/core/ipns/publisher.js @@ -108,13 +108,13 @@ class IpnsPublisher { // Add record to routing (buffer key) this._routing.put(key.toBuffer(), entryData, (err, res) => { if (err) { - const errMsg = `ipns record for ${key.toString()} could not be stored in the routing` + const errMsg = `ipns record for ${key.toString('base64')} could not be stored in the routing` log.error(errMsg) return callback(errcode(new Error(errMsg), 'ERR_PUTTING_TO_ROUTING')) } - log(`ipns record for ${key.toString()} was stored in the routing`) + log(`ipns record for ${key.toString('base64')} was stored in the routing`) callback(null, res) }) } @@ -137,13 +137,13 @@ class IpnsPublisher { // Add public key to routing (buffer key) this._routing.put(key.toBuffer(), publicKey.bytes, (err, res) => { if (err) { - const errMsg = `public key for ${key.toString()} could not be stored in the routing` + const errMsg = `public key for ${key.toString('base64')} could not be stored in the routing` log.error(errMsg) return callback(errcode(new Error(errMsg), 'ERR_PUTTING_TO_ROUTING')) } - log(`public key for ${key.toString()} was stored in the routing`) + log(`public key for ${key.toString('base64')} was stored in the routing`) callback(null, res) }) } From 103e35954ee03cba10b09e4d38331d01a9b027b7 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 25 Jun 2019 14:54:05 +0100 Subject: [PATCH 08/19] docs: update bitswap maintainer (#2199) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fabaf55e07..82c538d27b 100644 --- a/README.md +++ b/README.md @@ -1008,7 +1008,7 @@ Listing of the main packages used in the IPFS ecosystem. There are also three sp | [`ipfs-repo`](//github.com/ipfs/js-ipfs-repo) | [![npm](https://img.shields.io/npm/v/ipfs-repo.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-repo/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-repo.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-repo) | [![Travis CI](https://travis-ci.com/ipfs/js-ipfs-repo.svg?branch=master)](https://travis-ci.com/ipfs/js-ipfs-repo) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo) | [Jacob Heun](mailto:jacobheun@gmail.com) | | **Exchange** | | [`ipfs-block-service`](//github.com/ipfs/js-ipfs-block-service) | [![npm](https://img.shields.io/npm/v/ipfs-block-service.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-block-service/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-block-service.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-block-service) | [![Travis CI](https://travis-ci.com/ipfs/js-ipfs-block-service.svg?branch=master)](https://travis-ci.com/ipfs/js-ipfs-block-service) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-block-service/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-block-service) | [Volker Mische](mailto:volker.mische@gmail.com) | -| [`ipfs-bitswap`](//github.com/ipfs/js-ipfs-bitswap) | [![npm](https://img.shields.io/npm/v/ipfs-bitswap.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-bitswap/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-bitswap.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-bitswap) | [![Travis CI](https://travis-ci.com/ipfs/js-ipfs-bitswap.svg?branch=master)](https://travis-ci.com/ipfs/js-ipfs-bitswap) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-bitswap/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-bitswap) | [Volker Mische](mailto:volker.mische@gmail.com) | +| [`ipfs-bitswap`](//github.com/ipfs/js-ipfs-bitswap) | [![npm](https://img.shields.io/npm/v/ipfs-bitswap.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-bitswap/releases) | [![Deps](https://david-dm.org/ipfs/js-ipfs-bitswap.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-bitswap) | [![Travis CI](https://travis-ci.com/ipfs/js-ipfs-bitswap.svg?branch=master)](https://travis-ci.com/ipfs/js-ipfs-bitswap) | [![codecov](https://codecov.io/gh/ipfs/js-ipfs-bitswap/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-bitswap) | [Dirk McCormick](mailto:dirk@protocol.ai) | | **libp2p** | | [`libp2p`](//github.com/libp2p/js-libp2p) | [![npm](https://img.shields.io/npm/v/libp2p.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p/releases) | [![Deps](https://david-dm.org/libp2p/js-libp2p.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p) | [![Travis CI](https://travis-ci.com/libp2p/js-libp2p.svg?branch=master)](https://travis-ci.com/libp2p/js-libp2p) | [![codecov](https://codecov.io/gh/libp2p/js-libp2p/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p) | [Jacob Heun](mailto:jacobheun@gmail.com) | | [`libp2p-circuit`](//github.com/libp2p/js-libp2p-circuit) | [![npm](https://img.shields.io/npm/v/libp2p-circuit.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-circuit/releases) | [![Deps](https://david-dm.org/libp2p/js-libp2p-circuit.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-circuit) | [![Travis CI](https://travis-ci.com/libp2p/js-libp2p-circuit.svg?branch=master)](https://travis-ci.com/libp2p/js-libp2p-circuit) | [![codecov](https://codecov.io/gh/libp2p/js-libp2p-circuit/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-circuit) | [Jacob Heun](mailto:jacobheun@gmail.com) | From 5044a300afc04df83041364cb68d6f2ce9636c69 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Wed, 26 Jun 2019 23:37:38 +0200 Subject: [PATCH 09/19] feat: add support for ipns name resolve /ipns/ (#2002) fixes: #1918 `ipns name resolve` dns tests moved to interface-core resolve call now returns a string as per documention. --- .travis.yml | 6 +- package.json | 9 +- src/cli/commands/name/publish.js | 22 +- src/cli/commands/name/resolve.js | 8 +- src/core/components/name.js | 66 ++++-- src/core/index.js | 12 +- src/core/ipns/index.js | 34 +-- src/core/ipns/publisher.js | 5 +- src/http/api/resources/name.js | 4 +- src/utils/tlru.js | 87 +++++++ test/cli/name-pubsub.js | 50 ----- test/cli/name.js | 336 ++++------------------------ test/core/files-regular-utils.js | 33 +-- test/core/interface.spec.js | 34 +-- test/core/kad-dht.node.js | 2 +- test/core/name-pubsub.js | 33 ++- test/core/{name.js => name.spec.js} | 126 +---------- test/core/node.js | 1 - test/http-api/interface.js | 24 ++ test/utils/dns-fetch-stub.js | 16 -- 20 files changed, 311 insertions(+), 597 deletions(-) create mode 100644 src/utils/tlru.js rename test/core/{name.js => name.spec.js} (80%) delete mode 100644 test/utils/dns-fetch-stub.js diff --git a/.travis.yml b/.travis.yml index 19ece97bfc..995a49a2f2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,7 +18,7 @@ os: - osx - windows -script: npx nyc -s npm run test:node --timeout=10000 -- --bail +script: npx nyc -s npx aegir test -t node --timeout 10000 --bail after_success: npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov jobs: @@ -47,12 +47,12 @@ jobs: - stage: test name: electron-main script: - - xvfb-run npx aegir test -t electron-main -- --bail + - xvfb-run npx aegir test -t electron-main -- --bail --timeout 10000 - stage: test name: electron-renderer script: - - xvfb-run npx aegir test -t electron-renderer -- --bail + - xvfb-run npx aegir test -t electron-renderer -- --bail --timeout 10000 notifications: email: false diff --git a/package.json b/package.json index 68067f570f..ad8413336d 100644 --- a/package.json +++ b/package.json @@ -87,12 +87,13 @@ "get-folder-size": "^2.0.0", "glob": "^7.1.3", "hapi-pino": "^6.0.0", + "hashlru": "^2.3.0", "human-to-milliseconds": "^1.0.0", "interface-datastore": "~0.6.0", "ipfs-bitswap": "~0.24.1", "ipfs-block": "~0.8.1", "ipfs-block-service": "~0.15.1", - "ipfs-http-client": "^32.0.0", + "ipfs-http-client": "^32.0.1", "ipfs-http-response": "~0.3.0", "ipfs-mfs": "~0.11.4", "ipfs-multipart": "~0.1.0", @@ -110,6 +111,7 @@ "ipld-raw": "^4.0.0", "ipld-zcash": "~0.3.0", "ipns": "~0.5.2", + "is-domain-name": "^1.0.1", "is-ipfs": "~0.6.1", "is-pull-stream": "~0.0.0", "is-stream": "^2.0.0", @@ -176,6 +178,7 @@ "aegir": "^19.0.3", "base64url": "^3.0.1", "chai": "^4.2.0", + "clear-module": "^3.2.0", "delay": "^4.1.0", "detect-node": "^2.0.4", "dir-compare": "^1.4.0", @@ -183,8 +186,8 @@ "execa": "^1.0.0", "form-data": "^2.3.3", "hat": "0.0.3", - "interface-ipfs-core": "~0.104.0", - "ipfsd-ctl": "~0.42.0", + "interface-ipfs-core": "~0.105.0", + "ipfsd-ctl": "~0.43.0", "libp2p-websocket-star": "~0.10.2", "ncp": "^2.0.0", "qs": "^6.5.2", diff --git a/src/cli/commands/name/publish.js b/src/cli/commands/name/publish.js index 8452f23248..b84227a5d6 100644 --- a/src/cli/commands/name/publish.js +++ b/src/cli/commands/name/publish.js @@ -1,6 +1,6 @@ 'use strict' -const print = require('../../utils').print +const { print } = require('../../utils') module.exports = { command: 'publish ', @@ -11,21 +11,25 @@ module.exports = { resolve: { alias: 'r', describe: 'Resolve given path before publishing. Default: true.', - default: true + default: true, + type: 'boolean' }, lifetime: { alias: 't', describe: 'Time duration that the record will be valid for. Default: 24h.', - default: '24h' + default: '24h', + type: 'string' }, key: { alias: 'k', describe: 'Name of the key to be used, as listed by "ipfs key list -l". Default: self.', - default: 'self' + default: 'self', + type: 'string' }, ttl: { describe: 'Time duration this record should be cached for (caution: experimental).', - default: '' + default: '', + type: 'string' } }, @@ -33,14 +37,8 @@ module.exports = { argv.resolve((async () => { // yargs-promise adds resolve/reject properties to argv // resolve should use the alias as resolve will always be overwritten to a function - let resolve = true - - if (argv.r === false || argv.r === 'false') { - resolve = false - } - const opts = { - resolve, + resolve: argv.r, lifetime: argv.lifetime, key: argv.key, ttl: argv.ttl diff --git a/src/cli/commands/name/resolve.js b/src/cli/commands/name/resolve.js index e8871a1d17..ca1dd219a3 100644 --- a/src/cli/commands/name/resolve.js +++ b/src/cli/commands/name/resolve.js @@ -18,7 +18,7 @@ module.exports = { type: 'boolean', alias: 'r', describe: 'Resolve until the result is not an IPNS name. Default: false.', - default: false + default: true } }, @@ -32,11 +32,7 @@ module.exports = { const ipfs = await argv.getIpfs() const result = await ipfs.name.resolve(argv.name, opts) - if (result && result.path) { - print(result.path) - } else { - print(result) - } + print(result) })()) } } diff --git a/src/core/components/name.js b/src/core/components/name.js index 239d8ecc32..aee8dfba29 100644 --- a/src/core/components/name.js +++ b/src/core/components/name.js @@ -7,6 +7,9 @@ const parallel = require('async/parallel') const human = require('human-to-milliseconds') const crypto = require('libp2p-crypto') const errcode = require('err-code') +const mergeOptions = require('merge-options') +const mh = require('multihashes') +const isDomain = require('is-domain-name') const log = debug('ipfs:name') log.error = debug('ipfs:name:error') @@ -35,6 +38,28 @@ const keyLookup = (ipfsNode, kname, callback) => { }) } +const appendRemainder = (cb, remainder) => { + return (err, result) => { + if (err) { + return cb(err) + } + if (remainder.length) { + return cb(null, result + '/' + remainder.join('/')) + } + return cb(null, result) + } +} + +/** + * @typedef { import("../index") } IPFS + */ + +/** + * IPNS - Inter-Planetary Naming System + * + * @param {IPFS} self + * @returns {Object} + */ module.exports = function name (self) { return { /** @@ -125,22 +150,15 @@ module.exports = function name (self) { options = {} } - options = options || {} - const nocache = options.nocache && options.nocache.toString() === 'true' - const recursive = options.recursive && options.recursive.toString() === 'true' + options = mergeOptions({ + nocache: false, + recursive: true + }, options) const offline = self._options.offline - if (!self.isOnline() && !offline) { - const errMsg = utils.OFFLINE_ERROR - - log.error(errMsg) - return callback(errcode(errMsg, 'OFFLINE_ERROR')) - } - // TODO: params related logic should be in the core implementation - - if (offline && nocache) { + if (offline && options.nocache) { const error = 'cannot specify both offline and nocache' log.error(error) @@ -156,12 +174,28 @@ module.exports = function name (self) { name = `/ipns/${name}` } - const resolveOptions = { - nocache, - recursive + const [ namespace, hash, ...remainder ] = name.slice(1).split('/') + try { + mh.fromB58String(hash) + } catch (err) { + // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns + if (isDomain(hash)) { + return self.dns(hash, options, appendRemainder(callback, remainder)) + } + + log.error(err) + return callback(errcode(new Error('Invalid IPNS name.'), 'ERR_IPNS_INVALID_NAME')) } - self._ipns.resolve(name, resolveOptions, callback) + // multihash is valid lets resolve with IPNS + // IPNS resolve needs a online daemon + if (!self.isOnline() && !offline) { + const errMsg = utils.OFFLINE_ERROR + + log.error(errMsg) + return callback(errcode(errMsg, 'OFFLINE_ERROR')) + } + self._ipns.resolve(`/${namespace}/${hash}`, options, appendRemainder(callback, remainder)) }), pubsub: namePubsub(self) } diff --git a/src/core/index.js b/src/core/index.js index d457cb6149..3245a6cb9a 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -26,7 +26,16 @@ const defaultRepo = require('./runtime/repo-nodejs') const preload = require('./preload') const mfsPreload = require('./mfs-preload') const ipldOptions = require('./runtime/ipld-nodejs') - +/** + * @typedef { import("./ipns/index") } IPNS + */ + +/** + * + * + * @class IPFS + * @extends {EventEmitter} + */ class IPFS extends EventEmitter { constructor (options) { super() @@ -76,6 +85,7 @@ class IPFS extends EventEmitter { this._ipld = new Ipld(ipldOptions(this._blockService, this._options.ipld, this.log)) this._preload = preload(this) this._mfsPreload = mfsPreload(this) + /** @type {IPNS} */ this._ipns = undefined // eslint-disable-next-line no-console this._print = this._options.silent ? this.log : console.log diff --git a/src/core/ipns/index.js b/src/core/ipns/index.js index a064ece4d5..d7405ca3e7 100644 --- a/src/core/ipns/index.js +++ b/src/core/ipns/index.js @@ -2,7 +2,6 @@ const { createFromPrivKey } = require('peer-id') const series = require('async/series') -const Receptacle = require('receptacle') const errcode = require('err-code') const debug = require('debug') @@ -13,7 +12,8 @@ const IpnsPublisher = require('./publisher') const IpnsRepublisher = require('./republisher') const IpnsResolver = require('./resolver') const path = require('./path') - +const { normalizePath } = require('../utils') +const TLRU = require('../../utils/tlru') const defaultRecordTtl = 60 * 1000 class IPNS { @@ -21,12 +21,19 @@ class IPNS { this.publisher = new IpnsPublisher(routing, datastore) this.republisher = new IpnsRepublisher(this.publisher, datastore, peerInfo, keychain, options) this.resolver = new IpnsResolver(routing) - this.cache = new Receptacle({ max: 1000 }) // Create an LRU cache with max 1000 items + this.cache = new TLRU(1000) this.routing = routing } // Publish - publish (privKey, value, lifetime, callback) { + publish (privKey, value, lifetime = IpnsPublisher.defaultRecordLifetime, callback) { + try { + value = normalizePath(value) + } catch (err) { + log.error(err) + return callback(err) + } + series([ (cb) => createFromPrivKey(privKey.bytes, cb), (cb) => this.publisher.publishWithEOL(privKey, value, lifetime, cb) @@ -38,12 +45,12 @@ class IPNS { log(`IPNS value ${value} was published correctly`) - // Add to cache + // // Add to cache const id = results[0].toB58String() const ttEol = parseFloat(lifetime) const ttl = (ttEol < defaultRecordTtl) ? ttEol : defaultRecordTtl - this.cache.set(id, value, { ttl: ttl }) + this.cache.set(id, value, ttl) log(`IPNS value ${value} was cached correctly`) @@ -77,9 +84,7 @@ class IPNS { const result = this.cache.get(id) if (result) { - return callback(null, { - path: result - }) + return callback(null, result) } } @@ -91,18 +96,17 @@ class IPNS { log(`IPNS record from ${name} was resolved correctly`) - callback(null, { - path: result - }) + callback(null, result) }) } // Initialize keyspace // sets the ipns record for the given key to point to an empty directory initializeKeyspace (privKey, value, callback) { - this.publisher.publish(privKey, value, callback) + this.publish(privKey, value, IpnsPublisher.defaultRecordLifetime, callback) } } -exports = module.exports = IPNS -exports.path = path +IPNS.path = path + +module.exports = IPNS diff --git a/src/core/ipns/publisher.js b/src/core/ipns/publisher.js index 4a3fb1a4de..1137d006cb 100644 --- a/src/core/ipns/publisher.js +++ b/src/core/ipns/publisher.js @@ -11,7 +11,7 @@ log.error = debug('ipfs:ipns:publisher:error') const ipns = require('ipns') -const defaultRecordTtl = 60 * 60 * 1000 +const defaultRecordLifetime = 60 * 60 * 1000 // IpnsPublisher is capable of publishing and resolving names to the IPFS routing system. class IpnsPublisher { @@ -46,7 +46,7 @@ class IpnsPublisher { // Accepts a keypair, as well as a value (ipfsPath), and publishes it out to the routing system publish (privKey, value, callback) { - this.publishWithEOL(privKey, value, defaultRecordTtl, callback) + this.publishWithEOL(privKey, value, defaultRecordLifetime, callback) } _putRecordToRouting (record, peerId, callback) { @@ -269,4 +269,5 @@ class IpnsPublisher { } } +IpnsPublisher.defaultRecordLifetime = defaultRecordLifetime exports = module.exports = IpnsPublisher diff --git a/src/http/api/resources/name.js b/src/http/api/resources/name.js index 7c8206444c..02b1eb7326 100644 --- a/src/http/api/resources/name.js +++ b/src/http/api/resources/name.js @@ -7,7 +7,7 @@ exports.resolve = { query: Joi.object().keys({ arg: Joi.string(), nocache: Joi.boolean().default(false), - recursive: Joi.boolean().default(false) + recursive: Joi.boolean().default(true) }).unknown() }, async handler (request, h) { @@ -17,7 +17,7 @@ exports.resolve = { const res = await ipfs.name.resolve(arg, request.query) return h.response({ - Path: res.path + Path: res }) } } diff --git a/src/utils/tlru.js b/src/utils/tlru.js new file mode 100644 index 0000000000..ba3b26e8c6 --- /dev/null +++ b/src/utils/tlru.js @@ -0,0 +1,87 @@ +'use strict' +const hashlru = require('hashlru') + +/** + * Time Aware Least Recent Used Cache + * @see https://arxiv.org/pdf/1801.00390 + * @todo move this to ipfs-utils or it's own package + * + * @class TLRU + */ +class TLRU { + /** + * Creates an instance of TLRU. + * + * @param {number} maxSize + * @memberof TLRU + */ + constructor (maxSize) { + this.lru = hashlru(maxSize) + } + + /** + * Get the value from the a key + * + * @param {string} key + * @returns {any} + * @memberof TLRU + */ + get (key) { + const value = this.lru.get(key) + if (value) { + if ((value.expire) && (value.expire < Date.now())) { + this.lru.remove(key) + return undefined + } + } + return value.value + } + + /** + * Set a key value pair + * + * @param {string} key + * @param {any} value + * @param {number} ttl - in miliseconds + * @memberof TLRU + */ + set (key, value, ttl) { + this.lru.set(key, { value, expire: Date.now() + ttl }) + } + + /** + * Find if the cache has the key + * + * @param {string} key + * @returns {boolean} + * @memberof TLRU + */ + has (key) { + const value = this.get(key) + if (value) { + return true + } + return false + } + + /** + * Remove key + * + * @param {string} key + * @memberof TLRU + */ + remove (key) { + this.lru.remove(key) + } + + /** + * Clears the cache + * + * @memberof TLRU + */ + clear () { + this.lru.clear() + } +} + +module.exports = TLRU diff --git a/test/cli/name-pubsub.js b/test/cli/name-pubsub.js index fef64295de..cfeb5cf140 100644 --- a/test/cli/name-pubsub.js +++ b/test/cli/name-pubsub.js @@ -14,9 +14,6 @@ const ipfsExec = require('../utils/ipfs-exec') const DaemonFactory = require('ipfsd-ctl') const df = DaemonFactory.create({ type: 'js' }) -const checkAll = (bits) => string => bits.every(bit => string.includes(bit)) -const emptyDirCid = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - const spawnDaemon = (callback) => { df.spawn({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), @@ -165,53 +162,6 @@ describe('name-pubsub', () => { }) }) }) - - describe('pubsub records', () => { - let cidAdded - - before(function (done) { - this.timeout(50 * 1000) - ipfsA(`add ${path.resolve(`${__dirname}/../../src/init-files/init-docs/readme`)}`) - .then((out) => { - cidAdded = out.split(' ')[1] - done() - }) - }) - - it('should publish the received record to the subscriber', function () { - this.timeout(80 * 1000) - - return ipfsB(`name resolve ${nodeBId.id}`) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([emptyDirCid])) // Empty dir received (subscribed) - - return ipfsA(`name resolve ${nodeBId.id}`) - }) - .catch((err) => { - expect(err).to.exist() // Not available (subscribed now) - - return ipfsB(`name publish ${cidAdded}`) - }) - .then((res) => { - // published to IpfsB and published through pubsub to ipfsa - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeBId.id])) - - return ipfsB(`name resolve ${nodeBId.id}`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded])) - - return ipfsA(`name resolve ${nodeBId.id}`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded])) // value propagated to node B - }) - }) - }) }) describe('disabled', () => { diff --git a/test/cli/name.js b/test/cli/name.js index 1c0638503f..984d777da8 100644 --- a/test/cli/name.js +++ b/test/cli/name.js @@ -1,305 +1,59 @@ -/* eslint max-nested-callbacks: ["error", 6] */ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const path = require('path') -const hat = require('hat') -const ipfsExec = require('../utils/ipfs-exec') - -const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'js' }) - -const checkAll = (bits) => string => bits.every(bit => string.includes(bit)) +const sinon = require('sinon') +const YargsPromise = require('yargs-promise') +const clearModule = require('clear-module') describe('name', () => { - describe('working locally', () => { - const passPhrase = hat() - const pass = '--pass ' + passPhrase - const name = 'test-key-' + hat() - - let ipfs - let ipfsd - - let cidAdded - let nodeId - let keyId - - before(function (done) { - this.timeout(80 * 1000) - - df.spawn({ - exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), - config: { - Bootstrap: [] - }, - args: ['--pass', passPhrase, '--offline'], - initOptions: { bits: 512 } - }, (err, _ipfsd) => { - expect(err).to.not.exist() - - ipfsd = _ipfsd - ipfs = ipfsExec(_ipfsd.repoPath) - - ipfs(`${pass} key gen ${name} --type rsa --size 2048`) - .then((out) => { - expect(out).to.include(name) - keyId = out.split(' ')[1] - - return ipfs('id') - }) - .then((res) => { - const id = JSON.parse(res) - expect(id).to.have.property('id') - nodeId = id.id - - return ipfs(`add ${path.resolve(`${__dirname}/../../src/init-files/init-docs/readme`)}`) - }) - .then((out) => { - cidAdded = out.split(' ')[1] - done() - }) - }) - }) - - after(function (done) { - if (ipfsd) { - ipfsd.stop(() => done()) - } else { - done() - } - }) - - it('should publish correctly when the file was already added', function () { - this.timeout(70 * 1000) - - return ipfs(`name publish ${cidAdded}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeId])) - }) - }) - - it('should publish and resolve an entry with the default options', function () { - this.timeout(70 * 1000) - - return ipfs(`name publish ${cidAdded}`) - .then((res) => { - expect(res).to.exist() - - return ipfs('name resolve') - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded])) - }) - }) - - it('should publish correctly when the file was not added but resolve is disabled', function () { - this.timeout(70 * 1000) - - const notAddedCid = 'QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' - - return ipfs(`name publish ${notAddedCid} --resolve false`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([notAddedCid, nodeId])) - }) - }) - - it('should not get the entry correctly if its validity time expired', function () { - this.timeout(70 * 1000) - - return ipfs(`name publish ${cidAdded} --lifetime 10ns`) - .then((res) => { - expect(res).to.exist() - - setTimeout(function () { - return ipfs('name resolve') - .then((res) => { - expect(res).to.not.exist() - }) - .catch((err) => { - expect(err).to.exist() - }) - }, 1) - }) - }) - - it('should publish correctly when a new key is used', function () { - this.timeout(70 * 1000) - - return ipfs(`name publish ${cidAdded} --key ${name}`).then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, keyId])) - }) - }) - - it('should return the immediate pointing record, unless using the recursive parameter', function () { - this.timeout(90 * 1000) - - return ipfs(`name publish ${cidAdded}`) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeId])) - - return ipfs(`name publish /ipns/${nodeId} --key ${name}`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId, keyId])) - - return ipfs(`name resolve ${keyId}`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId])) - }) - }) - - it('should go recursively until finding an ipfs hash', function () { - this.timeout(90 * 1000) - - return ipfs(`name publish ${cidAdded}`) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeId])) - - return ipfs(`name publish /ipns/${nodeId} --key ${name}`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId, keyId])) - - return ipfs(`name resolve ${keyId} --recursive`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded])) - }) - }) + let cli + let cliUtils + beforeEach(() => { + cliUtils = require('../../src/cli/utils') + cli = new YargsPromise(require('../../src/cli/parser')) + }) + afterEach(() => { + sinon.restore() + // TODO: the lines below shouldn't be necessary, cli needs refactor to simplify testability + // Force the next require to not use require cache + clearModule('../../src/cli/utils') + clearModule('../../src/cli/parser') }) - describe('using dht', () => { - const passPhrase = hat() - const pass = '--pass ' + passPhrase - const name = 'test-key-' + hat() - - let ipfs - let ipfsd - - let cidAdded - let nodeId - let keyId - - before(function (done) { - this.timeout(80 * 1000) - - df.spawn({ - exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), - config: { - Bootstrap: [], - Discovery: { - MDNS: { - Enabled: false - }, - webRTCStar: { - Enabled: false - } - } - }, - args: ['--pass', passPhrase], - initOptions: { bits: 512 } - }, (err, _ipfsd) => { - expect(err).to.not.exist() - - ipfsd = _ipfsd - ipfs = ipfsExec(_ipfsd.repoPath) - - ipfs(`${pass} key gen ${name} --type rsa --size 2048`) - .then((out) => { - expect(out).to.include(name) - keyId = out.split(' ')[1] - - return ipfs('id') - }) - .then((res) => { - const id = JSON.parse(res) - expect(id).to.have.property('id') - nodeId = id.id - - return ipfs(`add ${path.resolve(`${__dirname}/../../src/init-files/init-docs/readme`)}`) - }) - .then((out) => { - cidAdded = out.split(' ')[1] - done() - }) - }) - }) - - after(function (done) { - if (ipfsd) { - ipfsd.stop(() => done()) - } else { - done() - } - }) - - it('should publish and resolve an entry with the default options', function () { - this.timeout(70 * 1000) - - return ipfs(`name publish ${cidAdded}`) - .then((res) => { - expect(res).to.exist() - - return ipfs('name resolve') - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded])) - }) - }) - - it('should not get the entry correctly if its validity time expired', function () { - this.timeout(70 * 1000) - - return ipfs(`name publish ${cidAdded} --lifetime 10ns`) - .then((res) => { - expect(res).to.exist() - - setTimeout(function () { - return ipfs('name resolve') - .then((res) => { - expect(res).to.not.exist() - }) - .catch((err) => { - expect(err).to.exist() - }) - }, 1) - }) - }) + it('resolve', async () => { + const resolveFake = sinon.fake() - it('should return the immediate pointing record, unless using the recursive parameter', function () { - this.timeout(90 * 1000) + sinon + .stub(cliUtils, 'getIPFS') + .callsArgWith(1, null, { name: { resolve: resolveFake } }) - return ipfs(`name publish ${cidAdded}`) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([cidAdded, nodeId])) + // TODO: the lines below shouldn't be necessary, cli needs refactor to simplify testability + // Force the next require to not use require cache + clearModule('../../src/cli/commands/name/resolve.js') - return ipfs(`name publish /ipns/${nodeId} --key ${name}`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId, keyId])) + await cli.parse(`name resolve test`) + sinon.assert.calledWith(resolveFake, 'test', { nocache: false, recursive: true }) + }) - return ipfs(`name resolve ${keyId}`) - }) - .then((res) => { - expect(res).to.exist() - expect(res).to.satisfy(checkAll([nodeId])) - }) + it('publish', async () => { + const publishFake = sinon.fake.returns({ name: 'name', value: 'value' }) + const printSpy = sinon.spy(cliUtils, 'print') + + sinon + .stub(cliUtils, 'getIPFS') + .callsArgWith(1, null, { name: { publish: publishFake } }) + + // TODO: the lines below shouldn't be necessary, cli needs refactor to simplify testability + // Force the next require to not use require cache + clearModule('../../src/cli/commands/name/publish.js') + + await cli.parse(`name publish test --silent`) + sinon.assert.calledWith(printSpy, 'Published to name: value') + sinon.assert.calledWith(publishFake, 'test', { + resolve: true, + lifetime: '24h', + key: 'self', + ttl: '' }) }) }) diff --git a/test/core/files-regular-utils.js b/test/core/files-regular-utils.js index 3b86020d3f..380ff3f3e8 100644 --- a/test/core/files-regular-utils.js +++ b/test/core/files-regular-utils.js @@ -12,47 +12,38 @@ describe('files-regular/utils', () => { describe('parseChunkerString', () => { it('handles an empty string', () => { const options = utils.parseChunkerString('') - expect(options).to.have.property('chunker').to.equal('fixed') + expect(options.chunker).to.equal('fixed') }) it('handles a null chunker string', () => { const options = utils.parseChunkerString(null) - expect(options).to.have.property('chunker').to.equal('fixed') + expect(options.chunker).to.equal('fixed') }) it('parses a fixed size string', () => { const options = utils.parseChunkerString('size-512') - expect(options).to.have.property('chunker').to.equal('fixed') - expect(options) - .to.have.property('chunkerOptions') - .to.have.property('maxChunkSize') - .to.equal(512) + expect(options.chunker).to.equal('fixed') + expect(options.chunkerOptions.maxChunkSize).to.equal(512) }) it('parses a rabin string without size', () => { const options = utils.parseChunkerString('rabin') - expect(options).to.have.property('chunker').to.equal('rabin') - expect(options) - .to.have.property('chunkerOptions') - .to.have.property('avgChunkSize') + expect(options.chunker).to.equal('rabin') + expect(options.chunkerOptions.avgChunkSize).to.equal(262144) }) it('parses a rabin string with only avg size', () => { const options = utils.parseChunkerString('rabin-512') - expect(options).to.have.property('chunker').to.equal('rabin') - expect(options) - .to.have.property('chunkerOptions') - .to.have.property('avgChunkSize') - .to.equal(512) + expect(options.chunker).to.equal('rabin') + expect(options.chunkerOptions.avgChunkSize).to.equal(512) }) it('parses a rabin string with min, avg, and max', () => { const options = utils.parseChunkerString('rabin-42-92-184') - expect(options).to.have.property('chunker').to.equal('rabin') - expect(options).to.have.property('chunkerOptions') - expect(options.chunkerOptions).to.have.property('minChunkSize').to.equal(42) - expect(options.chunkerOptions).to.have.property('avgChunkSize').to.equal(92) - expect(options.chunkerOptions).to.have.property('maxChunkSize').to.equal(184) + expect(options.chunker).to.equal('rabin') + expect(options.chunkerOptions.minChunkSize).to.equal(42) + expect(options.chunkerOptions.avgChunkSize).to.equal(92) + expect(options.chunkerOptions.maxChunkSize).to.equal(184) }) it('throws an error for unsupported chunker type', () => { diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index df572c19e2..a091dd0ea4 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -4,26 +4,10 @@ const tests = require('interface-ipfs-core') const CommonFactory = require('../utils/interface-common-factory') const isNode = require('detect-node') -const dnsFetchStub = require('../utils/dns-fetch-stub') describe('interface-ipfs-core tests', function () { this.timeout(20 * 1000) - // ipfs.dns in the browser calls out to https://ipfs.io/api/v0/dns. - // The following code stubs self.fetch to return a static CID for calls - // to https://ipfs.io/api/v0/dns?arg=ipfs.io. - if (!isNode) { - const fetch = self.fetch - - before(() => { - self.fetch = dnsFetchStub(fetch) - }) - - after(() => { - self.fetch = fetch - }) - } - const defaultCommonFactory = CommonFactory.create() tests.bitswap(defaultCommonFactory, { skip: !isNode }) @@ -107,29 +91,13 @@ describe('interface-ipfs-core tests', function () { { name: 'should resolve IPNS link recursively', reason: 'TODO: IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' - }, - { - name: 'should recursively resolve ipfs.io', - reason: 'TODO: ipfs.io dnslink=/ipns/website.ipfs.io & IPNS resolve not yet implemented https://github.com/ipfs/js-ipfs/issues/1918' } ] }) tests.name(CommonFactory.create({ spawnOptions: { - args: ['--pass ipfs-is-awesome-software', '--offline'], - initOptions: { bits: 512 }, - config: { - Bootstrap: [], - Discovery: { - MDNS: { - Enabled: false - }, - webRTCStar: { - Enabled: false - } - } - } + args: ['--pass ipfs-is-awesome-software', '--offline'] } })) diff --git a/test/core/kad-dht.node.js b/test/core/kad-dht.node.js index d07be33f20..d06392abbc 100644 --- a/test/core/kad-dht.node.js +++ b/test/core/kad-dht.node.js @@ -32,7 +32,7 @@ function createNode (callback) { }, callback) } -describe('kad-dht is routing content and peers correctly', () => { +describe.skip('kad-dht is routing content and peers correctly', () => { let nodeA let nodeB let nodeC diff --git a/test/core/name-pubsub.js b/test/core/name-pubsub.js index 575fb54fad..884d0a55b7 100644 --- a/test/core/name-pubsub.js +++ b/test/core/name-pubsub.js @@ -18,6 +18,7 @@ const isNode = require('detect-node') const ipns = require('ipns') const IPFS = require('../../src') const waitFor = require('../utils/wait-for') +const delay = require('interface-ipfs-core/src/utils/delay') const DaemonFactory = require('ipfsd-ctl') const df = DaemonFactory.create({ type: 'proc' }) @@ -34,6 +35,7 @@ describe('name-pubsub', function () { let nodeA let nodeB let idA + let idB const createNode = (callback) => { df.spawn({ @@ -73,6 +75,7 @@ describe('name-pubsub', function () { expect(err).to.not.exist() idA = ids[0] + idB = ids[1] nodeA.swarm.connect(ids[1].addresses[0], done) }) }) @@ -130,10 +133,36 @@ describe('name-pubsub', function () { expect(err).to.not.exist() expect(res).to.exist() - expect(res[5]).to.exist() - expect(res[5].path).to.equal(ipfsRef) + expect(res[5]).to.equal(ipfsRef) done() }) }) }) + + it('should self resolve, publish and then resolve correctly', async function () { + this.timeout(6000) + const emptyDirCid = '/ipfs/QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + const [{ path }] = await nodeA.add(Buffer.from('pubsub records')) + + const resolvesEmpty = await nodeB.name.resolve(idB.id) + expect(resolvesEmpty).to.be.eq(emptyDirCid) + + try { + await nodeA.name.resolve(idB.id) + } catch (error) { + expect(error).to.exist() + } + + const publish = await nodeB.name.publish(path) + expect(publish).to.be.eql({ + name: idB.id, + value: `/ipfs/${path}` + }) + + const resolveB = await nodeB.name.resolve(idB.id) + expect(resolveB).to.be.eq(`/ipfs/${path}`) + await delay(5000) + const resolveA = await nodeA.name.resolve(idB.id) + expect(resolveA).to.be.eq(`/ipfs/${path}`) + }) }) diff --git a/test/core/name.js b/test/core/name.spec.js similarity index 80% rename from test/core/name.js rename to test/core/name.spec.js index 99b8257251..1191eba711 100644 --- a/test/core/name.js +++ b/test/core/name.spec.js @@ -9,11 +9,9 @@ const expect = chai.expect chai.use(dirtyChai) const sinon = require('sinon') -const fs = require('fs') const parallel = require('async/parallel') const series = require('async/series') -const isNode = require('detect-node') const IPFS = require('../../src') const ipnsPath = require('../../src/core/ipns/path') const ipnsRouting = require('../../src/core/ipns/routing/config') @@ -34,120 +32,13 @@ const publishAndResolve = (publisher, resolver, ipfsRef, publishOpts, nodeId, re expect(err).to.not.exist() expect(res[0]).to.exist() expect(res[1]).to.exist() - expect(res[1].path).to.equal(ipfsRef) + expect(res[1]).to.equal(ipfsRef) callback() }) } describe('name', function () { - if (!isNode) { - return - } - - describe('working locally', function () { - let node - let nodeId - let ipfsd - - before(function (done) { - this.timeout(50 * 1000) - df.spawn({ - exec: IPFS, - args: [`--pass ${hat()}`, '--offline'], - config: { Bootstrap: [] } - }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - node = _ipfsd.api - - node.id().then((res) => { - expect(res.id).to.exist() - - nodeId = res.id - done() - }) - }) - }) - - after((done) => ipfsd.stop(done)) - - it('should publish and then resolve correctly with the default options', function (done) { - this.timeout(50 * 1000) - - publishAndResolve(node, node, ipfsRef, { resolve: false }, nodeId, {}, done) - }) - - it('should publish correctly with the lifetime option and resolve', function (done) { - this.timeout(50 * 1000) - - const publishOpts = { - resolve: false, - lifetime: '2h' - } - - publishAndResolve(node, node, ipfsRef, publishOpts, nodeId, {}, done) - }) - - it('should not get the entry correctly if its validity time expired', function (done) { - this.timeout(50 * 1000) - - node.name.publish(ipfsRef, { resolve: false, lifetime: '1ms' }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - - setTimeout(function () { - node.name.resolve(nodeId, (err) => { - expect(err).to.exist() - done() - }) - }, 2) - }) - }) - - it('should recursively resolve to an IPFS hash', function (done) { - this.timeout(90 * 1000) - const keyName = hat() - - node.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) { - expect(err).to.not.exist() - series([ - (cb) => node.name.publish(ipfsRef, { resolve: false }, cb), - (cb) => node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, cb), - (cb) => node.name.resolve(key.id, { recursive: true }, cb) - ], (err, res) => { - expect(err).to.not.exist() - expect(res[2]).to.exist() - expect(res[2].path).to.equal(ipfsRef) - done() - }) - }) - }) - - it('should not recursively resolve to an IPFS hash if the option recursive is not provided', function (done) { - this.timeout(90 * 1000) - const keyName = hat() - - node.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) { - expect(err).to.not.exist() - series([ - (cb) => node.name.publish(ipfsRef, { resolve: false }, cb), - (cb) => node.name.publish(`/ipns/${nodeId}`, { resolve: false, key: keyName }, cb), - (cb) => node.name.resolve(key.id, cb) - ], (err, res) => { - expect(err).to.not.exist() - expect(res[2]).to.exist() - expect(res[2].path).to.equal(`/ipns/${nodeId}`) - done() - }) - }) - }) - }) - describe('republisher', function () { - if (!isNode) { - return - } - let node let ipfsd @@ -277,7 +168,7 @@ describe('name', function () { ], (err, res) => { expect(err).to.not.exist() expect(res[2]).to.exist() - expect(res[2].path).to.equal(ipfsRef) + expect(res[2]).to.equal(ipfsRef) done() }) }) @@ -285,10 +176,6 @@ describe('name', function () { }) describe('errors', function () { - if (!isNode) { - return - } - let node let nodeId let ipfsd @@ -460,20 +347,15 @@ describe('name', function () { }) describe('ipns.path', function () { - const path = 'test/fixtures/planets/solar-system.md' const fixture = { - path, - content: fs.readFileSync(path) + path: 'test/fixtures/planets/solar-system.md', + content: Buffer.from('ipns.path') } let node let ipfsd let nodeId - if (!isNode) { - return - } - before(function (done) { this.timeout(40 * 1000) df.spawn({ diff --git a/test/core/node.js b/test/core/node.js index b9297cce42..35f185f1d3 100644 --- a/test/core/node.js +++ b/test/core/node.js @@ -2,7 +2,6 @@ require('./circuit-relay') require('./files-regular-utils') -require('./name') require('./name-pubsub') require('./key-exchange') require('./pin') diff --git a/test/http-api/interface.js b/test/http-api/interface.js index 43c6ff6a12..d1607dfb71 100644 --- a/test/http-api/interface.js +++ b/test/http-api/interface.js @@ -96,6 +96,30 @@ describe('interface-ipfs-core over ipfs-http-client tests', () => { ] }) + tests.name(CommonFactory.create({ + spawnOptions: { + args: ['--pass ipfs-is-awesome-software', '--offline'] + } + })) + + tests.namePubsub(CommonFactory.create({ + spawnOptions: { + args: ['--enable-namesys-pubsub'], + initOptions: { bits: 1024 }, + config: { + Bootstrap: [], + Discovery: { + MDNS: { + Enabled: false + }, + webRTCStar: { + Enabled: false + } + } + } + } + })) + tests.object(defaultCommonFactory) tests.pin(defaultCommonFactory) diff --git a/test/utils/dns-fetch-stub.js b/test/utils/dns-fetch-stub.js deleted file mode 100644 index a1e24a122c..0000000000 --- a/test/utils/dns-fetch-stub.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -// Create a fetch stub with a fall through to the provided fetch implementation -// if the URL doesn't match https://ipfs.io/api/v0/dns?arg=ipfs.io. -module.exports = (fetch) => { - return function () { - if (arguments[0].startsWith('https://ipfs.io/api/v0/dns?arg=ipfs.io')) { - return Promise.resolve({ - json: () => Promise.resolve({ - Path: '/ipfs/QmYNQJoKGNHTpPxCBPh9KkDpaExgd2duMa3aF6ytMpHdao' - }) - }) - } - return fetch.apply(this, arguments) - } -} From 03dcab94cf98fe9dad1dad82f3a30ece50bf23ec Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Thu, 18 Apr 2019 16:53:49 +0100 Subject: [PATCH 10/19] feat: add support to ipns resolve /ipns/ fixes: #1918 --- package.json | 1 + test/core/name.spec.js | 39 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/package.json b/package.json index ad8413336d..d156c7ae1c 100644 --- a/package.json +++ b/package.json @@ -113,6 +113,7 @@ "ipns": "~0.5.2", "is-domain-name": "^1.0.1", "is-ipfs": "~0.6.1", + "is-domain-name": "^1.0.1", "is-pull-stream": "~0.0.0", "is-stream": "^2.0.0", "iso-url": "~0.4.6", diff --git a/test/core/name.spec.js b/test/core/name.spec.js index 1191eba711..9cdb4309dd 100644 --- a/test/core/name.spec.js +++ b/test/core/name.spec.js @@ -500,4 +500,43 @@ describe('name', function () { done() }) }) + + describe('working with dns', function () { + let node + let ipfsd + + before(function (done) { + df.spawn({ + exec: IPFS, + args: [`--pass ${hat()}`, '--offline'], + config: { Bootstrap: [] } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + node = _ipfsd.api + done() + }) + }) + + after((done) => ipfsd.stop(done)) + + it('should resolve ipfs.io', async () => { + const r = await node.name.resolve('ipfs.io', { recursive: false }) + return expect(r).to.eq('/ipns/website.ipfs.io') + }) + + it('should resolve /ipns/ipfs.io recursive', async () => { + const r = await node.name.resolve('ipfs.io', { recursive: true }) + + return expect(r.substr(0, 6)).to.eql('/ipfs/') + }) + + it('should fail to resolve /ipns/ipfs.a', async () => { + try { + await node.name.resolve('ipfs.a') + } catch (err) { + expect(err).to.exist() + } + }) + }) }) From 29058161cdd963459381b7d66acae647e8d39a53 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Mon, 22 Apr 2019 14:32:38 +0100 Subject: [PATCH 11/19] fix: moves tests to interface-core and removes path from output `ipns name resolve` dns tests moved to interface-core resolve call now return a string as per documention --- test/core/name.spec.js | 39 --------------------------------------- 1 file changed, 39 deletions(-) diff --git a/test/core/name.spec.js b/test/core/name.spec.js index 9cdb4309dd..1191eba711 100644 --- a/test/core/name.spec.js +++ b/test/core/name.spec.js @@ -500,43 +500,4 @@ describe('name', function () { done() }) }) - - describe('working with dns', function () { - let node - let ipfsd - - before(function (done) { - df.spawn({ - exec: IPFS, - args: [`--pass ${hat()}`, '--offline'], - config: { Bootstrap: [] } - }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - node = _ipfsd.api - done() - }) - }) - - after((done) => ipfsd.stop(done)) - - it('should resolve ipfs.io', async () => { - const r = await node.name.resolve('ipfs.io', { recursive: false }) - return expect(r).to.eq('/ipns/website.ipfs.io') - }) - - it('should resolve /ipns/ipfs.io recursive', async () => { - const r = await node.name.resolve('ipfs.io', { recursive: true }) - - return expect(r.substr(0, 6)).to.eql('/ipfs/') - }) - - it('should fail to resolve /ipns/ipfs.a', async () => { - try { - await node.name.resolve('ipfs.a') - } catch (err) { - expect(err).to.exist() - } - }) - }) }) From 2f5bc695b929bf313ae26ddaaaf2d5472f981ce9 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Tue, 18 Jun 2019 17:02:53 +0100 Subject: [PATCH 12/19] fix: update deps and fix tests --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d156c7ae1c..1746692fb6 100644 --- a/package.json +++ b/package.json @@ -76,6 +76,7 @@ "cid-tool": "~0.3.0", "cids": "~0.7.1", "class-is": "^1.1.0", + "clear-module": "^3.2.0", "datastore-core": "~0.6.0", "datastore-pubsub": "~0.1.1", "debug": "^4.1.0", @@ -113,7 +114,6 @@ "ipns": "~0.5.2", "is-domain-name": "^1.0.1", "is-ipfs": "~0.6.1", - "is-domain-name": "^1.0.1", "is-pull-stream": "~0.0.0", "is-stream": "^2.0.0", "iso-url": "~0.4.6", From f85d018de9e5fa5088869b30bfd63750061eb2aa Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Mon, 17 Jun 2019 15:48:50 +0100 Subject: [PATCH 13/19] WIP dns datastore --- package.json | 4 + src/core/ipns/routing/config.js | 1 + src/core/ipns/routing/dns-datastore.js | 162 +++++++++++++++++++++++++ 3 files changed, 167 insertions(+) create mode 100644 src/core/ipns/routing/dns-datastore.js diff --git a/package.json b/package.json index 1746692fb6..7f8b30eac9 100644 --- a/package.json +++ b/package.json @@ -81,6 +81,8 @@ "datastore-pubsub": "~0.1.1", "debug": "^4.1.0", "dlv": "^1.1.3", + "dns-packet": "^5.2.1", + "dns-socket": "^4.2.0", "err-code": "^1.1.2", "file-type": "^12.0.0", "fnv1a": "^1.0.1", @@ -120,6 +122,8 @@ "just-flatten-it": "^2.1.0", "just-safe-set": "^2.1.0", "kind-of": "^6.0.2", + "ky": "^0.11.1", + "ky-universal": "^0.2.1", "libp2p": "~0.25.3", "libp2p-bootstrap": "~0.9.3", "libp2p-crypto": "~0.16.0", diff --git a/src/core/ipns/routing/config.js b/src/core/ipns/routing/config.js index 09f2f3aedd..ef3569c56f 100644 --- a/src/core/ipns/routing/config.js +++ b/src/core/ipns/routing/config.js @@ -5,6 +5,7 @@ const get = require('dlv') const PubsubDatastore = require('./pubsub-datastore') const OfflineDatastore = require('./offline-datastore') +const DnsDatastore = require('./dns-datastore') module.exports = (ipfs) => { // Setup online routing for IPNS with a tiered routing composed by a DHT and a Pubsub router (if properly enabled) diff --git a/src/core/ipns/routing/dns-datastore.js b/src/core/ipns/routing/dns-datastore.js new file mode 100644 index 0000000000..ae605b355b --- /dev/null +++ b/src/core/ipns/routing/dns-datastore.js @@ -0,0 +1,162 @@ +'use strict' + +const ipns = require('ipns') +const ky = require('ky-universal').default +const { Record } = require('libp2p-record') +const dnsSocket = require('dns-socket') +const dnsPacket = require('dns-packet') +const Cid = require('cids') + +const errcode = require('err-code') +const debug = require('debug') +const log = debug('ipfs:ipns:workers-api-datastore') +log.error = debug('ipfs:ipns:workers-api:error') + +// DNS datastore aims to mimic the same encoding as routing when storing records +// to the local datastore +class DNSDataStore { + constructor (repo) { + this._repo = repo + } + + /** + * Put a value to the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value. + * @param {Buffer} value value to be stored. + * @param {function(Error)} callback + * @returns {void} + */ + put (key, value, callback) { + if (key.toString().startsWith('/pk/')) { + return callback() + } + + if (!Buffer.isBuffer(key)) { + return callback(errcode(new Error('DNS datastore key must be a buffer'), 'ERR_INVALID_KEY')) + } + + if (!Buffer.isBuffer(value)) { + return callback(errcode(new Error(`DNS datastore value must be a buffer`), 'ERR_INVALID_VALUE')) + } + + const cid = new Cid(key.slice(ipns.namespaceLength)) + + // http://localhost:8000 + // https://ipns.dev + ky.put( + 'https://ipns.dev', + { + json: { + key: cid.toV1().toString(), + record: value.toString('base64') + } + }) + .then(data => { + setImmediate(() => callback()) + }) + .catch(err => { + setImmediate(() => callback(err)) + }) + } + + /** + * Get a value from the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value to be obtained. + * @param {function(Error, Buffer)} callback + * @returns {void} + */ + get (key, callback) { + if (!Buffer.isBuffer(key)) { + return callback(errcode(new Error(`DNS datastore key must be a buffer`), 'ERR_INVALID_KEY')) + } + + dohBinary(key, callback) + } +} + +exports = module.exports = DNSDataStore +function dns (key, callback) { + const socket = dnsSocket() + const cid = new Cid(key.slice(ipns.namespaceLength)) + + socket.query({ + questions: [{ + type: 'TXT', + name: `${cid.toV1().toString()}.dns.ipns.dev` + }] + }, 5300, 'localhost', (err, res) => { + console.log(err, res) // prints the A record for google.com + }) +} +function dohBinary (key, callback) { + const cid = new Cid(key.slice(ipns.namespaceLength)) + const buf = dnsPacket.encode({ + type: 'query', + id: getRandomInt(1, 65534), + flags: dnsPacket.RECURSION_DESIRED, + questions: [{ + type: 'TXT', + name: `${cid.toV1().toString()}.dns.ipns.dev` + }] + }) + // https://dns.google.com/experimental + // https://cloudflare-dns.com/dns-query + // https://mozilla.cloudflare-dns.com/dns-query + ky + .get('https://cloudflare-dns.com/dns-query', { + searchParams: { + dns: buf.toString('base64') + }, + headers: { + accept: 'application/dns-message' + } + }) + .arrayBuffer() + .then(data => { + data = dnsPacket.decode(Buffer.from(data)) + console.log('TCL: dohBinary -> data', data) + + if (!data && data.answers.length < 1) { + throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') + } + console.log('TCL: doh -> data', data) + const record = new Record(key, Buffer.from(Buffer.concat(data.answers[0].data).toString(), 'base64')) + setImmediate(() => callback(null, record.value)) + }) + .catch(err => { + setImmediate(() => callback(err)) + }) +} + +function dohJson (key, callback) { + const cid = new Cid(key.slice(ipns.namespaceLength)) + + // https://dns.google.com/resolve + // https://cloudflare-dns.com/dns-query + // https://mozilla.cloudflare-dns.com/dns-query + ky + .get('https://cloudflare-dns.com/dns-query', { + searchParams: { + name: `${cid.toV1().toString()}.dns.ipns.dev`, + type: 'TXT', + cd: 1, + ad: 0, + ct: 'application/dns-json' + } + }) + .json() + .then(data => { + if (!data && !data.Answer && data.Answer.length < 1) { + throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') + } + const record = new Record(key, Buffer.from(data.Answer[0].data, 'base64')) + setImmediate(() => callback(null, record.value)) + }) + .catch(err => { + setImmediate(() => callback(err)) + }) +} + +function getRandomInt (min, max) { + return Math.floor(Math.random() * (max - min + 1)) + min +} From a980b52bf12a7f2cc28fc056a67ac79652e66e8f Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Wed, 19 Jun 2019 21:18:16 +0100 Subject: [PATCH 14/19] fix: remove check --- src/core/ipns/routing/dns-datastore.js | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/core/ipns/routing/dns-datastore.js b/src/core/ipns/routing/dns-datastore.js index ae605b355b..0545913683 100644 --- a/src/core/ipns/routing/dns-datastore.js +++ b/src/core/ipns/routing/dns-datastore.js @@ -27,10 +27,6 @@ class DNSDataStore { * @returns {void} */ put (key, value, callback) { - if (key.toString().startsWith('/pk/')) { - return callback() - } - if (!Buffer.isBuffer(key)) { return callback(errcode(new Error('DNS datastore key must be a buffer'), 'ERR_INVALID_KEY')) } From 4920e034a302ec2aef07ee52f6c59d36e92bcb12 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Fri, 21 Jun 2019 15:28:12 +0100 Subject: [PATCH 15/19] feat: add fast ipns datastores --- src/core/config.js | 2 +- src/core/ipns/publisher.js | 15 +-- src/core/ipns/routing/config.js | 38 ++++--- src/core/ipns/routing/dns-datastore.js | 126 ++++----------------- src/core/ipns/routing/mdns-datastore.js | 72 ++++++++++++ src/core/ipns/routing/utils.js | 63 +++++++++++ src/core/ipns/routing/workers-datastore.js | 101 +++++++++++++++++ 7 files changed, 287 insertions(+), 130 deletions(-) create mode 100644 src/core/ipns/routing/mdns-datastore.js create mode 100644 src/core/ipns/routing/workers-datastore.js diff --git a/src/core/config.js b/src/core/config.js index 2fb66bb558..41c6501829 100644 --- a/src/core/config.js +++ b/src/core/config.js @@ -48,7 +48,7 @@ const configSchema = s({ ipnsPubsub: 'boolean?', sharding: 'boolean?', dht: 'boolean?' - })), + }, { dht: false, pubsub: false, ipnsPubsub: false, sharding: false })), connectionManager: 'object?', config: optional(s({ API: 'object?', diff --git a/src/core/ipns/publisher.js b/src/core/ipns/publisher.js index 1137d006cb..12171e3b42 100644 --- a/src/core/ipns/publisher.js +++ b/src/core/ipns/publisher.js @@ -79,7 +79,6 @@ class IpnsPublisher { (cb) => this._publishPublicKey(keys.routingPubKey, publicKey, peerId, cb) ], (err) => { if (err) { - log.error(err) return callback(err) } @@ -108,13 +107,10 @@ class IpnsPublisher { // Add record to routing (buffer key) this._routing.put(key.toBuffer(), entryData, (err, res) => { if (err) { - const errMsg = `ipns record for ${key.toString('base64')} could not be stored in the routing` - - log.error(errMsg) - return callback(errcode(new Error(errMsg), 'ERR_PUTTING_TO_ROUTING')) + return callback(errcode(new Error(`ipns record for /ipns/${peerId.toB58String()} could not be stored in the routing`), 'ERR_PUTTING_TO_ROUTING')) } - log(`ipns record for ${key.toString('base64')} was stored in the routing`) + log(`ipns record for /ipns/${peerId.toB58String()} was stored in the routing`) callback(null, res) }) } @@ -137,13 +133,10 @@ class IpnsPublisher { // Add public key to routing (buffer key) this._routing.put(key.toBuffer(), publicKey.bytes, (err, res) => { if (err) { - const errMsg = `public key for ${key.toString('base64')} could not be stored in the routing` - - log.error(errMsg) - return callback(errcode(new Error(errMsg), 'ERR_PUTTING_TO_ROUTING')) + return callback(errcode(new Error(`public key for /ipns/${peerId.toB58String()} could not be stored in the routing`), 'ERR_PUTTING_TO_ROUTING')) } - log(`public key for ${key.toString('base64')} was stored in the routing`) + log(`public key for /ipns/${peerId.toB58String()} was stored in the routing`) callback(null, res) }) } diff --git a/src/core/ipns/routing/config.js b/src/core/ipns/routing/config.js index ef3569c56f..0b8c30b945 100644 --- a/src/core/ipns/routing/config.js +++ b/src/core/ipns/routing/config.js @@ -1,35 +1,41 @@ 'use strict' const { TieredDatastore } = require('datastore-core') -const get = require('dlv') - const PubsubDatastore = require('./pubsub-datastore') const OfflineDatastore = require('./offline-datastore') const DnsDatastore = require('./dns-datastore') +const MDnsDatastore = require('./mdns-datastore') +const WorkersDatastore = require('./workers-datastore') +/** + * @typedef { import("../../index") } IPFS + */ +/** + * IPNS routing config + * + * @param {IPFS} ipfs + * @returns {function} + */ module.exports = (ipfs) => { // Setup online routing for IPNS with a tiered routing composed by a DHT and a Pubsub router (if properly enabled) const ipnsStores = [] - // Add IPNS pubsub if enabled - let pubsubDs - if (get(ipfs._options, 'EXPERIMENTAL.ipnsPubsub', false)) { - const pubsub = ipfs.libp2p.pubsub - const localDatastore = ipfs._repo.datastore - const peerId = ipfs._peerInfo.id - - pubsubDs = new PubsubDatastore(pubsub, localDatastore, peerId) - ipnsStores.push(pubsubDs) + // // Add IPNS pubsub if enabled + if (ipfs._options.EXPERIMENTAL.ipnsPubsub) { + ipnsStores.push(new PubsubDatastore(ipfs.libp2p.pubsub, ipfs._repo.datastore, ipfs._peerInfo.id)) } - // DHT should not be added as routing if we are offline or it is disabled - if (get(ipfs._options, 'offline') || !get(ipfs._options, 'libp2p.config.dht.enabled', false)) { - const offlineDatastore = new OfflineDatastore(ipfs._repo) - ipnsStores.push(offlineDatastore) - } else { + // Add DHT if we are online + if (ipfs.isOnline()) { ipnsStores.push(ipfs.libp2p.dht) + } else { + ipnsStores.push(new OfflineDatastore(ipfs._repo)) } + ipnsStores.push(new MDnsDatastore()) + ipnsStores.push(new DnsDatastore()) + ipnsStores.push(new WorkersDatastore()) + // Create ipns routing with a set of datastores return new TieredDatastore(ipnsStores) } diff --git a/src/core/ipns/routing/dns-datastore.js b/src/core/ipns/routing/dns-datastore.js index 0545913683..4956603d10 100644 --- a/src/core/ipns/routing/dns-datastore.js +++ b/src/core/ipns/routing/dns-datastore.js @@ -1,16 +1,12 @@ 'use strict' -const ipns = require('ipns') const ky = require('ky-universal').default -const { Record } = require('libp2p-record') -const dnsSocket = require('dns-socket') -const dnsPacket = require('dns-packet') -const Cid = require('cids') - const errcode = require('err-code') const debug = require('debug') -const log = debug('ipfs:ipns:workers-api-datastore') -log.error = debug('ipfs:ipns:workers-api:error') +const { dohBinary, keyToBase32 } = require('./utils') + +const log = debug('ipfs:ipns:dns-datastore') +log.error = debug('ipfs:ipns:dns-datastore:error') // DNS datastore aims to mimic the same encoding as routing when storing records // to the local datastore @@ -27,30 +23,39 @@ class DNSDataStore { * @returns {void} */ put (key, value, callback) { + if (key.toString().startsWith('/pk/')) { + return callback() + } if (!Buffer.isBuffer(key)) { return callback(errcode(new Error('DNS datastore key must be a buffer'), 'ERR_INVALID_KEY')) } - if (!Buffer.isBuffer(value)) { return callback(errcode(new Error(`DNS datastore value must be a buffer`), 'ERR_INVALID_VALUE')) } - const cid = new Cid(key.slice(ipns.namespaceLength)) + let keyStr + try { + keyStr = keyToBase32(key) + } catch (err) { + log.error(err) + return callback(err) + } - // http://localhost:8000 - // https://ipns.dev ky.put( 'https://ipns.dev', { json: { - key: cid.toV1().toString(), - record: value.toString('base64') + key: keyStr, + record: value.toString('base64'), + subdomain: true } }) .then(data => { + log(`publish key: ${keyStr}`) setImmediate(() => callback()) }) .catch(err => { + log.error(err) setImmediate(() => callback(err)) }) } @@ -65,94 +70,11 @@ class DNSDataStore { if (!Buffer.isBuffer(key)) { return callback(errcode(new Error(`DNS datastore key must be a buffer`), 'ERR_INVALID_KEY')) } - - dohBinary(key, callback) + // https://dns.google.com/experimental + // https://cloudflare-dns.com/dns-query + // https://mozilla.cloudflare-dns.com/dns-query + dohBinary('https://cloudflare-dns.com/dns-query', 'dns.ipns.dev', key, callback) } } -exports = module.exports = DNSDataStore -function dns (key, callback) { - const socket = dnsSocket() - const cid = new Cid(key.slice(ipns.namespaceLength)) - - socket.query({ - questions: [{ - type: 'TXT', - name: `${cid.toV1().toString()}.dns.ipns.dev` - }] - }, 5300, 'localhost', (err, res) => { - console.log(err, res) // prints the A record for google.com - }) -} -function dohBinary (key, callback) { - const cid = new Cid(key.slice(ipns.namespaceLength)) - const buf = dnsPacket.encode({ - type: 'query', - id: getRandomInt(1, 65534), - flags: dnsPacket.RECURSION_DESIRED, - questions: [{ - type: 'TXT', - name: `${cid.toV1().toString()}.dns.ipns.dev` - }] - }) - // https://dns.google.com/experimental - // https://cloudflare-dns.com/dns-query - // https://mozilla.cloudflare-dns.com/dns-query - ky - .get('https://cloudflare-dns.com/dns-query', { - searchParams: { - dns: buf.toString('base64') - }, - headers: { - accept: 'application/dns-message' - } - }) - .arrayBuffer() - .then(data => { - data = dnsPacket.decode(Buffer.from(data)) - console.log('TCL: dohBinary -> data', data) - - if (!data && data.answers.length < 1) { - throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') - } - console.log('TCL: doh -> data', data) - const record = new Record(key, Buffer.from(Buffer.concat(data.answers[0].data).toString(), 'base64')) - setImmediate(() => callback(null, record.value)) - }) - .catch(err => { - setImmediate(() => callback(err)) - }) -} - -function dohJson (key, callback) { - const cid = new Cid(key.slice(ipns.namespaceLength)) - - // https://dns.google.com/resolve - // https://cloudflare-dns.com/dns-query - // https://mozilla.cloudflare-dns.com/dns-query - ky - .get('https://cloudflare-dns.com/dns-query', { - searchParams: { - name: `${cid.toV1().toString()}.dns.ipns.dev`, - type: 'TXT', - cd: 1, - ad: 0, - ct: 'application/dns-json' - } - }) - .json() - .then(data => { - if (!data && !data.Answer && data.Answer.length < 1) { - throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') - } - const record = new Record(key, Buffer.from(data.Answer[0].data, 'base64')) - setImmediate(() => callback(null, record.value)) - }) - .catch(err => { - setImmediate(() => callback(err)) - }) -} - -function getRandomInt (min, max) { - return Math.floor(Math.random() * (max - min + 1)) + min -} +module.exports = DNSDataStore diff --git a/src/core/ipns/routing/mdns-datastore.js b/src/core/ipns/routing/mdns-datastore.js new file mode 100644 index 0000000000..cf0d8cfb92 --- /dev/null +++ b/src/core/ipns/routing/mdns-datastore.js @@ -0,0 +1,72 @@ +'use strict' + +const ky = require('ky-universal').default +const errcode = require('err-code') +const debug = require('debug') +const { dohBinary, keyToBase32 } = require('./utils') + +const log = debug('ipfs:ipns:mdns-datastore') +log.error = debug('ipfs:ipns:mdns-datastore:error') + +// DNS datastore aims to mimic the same encoding as routing when storing records +// to the local datastore +class MDNSDataStore { + /** + * Put a value to the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value. + * @param {Buffer} value value to be stored. + * @param {function(Error)} callback + * @returns {void} + */ + put (key, value, callback) { + if (key.toString().startsWith('/pk/')) { + return callback() + } + if (!Buffer.isBuffer(key)) { + return callback(errcode(new Error('MDNS datastore key must be a buffer'), 'ERR_INVALID_KEY')) + } + if (!Buffer.isBuffer(value)) { + return callback(errcode(new Error(`MDNS datastore value must be a buffer`), 'ERR_INVALID_VALUE')) + } + + let keyStr + try { + keyStr = keyToBase32(key) + } catch (err) { + log.error(err) + return callback(err) + } + ky.put( + 'http://ipns.local:8000', + { + json: { + key: keyStr, + record: value.toString('base64') + } + }) + .then(data => { + log(`publish key: ${keyStr}`) + setImmediate(() => callback()) + }) + .catch(err => { + log.error(err) + setImmediate(() => callback(err)) + }) + } + + /** + * Get a value from the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value to be obtained. + * @param {function(Error, Buffer)} callback + * @returns {void} + */ + get (key, callback) { + if (!Buffer.isBuffer(key)) { + return callback(errcode(new Error(`MDNS datastore key must be a buffer`), 'ERR_INVALID_KEY')) + } + + dohBinary('http://ipns.local:8000/dns-query', 'ipns.local', key, callback) + } +} + +module.exports = MDNSDataStore diff --git a/src/core/ipns/routing/utils.js b/src/core/ipns/routing/utils.js index 0d960618ce..b82d1efbb3 100644 --- a/src/core/ipns/routing/utils.js +++ b/src/core/ipns/routing/utils.js @@ -2,8 +2,71 @@ const multibase = require('multibase') const ipns = require('ipns') +const { Record } = require('libp2p-record') +const dnsPacket = require('dns-packet') +const Cid = require('cids') +const errcode = require('err-code') +const debug = require('debug') +const ky = require('ky-universal').default +const log = debug('ipfs:ipns:doh') +log.error = debug('ipfs:ipns:doh:error') + +function dohBinary (url, domain, key, callback) { + const start = Date.now() + let keyStr + let buf + try { + keyStr = keyToBase32(key) + buf = dnsPacket.encode({ + type: 'query', + questions: [{ + type: 'TXT', + name: `${keyStr}.${domain}` + }] + }) + } catch (err) { + log.error(err) + return callback(err) + } + ky + .get(url, { + searchParams: { + dns: buf.toString('base64') + }, + headers: { + accept: 'application/dns-message' + } + }) + .arrayBuffer() + .then(data => { + data = dnsPacket.decode(Buffer.from(data)) + + if (!data && data.answers.length < 1) { + throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') + } + const record = new Record(key, Buffer.from(Buffer.concat(data.answers[0].data).toString(), 'base64')) + log(`${domain} time: ${(Date.now() - start)}ms`) + setImmediate(() => callback(null, record.value)) + }) + .catch(err => { + setImmediate(() => callback(err)) + }) +} + +/** + * Libp2p Key to base32 encoded string + * + * @param {Buffer} key + * @returns {string} + */ +function keyToBase32 (key) { + const cid = new Cid(key.slice(ipns.namespaceLength)) + return cid.toV1().toString() +} module.exports = { + dohBinary, + keyToBase32, encodeBase32: (buf) => { const m = multibase.encode('base32', buf).slice(1) // slice off multibase codec diff --git a/src/core/ipns/routing/workers-datastore.js b/src/core/ipns/routing/workers-datastore.js new file mode 100644 index 0000000000..d5e5b0808e --- /dev/null +++ b/src/core/ipns/routing/workers-datastore.js @@ -0,0 +1,101 @@ +'use strict' + +const ky = require('ky-universal').default +const errcode = require('err-code') +const debug = require('debug') +const { Record } = require('libp2p-record') +const { keyToBase32 } = require('./utils') + +const log = debug('ipfs:ipns:workers-datastore') +log.error = debug('ipfs:ipns:workers-datastore:error') + +// Workers datastore aims to mimic the same encoding as routing when storing records +// to the local datastore +class WorkersDataStore { + /** + * Put a value to the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value. + * @param {Buffer} value value to be stored. + * @param {function(Error)} callback + * @returns {void} + */ + put (key, value, callback) { + if (key.toString().startsWith('/pk/')) { + return callback() + } + if (!Buffer.isBuffer(key)) { + return callback(errcode(new Error('Workers datastore key must be a buffer'), 'ERR_INVALID_KEY')) + } + if (!Buffer.isBuffer(value)) { + return callback(errcode(new Error(`Workers datastore value must be a buffer`), 'ERR_INVALID_VALUE')) + } + + let keyStr + try { + keyStr = keyToBase32(key) + } catch (err) { + log.error(err) + return callback(err) + } + ky.put( + 'https://workers.ipns.dev', + { + json: { + key: keyStr, + value: value.toString('base64') + } + }) + .text() + .then(data => { + log(`publish key: ${keyStr}`) + setImmediate(() => callback()) + }) + .catch(err => { + log.error(err) + setImmediate(() => callback(err)) + }) + } + + /** + * Get a value from the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value to be obtained. + * @param {function(Error, Buffer)} callback + * @returns {void} + */ + get (key, callback) { + const start = Date.now() + + if (!Buffer.isBuffer(key)) { + return callback(errcode(new Error(`Workers datastore key must be a buffer`), 'ERR_INVALID_KEY')) + } + + let keyStr + try { + keyStr = keyToBase32(key) + } catch (err) { + log.error(err) + return callback(err) + } + + ky + .get('https://workers.ipns.dev', { + searchParams: { + key: keyStr + } + }) + .text() + .then(data => { + const record = new Record(key, Buffer.from(data, 'base64')) + log(`resolved: ${keyStr}`) + log(`time: ${(Date.now() - start)}ms`) + + setImmediate(() => callback(null, record.value)) + }) + .catch(err => { + log.error(err) + setImmediate(() => callback(err)) + }) + } +} + +module.exports = WorkersDataStore From debcff9e6b374a2cffb65432f6c602cfec685b36 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Wed, 26 Jun 2019 17:31:56 +0200 Subject: [PATCH 16/19] fix: make it experimental --- package.json | 2 + src/cli/commands/daemon.js | 14 +++ src/core/config.js | 7 +- src/core/ipns/routing/config.js | 17 +-- src/core/ipns/routing/dns-datastore.js | 80 -------------- .../routing/experimental/dns-datastore.js | 76 +++++++++++++ .../{ => experimental}/mdns-datastore.js | 51 ++++----- .../routing/experimental/tiered-datastore.js | 45 ++++++++ src/core/ipns/routing/experimental/utils.js | 61 +++++++++++ .../routing/experimental/workers-datastore.js | 81 ++++++++++++++ src/core/ipns/routing/utils.js | 63 ----------- src/core/ipns/routing/workers-datastore.js | 101 ------------------ 12 files changed, 317 insertions(+), 281 deletions(-) delete mode 100644 src/core/ipns/routing/dns-datastore.js create mode 100644 src/core/ipns/routing/experimental/dns-datastore.js rename src/core/ipns/routing/{ => experimental}/mdns-datastore.js (50%) create mode 100644 src/core/ipns/routing/experimental/tiered-datastore.js create mode 100644 src/core/ipns/routing/experimental/utils.js create mode 100644 src/core/ipns/routing/experimental/workers-datastore.js delete mode 100644 src/core/ipns/routing/workers-datastore.js diff --git a/package.json b/package.json index 7f8b30eac9..5cda27c2fa 100644 --- a/package.json +++ b/package.json @@ -148,6 +148,8 @@ "multihashes": "~0.4.14", "multihashing-async": "~0.6.0", "node-fetch": "^2.3.0", + "p-any": "^2.1.0", + "p-settle": "^3.1.0", "peer-book": "~0.9.0", "peer-id": "~0.12.0", "peer-info": "~0.15.0", diff --git a/src/cli/commands/daemon.js b/src/cli/commands/daemon.js index 570cd56a86..3ec0bb98b8 100644 --- a/src/cli/commands/daemon.js +++ b/src/cli/commands/daemon.js @@ -20,6 +20,16 @@ module.exports = { type: 'boolean', default: false }) + .option('enable-ipns-experiment', { + type: 'boolean', + default: false, + desc: 'EXPERIMENTAL ipns routers.' + }) + .option('experimental-ipns-alias', { + type: 'string', + default: '', + desc: 'EXPERIMENTAL human readable alias for ipns subdomains.' + }) .option('offline', { type: 'boolean', desc: 'Run offline. Do not connect to the rest of the network but provide local API.', @@ -54,9 +64,13 @@ module.exports = { preload: { enabled: argv.enablePreload }, EXPERIMENTAL: { pubsub: argv.enablePubsubExperiment, + ipnsDNS: argv.enableIpnsExperiment, ipnsPubsub: argv.enableNamesysPubsub, dht: argv.enableDhtExperiment, sharding: argv.enableShardingExperiment + }, + ipns: { + alias: argv.experimentalIpnsAlias } }) diff --git a/src/core/config.js b/src/core/config.js index 41c6501829..4cc4eb99a2 100644 --- a/src/core/config.js +++ b/src/core/config.js @@ -46,9 +46,10 @@ const configSchema = s({ EXPERIMENTAL: optional(s({ pubsub: 'boolean?', ipnsPubsub: 'boolean?', + ipnsDNS: 'boolean?', sharding: 'boolean?', dht: 'boolean?' - }, { dht: false, pubsub: false, ipnsPubsub: false, sharding: false })), + }, { dht: false, pubsub: false, ipnsDNS: false, ipnsPubsub: false, sharding: false })), connectionManager: 'object?', config: optional(s({ API: 'object?', @@ -69,9 +70,11 @@ const configSchema = s({ Bootstrap: optional(s(['multiaddr-ipfs'])) })), ipld: 'object?', + ipns: 'object?', libp2p: optional(union(['function', 'object'])) // libp2p validates this }, { - repoOwner: true + repoOwner: true, + ipns: {} }) const validate = (opts) => { diff --git a/src/core/ipns/routing/config.js b/src/core/ipns/routing/config.js index 0b8c30b945..227b78be25 100644 --- a/src/core/ipns/routing/config.js +++ b/src/core/ipns/routing/config.js @@ -3,9 +3,10 @@ const { TieredDatastore } = require('datastore-core') const PubsubDatastore = require('./pubsub-datastore') const OfflineDatastore = require('./offline-datastore') -const DnsDatastore = require('./dns-datastore') -const MDnsDatastore = require('./mdns-datastore') -const WorkersDatastore = require('./workers-datastore') +const DnsDatastore = require('./experimental/dns-datastore') +const MDnsDatastore = require('./experimental/mdns-datastore') +const WorkersDatastore = require('./experimental/workers-datastore') +const ExperimentalTieredDatastore = require('./experimental/tiered-datastore') /** * @typedef { import("../../index") } IPFS */ @@ -19,6 +20,12 @@ const WorkersDatastore = require('./workers-datastore') module.exports = (ipfs) => { // Setup online routing for IPNS with a tiered routing composed by a DHT and a Pubsub router (if properly enabled) const ipnsStores = [] + if (ipfs._options.EXPERIMENTAL.ipnsDNS) { + ipnsStores.push(new WorkersDatastore(ipfs._options.ipns)) + ipnsStores.push(new DnsDatastore(ipfs._options.ipns)) + ipnsStores.push(new MDnsDatastore(ipfs._options.ipns)) + return new ExperimentalTieredDatastore(ipnsStores) + } // // Add IPNS pubsub if enabled if (ipfs._options.EXPERIMENTAL.ipnsPubsub) { @@ -32,10 +39,6 @@ module.exports = (ipfs) => { ipnsStores.push(new OfflineDatastore(ipfs._repo)) } - ipnsStores.push(new MDnsDatastore()) - ipnsStores.push(new DnsDatastore()) - ipnsStores.push(new WorkersDatastore()) - // Create ipns routing with a set of datastores return new TieredDatastore(ipnsStores) } diff --git a/src/core/ipns/routing/dns-datastore.js b/src/core/ipns/routing/dns-datastore.js deleted file mode 100644 index 4956603d10..0000000000 --- a/src/core/ipns/routing/dns-datastore.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const ky = require('ky-universal').default -const errcode = require('err-code') -const debug = require('debug') -const { dohBinary, keyToBase32 } = require('./utils') - -const log = debug('ipfs:ipns:dns-datastore') -log.error = debug('ipfs:ipns:dns-datastore:error') - -// DNS datastore aims to mimic the same encoding as routing when storing records -// to the local datastore -class DNSDataStore { - constructor (repo) { - this._repo = repo - } - - /** - * Put a value to the local datastore indexed by the received key properly encoded. - * @param {Buffer} key identifier of the value. - * @param {Buffer} value value to be stored. - * @param {function(Error)} callback - * @returns {void} - */ - put (key, value, callback) { - if (key.toString().startsWith('/pk/')) { - return callback() - } - if (!Buffer.isBuffer(key)) { - return callback(errcode(new Error('DNS datastore key must be a buffer'), 'ERR_INVALID_KEY')) - } - if (!Buffer.isBuffer(value)) { - return callback(errcode(new Error(`DNS datastore value must be a buffer`), 'ERR_INVALID_VALUE')) - } - - let keyStr - try { - keyStr = keyToBase32(key) - } catch (err) { - log.error(err) - return callback(err) - } - - ky.put( - 'https://ipns.dev', - { - json: { - key: keyStr, - record: value.toString('base64'), - subdomain: true - } - }) - .then(data => { - log(`publish key: ${keyStr}`) - setImmediate(() => callback()) - }) - .catch(err => { - log.error(err) - setImmediate(() => callback(err)) - }) - } - - /** - * Get a value from the local datastore indexed by the received key properly encoded. - * @param {Buffer} key identifier of the value to be obtained. - * @param {function(Error, Buffer)} callback - * @returns {void} - */ - get (key, callback) { - if (!Buffer.isBuffer(key)) { - return callback(errcode(new Error(`DNS datastore key must be a buffer`), 'ERR_INVALID_KEY')) - } - // https://dns.google.com/experimental - // https://cloudflare-dns.com/dns-query - // https://mozilla.cloudflare-dns.com/dns-query - dohBinary('https://cloudflare-dns.com/dns-query', 'dns.ipns.dev', key, callback) - } -} - -module.exports = DNSDataStore diff --git a/src/core/ipns/routing/experimental/dns-datastore.js b/src/core/ipns/routing/experimental/dns-datastore.js new file mode 100644 index 0000000000..b633046643 --- /dev/null +++ b/src/core/ipns/routing/experimental/dns-datastore.js @@ -0,0 +1,76 @@ +/* eslint-disable no-console */ +'use strict' + +const ky = require('ky-universal').default +const errcode = require('err-code') +const debug = require('debug') +const { dohBinary, keyToBase32 } = require('./utils') + +const log = debug('ipfs:ipns:dns-datastore') +log.error = debug('ipfs:ipns:dns-datastore:error') + +class DNSDataStore { + constructor (options) { + this.options = options + } + + /** + * Put a key value pair into the datastore + * @param {Buffer} key identifier of the value. + * @param {Buffer} value value to be stored. + * @returns {Promise} + */ + async put (key, value) { + const start = Date.now() + if (key.toString().startsWith('/pk/')) { + return + } + if (!Buffer.isBuffer(key)) { + throw errcode(new Error('DNS datastore key must be a buffer'), 'ERR_INVALID_KEY') + } + if (!Buffer.isBuffer(value)) { + throw errcode(new Error(`DNS datastore value must be a buffer`), 'ERR_INVALID_VALUE') + } + + const keyStr = keyToBase32(key) + const data = await ky + .put( + 'https://ipns.dev', + { + json: { + key: keyStr, + record: value.toString('base64'), + subdomain: true, + alias: this.options.alias + } + } + ) + .json() + + console.log(` + DNS Store + Domain: ipns.dev + Key: ${keyStr} + Subdomain: ${data.subdomain} + Alias: ${data.alias} + Time: ${(Date.now() - start)}ms + `) + } + + /** + * Get a value from the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value to be obtained. + * @returns {Promise} + */ + get (key) { + if (!Buffer.isBuffer(key)) { + throw errcode(new Error(`DNS datastore key must be a buffer`), 'ERR_INVALID_KEY') + } + // https://dns.google.com/experimental + // https://cloudflare-dns.com/dns-query + // https://mozilla.cloudflare-dns.com/dns-query + return dohBinary('https://cloudflare-dns.com/dns-query', 'dns.ipns.dev', key) + } +} + +module.exports = DNSDataStore diff --git a/src/core/ipns/routing/mdns-datastore.js b/src/core/ipns/routing/experimental/mdns-datastore.js similarity index 50% rename from src/core/ipns/routing/mdns-datastore.js rename to src/core/ipns/routing/experimental/mdns-datastore.js index cf0d8cfb92..80c78a0955 100644 --- a/src/core/ipns/routing/mdns-datastore.js +++ b/src/core/ipns/routing/experimental/mdns-datastore.js @@ -1,3 +1,4 @@ +/* eslint-disable no-console */ 'use strict' const ky = require('ky-universal').default @@ -11,32 +12,29 @@ log.error = debug('ipfs:ipns:mdns-datastore:error') // DNS datastore aims to mimic the same encoding as routing when storing records // to the local datastore class MDNSDataStore { + constructor (options) { + this.options = options + } /** - * Put a value to the local datastore indexed by the received key properly encoded. + * Put a key value pair into the datastore * @param {Buffer} key identifier of the value. * @param {Buffer} value value to be stored. - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - put (key, value, callback) { + async put (key, value) { + const start = Date.now() if (key.toString().startsWith('/pk/')) { - return callback() + return } if (!Buffer.isBuffer(key)) { - return callback(errcode(new Error('MDNS datastore key must be a buffer'), 'ERR_INVALID_KEY')) + throw errcode(new Error('MDNS datastore key must be a buffer'), 'ERR_INVALID_KEY') } if (!Buffer.isBuffer(value)) { - return callback(errcode(new Error(`MDNS datastore value must be a buffer`), 'ERR_INVALID_VALUE')) + throw errcode(new Error(`MDNS datastore value must be a buffer`), 'ERR_INVALID_VALUE') } - let keyStr - try { - keyStr = keyToBase32(key) - } catch (err) { - log.error(err) - return callback(err) - } - ky.put( + const keyStr = keyToBase32(key) + await ky.put( 'http://ipns.local:8000', { json: { @@ -44,28 +42,25 @@ class MDNSDataStore { record: value.toString('base64') } }) - .then(data => { - log(`publish key: ${keyStr}`) - setImmediate(() => callback()) - }) - .catch(err => { - log.error(err) - setImmediate(() => callback(err)) - }) + console.log(` + Local Store + Domain: ipns.local + Key: ${keyStr} + Time: ${(Date.now() - start)}ms + `) } /** * Get a value from the local datastore indexed by the received key properly encoded. * @param {Buffer} key identifier of the value to be obtained. - * @param {function(Error, Buffer)} callback - * @returns {void} + * @returns {Promise} */ - get (key, callback) { + get (key) { if (!Buffer.isBuffer(key)) { - return callback(errcode(new Error(`MDNS datastore key must be a buffer`), 'ERR_INVALID_KEY')) + throw errcode(new Error(`MDNS datastore key must be a buffer`), 'ERR_INVALID_KEY') } - dohBinary('http://ipns.local:8000/dns-query', 'ipns.local', key, callback) + return dohBinary('http://ipns.local:8000/dns-query', 'ipns.local', key) } } diff --git a/src/core/ipns/routing/experimental/tiered-datastore.js b/src/core/ipns/routing/experimental/tiered-datastore.js new file mode 100644 index 0000000000..aa571ba1be --- /dev/null +++ b/src/core/ipns/routing/experimental/tiered-datastore.js @@ -0,0 +1,45 @@ +'use strict' + +const pany = require('p-any') +const pSettle = require('p-settle') +const debug = require('debug') +const Errors = require('interface-datastore').Errors + +const log = debug('ipfs:ipns:tiered-datastore') +log.error = debug('ipfs:ipns:tiered-datastore:error') + +class TieredDatastore { + constructor (stores) { + this.stores = stores.slice() + } + + put (key, value, callback) { + pSettle(this.stores.map(s => s.put(key, value))) + .then(results => { + let fulfilled = false + results.forEach(r => { + if (r.isFulfilled) { + fulfilled = true + } else { + log.error(r.reason) + } + }) + + if (fulfilled) { + return setImmediate(() => callback()) + } + setImmediate(() => callback(Errors.dbWriteFailedError())) + }) + } + + get (key, callback) { + pany(this.stores.map(s => s.get(key))) + .then(r => setImmediate(() => callback(null, r))) + .catch(err => { + log.error(err) + setImmediate(() => callback(Errors.notFoundError())) + }) + } +} + +module.exports = TieredDatastore diff --git a/src/core/ipns/routing/experimental/utils.js b/src/core/ipns/routing/experimental/utils.js new file mode 100644 index 0000000000..99e31a791b --- /dev/null +++ b/src/core/ipns/routing/experimental/utils.js @@ -0,0 +1,61 @@ +/* eslint-disable no-console */ +'use strict' + +const ipns = require('ipns') +const { Record } = require('libp2p-record') +const dnsPacket = require('dns-packet') +const Cid = require('cids') +const errcode = require('err-code') +const debug = require('debug') +const ky = require('ky-universal').default + +const log = debug('ipfs:ipns:doh') +log.error = debug('ipfs:ipns:doh:error') + +async function dohBinary (url, domain, key) { + const start = Date.now() + const keyStr = keyToBase32(key) + const buf = dnsPacket.encode({ + type: 'query', + questions: [{ + type: 'TXT', + name: `${keyStr}.${domain}` + }] + }) + + const result = await ky + .get(url, { + searchParams: { + dns: buf.toString('base64') + }, + headers: { + accept: 'application/dns-message' + } + }) + .arrayBuffer() + + const data = dnsPacket.decode(Buffer.from(result)) + if (!data && data.answers.length < 1) { + throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') + } + const record = new Record(key, Buffer.from(Buffer.concat(data.answers[0].data).toString(), 'base64')) + console.log(`Resolved ${keyStr}.${domain} in ${(Date.now() - start)}ms`) + + return record.value +} + +/** + * Libp2p Key to base32 encoded string + * + * @param {Buffer} key + * @returns {string} + */ +function keyToBase32 (key) { + const cid = new Cid(key.slice(ipns.namespaceLength)) + return cid.toV1().toString() +} + +module.exports = { + dohBinary, + keyToBase32 +} diff --git a/src/core/ipns/routing/experimental/workers-datastore.js b/src/core/ipns/routing/experimental/workers-datastore.js new file mode 100644 index 0000000000..b7bc382942 --- /dev/null +++ b/src/core/ipns/routing/experimental/workers-datastore.js @@ -0,0 +1,81 @@ +/* eslint-disable no-console */ +'use strict' + +const ky = require('ky-universal').default +const errcode = require('err-code') +const debug = require('debug') +const { Record } = require('libp2p-record') +const { keyToBase32 } = require('./utils') + +const log = debug('ipfs:ipns:workers-datastore') +log.error = debug('ipfs:ipns:workers-datastore:error') + +// Workers datastore aims to mimic the same encoding as routing when storing records +// to the local datastore +class WorkersDataStore { + /** + * Put a key value pair into the datastore + * @param {Buffer} key identifier of the value. + * @param {Buffer} value value to be stored. + * @returns {Promise} + */ + async put (key, value) { + const start = Date.now() + if (key.toString().startsWith('/pk/')) { + return + } + if (!Buffer.isBuffer(key)) { + throw errcode(new Error('Workers datastore key must be a buffer'), 'ERR_INVALID_KEY') + } + if (!Buffer.isBuffer(value)) { + throw errcode(new Error(`Workers datastore value must be a buffer`), 'ERR_INVALID_VALUE') + } + + const keyStr = keyToBase32(key) + await ky.put( + 'https://workers.ipns.dev', + { + json: { + key: keyStr, + value: value.toString('base64') + } + }) + + console.log(` + Workers Store + Domain: workers.ipns.dev + Key: ${keyStr} + Time: ${(Date.now() - start)}ms + `) + } + + /** + * Get a value from the local datastore indexed by the received key properly encoded. + * @param {Buffer} key identifier of the value to be obtained. + * @returns {Promise} + */ + async get (key) { + const start = Date.now() + + if (!Buffer.isBuffer(key)) { + throw errcode(new Error(`Workers datastore key must be a buffer`), 'ERR_INVALID_KEY') + } + + const keyStr = keyToBase32(key) + + const data = await ky + .get('https://workers.ipns.dev', { + searchParams: { + key: keyStr + } + }) + .text() + + const record = new Record(key, Buffer.from(data, 'base64')) + console.log(`Resolved ${keyStr} with workers in: ${(Date.now() - start)}ms`) + + return record.value + } +} + +module.exports = WorkersDataStore diff --git a/src/core/ipns/routing/utils.js b/src/core/ipns/routing/utils.js index b82d1efbb3..0d960618ce 100644 --- a/src/core/ipns/routing/utils.js +++ b/src/core/ipns/routing/utils.js @@ -2,71 +2,8 @@ const multibase = require('multibase') const ipns = require('ipns') -const { Record } = require('libp2p-record') -const dnsPacket = require('dns-packet') -const Cid = require('cids') -const errcode = require('err-code') -const debug = require('debug') -const ky = require('ky-universal').default -const log = debug('ipfs:ipns:doh') -log.error = debug('ipfs:ipns:doh:error') - -function dohBinary (url, domain, key, callback) { - const start = Date.now() - let keyStr - let buf - try { - keyStr = keyToBase32(key) - buf = dnsPacket.encode({ - type: 'query', - questions: [{ - type: 'TXT', - name: `${keyStr}.${domain}` - }] - }) - } catch (err) { - log.error(err) - return callback(err) - } - ky - .get(url, { - searchParams: { - dns: buf.toString('base64') - }, - headers: { - accept: 'application/dns-message' - } - }) - .arrayBuffer() - .then(data => { - data = dnsPacket.decode(Buffer.from(data)) - - if (!data && data.answers.length < 1) { - throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') - } - const record = new Record(key, Buffer.from(Buffer.concat(data.answers[0].data).toString(), 'base64')) - log(`${domain} time: ${(Date.now() - start)}ms`) - setImmediate(() => callback(null, record.value)) - }) - .catch(err => { - setImmediate(() => callback(err)) - }) -} - -/** - * Libp2p Key to base32 encoded string - * - * @param {Buffer} key - * @returns {string} - */ -function keyToBase32 (key) { - const cid = new Cid(key.slice(ipns.namespaceLength)) - return cid.toV1().toString() -} module.exports = { - dohBinary, - keyToBase32, encodeBase32: (buf) => { const m = multibase.encode('base32', buf).slice(1) // slice off multibase codec diff --git a/src/core/ipns/routing/workers-datastore.js b/src/core/ipns/routing/workers-datastore.js deleted file mode 100644 index d5e5b0808e..0000000000 --- a/src/core/ipns/routing/workers-datastore.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict' - -const ky = require('ky-universal').default -const errcode = require('err-code') -const debug = require('debug') -const { Record } = require('libp2p-record') -const { keyToBase32 } = require('./utils') - -const log = debug('ipfs:ipns:workers-datastore') -log.error = debug('ipfs:ipns:workers-datastore:error') - -// Workers datastore aims to mimic the same encoding as routing when storing records -// to the local datastore -class WorkersDataStore { - /** - * Put a value to the local datastore indexed by the received key properly encoded. - * @param {Buffer} key identifier of the value. - * @param {Buffer} value value to be stored. - * @param {function(Error)} callback - * @returns {void} - */ - put (key, value, callback) { - if (key.toString().startsWith('/pk/')) { - return callback() - } - if (!Buffer.isBuffer(key)) { - return callback(errcode(new Error('Workers datastore key must be a buffer'), 'ERR_INVALID_KEY')) - } - if (!Buffer.isBuffer(value)) { - return callback(errcode(new Error(`Workers datastore value must be a buffer`), 'ERR_INVALID_VALUE')) - } - - let keyStr - try { - keyStr = keyToBase32(key) - } catch (err) { - log.error(err) - return callback(err) - } - ky.put( - 'https://workers.ipns.dev', - { - json: { - key: keyStr, - value: value.toString('base64') - } - }) - .text() - .then(data => { - log(`publish key: ${keyStr}`) - setImmediate(() => callback()) - }) - .catch(err => { - log.error(err) - setImmediate(() => callback(err)) - }) - } - - /** - * Get a value from the local datastore indexed by the received key properly encoded. - * @param {Buffer} key identifier of the value to be obtained. - * @param {function(Error, Buffer)} callback - * @returns {void} - */ - get (key, callback) { - const start = Date.now() - - if (!Buffer.isBuffer(key)) { - return callback(errcode(new Error(`Workers datastore key must be a buffer`), 'ERR_INVALID_KEY')) - } - - let keyStr - try { - keyStr = keyToBase32(key) - } catch (err) { - log.error(err) - return callback(err) - } - - ky - .get('https://workers.ipns.dev', { - searchParams: { - key: keyStr - } - }) - .text() - .then(data => { - const record = new Record(key, Buffer.from(data, 'base64')) - log(`resolved: ${keyStr}`) - log(`time: ${(Date.now() - start)}ms`) - - setImmediate(() => callback(null, record.value)) - }) - .catch(err => { - log.error(err) - setImmediate(() => callback(err)) - }) - } -} - -module.exports = WorkersDataStore From f6748260ebe649a47aadd700a30e5aacb867b5af Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Thu, 27 Jun 2019 11:12:00 +0200 Subject: [PATCH 17/19] fix: disable workers router --- src/core/ipns/routing/config.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/core/ipns/routing/config.js b/src/core/ipns/routing/config.js index 227b78be25..69e680c812 100644 --- a/src/core/ipns/routing/config.js +++ b/src/core/ipns/routing/config.js @@ -5,7 +5,7 @@ const PubsubDatastore = require('./pubsub-datastore') const OfflineDatastore = require('./offline-datastore') const DnsDatastore = require('./experimental/dns-datastore') const MDnsDatastore = require('./experimental/mdns-datastore') -const WorkersDatastore = require('./experimental/workers-datastore') +// const WorkersDatastore = require('./experimental/workers-datastore') const ExperimentalTieredDatastore = require('./experimental/tiered-datastore') /** * @typedef { import("../../index") } IPFS @@ -21,7 +21,8 @@ module.exports = (ipfs) => { // Setup online routing for IPNS with a tiered routing composed by a DHT and a Pubsub router (if properly enabled) const ipnsStores = [] if (ipfs._options.EXPERIMENTAL.ipnsDNS) { - ipnsStores.push(new WorkersDatastore(ipfs._options.ipns)) + // something is wrong with the workers code disabled for now + // ipnsStores.push(new WorkersDatastore(ipfs._options.ipns)) ipnsStores.push(new DnsDatastore(ipfs._options.ipns)) ipnsStores.push(new MDnsDatastore(ipfs._options.ipns)) return new ExperimentalTieredDatastore(ipnsStores) From 1405e1f95567765c73608c69c0bdca15cc8f6dfe Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Thu, 27 Jun 2019 20:03:30 +0200 Subject: [PATCH 18/19] fix: fix resolve response validation --- src/core/ipns/routing/experimental/utils.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/ipns/routing/experimental/utils.js b/src/core/ipns/routing/experimental/utils.js index 99e31a791b..7c599f8292 100644 --- a/src/core/ipns/routing/experimental/utils.js +++ b/src/core/ipns/routing/experimental/utils.js @@ -35,7 +35,7 @@ async function dohBinary (url, domain, key) { .arrayBuffer() const data = dnsPacket.decode(Buffer.from(result)) - if (!data && data.answers.length < 1) { + if (!data || data.answers.length < 1) { throw errcode(new Error('Record not found'), 'ERR_NOT_FOUND') } const record = new Record(key, Buffer.from(Buffer.concat(data.answers[0].data).toString(), 'base64')) From 19fc85dca5a20dbbac0cdd3112abf472696b9612 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Fri, 28 Jun 2019 08:17:56 +0200 Subject: [PATCH 19/19] fix: bring back dht check in ipns routing --- src/core/ipns/routing/config.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/core/ipns/routing/config.js b/src/core/ipns/routing/config.js index 69e680c812..664963bf03 100644 --- a/src/core/ipns/routing/config.js +++ b/src/core/ipns/routing/config.js @@ -1,5 +1,6 @@ 'use strict' +const get = require('dlv') const { TieredDatastore } = require('datastore-core') const PubsubDatastore = require('./pubsub-datastore') const OfflineDatastore = require('./offline-datastore') @@ -34,7 +35,7 @@ module.exports = (ipfs) => { } // Add DHT if we are online - if (ipfs.isOnline()) { + if (get(ipfs._options, 'offline') || !get(ipfs._options, 'libp2p.config.dht.enabled', false)) { ipnsStores.push(ipfs.libp2p.dht) } else { ipnsStores.push(new OfflineDatastore(ipfs._repo))