Skip to content
This repository was archived by the owner on Aug 1, 2023. It is now read-only.
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit e2cb915

Browse files
committedSep 16, 2022
feat!: upgrade to esm libp2p/ipfs (#462)
Updates all dependencies and adds types to tests. BREAKING CHANGE: uses ESM release of ipfs/libp2p
1 parent 03132ac commit e2cb915

34 files changed

+28053
-18798
lines changed
 

‎.aegir.cjs ‎.aegir.js

+28-16
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,23 @@
1-
'use strict'
1+
import path from 'path'
2+
import { createServer } from 'ipfsd-ctl'
3+
import { sigServer } from '@libp2p/webrtc-star-signalling-server'
4+
import { fileURLToPath } from 'url'
5+
import { resolve } from 'import-meta-resolve'
26

3-
const path = require('path')
4-
const createServer = require('ipfsd-ctl').createServer
5-
const signaller = require('libp2p-webrtc-star-signalling-server')
7+
const __dirname = path.dirname(fileURLToPath(import.meta.url))
8+
const ipfsModule = await resolve(process.env.IPFS_JS_HTTP_MODULE || 'ipfs', import.meta.url)
9+
const ipfsHttpModule = await resolve(process.env.IPFS_JS_HTTP_MODULE || 'ipfs-http-client', import.meta.url)
10+
11+
async function findGoIpfsBin () {
12+
if (process.env.IPFS_GO_EXEC != null) {
13+
return process.env.IPFS_GO_EXEC
14+
}
15+
16+
const modulePath = await resolve(process.env.IPFS_GO_IPFS_MODULE || 'go-ipfs', import.meta.url)
17+
const module = await import(modulePath.replace('file://', ''))
18+
19+
return module.path()
20+
}
621

722
/** @type {import('aegir').Options["build"]["config"]} */
823
const esbuild = {
@@ -11,33 +26,29 @@ const esbuild = {
1126
{
1227
name: 'node built ins',
1328
setup (build) {
14-
build.onResolve({ filter: /^stream$/ }, () => {
15-
return { path: require.resolve('readable-stream') }
16-
})
17-
1829
build.onResolve({ filter: /^ipfs$/ }, () => {
19-
return { path: require.resolve(process.env.IPFS_JS_MODULE || 'ipfs') }
30+
return { path: ipfsModule.replace('file://', '') }
2031
})
2132
build.onResolve({ filter: /^ipfs-http-client$/ }, () => {
22-
return { path: require.resolve(process.env.IPFS_JS_HTTP_MODULE || 'ipfs-http-client') }
33+
return { path: ipfsHttpModule.replace('file://', '') }
2334
})
2435
}
2536
}
2637
]
2738
}
2839

29-
const ipfsHttpModule = require(process.env.IPFS_JS_HTTP_MODULE || 'ipfs-http-client')
30-
const ipfsModule = require(process.env.IPFS_JS_MODULE || 'ipfs')
31-
3240
/** @type {import('aegir').PartialOptions} */
33-
module.exports = {
41+
export default {
3442
test: {
3543
browser: {
3644
config: {
3745
buildConfig: esbuild
3846
}
3947
},
4048
async before (options) {
49+
const ipfsHttpModule = await import(process.env.IPFS_JS_HTTP_MODULE || 'ipfs-http-client')
50+
const ipfsModule = await import(process.env.IPFS_JS_MODULE || 'ipfs')
51+
4152
if (options.runner !== 'node') {
4253
const ipfsdServer = await createServer({
4354
host: '127.0.0.1',
@@ -48,7 +59,7 @@ module.exports = {
4859
ipfsHttpModule
4960
}, {
5061
go: {
51-
ipfsBin: process.env.IPFS_GO_EXEC || require(process.env.IPFS_GO_IPFS_MODULE || 'go-ipfs').path()
62+
ipfsBin: await findGoIpfsBin()
5263
},
5364
js: {
5465
ipfsOptions: {
@@ -65,11 +76,12 @@ module.exports = {
6576
}
6677
}).start()
6778

68-
const signallingServer = await signaller.start({
79+
const signallingServer = await sigServer({
6980
port: 24642,
7081
host: '0.0.0.0',
7182
metrics: false
7283
})
84+
7385
return {
7486
ipfsdServer,
7587
signallingServer

‎.github/dependabot.yml

+3
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,6 @@ updates:
66
interval: daily
77
time: "10:00"
88
open-pull-requests-limit: 10
9+
commit-message:
10+
prefix: "deps"
11+
prefix-development: "deps(dev)"

‎.github/workflows/automerge.yml

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
name: Automerge
2+
on: [ pull_request ]
3+
4+
jobs:
5+
automerge:
6+
uses: protocol/.github/.github/workflows/automerge.yml@master
7+
with:
8+
job: 'automerge'
+145
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,145 @@
1+
name: test & maybe release
2+
on:
3+
push:
4+
branches:
5+
- master # with #262 - ${{{ github.default_branch }}}
6+
pull_request:
7+
branches:
8+
- master # with #262 - ${{{ github.default_branch }}}
9+
10+
jobs:
11+
12+
check:
13+
runs-on: ubuntu-latest
14+
steps:
15+
- uses: actions/checkout@v2
16+
- uses: actions/setup-node@v2
17+
with:
18+
node-version: lts/*
19+
- uses: ipfs/aegir/actions/cache-node-modules@master
20+
- run: npm run --if-present lint
21+
- run: npm run --if-present dep-check
22+
23+
test-node:
24+
needs: check
25+
runs-on: ${{ matrix.os }}
26+
strategy:
27+
matrix:
28+
os: [windows-latest, ubuntu-latest, macos-latest]
29+
node: [16]
30+
fail-fast: true
31+
steps:
32+
- uses: actions/checkout@v2
33+
- uses: actions/setup-node@v2
34+
with:
35+
node-version: ${{ matrix.node }}
36+
- uses: ipfs/aegir/actions/cache-node-modules@master
37+
- run: npm run --if-present test:node
38+
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
39+
with:
40+
flags: node
41+
42+
test-chrome:
43+
needs: check
44+
runs-on: ubuntu-latest
45+
steps:
46+
- uses: actions/checkout@v2
47+
- uses: actions/setup-node@v2
48+
with:
49+
node-version: lts/*
50+
- uses: ipfs/aegir/actions/cache-node-modules@master
51+
- run: npm run --if-present test:chrome
52+
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
53+
with:
54+
flags: chrome
55+
56+
test-chrome-webworker:
57+
needs: check
58+
runs-on: ubuntu-latest
59+
steps:
60+
- uses: actions/checkout@v2
61+
- uses: actions/setup-node@v2
62+
with:
63+
node-version: lts/*
64+
- uses: ipfs/aegir/actions/cache-node-modules@master
65+
- run: npm run --if-present test:chrome-webworker
66+
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
67+
with:
68+
flags: chrome-webworker
69+
70+
test-firefox:
71+
needs: check
72+
runs-on: ubuntu-latest
73+
steps:
74+
- uses: actions/checkout@v2
75+
- uses: actions/setup-node@v2
76+
with:
77+
node-version: lts/*
78+
- uses: ipfs/aegir/actions/cache-node-modules@master
79+
- run: npm run --if-present test:firefox
80+
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
81+
with:
82+
flags: firefox
83+
84+
test-firefox-webworker:
85+
needs: check
86+
runs-on: ubuntu-latest
87+
steps:
88+
- uses: actions/checkout@v2
89+
- uses: actions/setup-node@v2
90+
with:
91+
node-version: lts/*
92+
- uses: ipfs/aegir/actions/cache-node-modules@master
93+
- run: npm run --if-present test:firefox-webworker
94+
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
95+
with:
96+
flags: firefox-webworker
97+
98+
test-electron-main:
99+
needs: check
100+
runs-on: ubuntu-latest
101+
steps:
102+
- uses: actions/checkout@v2
103+
- uses: actions/setup-node@v2
104+
with:
105+
node-version: lts/*
106+
- uses: ipfs/aegir/actions/cache-node-modules@master
107+
- run: npx xvfb-maybe npm run --if-present test:electron-main
108+
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
109+
with:
110+
flags: electron-main
111+
112+
test-electron-renderer:
113+
needs: check
114+
runs-on: ubuntu-latest
115+
steps:
116+
- uses: actions/checkout@v2
117+
- uses: actions/setup-node@v2
118+
with:
119+
node-version: lts/*
120+
- uses: ipfs/aegir/actions/cache-node-modules@master
121+
- run: npx xvfb-maybe npm run --if-present test:electron-renderer
122+
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # v3.1.0
123+
with:
124+
flags: electron-renderer
125+
126+
release:
127+
needs: [test-node, test-chrome, test-chrome-webworker, test-firefox, test-firefox-webworker, test-electron-main, test-electron-renderer]
128+
runs-on: ubuntu-latest
129+
if: github.event_name == 'push' && github.ref == 'refs/heads/master' # with #262 - 'refs/heads/${{{ github.default_branch }}}'
130+
steps:
131+
- uses: actions/checkout@v2
132+
with:
133+
fetch-depth: 0
134+
- uses: actions/setup-node@v2
135+
with:
136+
node-version: lts/*
137+
- uses: ipfs/aegir/actions/cache-node-modules@master
138+
- uses: ipfs/aegir/actions/docker-login@master
139+
with:
140+
docker-token: ${{ secrets.DOCKER_TOKEN }}
141+
docker-username: ${{ secrets.DOCKER_USERNAME }}
142+
- run: npm run --if-present release
143+
env:
144+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
145+
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}

‎.github/workflows/test.yml

-158
This file was deleted.

‎.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ logs
88
*.log
99

1010
coverage
11+
.coverage
1112

1213
# Runtime data
1314
pids

‎README.md

+52-29
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,39 @@
1-
# Interoperability Tests for IPFS
1+
# ipfs-interop <!-- omit in toc -->
2+
3+
[![ipfs.io](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io)
4+
[![IRC](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
5+
[![Discord](https://img.shields.io/discord/806902334369824788?style=flat-square)](https://discord.gg/ipfs)
6+
[![codecov](https://img.shields.io/codecov/c/github/ipfs/interop.svg?style=flat-square)](https://codecov.io/gh/ipfs/interop)
7+
[![CI](https://img.shields.io/github/workflow/status/ipfs/interop/test%20&%20maybe%20release/master?style=flat-square)](https://github.com/ipfs/interop/actions/workflows/js-test-and-release.yml)
8+
9+
> Interoperability Tests for IPFS
10+
11+
## Table of contents <!-- omit in toc -->
12+
13+
- [Install](#install)
14+
- [Usage](#usage)
15+
- [Run the tests](#run-the-tests)
16+
- [Run a particular test locally](#run-a-particular-test-locally)
17+
- [Testing with different versions of go/js IPFS](#testing-with-different-versions-of-gojs-ipfs)
18+
- [As a project](#as-a-project)
19+
- [As environmental variables](#as-environmental-variables)
20+
- [As a custom runtime](#as-a-custom-runtime)
21+
- [Releasing a new version](#releasing-a-new-version)
22+
- [Interop release process for when breaking changes are introduced](#interop-release-process-for-when-breaking-changes-are-introduced)
23+
- [Contribute](#contribute)
24+
- [License](#license)
25+
- [Contribute](#contribute-1)
26+
27+
## Install
228

3-
[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io)
4-
[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/)
5-
[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
6-
[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme)
7-
[![Travis CI](https://travis-ci.com/ipfs/interop.svg?branch=master)](https://travis-ci.com/ipfs/interop)
8-
9-
> Interoperability tests for IPFS Implementations
29+
```console
30+
$ npm i ipfs-interop
31+
```
1032

1133
This repository will be used for interop tests. Please jump into the issues if you'd like to help out setting this up!
1234

1335
## Usage
1436

15-
### Install
16-
17-
```console
18-
$ npm install -g ipfs-interop
19-
```
20-
2137
### Run the tests
2238

2339
```console
@@ -76,7 +92,7 @@ $ IPFS_GO_EXEC=/path IPFS_JS_EXEC=/path IPFS_JS_MODULE=/path IPFS_JS_HTTP_MODULE
7692
### As a custom runtime
7793

7894
If you want to run interop on CI against specific repo and git revision of
79-
go-ipfs or js-ipfs* then set everything up in `./scripts/custom-runtime.sh`
95+
go-ipfs or js-ipfs\* then set everything up in `./scripts/custom-runtime.sh`
8096
and enable it by uncommenting `env:` `IPFS_(..)` definitions in `.github/workflows/test.yml`
8197

8298
If you want to test against unrelased things locally, make sure the same env
@@ -85,25 +101,21 @@ variables are set on your machine.
85101
For example, to run pubsub tests against go-ipfs and js-ipfs revision defined
86102
in `./scripts/custom-runtime.sh`, one can:
87103

88-
```
89-
export IPFS_GO_EXEC=/tmp/go-ipfs/cmd/ipfs/ipfs
90-
export IPFS_JS_EXEC=/tmp/js-ipfs/packages/ipfs/src/cli.js
91-
export IPFS_JS_MODULE=/tmp/js-ipfs/packages/ipfs/dist/cjs/src/index.js
92-
export IPFS_JS_HTTP_MODULE=/tmp/js-ipfs/packages/ipfs-http-client/dist/cjs/src/index.js
93-
./scripts/custom-runtime.sh
94-
node bin/ipfs-interop.js -- -t node --grep "pubsub"
95-
```
104+
export IPFS_GO_EXEC=/tmp/go-ipfs/cmd/ipfs/ipfs
105+
export IPFS_JS_EXEC=/tmp/js-ipfs/packages/ipfs/src/cli.js
106+
export IPFS_JS_MODULE=/tmp/js-ipfs/packages/ipfs/src/index.js
107+
export IPFS_JS_HTTP_MODULE=/tmp/js-ipfs/packages/ipfs-http-client/src/index.js
108+
./scripts/custom-runtime.sh
109+
node bin/ipfs-interop.js -- -t node --grep "pubsub"
96110

97111
## Releasing a new version
98112

99113
This repo does not use aegir for releases.
100114
Use `npm` directly and publish entire root (CI in go-ipfs requires it).
101115

102-
```
103-
npm version [major|minor|patch]
104-
npm publish
105-
npm push origin && npm push origin v[N.N.N]
106-
```
116+
npm version [major|minor|patch]
117+
npm publish
118+
npm push origin && npm push origin v[N.N.N]
107119

108120
## Interop release process for when breaking changes are introduced
109121

@@ -124,4 +136,15 @@ This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/c
124136

125137
## License
126138

127-
[MIT](./LICENSE)
139+
Licensed under either of
140+
141+
- Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / <http://www.apache.org/licenses/LICENSE-2.0>)
142+
- MIT ([LICENSE-MIT](LICENSE-MIT) / <http://opensource.org/licenses/MIT>)
143+
144+
## Contribute
145+
146+
Feel free to join in. All welcome. Open an [issue](https://github.com/ipfs/js-ipfs-unixfs-importer/issues)!
147+
148+
This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md).
149+
150+
[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md)

‎bin/package.json

-3
This file was deleted.

‎package-lock.json

+26,582-18,040
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎package.json

+62-52
Original file line numberDiff line numberDiff line change
@@ -22,37 +22,43 @@
2222
"bin": {
2323
"ipfs-interop": "bin/ipfs-interop.js"
2424
},
25-
"main": "src/index.js",
2625
"type": "module",
27-
"types": "types/src/index.d.ts",
26+
"types": "./dist/src/index.d.ts",
2827
"typesVersions": {
2928
"*": {
3029
"*": [
31-
"types/*",
32-
"types/src/*"
30+
"*",
31+
"dist/*",
32+
"dist/src/*",
33+
"dist/src/*/index"
3334
],
34-
"types/*": [
35-
"types/*",
36-
"types/src/*"
35+
"src/*": [
36+
"*",
37+
"dist/*",
38+
"dist/src/*",
39+
"dist/src/*/index"
3740
]
3841
}
3942
},
4043
"files": [
41-
"*",
42-
"!**/*.tsbuildinfo",
43-
"!**/browser-test",
44-
"!**/node-test"
44+
".aegir.js",
45+
"bin",
46+
"src",
47+
"scripts",
48+
"test",
49+
"dist",
50+
"!**/*.tsbuildinfo"
4551
],
4652
"exports": {
4753
".": {
54+
"types": "./dist/src/index.d.ts",
4855
"import": "./src/index.js"
4956
}
5057
},
5158
"eslintConfig": {
5259
"extends": "ipfs",
5360
"parserOptions": {
54-
"sourceType": "module",
55-
"ecmaVersion": "2020"
61+
"sourceType": "module"
5662
},
5763
"ignorePatterns": [
5864
"package.json"
@@ -85,15 +91,15 @@
8591
"release": "patch"
8692
},
8793
{
88-
"type": "chore",
94+
"type": "docs",
8995
"release": "patch"
9096
},
9197
{
92-
"type": "docs",
98+
"type": "test",
9399
"release": "patch"
94100
},
95101
{
96-
"type": "test",
102+
"type": "deps",
97103
"release": "patch"
98104
},
99105
{
@@ -123,7 +129,11 @@
123129
},
124130
{
125131
"type": "docs",
126-
"section": "Trivial Changes"
132+
"section": "Documentation"
133+
},
134+
{
135+
"type": "deps",
136+
"section": "Dependencies"
127137
},
128138
{
129139
"type": "test",
@@ -134,77 +144,77 @@
134144
}
135145
],
136146
"@semantic-release/changelog",
137-
[
138-
"@semantic-release/npm",
139-
{
140-
"pkgRoot": "dist"
141-
}
142-
],
147+
"@semantic-release/npm",
143148
"@semantic-release/github",
144149
"@semantic-release/git"
145150
]
146151
},
147152
"scripts": {
148-
"clean": "rimraf /tmp/js-ipfs /tmp/go-ipfs ./go-libp2p-relay-daemon dist types",
153+
"clean": "rimraf scripts/libp2p-relay-daemon dist",
149154
"lint": "aegir lint",
150-
"build": "aegir build --esm-tests && cp -R types scripts bin test src .aegir.cjs dist && cp bin/package.json dist/src && cp bin/package.json dist/test && npx json -I -f dist/package.json -e 'this.browser.execa=false; this.browser.fs=false; this.browser[\"go-ipfs\"]=false'",
151-
"release": "semantic-release",
152-
"postinstall": "cross-env node ./scripts/setup-libp2p-relay-daemon.js",
153-
"pretest": "aegir build --esm-tests",
155+
"build": "aegir build",
156+
"release": "aegir release",
154157
"test": "cross-env LIBP2P_TCP_REUSEPORT=false aegir test",
155-
"test:node": "cross-env LIBP2P_TCP_REUSEPORT=false aegir test -t node -f test/node.js",
156-
"test:browser": "cross-env LIBP2P_TCP_REUSEPORT=false aegir test -t browser -f test/browser.js",
157-
"dep-check": "aegir dep-check package.json"
158+
"test:node": "cross-env LIBP2P_TCP_REUSEPORT=false aegir test -t node -f test/node.js --cov",
159+
"test:chrome": "cross-env LIBP2P_TCP_REUSEPORT=false aegir test -t browser -f test/browser.js --cov",
160+
"test:electron-main": "LIBP2P_TCP_REUSEPORT=false aegir test -t electron-main -f test/node.js",
161+
"dep-check": "aegir dep-check"
158162
},
159163
"dependencies": {
160-
"aegir": "^36.2.3",
164+
"@libp2p/interface-peer-id": "^1.0.4",
165+
"@libp2p/peer-id": "^1.1.10",
166+
"@libp2p/webrtc-star-signalling-server": "^2.0.1",
167+
"@libp2p/websockets": "^3.0.0",
168+
"@multiformats/multiaddr": "^10.4.3",
169+
"@types/is-ci": "^3.0.0",
170+
"aegir": "^37.0.15",
161171
"assert": "^2.0.0",
162172
"buffer": "^6.0.3",
163173
"cachedir": "^2.3.0",
164174
"cross-env": "^7.0.2",
165175
"delay": "^5.0.0",
166-
"detect-node": "^2.0.4",
167-
"execa": "^5.1.1",
176+
"execa": "^6.1.0",
168177
"go-platform": "^1.0.0",
169178
"got": "^12.0.0",
170179
"gunzip-maybe": "^1.4.2",
171180
"hasha": "^5.2.2",
172-
"ipfs-unixfs": "^6.0.3",
181+
"import-meta-resolve": "^2.0.3",
182+
"ipfs-unixfs": "^7.0.0",
173183
"ipfs-utils": "^9.0.1",
174-
"ipfsd-ctl": "^10.0.3",
175-
"ipns": "^0.16.0",
184+
"ipfsd-ctl": "^12.0.0",
185+
"ipns": "^2.0.0",
176186
"is-ci": "^3.0.0",
177-
"is-os": "^1.0.1",
178187
"iso-random-stream": "^2.0.0",
179188
"it-all": "^1.0.1",
180189
"it-concat": "^2.0.0",
181190
"it-drain": "^1.0.0",
182191
"it-last": "^1.0.1",
183192
"it-to-buffer": "^2.0.1",
184-
"libp2p-webrtc-star-signalling-server": "^0.1.0",
185-
"libp2p-websockets": "^0.16.1",
186193
"multiformats": "^9.3.0",
187-
"nanoid": "^3.1.10",
188-
"p-defer": "^3.0.0",
189-
"p-retry": "^4.1.0",
190-
"peer-id": "^0.16.0",
191-
"pretty-bytes": "^5.1.0",
194+
"nanoid": "^4.0.0",
195+
"p-defer": "^4.0.0",
196+
"p-retry": "^5.1.1",
197+
"p-timeout": "^6.0.0",
198+
"p-wait-for": "^5.0.0",
199+
"pretty-bytes": "^6.0.0",
192200
"promisify-es6": "^1.0.3",
193-
"random-fs": "^1.0.3",
194-
"rimraf": "^3.0.2",
195201
"tar-fs": "^2.1.1",
196202
"uint8arrays": "^3.0.0",
197203
"unzip-stream": "^0.3.1",
198-
"wherearewe": "^1.0.0"
204+
"wherearewe": "^2.0.1"
199205
},
200206
"devDependencies": {
201-
"go-ipfs": "^0.12.2",
202-
"ipfs": "^0.61.0",
203-
"ipfs-http-client": "^55.0.0"
207+
"go-ipfs": "^0.15.0",
208+
"ipfs": "^0.64.0",
209+
"ipfs-http-client": "^58.0.0"
204210
},
205211
"browser": {
206212
"go-ipfs": false,
207213
"fs": false,
208-
"execa": false
214+
"execa": false,
215+
"os": false,
216+
"path": false,
217+
"./scripts/download-relay-daemon.js": false,
218+
"./test/utils/is-windows.js": "./test/utils/is-windows.browser.js"
209219
}
210220
}

‎scripts/custom-runtime.sh

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010
#
1111
# export IPFS_GO_EXEC=/tmp/go-ipfs/cmd/ipfs/ipfs
1212
# export IPFS_JS_EXEC=/tmp/js-ipfs/packages/ipfs/src/cli.js
13-
# export IPFS_JS_MODULE=/tmp/js-ipfs/packages/ipfs/dist/cjs/src/index.js
14-
# export IPFS_JS_HTTP_MODULE=/tmp/js-ipfs/packages/ipfs-http-client/dist/cjs/src/index.js
13+
# export IPFS_JS_MODULE=/tmp/js-ipfs/packages/ipfs/src/index.js
14+
# export IPFS_JS_HTTP_MODULE=/tmp/js-ipfs/packages/ipfs-http-client/src/index.js
1515
# ./scripts/custom-runtime.sh
1616
# node bin/ipfs-interop.js -- -t node --grep "pubsub"
1717

‎scripts/download-relay-daemon.js

+150
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
#!/usr/bin/env node
2+
3+
// this script will download the libp2p-relay-daemon binary
4+
// for windows/macos/linux. based on parts of https://github.com/ipfs/npm-go-ipfs
5+
6+
// @ts-expect-error no types
7+
import gunzip from 'gunzip-maybe'
8+
import got from 'got'
9+
import path from 'path'
10+
// @ts-expect-error no types
11+
import tarFS from 'tar-fs'
12+
// @ts-expect-error no types
13+
import unzip from 'unzip-stream'
14+
// @ts-expect-error no types
15+
import cachedir from 'cachedir'
16+
import fs from 'fs'
17+
import os from 'os'
18+
import hasha from 'hasha'
19+
20+
// libp2p-relay-daemon version from https://dist.ipfs.io/libp2p-relay-daemon/
21+
export const LIBP2P_RELAY_DAEMON_VERSION = 'v0.1.0'
22+
23+
/**
24+
* avoid expensive fetch if file is already in cache
25+
* @param {string} url
26+
*/
27+
async function cachingFetchAndVerify (url) {
28+
const cacheDir = process.env.NPM_GO_LIBP2P_RELAY_DAEMON_CACHE || cachedir('npm-go-libp2p-relay-daemon')
29+
const filename = url.split('/').pop()
30+
31+
if (!filename) {
32+
throw new Error('Invalid URL')
33+
}
34+
35+
const cachedFilePath = path.join(cacheDir, filename)
36+
const cachedHashPath = `${cachedFilePath}.sha512`
37+
38+
if (!fs.existsSync(cacheDir)) {
39+
fs.mkdirSync(cacheDir, { recursive: true })
40+
}
41+
if (!fs.existsSync(cachedFilePath)) {
42+
console.info(`Downloading ${url} to ${cacheDir}`)
43+
// download file
44+
fs.writeFileSync(cachedFilePath, await got(url).buffer())
45+
console.info(`Downloaded ${url}`)
46+
47+
// ..and checksum
48+
console.info(`Downloading ${filename}.sha512`)
49+
fs.writeFileSync(cachedHashPath, await got(`${url}.sha512`).buffer())
50+
console.info(`Downloaded ${filename}.sha512`)
51+
} else {
52+
console.info(`Found ${cachedFilePath}`)
53+
}
54+
55+
console.info(`Verifying ${filename}.sha512`)
56+
57+
const digest = Buffer.alloc(128)
58+
const fd = fs.openSync(cachedHashPath, 'r')
59+
fs.readSync(fd, digest, 0, digest.length, 0)
60+
fs.closeSync(fd)
61+
const expectedSha = digest.toString('utf8')
62+
const calculatedSha = await hasha.fromFile(cachedFilePath, { encoding: 'hex', algorithm: 'sha512' })
63+
if (calculatedSha !== expectedSha) {
64+
console.log(`Expected SHA512: ${expectedSha}`)
65+
console.log(`Calculated SHA512: ${calculatedSha}`)
66+
throw new Error(`SHA512 of ${cachedFilePath}' (${calculatedSha}) does not match expected value from ${cachedFilePath}.sha512 (${expectedSha})`)
67+
}
68+
console.log(`OK (${expectedSha})`)
69+
70+
return fs.createReadStream(cachedFilePath)
71+
}
72+
73+
/**
74+
* @param {string} version
75+
* @param {string} platform
76+
* @param {string} arch
77+
* @param {string} distUrl
78+
*/
79+
async function getDownloadURL (version, platform, arch, distUrl) {
80+
const data = await got(`${distUrl}/libp2p-relay-daemon/${version}/dist.json`).json()
81+
82+
if (!data.platforms[platform]) {
83+
throw new Error(`No binary available for platform '${platform}'. Available platforms: ${Object.keys(data.platforms).join(', ')}`)
84+
}
85+
86+
if (!data.platforms[platform].archs[arch]) {
87+
throw new Error(`No binary available for platform '${platform}' and arch '${arch}'. Available architectures: ${Object.keys(data.platforms[platform].archs)}`)
88+
}
89+
90+
const link = data.platforms[platform].archs[arch].link
91+
return `${distUrl}/libp2p-relay-daemon/${version}${link}`
92+
}
93+
94+
/**
95+
* @param {string} url
96+
* @param {string} installPath
97+
* @param {import('stream').Readable} stream
98+
*/
99+
function unpack (url, installPath, stream) {
100+
return new Promise((resolve, reject) => {
101+
if (url.endsWith('.zip')) {
102+
return stream.pipe(
103+
unzip
104+
.Extract({ path: installPath })
105+
.on('close', resolve)
106+
.on('error', reject)
107+
)
108+
}
109+
110+
return stream
111+
.pipe(gunzip())
112+
.pipe(
113+
tarFS
114+
.extract(installPath)
115+
.on('finish', resolve)
116+
.on('error', reject)
117+
)
118+
})
119+
}
120+
121+
/**
122+
* @param {object} options
123+
* @param {string} options.version
124+
* @param {string} options.platform
125+
* @param {string} options.arch
126+
* @param {string} options.installPath
127+
* @param {string} options.distUrl
128+
*/
129+
export async function download ({ version, platform, arch, installPath, distUrl }) {
130+
const url = await getDownloadURL(version, platform, arch, distUrl)
131+
const data = await cachingFetchAndVerify(url)
132+
133+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tmp-libp2p-relay-daemon'))
134+
await unpack(url, tmpDir, data)
135+
136+
const filename = `libp2p-relay-daemon${platform === 'windows' ? '.exe' : ''}`
137+
const tmpPath = path.join(tmpDir, 'libp2p-relay-daemon', filename)
138+
const finalPath = path.join(installPath, filename)
139+
try {
140+
// will throw if the file cannot be accessed
141+
fs.accessSync(finalPath)
142+
// remove old binary, just to be sure we always use the correct version
143+
fs.rmSync(finalPath, { recursive: true })
144+
} catch (e) {}
145+
fs.cpSync(tmpPath, finalPath)
146+
fs.rmSync(tmpDir, { recursive: true })
147+
console.info(`Unpacked binary placed in ${finalPath}`)
148+
149+
return finalPath
150+
}

‎scripts/package.json

-3
This file was deleted.

‎scripts/setup-libp2p-relay-daemon.js

+4-141
Original file line numberDiff line numberDiff line change
@@ -3,148 +3,11 @@
33
// this script will download the libp2p-relay-daemon binary
44
// for windows/macos/linux. based on parts of https://github.com/ipfs/npm-go-ipfs
55

6-
import goenv from 'go-platform'
7-
import gunzip from 'gunzip-maybe'
8-
import got from 'got'
9-
import path from 'path'
10-
import tarFS from 'tar-fs'
11-
import unzip from 'unzip-stream'
12-
import cachedir from 'cachedir'
13-
import fs from 'fs'
6+
import { download, LIBP2P_RELAY_DAEMON_VERSION } from './download-relay-daemon.js'
147
import os from 'os'
15-
import hasha from 'hasha'
16-
17-
// libp2p-relay-daemon version from https://dist.ipfs.io/libp2p-relay-daemon/
18-
const LIBP2P_RELAY_DAEMON_VERSION = 'v0.1.0'
19-
20-
/**
21-
* avoid expensive fetch if file is already in cache
22-
* @param {string} url
23-
*/
24-
async function cachingFetchAndVerify (url) {
25-
const cacheDir = process.env.NPM_GO_LIBP2P_RELAY_DAEMON_CACHE || cachedir('npm-go-libp2p-relay-daemon')
26-
const filename = url.split('/').pop()
27-
28-
if (!filename) {
29-
throw new Error('Invalid URL')
30-
}
31-
32-
const cachedFilePath = path.join(cacheDir, filename)
33-
const cachedHashPath = `${cachedFilePath}.sha512`
34-
35-
if (!fs.existsSync(cacheDir)) {
36-
fs.mkdirSync(cacheDir, { recursive: true })
37-
}
38-
if (!fs.existsSync(cachedFilePath)) {
39-
console.info(`Downloading ${url} to ${cacheDir}`)
40-
// download file
41-
fs.writeFileSync(cachedFilePath, await got(url).buffer())
42-
console.info(`Downloaded ${url}`)
43-
44-
// ..and checksum
45-
console.info(`Downloading ${filename}.sha512`)
46-
fs.writeFileSync(cachedHashPath, await got(`${url}.sha512`).buffer())
47-
console.info(`Downloaded ${filename}.sha512`)
48-
} else {
49-
console.info(`Found ${cachedFilePath}`)
50-
}
51-
52-
console.info(`Verifying ${filename}.sha512`)
53-
54-
const digest = Buffer.alloc(128)
55-
const fd = fs.openSync(cachedHashPath, 'r')
56-
fs.readSync(fd, digest, 0, digest.length, 0)
57-
fs.closeSync(fd)
58-
const expectedSha = digest.toString('utf8')
59-
const calculatedSha = await hasha.fromFile(cachedFilePath, { encoding: 'hex', algorithm: 'sha512' })
60-
if (calculatedSha !== expectedSha) {
61-
console.log(`Expected SHA512: ${expectedSha}`)
62-
console.log(`Calculated SHA512: ${calculatedSha}`)
63-
throw new Error(`SHA512 of ${cachedFilePath}' (${calculatedSha}) does not match expected value from ${cachedFilePath}.sha512 (${expectedSha})`)
64-
}
65-
console.log(`OK (${expectedSha})`)
66-
67-
return fs.createReadStream(cachedFilePath)
68-
}
69-
70-
/**
71-
* @param {string} version
72-
* @param {string} platform
73-
* @param {string} arch
74-
* @param {string} distUrl
75-
*/
76-
async function getDownloadURL (version, platform, arch, distUrl) {
77-
const data = await got(`${distUrl}/libp2p-relay-daemon/${version}/dist.json`).json()
78-
79-
if (!data.platforms[platform]) {
80-
throw new Error(`No binary available for platform '${platform}'. Available platforms: ${Object.keys(data.platforms).join(', ')}`)
81-
}
82-
83-
if (!data.platforms[platform].archs[arch]) {
84-
throw new Error(`No binary available for platform '${platform}' and arch '${arch}'. Available architectures: ${Object.keys(data.platforms[platform].archs)}`)
85-
}
86-
87-
const link = data.platforms[platform].archs[arch].link
88-
return `${distUrl}/libp2p-relay-daemon/${version}${link}`
89-
}
90-
91-
/**
92-
* @param {string} url
93-
* @param {string} installPath
94-
* @param {import('stream').Readable} stream
95-
*/
96-
function unpack (url, installPath, stream) {
97-
return new Promise((resolve, reject) => {
98-
if (url.endsWith('.zip')) {
99-
return stream.pipe(
100-
unzip
101-
.Extract({ path: installPath })
102-
.on('close', resolve)
103-
.on('error', reject)
104-
)
105-
}
106-
107-
return stream
108-
.pipe(gunzip())
109-
.pipe(
110-
tarFS
111-
.extract(installPath)
112-
.on('finish', resolve)
113-
.on('error', reject)
114-
)
115-
})
116-
}
117-
118-
/**
119-
* @param {object} options
120-
* @param {string} options.version
121-
* @param {string} options.platform
122-
* @param {string} options.arch
123-
* @param {string} options.installPath
124-
* @param {string} options.distUrl
125-
*/
126-
async function download ({ version, platform, arch, installPath, distUrl }) {
127-
const url = await getDownloadURL(version, platform, arch, distUrl)
128-
const data = await cachingFetchAndVerify(url)
129-
130-
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tmp-libp2p-relay-daemon'))
131-
await unpack(url, tmpDir, data)
132-
133-
const filename = `libp2p-relay-daemon${platform === 'windows' ? '.exe' : ''}`
134-
const tmpPath = path.join(tmpDir, 'libp2p-relay-daemon', filename)
135-
const finalPath = path.join(installPath, filename)
136-
try {
137-
// remove old binary, just to be sure we always use the correct version
138-
if (fs.accessSync(finalPath)) {
139-
fs.rmSync(finalPath, { recursive: true })
140-
}
141-
} catch (e) {}
142-
fs.cpSync(tmpPath, finalPath)
143-
fs.rmSync(tmpDir, { recursive: true })
144-
console.info(`Unpacked binary placed in ${finalPath}`)
145-
146-
return finalPath
147-
}
8+
import path from 'path'
9+
// @ts-expect-error no types
10+
import goenv from 'go-platform'
14811

14912
download({
15013
version: LIBP2P_RELAY_DAEMON_VERSION,

‎test/cid-version-agnostic.js

+17-8
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,22 @@
22

33
import { nanoid } from 'nanoid'
44
import concat from 'it-concat'
5-
import { expect } from 'aegir/utils/chai.js'
5+
import { expect } from 'aegir/chai'
66
import { daemonFactory } from './utils/daemon-factory.js'
77
import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
88

9+
/**
10+
* @typedef {import('ipfsd-ctl').Controller} Controller
11+
* @typedef {import('ipfsd-ctl').Factory} Factory
12+
*/
13+
914
describe('CID version agnostic', function () {
10-
this.timeout(50 * 1000)
15+
this.timeout(50e3)
16+
/** @type {Record<string, Controller>} */
1117
const daemons = {}
18+
/**
19+
* @type {Factory}
20+
*/
1221
let factory
1322

1423
before(async function () {
@@ -23,12 +32,12 @@ describe('CID version agnostic', function () {
2332
Object.assign(daemons, { js0, js1, go0, go1 })
2433

2534
await Promise.all([
26-
js0.api.swarm.connect(js1.api.peerId.addresses[0]),
27-
js1.api.swarm.connect(js0.api.peerId.addresses[0]),
28-
go0.api.swarm.connect(go1.api.peerId.addresses[0]),
29-
go1.api.swarm.connect(go0.api.peerId.addresses[0]),
30-
js0.api.swarm.connect(go0.api.peerId.addresses[0]),
31-
go0.api.swarm.connect(js0.api.peerId.addresses[0])
35+
js0.api.swarm.connect(js1.peer.addresses[0]),
36+
js1.api.swarm.connect(js0.peer.addresses[0]),
37+
go0.api.swarm.connect(go1.peer.addresses[0]),
38+
go1.api.swarm.connect(go0.peer.addresses[0]),
39+
js0.api.swarm.connect(go0.peer.addresses[0]),
40+
go0.api.swarm.connect(js0.peer.addresses[0])
3241
])
3342
})
3443

‎test/circuit.js

+18-3
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,16 @@ import allV1 from './circuit/v1/all.js'
55
import allV2 from './circuit/v2/all.js'
66
import browserV1 from './circuit/v1/browser.js'
77
import browserV2 from './circuit/v2/browser.js'
8-
import isNode from 'detect-node'
8+
import { isNode, isElectronMain } from 'wherearewe'
99
import { connect, send, clean } from './utils/circuit.js'
1010
import { closeRelays } from './utils/relayd.js'
1111
import { daemonFactory } from './utils/daemon-factory.js'
1212

13+
/**
14+
* @typedef {import('ipfsd-ctl').Controller} Controller
15+
* @typedef {import('ipfsd-ctl').Factory} Factory
16+
*/
17+
1318
const timeout = 80 * 1000
1419
const baseTest = {
1520
connect,
@@ -22,17 +27,22 @@ describe('circuit', () => {
2227

2328
// Legacy v1 (unlimited relay)
2429
describe('v1', () => {
30+
/** @type {Factory} */
2531
let factory
2632

2733
before(async () => {
2834
factory = await daemonFactory()
2935
})
3036

31-
const tests = isNode ? allV1 : browserV1
37+
/** @type {*} */
38+
const tests = isNode || isElectronMain ? allV1 : browserV1
3239

3340
Object.keys(tests).forEach((test) => {
41+
/** @type {Controller} */
3442
let nodeA
43+
/** @type {Controller} */
3544
let relay
45+
/** @type {Controller} */
3646
let nodeB
3747

3848
tests[test] = Object.assign({}, baseTest, tests[test])
@@ -64,17 +74,22 @@ describe('circuit', () => {
6474
// Modern v2 (limited relay)
6575
// https://github.com/libp2p/specs/blob/master/relay/circuit-v2.md
6676
describe('v2', () => {
77+
/** @type {Factory} */
6778
let factory
6879

6980
before(async () => {
7081
factory = await daemonFactory()
7182
})
7283

73-
const tests = isNode ? allV2 : browserV2
84+
/** @type {*} */
85+
const tests = isNode || isElectronMain ? allV2 : browserV2
7486

7587
Object.keys(tests).forEach((test) => {
88+
/** @type {Controller} */
7689
let nodeA
90+
/** @type {Controller} */
7791
let relay
92+
/** @type {Controller} */
7893
let nodeB
7994

8095
tests[test] = Object.assign({}, baseTest, tests[test])

‎test/circuit/v1/all.js

+28
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,19 @@
44
import { createJs, createGo, randomWsAddr } from '../../utils/circuit.js'
55
import { getRelayV } from '../../utils/relayd.js'
66

7+
/**
8+
* @typedef {import('ipfsd-ctl').Factory} Factory
9+
*/
10+
711
export default {
812

913
// rv1 is a standalone, reference implementation of circuit relay v1
1014
// (https://github.com/libp2p/go-libp2p-relay-daemon)
1115

1216
'go-rv1-go': {
17+
/**
18+
* @param {Factory} factory
19+
*/
1320
create: async (factory) => {
1421
const relay = await getRelayV(1)
1522
return Promise.all([
@@ -21,6 +28,9 @@ export default {
2128
},
2229

2330
'js-rv1-js': {
31+
/**
32+
* @param {Factory} factory
33+
*/
2434
create: async (factory) => {
2535
const relay = await getRelayV(1)
2636
return Promise.all([
@@ -32,6 +42,9 @@ export default {
3242
},
3343

3444
'js-rv1-go': {
45+
/**
46+
* @param {Factory} factory
47+
*/
3548
create: async (factory) => {
3649
const relay = await getRelayV(1)
3750
return Promise.all([
@@ -43,6 +56,9 @@ export default {
4356
},
4457

4558
'go-rv1-js': {
59+
/**
60+
* @param {Factory} factory
61+
*/
4662
create: async (factory) => {
4763
const relay = await getRelayV(1)
4864
return Promise.all([
@@ -58,6 +74,9 @@ export default {
5874
// FIXME: remove after js-ipfs migrates to v2
5975

6076
'go-js-go': {
77+
/**
78+
* @param {Factory} factory
79+
*/
6180
create: async (factory) => {
6281
return Promise.all([
6382
createGo([randomWsAddr], factory),
@@ -67,6 +86,9 @@ export default {
6786
}
6887
},
6988
'js-js-go': {
89+
/**
90+
* @param {Factory} factory
91+
*/
7092
create: async (factory) => {
7193
return Promise.all([
7294
createJs([randomWsAddr], factory),
@@ -76,13 +98,19 @@ export default {
7698
}
7799
},
78100
'go-js-js': {
101+
/**
102+
* @param {Factory} factory
103+
*/
79104
create: (factory) => Promise.all([
80105
createGo([randomWsAddr], factory),
81106
createJs([randomWsAddr], factory),
82107
createJs([randomWsAddr], factory)
83108
])
84109
},
85110
'js-js-js': {
111+
/**
112+
* @param {Factory} factory
113+
*/
86114
create: (factory) => Promise.all([
87115
createJs([randomWsAddr], factory),
88116
createJs([randomWsAddr], factory),

‎test/circuit/v1/browser.js

+66-4
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,19 @@ import {
1313
getWsAddr,
1414
getWrtcStarAddr
1515
} from '../../utils/circuit.js'
16+
import { Multiaddr } from '@multiformats/multiaddr'
17+
18+
/**
19+
* @typedef {import('ipfsd-ctl').Controller} Controller
20+
* @typedef {import('ipfsd-ctl').Factory} Factory
21+
*/
1622

1723
export default {
1824
'browser-go-js': {
1925
skip: () => true, // FIXME when we have circuit v2 in js-ipfs and webrtc signaling
26+
/**
27+
* @param {Factory} factory
28+
*/
2029
create: async (factory) => {
2130
const relay = await createGoRelay([randomWsAddr], factory)
2231
return Promise.all([
@@ -29,6 +38,9 @@ export default {
2938
},
3039
'browser-go-go': {
3140
skip: () => true, // FIXME when we have circuit v2 in js-ipfs and webrtc signaling
41+
/**
42+
* @param {Factory} factory
43+
*/
3244
create: async (factory) => {
3345
const relay = await createGoRelay([randomWsAddr], factory)
3446
return Promise.all([
@@ -40,6 +52,9 @@ export default {
4052
connect: connWithTimeout(1500)
4153
},
4254
'browser-js-js': {
55+
/**
56+
* @param {Factory} factory
57+
*/
4358
create: (factory) => Promise.all([
4459
createProc([], factory),
4560
createJs([randomWsAddr], factory),
@@ -48,6 +63,9 @@ export default {
4863
connect: connWithTimeout(1500)
4964
},
5065
'browser-js-go': {
66+
/**
67+
* @param {Factory} factory
68+
*/
5169
create: (factory) => Promise.all([
5270
createProc([], factory),
5371
createJs([randomWsAddr], factory),
@@ -57,6 +75,9 @@ export default {
5775
},
5876
'js-go-browser': {
5977
skip: () => true, // FIXME when we have circuit v2 in js-ipfs and webrtc signaling
78+
/**
79+
* @param {Factory} factory
80+
*/
6081
create: async (factory) => {
6182
const relay = await createGoRelay([randomWsAddr], factory)
6283
return Promise.all([
@@ -69,6 +90,9 @@ export default {
6990
},
7091
'go-go-browser': {
7192
skip: () => true, // FIXME when we have circuit v2 in js-ipfs and webrtc signaling
93+
/**
94+
* @param {Factory} factory
95+
*/
7296
create: async (factory) => {
7397
const relay = await createGoRelay([randomWsAddr], factory)
7498
return Promise.all([
@@ -80,6 +104,9 @@ export default {
80104
connect: connWithTimeout(1500)
81105
},
82106
'js-js-browser': {
107+
/**
108+
* @param {Factory} factory
109+
*/
83110
create: (factory) => Promise.all([
84111
createJs([randomWsAddr], factory),
85112
createJs([randomWsAddr], factory),
@@ -88,6 +115,9 @@ export default {
88115
connect: connWithTimeout(1500)
89116
},
90117
'go-js-browser': {
118+
/**
119+
* @param {Factory} factory
120+
*/
91121
create: (factory) => Promise.all([
92122
createGo([randomWsAddr], factory),
93123
createJs([randomWsAddr], factory),
@@ -96,65 +126,97 @@ export default {
96126
connect: connWithTimeout(1500)
97127
},
98128
'go-browser-browser': {
129+
/**
130+
* @param {Factory} factory
131+
*/
99132
create: (factory) => Promise.all([
100133
createGo([randomWsAddr], factory),
101134
createProc(['/ip4/127.0.0.1/tcp/24642/ws/p2p-webrtc-star'], factory),
102135
createProc(['/ip4/127.0.0.1/tcp/24642/ws/p2p-webrtc-star'], factory)
103136
]),
137+
/**
138+
* @param {Controller} nodeA
139+
* @param {Controller} nodeB
140+
* @param {Controller} relay
141+
*/
104142
connect: async (nodeA, nodeB, relay) => {
105143
await relay.api.swarm.connect(await getWsAddr(nodeA.api))
106144
await relay.api.swarm.connect(await getWrtcStarAddr(nodeB.api))
107145
// TODO: needed until https://github.com/ipfs/interop/issues/17 is resolved
108146
await delay(5000)
109-
const nodeBCircuitAddr = `${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${nodeB.api.peerId.id}`
147+
const nodeBCircuitAddr = new Multiaddr(`${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${nodeB.peer.id.toString()}`)
110148
await nodeA.api.swarm.connect(nodeBCircuitAddr)
111149
},
112150
skip: () => true // go-ipfs does not know what p2p-webrtc-star is
113151
},
114152
'js-browser-browser': {
153+
/**
154+
* @param {Factory} factory
155+
*/
115156
create: (factory) => Promise.all([
116157
createJs([randomWsAddr], factory),
117158
createProc(['/ip4/127.0.0.1/tcp/24642/ws/p2p-webrtc-star'], factory),
118159
createProc(['/ip4/127.0.0.1/tcp/24642/ws/p2p-webrtc-star'], factory)
119160
]),
161+
/**
162+
* @param {Controller} nodeA
163+
* @param {Controller} nodeB
164+
* @param {Controller} relay
165+
*/
120166
connect: async (nodeA, nodeB, relay) => {
121167
await relay.api.swarm.connect(await getWsAddr(nodeA.api))
122168
await relay.api.swarm.connect(await getWrtcStarAddr(nodeB.api))
123169
// TODO: needed until https://github.com/ipfs/interop/issues/17 is resolved
124170
await delay(3000)
125-
const nodeBCircuitAddr = `${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${nodeB.api.peerId.id}`
171+
const nodeBCircuitAddr = new Multiaddr(`${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${(await nodeB.api.id()).id.toString()}`)
126172
await nodeA.api.swarm.connect(nodeBCircuitAddr)
127173
},
128174
skip: () => isWebWorker // no webrtc support in webworkers
129175
},
130176
'browser-browser-go': {
177+
/**
178+
* @param {Factory} factory
179+
*/
131180
create: (factory) => Promise.all([
132181
createProc(['/ip4/127.0.0.1/tcp/24642/wss/p2p-webrtc-star'], factory),
133182
createProc(['/ip4/127.0.0.1/tcp/24642/wss/p2p-webrtc-star'], factory),
134183
createGo([randomWsAddr], factory)
135184
]),
185+
/**
186+
* @param {Controller} nodeA
187+
* @param {Controller} nodeB
188+
* @param {Controller} relay
189+
*/
136190
connect: async (nodeA, nodeB, relay) => {
137191
await relay.api.swarm.connect(await getWrtcStarAddr(nodeA.api))
138192
await relay.api.swarm.connect(await getWsAddr(nodeB.api))
139193
// TODO: needed until https://github.com/ipfs/interop/issues/17 is resolved
140194
await delay(5000)
141-
const nodeBCircuitAddr = `${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${nodeB.api.peerId.id}`
195+
const nodeBCircuitAddr = new Multiaddr(`${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${(await nodeB.api.id()).id.toString()}`)
142196
await nodeA.api.swarm.connect(nodeBCircuitAddr)
143197
},
144198
skip: () => isWebWorker // no webrtc support in webworkers
145199
},
146200
'browser-browser-js': {
201+
/**
202+
* @param {Factory} factory
203+
*/
147204
create: (factory) => Promise.all([
148205
createProc(['/ip4/127.0.0.1/tcp/24642/wss/p2p-webrtc-star'], factory),
149206
createProc(['/ip4/127.0.0.1/tcp/24642/wss/p2p-webrtc-star'], factory),
150207
createJs([randomWsAddr], factory)
151208
]),
209+
/**
210+
* @param {Controller} nodeA
211+
* @param {Controller} nodeB
212+
* @param {Controller} relay
213+
*/
152214
connect: async (nodeA, nodeB, relay) => {
153215
await relay.api.swarm.connect(await getWrtcStarAddr(nodeA.api))
154216
await relay.api.swarm.connect(await getWsAddr(nodeB.api))
155217
// TODO: needed until https://github.com/ipfs/interop/issues/17 is resolved
156218
await delay(3000)
157-
const nodeBCircuitAddr = `${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${nodeB.api.peerId.id}`
219+
const nodeBCircuitAddr = new Multiaddr(`${await getWrtcStarAddr(relay.api)}/p2p-circuit/p2p/${(await nodeB.api.id()).id.toString()}`)
158220
await nodeA.api.swarm.connect(nodeBCircuitAddr)
159221
},
160222
skip: () => isWebWorker // no webrtc support in webworkers

‎test/circuit/v2/all.js

+10
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,19 @@
44
import { createGo, createGoRelay, randomWsAddr } from '../../utils/circuit.js'
55
import { getRelayV } from '../../utils/relayd.js'
66

7+
/**
8+
* @typedef {import('ipfsd-ctl').Factory} Factory
9+
*/
10+
711
export default {
812

913
// rv2 is a standalone, reference implementation of circuit relay v2
1014
// (https://github.com/libp2p/go-libp2p-relay-daemon)
1115

1216
'go-rv2-go': {
17+
/**
18+
* @param {Factory} factory
19+
*/
1320
create: async (factory) => {
1421
const relay = await getRelayV(2)
1522
return Promise.all([
@@ -35,6 +42,9 @@ export default {
3542
// relay v2 implementation in go-ipfs 0.11+
3643

3744
'go-go-go': {
45+
/**
46+
* @param {Factory} factory
47+
*/
3848
create: async (factory) => {
3949
const relay = await createGoRelay([randomWsAddr], factory)
4050
return Promise.all([

‎test/exchange-files.js

+45-37
Original file line numberDiff line numberDiff line change
@@ -3,24 +3,35 @@
33

44
import randomBytes from 'iso-random-stream/src/random.js'
55
import pretty from 'pretty-bytes'
6-
import randomFs from 'random-fs'
7-
import { promisify } from 'util'
8-
import rimraf from 'rimraf'
9-
import { join } from 'path'
106
import { nanoid } from 'nanoid'
117
import isCi from 'is-ci'
12-
import { isWindows } from 'is-os'
13-
import os from 'os'
148
import concat from 'it-concat'
15-
import globSource from 'ipfs-utils/src/files/glob-source.js'
16-
import { expect } from 'aegir/utils/chai.js'
9+
import { expect } from 'aegir/chai'
1710
import { daemonFactory } from './utils/daemon-factory.js'
1811
import last from 'it-last'
1912

20-
const rmDir = promisify(rimraf)
21-
22-
function tmpDir () {
23-
return join(os.tmpdir(), `ipfs_${nanoid()}`)
13+
/**
14+
* @typedef {import('ipfsd-ctl').Controller} Controller
15+
* @typedef {import('ipfsd-ctl').Factory} Factory
16+
*/
17+
18+
/**
19+
* @param {string} dir
20+
* @param {number} depth
21+
* @param {number} num
22+
*/
23+
async function * randomDir (dir, depth, num) {
24+
const dirs = new Array(depth).fill(0).map(() => nanoid())
25+
26+
for (let i = 0; i < num; i++) {
27+
const index = Math.round(Math.random() * depth)
28+
const path = `${dir}/${dirs.slice(0, index).join('/')}/${nanoid()}.txt`
29+
30+
yield {
31+
path,
32+
content: randomBytes(5)
33+
}
34+
}
2435
}
2536

2637
const KB = 1024
@@ -89,18 +100,20 @@ if (isCi) {
89100
}
90101

91102
const min = 60 * 1000
92-
const timeout = isCi ? 15 * min : 10 * min
103+
const timeout = isCi ? 2 * min : min
93104

94105
describe('exchange files', function () {
95106
this.timeout(timeout)
96107

108+
/** @type {Record<string, ('go' | 'js')[]>} */
97109
const tests = {
98110
'go -> js': ['go', 'js'],
99111
'go -> go2': ['go', 'go'],
100112
'js -> go': ['js', 'go'],
101113
'js -> js2': ['js', 'js']
102114
}
103115

116+
/** @type {Factory} */
104117
let factory
105118

106119
before(async () => {
@@ -109,7 +122,9 @@ describe('exchange files', function () {
109122

110123
Object.keys(tests).forEach((name) => {
111124
describe(name, () => {
125+
/** @type {Controller} */
112126
let daemon1
127+
/** @type {Controller} */
113128
let daemon2
114129

115130
before('spawn nodes', async function () {
@@ -119,24 +134,22 @@ describe('exchange files', function () {
119134
before('connect', async function () {
120135
this.timeout(timeout); // eslint-disable-line
121136

122-
await daemon1.api.swarm.connect(daemon2.api.peerId.addresses[0])
123-
await daemon2.api.swarm.connect(daemon1.api.peerId.addresses[0])
137+
await daemon1.api.swarm.connect(daemon2.peer.addresses[0])
138+
await daemon2.api.swarm.connect(daemon1.peer.addresses[0])
124139

125140
const [peer1, peer2] = await Promise.all([
126141
daemon1.api.swarm.peers(),
127142
daemon2.api.swarm.peers()
128143
])
129144

130-
expect(peer1.map((p) => p.peer.toString())).to.include(daemon2.api.peerId.id)
131-
expect(peer2.map((p) => p.peer.toString())).to.include(daemon1.api.peerId.id)
145+
expect(peer1.map((p) => p.peer.toString())).to.include(daemon2.peer.id.toString())
146+
expect(peer2.map((p) => p.peer.toString())).to.include(daemon1.peer.id.toString())
132147
})
133148

134149
after(() => factory.clean())
135150

136151
describe('cat file', () => sizes.forEach((size) => {
137152
it(`${name}: ${pretty(size)}`, async function () {
138-
this.timeout(timeout)
139-
140153
const data = randomBytes(size)
141154

142155
const { cid } = await daemon1.api.add(data)
@@ -146,36 +159,31 @@ describe('exchange files', function () {
146159
})
147160
}))
148161

149-
if (isWindows()) { return }
150-
// TODO fix dir tests on Windows
151-
152162
describe('get directory', () => depth.forEach((d) => dirs.forEach((num) => {
153163
it(`${name}: depth: ${d}, num: ${num}`, async function () {
154-
this.timeout(timeout)
164+
const dir = `/${nanoid()}`
155165

156-
const dir = tmpDir()
166+
const res = await last(daemon1.api.addAll(randomDir(dir, d, num), {
167+
wrapWithDirectory: true
168+
}))
157169

158-
try {
159-
await randomFs({
160-
path: dir,
161-
depth: d,
162-
number: num
163-
})
170+
if (res == null) {
171+
throw new Error('Nothing added')
172+
}
164173

165-
const { cid } = await last(daemon1.api.addAll(globSource(dir, '**/*'), {
166-
wrapWithDirectory: true
167-
}))
174+
const { cid } = res
168175

169-
await expect(countFiles(cid, daemon2.api)).to.eventually.equal(num)
170-
} finally {
171-
await rmDir(dir)
172-
}
176+
await expect(countFiles(cid, daemon2.api)).to.eventually.equal(num)
173177
})
174178
})))
175179
})
176180
})
177181
})
178182

183+
/**
184+
* @param {import('multiformats/cid').CID} cid
185+
* @param {Controller["api"]} ipfs
186+
*/
179187
async function countFiles (cid, ipfs) {
180188
let fileCount = 0
181189

‎test/files.js

+131-11
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,12 @@ import { daemonFactory } from './utils/daemon-factory.js'
66
import concat from 'it-concat'
77
import all from 'it-all'
88
import last from 'it-last'
9-
import { expect } from 'aegir/utils/chai.js'
9+
import { expect } from 'aegir/chai'
10+
11+
/**
12+
* @typedef {import('ipfsd-ctl').Controller} Controller
13+
* @typedef {import('ipfsd-ctl').Factory} Factory
14+
*/
1015

1116
const SHARD_THRESHOLD = 1000
1217

@@ -30,13 +35,30 @@ const jsOptions = {
3035
args: ['--enable-sharding-experiment']
3136
}
3237

38+
/**
39+
* @param {Controller} daemon
40+
* @param {string} path
41+
* @param {Parameters<Controller["api"]["files"]["mkdir"]>[1]} [options]
42+
*/
3343
const createDirectory = (daemon, path, options) => {
3444
return daemon.api.files.mkdir(path, options)
3545
}
3646

47+
/**
48+
* @param {Controller} daemon
49+
* @param {Awaited<ReturnType<Controller["api"]["add"]>>} [file]
50+
*/
3751
async function checkNodeTypes (daemon, file) {
52+
if (file == null) {
53+
throw new Error('No file specified')
54+
}
55+
3856
const node = await daemon.api.object.get(file.cid)
3957

58+
if (node.Data == null) {
59+
throw new Error('No data found on pb node')
60+
}
61+
4062
const meta = UnixFS.unmarshal(node.Data)
4163

4264
expect(meta.type).to.equal('file')
@@ -45,13 +67,22 @@ async function checkNodeTypes (daemon, file) {
4567
return Promise.all(
4668
node.Links.map(async (link) => {
4769
const child = await daemon.api.object.get(link.Hash)
70+
71+
if (child.Data == null) {
72+
throw new Error('No data found on pb node')
73+
}
74+
4875
const childMeta = UnixFS.unmarshal(child.Data)
4976

5077
expect(childMeta.type).to.equal('raw')
5178
})
5279
)
5380
}
5481

82+
/**
83+
* @param {Controller} daemon
84+
* @param {Uint8Array} data
85+
*/
5586
async function addFile (daemon, data) {
5687
const fileName = 'test-file'
5788

@@ -72,6 +103,9 @@ function createDataStream (size = 262144) {
72103
}())
73104
}
74105

106+
/**
107+
* @param {...any} ops
108+
*/
75109
const compare = async (...ops) => {
76110
expect(ops).to.have.property('length').that.is.above(1)
77111

@@ -84,6 +118,10 @@ const compare = async (...ops) => {
84118
results.forEach(res => expect(res).to.deep.equal(result))
85119
}
86120

121+
/**
122+
* @param {string} expectedMessage
123+
* @param {...any} ops
124+
*/
87125
const compareErrors = async (expectedMessage, ...ops) => {
88126
expect(ops).to.have.property('length').that.is.above(1)
89127

@@ -92,7 +130,7 @@ const compareErrors = async (expectedMessage, ...ops) => {
92130
try {
93131
await op
94132
throw new ExpectedError('Expected operation to fail')
95-
} catch (error) {
133+
} catch (/** @type {any} */ error) {
96134
if (error instanceof ExpectedError) {
97135
throw error
98136
}
@@ -113,18 +151,24 @@ const compareErrors = async (expectedMessage, ...ops) => {
113151

114152
const result = results.pop()
115153

154+
if (result == null) {
155+
throw new Error('No result found')
156+
}
157+
116158
// all implementations should have the same error code
117159
results.forEach(res => {
118160
expect(res).to.have.property('code', result.code)
119161
})
120162
}
121163

122164
describe('files', function () {
123-
this.timeout(500 * 1000)
165+
this.timeout(500e3)
124166

167+
/** @type {Controller} */
125168
let go
169+
/** @type {Controller} */
126170
let js
127-
171+
/** @type {Factory} */
128172
let factory
129173

130174
before(async () => {
@@ -145,6 +189,9 @@ describe('files', function () {
145189
after(() => factory.clean())
146190

147191
it('returns an error when reading non-existent files', () => {
192+
/**
193+
* @param {Controller} daemon
194+
*/
148195
const readNonExistentFile = (daemon) => {
149196
return concat(daemon.api.files.read(`/i-do-not-exist-${Math.random()}`))
150197
}
@@ -157,6 +204,9 @@ describe('files', function () {
157204
})
158205

159206
it('returns an error when writing deeply nested files and the parents do not exist', () => {
207+
/**
208+
* @param {Controller} daemon
209+
*/
160210
const writeNonExistentFile = (daemon) => {
161211
return daemon.api.files.write(`/foo-${Math.random()}/bar-${Math.random()}/baz-${Math.random()}/i-do-not-exist-${Math.random()}`, Uint8Array.from([0, 1, 2, 3]))
162212
}
@@ -174,8 +224,12 @@ describe('files', function () {
174224
// https://github.com/ipfs/js-ipfs-http-client/blob/d7eb0e8ffb15e207a8a6062e292a3b5babf35a9e/src/lib/error-handler.js#L12-L23
175225
it.skip('uses raw nodes for leaf data', () => {
176226
const data = randomBytes(1024 * 300)
227+
/**
228+
* @param {Controller} daemon
229+
*/
177230
const testLeavesAreRaw = async (daemon) => {
178231
const file = await addFile(daemon, data)
232+
// @ts-expect-error types do not have sufficient overlap
179233
await checkNodeTypes(daemon, file)
180234
}
181235

@@ -187,6 +241,9 @@ describe('files', function () {
187241

188242
it('errors when creating the same directory twice', () => {
189243
const path = `/test-dir-${Math.random()}`
244+
/**
245+
* @param {Controller} daemon
246+
*/
190247
const createSameDirectory = async (daemon) => {
191248
await createDirectory(daemon, path)
192249
await createDirectory(daemon, path)
@@ -201,6 +258,9 @@ describe('files', function () {
201258

202259
it('does not error when creating the same directory twice and parents option is passed', () => {
203260
const path = `/test-dir-${Math.random()}`
261+
/**
262+
* @param {Controller} daemon
263+
*/
204264
const createSameDirectory = async (daemon) => {
205265
await createDirectory(daemon, path)
206266
await createDirectory(daemon, path, { parents: true })
@@ -214,6 +274,9 @@ describe('files', function () {
214274

215275
it('errors when creating the root directory', () => {
216276
const path = '/'
277+
/**
278+
* @param {Controller} daemon
279+
*/
217280
const createSameDirectory = async (daemon) => {
218281
await createDirectory(daemon, path)
219282
await createDirectory(daemon, path)
@@ -227,33 +290,66 @@ describe('files', function () {
227290
})
228291

229292
describe('has the same hashes for', () => {
293+
/**
294+
* @param {Controller} daemon
295+
* @param {Parameters<Controller["api"]["add"]>[0]} data
296+
* @param {Parameters<Controller["api"]["add"]>[1]} options
297+
*/
230298
const testHashesAreEqual = async (daemon, data, options = {}) => {
231299
const { cid } = await daemon.api.add(data, options)
232300

233301
return cid
234302
}
235303

304+
/**
305+
* @param {Controller} daemon
306+
* @param {Parameters<Controller["api"]["addAll"]>[0]} data
307+
* @param {Parameters<Controller["api"]["addAll"]>[1]} options
308+
*/
236309
const testDirectoryHashesAreEqual = async (daemon, data, options = {}) => {
237-
const { cid } = await last(daemon.api.addAll(data, options))
310+
const res = await last(daemon.api.addAll(data, options))
311+
312+
if (res == null) {
313+
throw new Error('Nothing added')
314+
}
315+
316+
const { cid } = res
238317

239318
return cid
240319
}
241320

242-
const _writeData = async (daemon, initialData, newData, options) => {
321+
/**
322+
* @param {Controller} daemon
323+
* @param {Parameters<Controller["api"]["files"]["write"]>[1]} initialData
324+
* @param {Parameters<Controller["api"]["files"]["write"]>[1]} newData
325+
* @param {Parameters<Controller["api"]["files"]["write"]>[2]} options
326+
*/
327+
const _writeData = async (daemon, initialData, newData, options = {}) => {
243328
const fileName = `file-${Math.random()}.txt`
244329

245330
await daemon.api.files.write(`/${fileName}`, initialData, { create: true })
246-
const files = await all(daemon.api.files.ls('/'))
331+
await daemon.api.files.write(`/${fileName}`, newData, options)
332+
const { cid } = await daemon.api.files.stat(`/${fileName}`)
247333

248-
return files.filter(file => file.name === fileName).pop().cid
334+
return cid.toString()
249335
}
250336

337+
/**
338+
* @param {Controller} daemon
339+
* @param {Uint8Array} initialData
340+
* @param {Uint8Array} appendedData
341+
*/
251342
const appendData = (daemon, initialData, appendedData) => {
252343
return _writeData(daemon, initialData, appendedData, {
253344
offset: initialData.length
254345
})
255346
}
256347

348+
/**
349+
* @param {Controller} daemon
350+
* @param {Uint8Array} initialData
351+
* @param {Uint8Array} newData
352+
*/
257353
const overwriteData = (daemon, initialData, newData) => {
258354
return _writeData(daemon, initialData, newData, {
259355
offset: 0
@@ -287,7 +383,7 @@ describe('files', function () {
287383
)
288384
})
289385

290-
it('files that have had data appended', () => {
386+
it.skip('files that have had data appended', () => {
291387
const initialData = randomBytes(1024 * 300)
292388
const appendedData = randomBytes(1024 * 300)
293389

@@ -310,6 +406,7 @@ describe('files', function () {
310406

311407
it('small files with CIDv1', () => {
312408
const data = Uint8Array.from([0x00, 0x01, 0x02])
409+
/** @type {Parameters<Controller["api"]["add"]>[1]} */
313410
const options = {
314411
cidVersion: 1
315412
}
@@ -322,6 +419,7 @@ describe('files', function () {
322419

323420
it('big files with CIDv1', () => {
324421
const data = randomBytes(1024 * 3000)
422+
/** @type {Parameters<Controller["api"]["add"]>[1]} */
325423
const options = {
326424
cidVersion: 1
327425
}
@@ -333,6 +431,7 @@ describe('files', function () {
333431
})
334432

335433
it('trickle DAGs', () => {
434+
/** @type {Parameters<Controller["api"]["add"]>[1]} */
336435
const options = {
337436
cidVersion: 0,
338437
trickle: true,
@@ -423,13 +522,29 @@ describe('files', function () {
423522
})
424523
}
425524

426-
// will operate on sub-shard three levels deep
525+
/**
526+
* will operate on sub-shard three levels deep
527+
*
528+
* @param {Controller} daemon
529+
* @param {Parameters<Controller["api"]["addAll"]>[0]} data
530+
*/
427531
const testHamtShardHashesAreEqual = async (daemon, data) => {
428-
const { cid } = await last(daemon.api.addAll(data))
532+
const res = await last(daemon.api.addAll(data))
533+
534+
if (res == null) {
535+
throw new Error('Nothing added')
536+
}
537+
538+
const { cid } = res
429539

430540
await daemon.api.files.cp(`/ipfs/${cid}`, dir)
431541

432542
const node = await daemon.api.object.get(cid)
543+
544+
if (node.Data == null) {
545+
throw new Error('No data found on pb node')
546+
}
547+
433548
const meta = UnixFS.unmarshal(node.Data)
434549

435550
expect(meta.type).to.equal('hamt-sharded-directory')
@@ -452,6 +567,11 @@ describe('files', function () {
452567

453568
const stats = await daemon.api.files.stat(dir)
454569
const nodeAfterUpdates = await daemon.api.object.get(stats.cid)
570+
571+
if (nodeAfterUpdates.Data == null) {
572+
throw new Error('No data found on pb node')
573+
}
574+
455575
const metaAfterUpdates = UnixFS.unmarshal(nodeAfterUpdates.Data)
456576

457577
expect(metaAfterUpdates.type).to.equal('hamt-sharded-directory')

‎test/ipns-pubsub.js

+113-98
Original file line numberDiff line numberDiff line change
@@ -1,142 +1,157 @@
11
/* eslint-env mocha */
22

3-
import PeerID from 'peer-id'
4-
import { base58btc } from 'multiformats/bases/base58'
5-
import { getIdKeys } from 'ipns'
6-
import last from 'it-last'
7-
import pRetry from 'p-retry'
8-
import { waitFor } from './utils/wait-for.js'
9-
import { expect } from 'aegir/utils/chai.js'
3+
import { peerIdToRoutingKey } from 'ipns'
4+
import { expect } from 'aegir/chai'
105
import { daemonFactory } from './utils/daemon-factory.js'
116
import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
7+
import last from 'it-last'
8+
import { peerIdFromString } from '@libp2p/peer-id'
9+
import defer from 'p-defer'
10+
11+
/**
12+
* @typedef {import('ipfsd-ctl').Controller} Controller
13+
* @typedef {import('ipfsd-ctl').Factory} Factory
14+
*/
1215

1316
const daemonsOptions = {
1417
args: ['--enable-namesys-pubsub'] // enable ipns over pubsub
1518
}
1619

17-
const retryOptions = {
18-
retries: 5
19-
}
20-
2120
const namespace = '/record/'
2221

2322
const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU'
2423

2524
describe('ipns-pubsub', function () {
26-
let nodes = []
25+
/** @type {Controller} */
26+
let goNode0
27+
28+
/** @type {Controller} */
29+
let goNode1
30+
31+
/** @type {Controller} */
32+
let jsNode0
33+
34+
/** @type {Controller} */
35+
let jsNode1
36+
37+
/** @type {Factory} */
2738
let factory
2839

29-
before(async () => {
40+
beforeEach(async () => {
3041
factory = await daemonFactory()
3142
})
3243

3344
// Spawn daemons
34-
before('create the nodes', async function () {
35-
this.timeout(20e3)
36-
nodes = await Promise.all([
37-
factory.spawn({
38-
type: 'go',
39-
test: true,
40-
...daemonsOptions
41-
}),
42-
factory.spawn({
43-
type: 'js',
44-
test: true,
45-
...daemonsOptions
46-
}),
47-
// TODO: go-ipfs needs two nodes in the DHT to be able to publish a record
48-
// Remove this when js-ipfs has a DHT
49-
factory.spawn({
50-
type: 'go',
51-
test: true,
52-
...daemonsOptions
53-
})
54-
])
45+
beforeEach('create the nodes', async function () {
46+
this.timeout(120e3)
47+
48+
goNode0 = await factory.spawn({
49+
type: 'go',
50+
test: true,
51+
...daemonsOptions
52+
})
53+
goNode1 = await factory.spawn({
54+
type: 'go',
55+
test: true,
56+
...daemonsOptions
57+
})
58+
59+
jsNode0 = await factory.spawn({
60+
type: 'js',
61+
test: true,
62+
...daemonsOptions
63+
})
64+
jsNode1 = await factory.spawn({
65+
type: 'js',
66+
test: true,
67+
...daemonsOptions
68+
})
5569
})
5670

5771
// Connect nodes and wait for republish
58-
before('connect the nodes', async function () {
59-
this.timeout(10e3)
60-
// TODO: go-ipfs needs two nodes in the DHT to be able to publish a record
61-
// Remove the second connect when js-ipfs runs a DHT server
62-
await Promise.all([
63-
nodes[0].api.swarm.connect(nodes[1].api.peerId.addresses[0]),
64-
nodes[0].api.swarm.connect(nodes[2].api.peerId.addresses[0])
65-
])
72+
beforeEach('connect the nodes', async function () {
73+
this.timeout(60e3)
74+
await goNode0.api.swarm.connect(goNode1.peer.addresses[0])
75+
await goNode0.api.swarm.connect(jsNode0.peer.addresses[0])
76+
await goNode0.api.swarm.connect(jsNode1.peer.addresses[0])
77+
78+
await jsNode0.api.swarm.connect(jsNode1.peer.addresses[0])
79+
await jsNode0.api.swarm.connect(goNode1.peer.addresses[0])
6680
})
6781

68-
after(() => factory.clean())
82+
afterEach(() => factory.clean())
6983

7084
it('should get enabled state of pubsub', async function () {
71-
for (const node of nodes) {
72-
const state = await node.api.name.pubsub.state()
73-
expect(state).to.exist()
74-
expect(state.enabled).to.equal(true)
85+
for (const node of [goNode0, goNode1, jsNode0, jsNode1]) {
86+
await expect(node.api.name.pubsub.state()).to.eventually.have.property('enabled', true)
7587
}
7688
})
7789

7890
it('should publish the received record to a go node and a js subscriber should receive it', async function () {
79-
// TODO find out why JS doesn't resolve, might be just missing a DHT
80-
await Promise.all([
81-
subscribeToReceiveByPubsub(nodes[0], nodes[1], nodes[0].api.peerId.id, nodes[1].api.peerId.id),
82-
expect(last(nodes[1].api.name.resolve(nodes[0].api.peerId.id, { stream: false }))).to.eventually.be.rejected.with(/was not found in the network/)
83-
])
91+
await resolveByPubSub(goNode0, jsNode0)
8492
})
8593

8694
it('should publish the received record to a js node and a go subscriber should receive it', async function () {
87-
await Promise.all([
88-
subscribeToReceiveByPubsub(nodes[1], nodes[0], nodes[1].api.peerId.id, nodes[0].api.peerId.id),
89-
last(nodes[0].api.name.resolve(nodes[1].api.peerId.id, { stream: false }))
90-
])
95+
await resolveByPubSub(goNode0, jsNode0)
96+
})
97+
98+
it('should publish the received record to a go node and a go subscriber should receive it', async function () {
99+
await resolveByPubSub(goNode0, goNode1)
100+
})
101+
102+
it('should publish the received record to a js node and a js subscriber should receive it', async function () {
103+
await resolveByPubSub(jsNode0, jsNode1)
91104
})
92105
})
93106

94107
// * IPNS resolve subscription test
95-
// * 1) name.resolve() , which subscribes the topic
96-
// * 2) wait to guarantee the subscription
97-
// * 3) subscribe again just to know until when to wait (inside the scope of the test)
98-
// * 4) wait for the other peer to get notified of the subscription
99-
// * 5) publish new ipns record
100-
// * 6) wait until the record is received in the test scope subscribe
108+
// * 1) name.resolve(), which subscribes the topic
109+
// * 2) check we are subscribed via name.subs
110+
// * 3) subscribe to the actual pubsub topic
111+
// * 4) publish new ipns record
112+
// * 6) ensure we have received an update via pubsub
101113
// * 7) resolve ipns record
102-
const subscribeToReceiveByPubsub = async (nodeA, nodeB, idA, idB) => {
103-
let subscribed = false
104-
function checkMessage (msg) {
105-
subscribed = true
106-
}
107-
108-
const keys = getIdKeys(base58btc.decode(`z${idA}`))
109-
const topic = `${namespace}${uint8ArrayToString(keys.routingKey.uint8Array(), 'base64url')}`
110-
111-
await waitForPeerToSubscribe(nodeB.api, topic)
112-
await nodeB.api.pubsub.subscribe(topic, checkMessage)
113-
await waitForNotificationOfSubscription(nodeA.api, topic, idB)
114-
const res1 = await nodeA.api.name.publish(ipfsRef, { resolve: false })
115-
await waitFor(() => subscribed === true, (50 * 1000))
116-
const res2 = await last(nodeB.api.name.resolve(idA))
117-
118-
expect(PeerID.parse(res1.name).toString()).to.equal(PeerID.parse(idA).toString()) // Published to Node A ID
119-
expect(res2).to.equal(ipfsRef)
120-
}
121-
122-
// wait until a peer know about other peer to subscribe a topic
123-
const waitForNotificationOfSubscription = (daemon, topic, peerId) => pRetry(async () => {
124-
const res = await daemon.pubsub.peers(topic)
114+
/**
115+
* @param {Controller} publisher
116+
* @param {Controller} subscriber
117+
*/
118+
const resolveByPubSub = async (publisher, subscriber) => {
119+
const routingKey = peerIdToRoutingKey(publisher.peer.id)
120+
const topic = `${namespace}${uint8ArrayToString(routingKey, 'base64url')}`
121+
122+
// should not be subscribed to anything
123+
await expect(subscriber.api.name.pubsub.subs()).to.eventually.have.lengthOf(0)
124+
125+
try {
126+
// attempt to resolve the peer id - n.b js throws here, go does not because streaming API errors don't work over http
127+
await last(subscriber.api.name.resolve(publisher.peer.id, {
128+
timeout: 1000
129+
}))
130+
} catch {}
131+
132+
// should now be subscribed to updates for the publisher's peer id
133+
const subs = await subscriber.api.name.pubsub.subs()
134+
expect(subs).to.have.lengthOf(1)
135+
const subbed = peerIdFromString(subs[0].split('/ipns/').pop() || '')
136+
expect(subbed.equals(publisher.peer.id)).to.be.true()
137+
138+
// set up a listener for the pubsub topic for the IPNS key
139+
const deferred = defer()
140+
await subscriber.api.pubsub.subscribe(topic, () => {
141+
deferred.resolve()
142+
})
125143

126-
if (!res || !res.length || !res.includes(peerId)) {
127-
throw new Error('Could not find peer subscribing')
128-
}
129-
}, retryOptions)
144+
// publish an update
145+
const res1 = await publisher.api.name.publish(ipfsRef, { resolve: false })
130146

131-
// Wait until a peer subscribes a topic
132-
const waitForPeerToSubscribe = async (daemon, topic) => {
133-
await pRetry(async () => {
134-
const res = await daemon.pubsub.ls()
147+
// should receive a message on the topic for the IPNS key
148+
await deferred.promise
135149

136-
if (!res || !res.length || !res.includes(topic)) {
137-
throw new Error(`Could not find subscription to ${topic} in "${JSON.stringify(res)}"`)
138-
}
150+
// should succeed and be fast
151+
const res2 = await last(subscriber.api.name.resolve(publisher.peer.id, {
152+
timeout: 1000
153+
}))
139154

140-
return res[0]
141-
}, retryOptions)
155+
expect(peerIdFromString(res1.name).toString()).to.equal(publisher.peer.id.toString()) // Published to Node A ID
156+
expect(res2).to.equal(ipfsRef)
142157
}

‎test/ipns.js

+18-5
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,14 @@ import path from 'path'
55
import { nanoid } from 'nanoid'
66
import delay from 'delay'
77
import last from 'it-last'
8-
import { expect } from 'aegir/utils/chai.js'
8+
import { expect } from 'aegir/chai'
99
import { daemonFactory } from './utils/daemon-factory.js'
1010

11+
/**
12+
* @typedef {import('ipfsd-ctl').Controller} Controller
13+
* @typedef {import('ipfsd-ctl').Factory} Factory
14+
*/
15+
1116
const dir = path.join(os.tmpdir(), nanoid())
1217

1318
const daemonOptions = {
@@ -20,6 +25,10 @@ const daemonOptions = {
2025

2126
const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU'
2227

28+
/**
29+
* @param {Controller} publisherDaemon
30+
* @param {Controller} [resolverDaemon]
31+
*/
2332
const publishAndResolve = async (publisherDaemon, resolverDaemon) => {
2433
let sameDaemon = false
2534

@@ -31,13 +40,16 @@ const publishAndResolve = async (publisherDaemon, resolverDaemon) => {
3140
const stopPublisherAndStartResolverDaemon = async () => {
3241
await publisherDaemon.stop()
3342
await delay(2000)
34-
await resolverDaemon.start()
43+
44+
if (resolverDaemon != null) {
45+
await resolverDaemon.start()
46+
}
3547
}
3648

3749
await publisherDaemon.init()
3850
await publisherDaemon.start()
3951

40-
const nodeId = publisherDaemon.api.peerId.id
52+
const nodeId = publisherDaemon.peer.id
4153

4254
await publisherDaemon.api.name.publish(ipfsRef, {
4355
resolve: false,
@@ -46,7 +58,7 @@ const publishAndResolve = async (publisherDaemon, resolverDaemon) => {
4658

4759
!sameDaemon && await stopPublisherAndStartResolverDaemon()
4860

49-
const res = await last(resolverDaemon.api.name.resolve(nodeId, { local: true }))
61+
const res = await last(resolverDaemon.api.name.resolve(nodeId))
5062
expect(res).to.equal(ipfsRef)
5163

5264
await resolverDaemon.stop()
@@ -56,8 +68,9 @@ const publishAndResolve = async (publisherDaemon, resolverDaemon) => {
5668
}
5769

5870
describe('ipns locally using the same repo across implementations', function () {
59-
this.timeout(160 * 1000)
71+
this.timeout(160e3)
6072

73+
/** @type {Factory} */
6174
let factory
6275

6376
before(async () => {

‎test/kad-dht.js

+177-69
Large diffs are not rendered by default.

‎test/pin.js

+69-9
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,19 @@ import all from 'it-all'
55
import last from 'it-last'
66
import drain from 'it-drain'
77
import { tmpPath, removeAllPins } from './utils/pin-utils.js'
8-
import { expect } from 'aegir/utils/chai.js'
8+
import { expect } from 'aegir/chai'
99
import { daemonFactory } from './utils/daemon-factory.js'
1010

11+
/**
12+
* @typedef {import('ipfsd-ctl').Controller} Controller
13+
* @typedef {import('ipfsd-ctl').Factory} Factory
14+
*/
15+
1116
describe('pin', function () {
12-
this.timeout(60 * 1000)
13-
this.slow(30 * 1000)
17+
this.timeout(60e3)
18+
this.slow(30e3)
1419

20+
/** @type {Factory} */
1521
let factory
1622

1723
before(async () => {
@@ -25,6 +31,10 @@ describe('pin', function () {
2531
}]
2632

2733
let daemons = []
34+
/**
35+
* @param {'go' | 'js'} type
36+
* @param {string} [repoPath]
37+
*/
2838
async function spawnAndStart (type, repoPath = tmpPath()) {
2939
const daemonOptions = {
3040
ipfsOptions: {
@@ -50,6 +60,11 @@ describe('pin', function () {
5060
return daemon
5161
}
5262

63+
/**
64+
* @template T
65+
* @param {(controller: Controller) => Promise<T>} pipeline
66+
* @returns {Promise<[T, T]>}
67+
*/
5368
async function withDaemons (pipeline) {
5469
const goDaemon = await spawnAndStart('go')
5570
await removeAllPins(goDaemon)
@@ -61,16 +76,26 @@ describe('pin', function () {
6176
}
6277

6378
afterEach(async function () {
64-
this.timeout(25 * 1000)
79+
this.timeout(25e3)
6580
await factory.clean()
6681
daemons = []
6782
})
6883

6984
describe('pin add', function () {
7085
// Pinning a large file recursively results in the same pins
7186
it('pin recursively', async function () {
87+
/**
88+
* @param {Controller} daemon
89+
*/
7290
async function pipeline (daemon) {
73-
const { cid } = await last(daemon.api.addAll(jupiter, { pin: false }))
91+
const res = await last(daemon.api.addAll(jupiter, { pin: false }))
92+
93+
if (!res) {
94+
throw new Error('Nothing added')
95+
}
96+
97+
const { cid } = res
98+
7499
await daemon.api.pin.add(cid)
75100

76101
return all(daemon.api.pin.ls())
@@ -85,8 +110,18 @@ describe('pin', function () {
85110

86111
// Pinning a large file with recursive=false results in the same direct pin
87112
it('pin directly', async function () {
113+
/**
114+
* @param {Controller} daemon
115+
*/
88116
async function pipeline (daemon) {
89-
const { cid } = await last(daemon.api.addAll(jupiter, { pin: false }))
117+
const res = await last(daemon.api.addAll(jupiter, { pin: false }))
118+
119+
if (!res) {
120+
throw new Error('Nothing added')
121+
}
122+
123+
const { cid } = res
124+
90125
await daemon.api.pin.add(cid, { recursive: false })
91126

92127
return all(daemon.api.pin.ls())
@@ -104,8 +139,18 @@ describe('pin', function () {
104139
// removing a root pin removes children as long as they're
105140
// not part of another pin's dag
106141
it('pin recursively, remove the root pin', async function () {
142+
/**
143+
* @param {Controller} daemon
144+
*/
107145
async function pipeline (daemon) {
108-
const { cid } = await last(daemon.api.addAll(jupiter))
146+
const res = await last(daemon.api.addAll(jupiter))
147+
148+
if (!res) {
149+
throw new Error('Nothing added')
150+
}
151+
152+
const { cid } = res
153+
109154
await daemon.api.pin.rm(cid)
110155

111156
return all(daemon.api.pin.ls())
@@ -120,9 +165,21 @@ describe('pin', function () {
120165
// When a pin contains the root of another pin and we remove it, it is
121166
// instead kept but its type is changed to 'indirect'
122167
it('remove a child shared by multiple pins', async function () {
168+
/** @type {Awaited<ReturnType<Controller["api"]["files"]["stat"]>>} */
123169
let jupiterDir
170+
171+
/**
172+
* @param {Controller} daemon
173+
*/
124174
async function pipeline (daemon) {
125-
const { cid } = await last(daemon.api.addAll(jupiter, { pin: false, wrapWithDirectory: true }))
175+
const res = await last(daemon.api.addAll(jupiter, { pin: false, wrapWithDirectory: true }))
176+
177+
if (!res) {
178+
throw new Error('Nothing added')
179+
}
180+
181+
const { cid } = res
182+
126183
jupiterDir = jupiterDir || await daemon.api.files.stat(`/ipfs/${cid}/test/fixtures/planets`)
127184

128185
// by separately pinning all the DAG nodes created when adding,
@@ -149,12 +206,15 @@ describe('pin', function () {
149206
expect(jsPins).to.deep.include.members(goPins)
150207

151208
const dirPin = goPins.find(pin => pin.cid.equals(jupiterDir.cid))
152-
expect(dirPin.type).to.eql('indirect')
209+
expect(dirPin).to.have.property('type', 'indirect')
153210
})
154211
})
155212

156213
describe('ls', function () {
157214
it('print same pins', async function () {
215+
/**
216+
* @param {Controller} daemon
217+
*/
158218
async function pipeline (daemon) {
159219
await drain(daemon.api.addAll(jupiter))
160220

‎test/pubsub.js

+127-51
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,32 @@
22
/* eslint-env mocha */
33

44
import pRetry from 'p-retry'
5-
import { expect } from 'aegir/utils/chai.js'
5+
import { expect } from 'aegir/chai'
66
import { daemonFactory } from './utils/daemon-factory.js'
77
import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
88
import { equals as uint8ArrayEquals } from 'uint8arrays/equals'
9+
import { isPeerId } from '@libp2p/interface-peer-id'
10+
import pTimeout from 'p-timeout'
11+
12+
/**
13+
* @typedef {import('ipfsd-ctl').Controller} Controller
14+
* @typedef {import('ipfsd-ctl').Factory} Factory
15+
*/
916

1017
const retryOptions = {
1118
retries: 5
1219
}
1320

21+
/**
22+
* @param {string} topic
23+
* @param {Controller["peer"]} peer
24+
* @param {Controller} daemon
25+
*/
1426
const waitForTopicPeer = (topic, peer, daemon) => {
1527
return pRetry(async () => {
1628
const peers = await daemon.api.pubsub.peers(topic)
1729

18-
if (!peers.includes(peer.id)) {
30+
if (!peers.map(p => p.toString()).includes(peer.id.toString())) {
1931
throw new Error(`Could not find peer ${peer.id}`)
2032
}
2133
}, retryOptions)
@@ -28,8 +40,9 @@ const daemonOptions = {
2840
const timeout = 20e3
2941

3042
describe('pubsub', function () {
31-
this.timeout(60 * 1000)
43+
this.timeout(60e3)
3244

45+
/** @type {Record<string, ((fac: Factory) => Promise<Controller>)[]>} */
3346
const tests = {
3447
'publish from Go, subscribe on Go': [(factory) => factory.spawn({ ...daemonOptions, type: 'go' }), (factory) => factory.spawn({ ...daemonOptions, type: 'go' })],
3548
'publish from JS, subscribe on JS': [(factory) => factory.spawn({ type: 'js' }), (factory) => factory.spawn({ type: 'js' })],
@@ -39,9 +52,11 @@ describe('pubsub', function () {
3952

4053
Object.keys(tests).forEach((name) => {
4154
describe(name, function () {
55+
/** @type {Controller} */
4256
let daemon1
57+
/** @type {Controller} */
4358
let daemon2
44-
59+
/** @type {Factory} */
4560
let factory
4661

4762
before(async () => {
@@ -51,23 +66,20 @@ describe('pubsub', function () {
5166
after(() => factory.clean())
5267

5368
before('spawn nodes', async function () {
54-
this.timeout(timeout)
5569
;[daemon1, daemon2] = await Promise.all(tests[name].map(fn => fn(factory)))
5670
})
5771

5872
before('connect', async function () {
59-
this.timeout(timeout)
60-
61-
await daemon1.api.swarm.connect(daemon2.api.peerId.addresses[0])
62-
await daemon2.api.swarm.connect(daemon1.api.peerId.addresses[0])
73+
await daemon1.api.swarm.connect(daemon2.peer.addresses[0])
74+
await daemon2.api.swarm.connect(daemon1.peer.addresses[0])
6375

6476
const peers = await Promise.all([
6577
daemon1.api.swarm.peers(),
6678
daemon2.api.swarm.peers()
6779
])
6880

69-
expect(peers[0].map((p) => p.peer.toString())).to.include(daemon2.api.peerId.id)
70-
expect(peers[1].map((p) => p.peer.toString())).to.include(daemon1.api.peerId.id)
81+
expect(peers[0].map((p) => p.peer.toString())).to.include(daemon2.peer.id.toString())
82+
expect(peers[1].map((p) => p.peer.toString())).to.include(daemon1.peer.id.toString())
7183
})
7284

7385
after(() => factory.clean())
@@ -76,103 +88,167 @@ describe('pubsub', function () {
7688
const data = uint8ArrayFromString('hello world')
7789
const topic = 'pubsub-ascii'
7890

79-
const subscriber = () => new Promise((resolve) => {
91+
/** @type {() => Promise<void>} */
92+
const subscriber = () => new Promise((resolve, reject) => {
8093
daemon2.api.pubsub.subscribe(topic, (msg) => {
81-
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
82-
expect(msg).to.have.property('seqno')
83-
expect(msg.seqno).to.be.an.instanceof(Uint8Array)
84-
expect(msg).to.have.property('topicIDs').and.to.include(topic)
85-
expect(msg).to.have.property('from', daemon1.api.peerId.id)
86-
resolve()
94+
try {
95+
if (msg.type !== 'signed') {
96+
throw new Error('Message was unsigned')
97+
}
98+
99+
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
100+
expect(msg).to.have.property('sequenceNumber')
101+
expect(msg.sequenceNumber).to.be.a('bigint')
102+
expect(msg).to.have.property('topic', topic)
103+
expect(isPeerId(msg.from)).to.be.true()
104+
expect(msg.from.toString()).to.equal(daemon1.peer.id.toString())
105+
resolve()
106+
} catch (err) {
107+
reject(err)
108+
}
87109
})
88110
})
89111

90112
const publisher = async () => {
91-
await waitForTopicPeer(topic, daemon2.api.peerId, daemon1)
113+
await waitForTopicPeer(topic, daemon2.peer, daemon1)
92114
await daemon1.api.pubsub.publish(topic, data)
93115
}
94116

95117
return Promise.all([
96-
subscriber(),
97-
publisher()
118+
pTimeout(subscriber(), {
119+
milliseconds: timeout,
120+
message: 'subscriber timed out'
121+
}),
122+
pTimeout(publisher(), {
123+
milliseconds: timeout,
124+
message: 'publisher timed out'
125+
})
98126
])
99127
})
100128

101129
it('should exchange non ascii data', function () {
102130
const data = uint8ArrayFromString('你好世界 zażółć gęślą jaźń')
103131
const topic = 'pubsub-non-ascii'
104132

105-
const subscriber = () => new Promise((resolve) => {
133+
/** @type {() => Promise<void>} */
134+
const subscriber = () => new Promise((resolve, reject) => {
106135
daemon2.api.pubsub.subscribe(topic, (msg) => {
107-
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
108-
expect(msg).to.have.property('seqno')
109-
expect(msg.seqno).to.be.an.instanceof(Uint8Array)
110-
expect(msg).to.have.property('topicIDs').and.to.include(topic)
111-
expect(msg).to.have.property('from', daemon1.api.peerId.id)
112-
resolve()
136+
try {
137+
if (msg.type !== 'signed') {
138+
throw new Error('Message was unsigned')
139+
}
140+
141+
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
142+
expect(msg).to.have.property('sequenceNumber')
143+
expect(msg.sequenceNumber).to.be.a('bigint')
144+
expect(msg).to.have.property('topic', topic)
145+
expect(isPeerId(msg.from)).to.be.true()
146+
expect(msg.from.toString()).to.equal(daemon1.peer.id.toString())
147+
resolve()
148+
} catch (err) {
149+
reject(err)
150+
}
113151
})
114152
})
115153

116154
const publisher = async () => {
117-
await waitForTopicPeer(topic, daemon2.api.peerId, daemon1)
155+
await waitForTopicPeer(topic, daemon2.peer, daemon1)
118156
await daemon1.api.pubsub.publish(topic, data)
119157
}
120158

121159
return Promise.all([
122-
subscriber(),
123-
publisher()
160+
pTimeout(subscriber(), {
161+
milliseconds: timeout,
162+
message: 'subscriber timed out'
163+
}),
164+
pTimeout(publisher(), {
165+
milliseconds: timeout,
166+
message: 'publisher timed out'
167+
})
124168
])
125169
})
126170

127171
it('should exchange binary data', function () {
128172
const data = uint8ArrayFromString('a36161636179656162830103056164a16466666666f400010203040506070809', 'base16')
129173
const topic = 'pubsub-binary'
130174

131-
const subscriber = () => new Promise((resolve) => {
175+
/** @type {() => Promise<void>} */
176+
const subscriber = () => new Promise((resolve, reject) => {
132177
daemon2.api.pubsub.subscribe(topic, (msg) => {
133-
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
134-
expect(msg).to.have.property('seqno')
135-
expect(msg.seqno).to.be.an.instanceof(Uint8Array)
136-
expect(msg).to.have.property('topicIDs').and.to.include(topic)
137-
expect(msg).to.have.property('from', daemon1.api.peerId.id)
138-
resolve()
178+
try {
179+
if (msg.type !== 'signed') {
180+
throw new Error('Message was unsigned')
181+
}
182+
183+
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
184+
expect(msg).to.have.property('sequenceNumber')
185+
expect(msg.sequenceNumber).to.be.a('bigint')
186+
expect(msg).to.have.property('topic', topic)
187+
expect(isPeerId(msg.from)).to.be.true()
188+
expect(msg.from.toString()).to.equal(daemon1.peer.id.toString())
189+
resolve()
190+
} catch (err) {
191+
reject(err)
192+
}
139193
})
140194
})
141195

142196
const publisher = async () => {
143-
await waitForTopicPeer(topic, daemon2.api.peerId, daemon1)
197+
await waitForTopicPeer(topic, daemon2.peer, daemon1)
144198
await daemon1.api.pubsub.publish(topic, data)
145199
}
146200

147201
return Promise.all([
148-
subscriber(),
149-
publisher()
202+
pTimeout(subscriber(), {
203+
milliseconds: timeout,
204+
message: 'subscriber timed out'
205+
}),
206+
pTimeout(publisher(), {
207+
milliseconds: timeout,
208+
message: 'publisher timed out'
209+
})
150210
])
151211
})
152212

153213
it('should exchange data over a topic with unicode and newlines', function () {
154214
const data = uint8ArrayFromString('你好世界\nzażółć\r\ngęślą\njaźń')
155215
const topic = 'pubsub\n你好世界\r\njaźń'
156216

157-
const subscriber = () => new Promise((resolve) => {
217+
/** @type {() => Promise<void>} */
218+
const subscriber = () => new Promise((resolve, reject) => {
158219
daemon2.api.pubsub.subscribe(topic, (msg) => {
159-
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
160-
expect(msg).to.have.property('seqno')
161-
expect(msg.seqno).to.be.an.instanceof(Uint8Array)
162-
expect(msg).to.have.property('topicIDs').and.to.include(topic)
163-
expect(msg).to.have.property('from', daemon1.api.peerId.id)
164-
resolve()
220+
try {
221+
if (msg.type !== 'signed') {
222+
throw new Error('Message was unsigned')
223+
}
224+
225+
expect(uint8ArrayEquals(data, msg.data)).to.be.true()
226+
expect(msg).to.have.property('sequenceNumber')
227+
expect(msg.sequenceNumber).to.be.a('bigint')
228+
expect(msg).to.have.property('topic', topic)
229+
expect(isPeerId(msg.from)).to.be.true()
230+
expect(msg.from.toString()).to.equal(daemon1.peer.id.toString())
231+
resolve()
232+
} catch (err) {
233+
reject(err)
234+
}
165235
})
166236
})
167237

168238
const publisher = async () => {
169-
await waitForTopicPeer(topic, daemon2.api.peerId, daemon1)
239+
await waitForTopicPeer(topic, daemon2.peer, daemon1)
170240
await daemon1.api.pubsub.publish(topic, data)
171241
}
172242

173243
return Promise.all([
174-
subscriber(),
175-
publisher()
244+
pTimeout(subscriber(), {
245+
milliseconds: timeout,
246+
message: 'subscriber timed out'
247+
}),
248+
pTimeout(publisher(), {
249+
milliseconds: timeout,
250+
message: 'publisher timed out'
251+
})
176252
])
177253
})
178254
})

‎test/utils/circuit.js

+89-35
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,24 @@
22
import delay from 'delay'
33
import randomBytes from 'iso-random-stream/src/random.js'
44
import concat from 'it-concat'
5-
import WS from 'libp2p-websockets'
5+
import { WebSockets } from '@libp2p/websockets'
66
import pRetry from 'p-retry'
7-
import filters from 'libp2p-websockets/src/filters.js'
8-
import { expect } from 'aegir/utils/chai.js'
7+
import * as filters from '@libp2p/websockets/filters'
8+
import { expect } from 'aegir/chai'
9+
import { Multiaddr } from '@multiformats/multiaddr'
910

10-
const transportKey = WS.prototype[Symbol.toStringTag]
11+
/**
12+
* @typedef {import('ipfsd-ctl').Controller} Controller
13+
* @typedef {import('ipfsd-ctl').Factory} Factory
14+
*/
1115

1216
export const randomWsAddr = '/ip4/127.0.0.1/tcp/0/ws'
1317

18+
/**
19+
* @param {string[]} addrs
20+
* @param {Factory} factory
21+
* @param {Controller} [relay]
22+
*/
1423
export function createProc (addrs, factory, relay) {
1524
if (relay) {
1625
throw new Error('createProc missing support for static relay v2')
@@ -33,21 +42,21 @@ export function createProc (addrs, factory, relay) {
3342
}
3443
},
3544
libp2p: {
36-
config: {
37-
transport: {
38-
[transportKey]: {
39-
filter: filters.all
40-
}
41-
},
42-
dht: {
43-
enabled: false
44-
}
45-
}
45+
transports: [
46+
new WebSockets({
47+
filter: filters.all
48+
})
49+
]
4650
}
4751
}
4852
})
4953
}
5054

55+
/**
56+
* @param {string[]} addrs
57+
* @param {Factory} factory
58+
* @param {Controller} [relay]
59+
*/
5160
export function createJs (addrs, factory, relay) {
5261
if (relay) {
5362
throw new Error('createJs missing support for static relay v2')
@@ -67,20 +76,19 @@ export function createJs (addrs, factory, relay) {
6776
hop: {
6877
enabled: true
6978
}
70-
},
71-
libp2p: {
72-
config: {
73-
dht: {
74-
enabled: false
75-
}
76-
}
7779
}
7880
}
7981
}
8082
})
8183
}
8284

83-
// creates "private" go-ipfs node which is uses static relay if specified
85+
/**
86+
* creates "private" go-ipfs node which is uses static relay if specified
87+
*
88+
* @param {string[]} addrs
89+
* @param {Factory} factory
90+
* @param {Controller} [relay]
91+
*/
8492
export async function createGo (addrs, factory, relay) {
8593
let StaticRelays
8694
if (relay) {
@@ -121,7 +129,12 @@ export async function createGo (addrs, factory, relay) {
121129
})
122130
}
123131

124-
// creates "publicly diallable" go-ipfs running a relay service
132+
/**
133+
* creates "publicly diallable" go-ipfs running a relay service
134+
*
135+
* @param {string[]} addrs
136+
* @param {Factory} factory
137+
*/
125138
export function createGoRelay (addrs, factory) {
126139
return factory.spawn({
127140
type: 'go',
@@ -157,19 +170,29 @@ export function createGoRelay (addrs, factory) {
157170
})
158171
}
159172

173+
/**
174+
* @param {Factory} factory
175+
*/
160176
export function clean (factory) {
161177
return factory.clean()
162178
}
163179

164180
const data = randomBytes(128)
165181

182+
/**
183+
* @param {Controller} nodeA
184+
* @param {Controller} nodeB
185+
*/
166186
export async function send (nodeA, nodeB) {
167187
const { cid } = await nodeA.api.add(data)
168188
const buffer = await concat(nodeB.api.cat(cid))
169189

170190
expect(buffer.slice()).to.deep.equal(data)
171191
}
172192

193+
/**
194+
* @param {Controller["api"]} api
195+
*/
173196
export async function getWsAddr (api) {
174197
return await pRetry(async () => {
175198
const id = await api.id()
@@ -184,10 +207,13 @@ export async function getWsAddr (api) {
184207
throw new Error(`No ws address found in ${id.addresses}`)
185208
}
186209

187-
return result
210+
return new Multiaddr(result)
188211
})
189212
}
190213

214+
/**
215+
* @param {Controller["api"]} api
216+
*/
191217
export async function getWsStarAddr (api) {
192218
return await pRetry(async () => {
193219
const id = await api.id()
@@ -200,10 +226,13 @@ export async function getWsStarAddr (api) {
200226
throw new Error(`No wsstar address found in ${id.addresses}`)
201227
}
202228

203-
return result
229+
return new Multiaddr(result)
204230
})
205231
}
206232

233+
/**
234+
* @param {Controller["api"]} api
235+
*/
207236
export async function getWrtcStarAddr (api) {
208237
return await pRetry(async () => {
209238
const id = await api.id()
@@ -216,10 +245,13 @@ export async function getWrtcStarAddr (api) {
216245
throw new Error(`No webrtcstar address found in ${id.addresses}`)
217246
}
218247

219-
return result
248+
return new Multiaddr(result)
220249
})
221250
}
222251

252+
/**
253+
* @param {Controller["api"]} api
254+
*/
223255
export async function getTcpAddr (api) {
224256
return await pRetry(async () => {
225257
const id = await api.id()
@@ -232,40 +264,62 @@ export async function getTcpAddr (api) {
232264
throw new Error(`No TCP address found in ${id.addresses}`)
233265
}
234266

235-
return result
267+
return new Multiaddr(result)
236268
})
237269
}
238270

271+
/**
272+
* @param {Controller} nodeA
273+
* @param {Controller} nodeB
274+
* @param {Controller} relay
275+
* @param {number} timeout
276+
*/
239277
export async function connect (nodeA, nodeB, relay, timeout = 1000) {
240278
const relayWsAddr = await getWsAddr(relay.api)
241-
const nodeAId = nodeA.api.peerId.id
242-
const nodeBId = nodeB.api.peerId.id
279+
const nodeAId = (await nodeA.api.id()).id
280+
const nodeBId = (await nodeB.api.id()).id
243281

244-
if (process.env.DEBUG) console.log(`connect A (${nodeAId}) to relay at`, relayWsAddr)
282+
if (process.env.DEBUG) console.log(`connect A (${nodeAId.toString()}) to relay at`, relayWsAddr.toString())
245283
await nodeA.api.swarm.connect(relayWsAddr)
246284

247-
if (process.env.DEBUG) console.log(`connect B (${nodeBId}) to relay at`, relayWsAddr)
285+
if (process.env.DEBUG) console.log(`connect B (${nodeBId.toString()}) to relay at`, relayWsAddr.toString())
248286
await nodeB.api.swarm.connect(relayWsAddr)
249287

250288
// TODO: needed until https://github.com/ipfs/interop/issues/17 is resolved
251289
await delay(timeout)
252-
const nodeBCircuitAddr = `${relayWsAddr}/p2p-circuit/p2p/${nodeB.api.peerId.id}`
253-
if (process.env.DEBUG) console.log('connect A to B over circuit', nodeBCircuitAddr)
290+
const nodeBCircuitAddr = new Multiaddr(`${relayWsAddr}/p2p-circuit/p2p/${nodeBId.toString()}`)
291+
if (process.env.DEBUG) console.log('connect A to B over circuit', nodeBCircuitAddr.toString())
254292
await nodeA.api.swarm.connect(nodeBCircuitAddr)
255293

256294
if (process.env.DEBUG) {
257295
console.log('done!')
296+
297+
/**
298+
* @param {string} name
299+
* @param {Controller} node
300+
*/
258301
const listConnections = async (name, node) => {
259302
const peers = await node.api.swarm.peers()
260-
console.log(`${name} has connections`, peers.map(p => `${p.addr.toString()}/p2p/${p.peer}`))
303+
console.log(`${name} has connections`, peers.map(p => p.addr.toString()))
261304
}
262305
await listConnections('nodeA', nodeA)
263306
await listConnections('nodeB', nodeB)
264307
}
265308
}
266309

310+
/**
311+
* @param {number} timeout
312+
* @returns
313+
*/
267314
export function connWithTimeout (timeout) {
268-
return (nodeA, nodeB, relay) => {
315+
/**
316+
* @param {Controller} nodeA
317+
* @param {Controller} nodeB
318+
* @param {Controller} relay
319+
*/
320+
const connectControllers = (nodeA, nodeB, relay) => {
269321
return connect(nodeA, nodeB, relay, timeout)
270322
}
323+
324+
return connectControllers
271325
}

‎test/utils/daemon-factory.js

+4-3
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,19 @@
1-
21
import { createFactory } from 'ipfsd-ctl'
3-
import isNode from 'detect-node'
2+
import { isNode, isElectronMain } from 'wherearewe'
43

54
export async function daemonFactory () {
65
let ipfsHttpModule
76
let ipfsModule
87

98
try {
9+
// @ts-expect-error env var could be undefined
1010
ipfsHttpModule = await import(process.env.IPFS_JS_HTTP_MODULE)
1111
} catch {
1212
ipfsHttpModule = await import('ipfs-http-client')
1313
}
1414

1515
try {
16+
// @ts-expect-error env var could be undefined
1617
ipfsModule = await import(process.env.IPFS_JS_MODULE)
1718
} catch {
1819
ipfsModule = await import('ipfs')
@@ -41,7 +42,7 @@ export async function daemonFactory () {
4142
* @param {{ path: () => string }} [module]
4243
*/
4344
async function findBin (envVar, moduleName, module) {
44-
if (!isNode) {
45+
if (!isNode && !isElectronMain) {
4546
return
4647
}
4748

‎test/utils/is-windows.browser.js

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
export default false

‎test/utils/is-windows.js

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
import os from 'os'
2+
3+
export default os.platform() === 'win32'

‎test/utils/pin-utils.js

+4-1
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,14 @@ import { nanoid } from 'nanoid'
44
import all from 'it-all'
55
import drain from 'it-drain'
66

7+
/**
8+
* @param {import('ipfsd-ctl').Controller} daemon
9+
*/
710
export async function removeAllPins (daemon) {
811
const pins = await all(daemon.api.pin.ls())
912
const rootPins = pins.filter(pin => pin.type === 'recursive' || pin.type === 'direct')
1013

11-
await drain(daemon.api.pin.rmAll(rootPins.map(pin => pin.cid)))
14+
await drain(daemon.api.pin.rmAll(rootPins.map(pin => ({ cid: pin.cid }))))
1215

1316
return daemon
1417
}

‎test/utils/relayd.js

+87-17
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,11 @@
1-
import isNode from 'detect-node'
1+
import { isNode, isElectronMain } from 'wherearewe'
22
import fs from 'fs'
33
import path from 'path'
4-
import { command } from 'execa'
4+
import { execaCommand } from 'execa'
5+
import pTimeout from 'p-timeout'
6+
import { download, LIBP2P_RELAY_DAEMON_VERSION } from '../../scripts/download-relay-daemon.js'
7+
import os from 'os'
8+
// @ts-expect-error no types
59
import goenv from 'go-platform'
610
const platform = process.env.TARGET_OS || goenv.GOOS
711

@@ -10,25 +14,91 @@ const platform = process.env.TARGET_OS || goenv.GOOS
1014
// per circuit relay version.
1115
const relays = new Map()
1216

13-
export async function getRelayV (version, factory) {
14-
if (!isNode) return
15-
if (relays.has(version)) return relays.get(version)
16-
if (process.env.DEBUG) console.log(`Starting relayd_v${version}..`) // eslint-disable-line no-console
17-
if (version < 1 || version > 2) throw new Error('Unsupported circuit relay version')
17+
const RELAY_STARTUP_TIMEOUT = Number(process.env.RELAY_STARTUP_TIMEOUT || 30000)
18+
19+
/**
20+
* @param {number} version
21+
*/
22+
export async function getRelayV (version) {
23+
if (!isNode && !isElectronMain) {
24+
return
25+
}
26+
27+
if (relays.has(version)) {
28+
return relays.get(version)
29+
}
30+
31+
if (process.env.DEBUG) {
32+
console.log(`Starting relayd_v${version}..`) // eslint-disable-line no-console
33+
}
34+
35+
if (version < 1 || version > 2) {
36+
throw new Error('Unsupported circuit relay version')
37+
}
38+
1839
const binaryPath = path.join('scripts', `libp2p-relay-daemon${platform === 'windows' ? '.exe' : ''}`)
1940
const configPath = path.join('scripts', `relayd_v${version}.config.json`)
20-
const identityPath = path.join('scripts', `relayd_v${version}.identity`)
21-
const relayd = command(`${binaryPath} -config ${configPath} -id ${identityPath}`)
22-
let id
23-
for await (const line of relayd.stdout) {
24-
const text = line.toString()
25-
if (process.env.DEBUG) console.log(text) // eslint-disable-line no-console
26-
if (text.includes(`RelayV${version} is running!`)) break
27-
if (text.includes('I am')) {
28-
id = text.split('I am')[1].split('\n')[0].trim()
41+
const identityPath = path.join(os.tmpdir(), `relayd_v${version}-${Math.random()}.identity`)
42+
43+
if (!fs.existsSync(binaryPath)) {
44+
console.info('libp2p-relay-daemon binary not found at', binaryPath) // eslint-disable-line no-console
45+
await download({
46+
version: LIBP2P_RELAY_DAEMON_VERSION,
47+
platform: process.env.TARGET_OS || goenv.GOOS || os.platform(),
48+
arch: process.env.TARGET_ARCH || goenv.GOARCH || os.arch(),
49+
distUrl: process.env.GO_IPFS_DIST_URL || 'https://dist.ipfs.io',
50+
installPath: path.resolve('scripts')
51+
})
52+
}
53+
54+
if (process.env.DEBUG) {
55+
console.info(`${binaryPath} -config ${configPath} -id ${identityPath}`) // eslint-disable-line no-console
56+
}
57+
58+
const relayd = execaCommand(`${binaryPath} -config ${configPath} -id ${identityPath}`, {
59+
all: true
60+
})
61+
62+
const all = relayd.all
63+
64+
if (all == null) {
65+
throw new Error('No stdout/stderr on execa return value')
66+
}
67+
68+
const waitForStartup = async () => {
69+
let id = ''
70+
71+
for await (const line of all) {
72+
const text = line.toString()
73+
74+
if (process.env.DEBUG) {
75+
console.log(text) // eslint-disable-line no-console
76+
}
77+
78+
if (text.includes(`RelayV${version} is running!`)) {
79+
return id
80+
}
81+
82+
if (text.includes('I am')) {
83+
id = text.split('I am')[1].split('\n')[0].trim()
84+
}
2985
}
3086
}
31-
const config = JSON.parse(fs.readFileSync(configPath))
87+
88+
const promise = waitForStartup()
89+
// @ts-ignore
90+
promise.cancel = () => {
91+
console.error(`Timed out waiting for ${binaryPath} to start after ${RELAY_STARTUP_TIMEOUT}ms, killing process`) // eslint-disable-line no-console
92+
relayd.kill()
93+
}
94+
95+
const id = await pTimeout(promise, {
96+
milliseconds: RELAY_STARTUP_TIMEOUT
97+
})
98+
99+
const config = JSON.parse(fs.readFileSync(configPath, {
100+
encoding: 'utf-8'
101+
}))
32102
const result = {
33103
relayd,
34104
// Mock: make it look like other things returned by ipfsd-ctl to reuse existing code.

‎test/utils/wait-for.js

+6-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,11 @@
11
import delay from 'delay'
22

3-
/*
4-
* Wait for a condition to become true.
3+
/**
4+
* Wait for a condition to become true
5+
*
6+
* @param {() => boolean} predicate
7+
* @param {number} ttl
8+
* @param {number} checkInterval
59
*/
610
export async function waitFor (predicate, ttl = 10e3, checkInterval = 50) {
711
const timeout = Date.now() + ttl

‎tsconfig.json

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
{
22
"extends": "aegir/src/config/tsconfig.aegir.json",
33
"compilerOptions": {
4-
"outDir": "types"
4+
"outDir": "dist",
5+
"emitDeclarationOnly": true
56
},
67
"include": [
78
"src",
9+
"scripts",
810
"test"
911
]
1012
}

0 commit comments

Comments
 (0)
This repository has been archived.