Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit ed4a64a

Browse files
authoredOct 28, 2016
Merge pull request #74 from ipfs/awesome-ipld
Awesome IPLD endeavour
2 parents 30f214f + 89c3602 commit ed4a64a

15 files changed

+465
-346
lines changed
 

Diff for: ‎README.md

+8
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,14 @@ IPFS unixFS Engine
2525
- [Contribute](#contribute)
2626
- [License](#license)
2727

28+
## BEWARE BEWARE BEWARE there might be 🐉
29+
30+
This module has passed through several iterations and still is far from a nice and easy understandable codebase. Currently missing features:
31+
32+
- tar importer
33+
- trickle dag exporter
34+
- sharding
35+
2836
## Install
2937

3038
With [npm](https://npmjs.org/) installed, run

Diff for: ‎package.json

+11-9
Original file line numberDiff line numberDiff line change
@@ -34,31 +34,33 @@
3434
},
3535
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engineg#readme",
3636
"devDependencies": {
37-
"aegir": "^8.0.1",
37+
"aegir": "^8.1.2",
3838
"buffer-loader": "0.0.1",
3939
"chai": "^3.5.0",
40-
"fs-pull-blob-store": "^0.3.0",
40+
"fs-pull-blob-store": "^0.4.1",
4141
"idb-pull-blob-store": "^0.5.1",
42-
"ipfs-block-service": "^0.5.0",
43-
"ipfs-repo": "^0.9.0",
42+
"ipfs-block-service": "^0.6.0",
43+
"ipfs-repo": "^0.10.0",
4444
"ncp": "^2.0.0",
4545
"pre-commit": "^1.1.3",
46-
"pull-zip": "^2.0.0",
46+
"pull-zip": "^2.0.1",
4747
"raw-loader": "^0.5.1",
4848
"rimraf": "^2.5.4",
4949
"run-series": "^1.1.4"
5050
},
5151
"dependencies": {
52-
"ipfs-merkle-dag": "^0.7.0",
52+
"cids": "^0.2.0",
5353
"ipfs-unixfs": "^0.1.4",
54-
"is-ipfs": "^0.2.0",
54+
"ipld-dag-pb": "^0.1.3",
55+
"ipld-resolver": "^0.1.1",
56+
"is-ipfs": "^0.2.1",
5557
"multihashes": "^0.2.2",
5658
"pull-block": "^1.0.2",
57-
"pull-paramap": "^1.1.6",
59+
"pull-paramap": "^1.2.0",
5860
"pull-pushable": "^2.0.1",
5961
"pull-stream": "^3.4.5",
6062
"pull-traverse": "^1.0.3",
61-
"pull-write": "^1.1.0",
63+
"pull-write": "^1.1.1",
6264
"run-parallel": "^1.1.6"
6365
},
6466
"contributors": [

Diff for: ‎src/chunker-fixed-size.js

-7
This file was deleted.

Diff for: ‎src/chunker/fixed-size.js

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
'use strict'
2+
3+
const pullBlock = require('pull-block')
4+
5+
module.exports = (size) => {
6+
return pullBlock(size, { zeroPadding: false })
7+
}

Diff for: ‎src/exporters/dir.js renamed to ‎src/exporter/dir.js

+4-3
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,13 @@
33
const path = require('path')
44
const pull = require('pull-stream')
55
const paramap = require('pull-paramap')
6+
const CID = require('cids')
67

78
const fileExporter = require('./file')
89
const switchType = require('../util').switchType
910

1011
// Logic to export a unixfs directory.
11-
module.exports = (node, name, dagService) => {
12+
module.exports = (node, name, ipldResolver) => {
1213
// The algorithm below is as follows
1314
//
1415
// 1. Take all links from a given directory node
@@ -25,15 +26,15 @@ module.exports = (node, name, dagService) => {
2526
path: path.join(name, link.name),
2627
hash: link.hash
2728
})),
28-
paramap((item, cb) => dagService.get(item.hash, (err, n) => {
29+
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
2930
if (err) {
3031
return cb(err)
3132
}
3233

3334
cb(null, switchType(
3435
n,
3536
() => pull.values([item]),
36-
() => fileExporter(n, item.path, dagService)
37+
() => fileExporter(n, item.path, ipldResolver)
3738
))
3839
})),
3940
pull.flatten()

Diff for: ‎src/exporters/file.js renamed to ‎src/exporter/file.js

+3-2
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@
22

33
const traverse = require('pull-traverse')
44
const UnixFS = require('ipfs-unixfs')
5+
const CID = require('cids')
56
const pull = require('pull-stream')
67
const paramap = require('pull-paramap')
78

89
// Logic to export a single (possibly chunked) unixfs file.
9-
module.exports = (node, name, ds) => {
10+
module.exports = (node, name, ipldResolver) => {
1011
function getData (node) {
1112
try {
1213
const file = UnixFS.unmarshal(node.data)
@@ -19,7 +20,7 @@ module.exports = (node, name, ds) => {
1920
function visitor (node) {
2021
return pull(
2122
pull.values(node.links),
22-
paramap((link, cb) => ds.get(link.hash, cb))
23+
paramap((link, cb) => ipldResolver.get(new CID(link.hash), cb))
2324
)
2425
}
2526

Diff for: ‎src/exporter.js renamed to ‎src/exporter/index.js

+10-9
Original file line numberDiff line numberDiff line change
@@ -2,37 +2,38 @@
22

33
const traverse = require('pull-traverse')
44
const pull = require('pull-stream')
5+
const CID = require('cids')
56

6-
const util = require('./util')
7+
const util = require('./../util')
78
const switchType = util.switchType
89
const cleanMultihash = util.cleanMultihash
910

10-
const dirExporter = require('./exporters/dir')
11-
const fileExporter = require('./exporters/file')
11+
const dirExporter = require('./dir')
12+
const fileExporter = require('./file')
1213

13-
module.exports = (hash, dagService, options) => {
14+
module.exports = (hash, ipldResolver, options) => {
1415
hash = cleanMultihash(hash)
1516
options = options || {}
1617

1718
function visitor (item) {
1819
return pull(
19-
dagService.getStream(item.hash),
20+
ipldResolver.getStream(new CID(item.hash)),
2021
pull.map((node) => switchType(
2122
node,
22-
() => dirExporter(node, item.path, dagService),
23-
() => fileExporter(node, item.path, dagService)
23+
() => dirExporter(node, item.path, ipldResolver),
24+
() => fileExporter(node, item.path, ipldResolver)
2425
)),
2526
pull.flatten()
2627
)
2728
}
2829

2930
// Traverse the DAG
3031
return pull(
31-
dagService.getStream(hash),
32+
ipldResolver.getStream(new CID(hash)),
3233
pull.map((node) => switchType(
3334
node,
3435
() => traverse.widthFirst({path: hash, hash}, visitor),
35-
() => fileExporter(node, hash, dagService)
36+
() => fileExporter(node, hash, ipldResolver)
3637
)),
3738
pull.flatten()
3839
)

Diff for: ‎src/importer.js

-155
This file was deleted.

Diff for: ‎src/importer/flush-tree.js

+167
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,167 @@
1+
'use strict'
2+
3+
const mh = require('multihashes')
4+
const UnixFS = require('ipfs-unixfs')
5+
const CID = require('cids')
6+
const dagPB = require('ipld-dag-pb')
7+
const mapValues = require('async/mapValues')
8+
const parallel = require('async/parallel')
9+
10+
const DAGLink = dagPB.DAGLink
11+
const DAGNode = dagPB.DAGNode
12+
13+
module.exports = (files, ipldResolver, source, callback) => {
14+
// 1) convert files to a tree
15+
const fileTree = createTree(files)
16+
17+
if (Object.keys(fileTree).length === 0) {
18+
return callback()// no dirs to be created
19+
}
20+
21+
// 2) create sizeIndex
22+
const sizeIndex = createSizeIndex(files)
23+
24+
// 3) bottom up flushing
25+
traverse(fileTree, sizeIndex, null, ipldResolver, source, callback)
26+
}
27+
28+
/*
29+
* createTree
30+
*
31+
* received an array of files with the format:
32+
* {
33+
* path: // full path
34+
* multihash: // multihash of the dagNode
35+
* size: // cumulative size
36+
* }
37+
*
38+
* returns a JSON object that represents a tree where branches are the paths
39+
* and the leaves are objects with file names and respective multihashes, such
40+
* as:
41+
* {
42+
* foo: {
43+
* bar: {
44+
* baz.txt: <multihash>
45+
* }
46+
* }
47+
* }
48+
*/
49+
function createTree (files) {
50+
const fileTree = {}
51+
52+
files.forEach((file) => {
53+
let splitted = file.path.split('/')
54+
if (splitted.length === 1) {
55+
return // adding just one file
56+
}
57+
if (splitted[0] === '') {
58+
splitted = splitted.slice(1)
59+
}
60+
var tmpTree = fileTree
61+
62+
for (var i = 0; i < splitted.length; i++) {
63+
if (!tmpTree[splitted[i]]) {
64+
tmpTree[splitted[i]] = {}
65+
}
66+
if (i === splitted.length - 1) {
67+
tmpTree[splitted[i]] = file.multihash
68+
} else {
69+
tmpTree = tmpTree[splitted[i]]
70+
}
71+
}
72+
})
73+
74+
return fileTree
75+
}
76+
77+
/*
78+
* create a size index that goes like:
79+
* { <multihash>: <size> }
80+
*/
81+
function createSizeIndex (files) {
82+
const sizeIndex = {}
83+
84+
files.forEach((file) => {
85+
sizeIndex[mh.toB58String(file.multihash)] = file.size
86+
})
87+
88+
return sizeIndex
89+
}
90+
91+
/*
92+
* expand the branches recursively (depth first), flush them first
93+
* and then traverse through the bottoum up, flushing everynode
94+
*
95+
* Algorithm tl;dr;
96+
* create a dirNode
97+
* Object.keys
98+
* If the value is an Object
99+
* create a dir Node
100+
* Object.keys
101+
* Once finished, add the result as a link to the dir node
102+
* If the value is not an object
103+
* add as a link to the dirNode
104+
*/
105+
function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
106+
mapValues(tree, (node, key, cb) => {
107+
if (isLeaf(node)) {
108+
return cb(null, node)
109+
}
110+
111+
traverse(node, sizeIndex, path ? `${path}/${key}` : key, ipldResolver, source, cb)
112+
}, (err, tree) => {
113+
if (err) {
114+
return done(err)
115+
}
116+
117+
// at this stage, all keys are multihashes
118+
// create a dir node
119+
// add all the multihashes as links
120+
// return this new node multihash
121+
122+
const keys = Object.keys(tree)
123+
124+
const ufsDir = new UnixFS('directory')
125+
const node = new DAGNode(ufsDir.marshal())
126+
127+
keys.forEach((key) => {
128+
const b58mh = mh.toB58String(tree[key])
129+
const link = new DAGLink(key, sizeIndex[b58mh], tree[key])
130+
node.addRawLink(link)
131+
})
132+
133+
parallel([
134+
(cb) => node.multihash(cb),
135+
(cb) => node.size(cb)
136+
], (err, res) => {
137+
if (err) {
138+
return done(err)
139+
}
140+
141+
const multihash = res[0]
142+
const size = res[1]
143+
144+
sizeIndex[mh.toB58String(multihash)] = size
145+
ipldResolver.put({
146+
node: node,
147+
cid: new CID(multihash)
148+
}, (err) => {
149+
if (err) {
150+
source.push(new Error('failed to store dirNode'))
151+
} else if (path) {
152+
source.push({
153+
path: path,
154+
multihash: multihash,
155+
size: size
156+
})
157+
}
158+
159+
done(null, multihash)
160+
})
161+
})
162+
})
163+
}
164+
165+
function isLeaf (value) {
166+
return !(typeof value === 'object' && !Buffer.isBuffer(value))
167+
}

Diff for: ‎src/importer/index.js

+217
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,217 @@
1+
'use strict'
2+
3+
const UnixFS = require('ipfs-unixfs')
4+
const assert = require('assert')
5+
const pull = require('pull-stream')
6+
const pullPushable = require('pull-pushable')
7+
const pullWrite = require('pull-write')
8+
const parallel = require('run-parallel')
9+
const dagPB = require('ipld-dag-pb')
10+
const CID = require('cids')
11+
12+
const fsc = require('./../chunker/fixed-size')
13+
const createAndStoreTree = require('./flush-tree')
14+
15+
const DAGNode = dagPB.DAGNode
16+
const DAGLink = dagPB.DAGLink
17+
18+
const CHUNK_SIZE = 262144
19+
20+
module.exports = (ipldResolver, options) => {
21+
assert(ipldResolver, 'Missing IPLD Resolver')
22+
23+
const files = []
24+
25+
const source = pullPushable()
26+
27+
const sink = pullWrite(
28+
makeWriter(source, files, ipldResolver),
29+
null,
30+
100,
31+
(err) => {
32+
if (err) {
33+
return source.end(err)
34+
}
35+
36+
createAndStoreTree(files, ipldResolver, source, () => {
37+
source.end()
38+
})
39+
}
40+
)
41+
42+
return {
43+
source: source,
44+
sink: sink
45+
}
46+
}
47+
48+
function makeWriter (source, files, ipldResolver) {
49+
return (items, cb) => {
50+
parallel(items.map((item) => (cb) => {
51+
if (!item.content) {
52+
return createAndStoreDir(item, ipldResolver, (err, node) => {
53+
if (err) {
54+
return cb(err)
55+
}
56+
source.push(node)
57+
files.push(node)
58+
cb()
59+
})
60+
}
61+
62+
createAndStoreFile(item, ipldResolver, (err, node) => {
63+
if (err) {
64+
return cb(err)
65+
}
66+
source.push(node)
67+
files.push(node)
68+
cb()
69+
})
70+
}), cb)
71+
}
72+
}
73+
74+
function createAndStoreDir (item, ipldResolver, cb) {
75+
// 1. create the empty dir dag node
76+
// 2. write it to the dag store
77+
78+
const d = new UnixFS('directory')
79+
const n = new DAGNode()
80+
n.data = d.marshal()
81+
82+
n.multihash((err, multihash) => {
83+
if (err) {
84+
return cb(err)
85+
}
86+
87+
ipldResolver.put({
88+
node: n,
89+
cid: new CID(multihash)
90+
}, (err) => {
91+
if (err) {
92+
return cb(err)
93+
}
94+
95+
n.size((err, size) => {
96+
if (err) {
97+
return cb(err)
98+
}
99+
100+
cb(null, {
101+
path: item.path,
102+
multihash: multihash,
103+
size: size
104+
})
105+
})
106+
})
107+
})
108+
}
109+
110+
function createAndStoreFile (file, ipldResolver, cb) {
111+
if (Buffer.isBuffer(file.content)) {
112+
file.content = pull.values([file.content])
113+
}
114+
115+
if (typeof file.content !== 'function') {
116+
return cb(new Error('invalid content'))
117+
}
118+
119+
// 1. create the unixfs merkledag node
120+
// 2. add its hash and size to the leafs array
121+
122+
// TODO - Support really large files
123+
// a) check if we already reach max chunks if yes
124+
// a.1) create a parent node for all of the current leaves
125+
// b.2) clean up the leaves array and add just the parent node
126+
127+
pull(
128+
file.content,
129+
fsc(CHUNK_SIZE),
130+
pull.asyncMap((chunk, cb) => {
131+
const l = new UnixFS('file', Buffer(chunk))
132+
const n = new DAGNode(l.marshal())
133+
134+
n.multihash((err, multihash) => {
135+
if (err) {
136+
return cb(err)
137+
}
138+
139+
ipldResolver.put({
140+
node: n,
141+
cid: new CID(multihash)
142+
}, (err) => {
143+
if (err) {
144+
return cb(new Error('Failed to store chunk'))
145+
}
146+
147+
n.size((err, size) => {
148+
if (err) {
149+
return cb(err)
150+
}
151+
152+
cb(null, {
153+
Hash: multihash,
154+
Size: size,
155+
leafSize: l.fileSize(),
156+
Name: ''
157+
})
158+
})
159+
})
160+
})
161+
}),
162+
pull.collect((err, leaves) => {
163+
if (err) {
164+
return cb(err)
165+
}
166+
167+
if (leaves.length === 1) {
168+
return cb(null, {
169+
path: file.path,
170+
multihash: leaves[0].Hash,
171+
size: leaves[0].Size
172+
})
173+
}
174+
175+
// create a parent node and add all the leafs
176+
177+
const f = new UnixFS('file')
178+
const n = new DAGNode()
179+
180+
for (let leaf of leaves) {
181+
f.addBlockSize(leaf.leafSize)
182+
n.addRawLink(
183+
new DAGLink(leaf.Name, leaf.Size, leaf.Hash)
184+
)
185+
}
186+
187+
n.data = f.marshal()
188+
189+
n.multihash((err, multihash) => {
190+
if (err) {
191+
return cb(err)
192+
}
193+
194+
ipldResolver.put({
195+
node: n,
196+
cid: new CID(multihash)
197+
}, (err) => {
198+
if (err) {
199+
return cb(err)
200+
}
201+
202+
n.size((err, size) => {
203+
if (err) {
204+
return cb(err)
205+
}
206+
207+
cb(null, {
208+
path: file.path,
209+
multihash: multihash,
210+
size: size
211+
})
212+
})
213+
})
214+
})
215+
})
216+
)
217+
}

Diff for: ‎src/index.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
'use strict'
22

3-
exports.importer = exports.Importer = require('./importer.js')
4-
exports.exporter = exports.Exporter = require('./exporter.js')
3+
exports.importer = exports.Importer = require('./importer')
4+
exports.exporter = exports.Exporter = require('./exporter')

Diff for: ‎src/tree.js

-130
This file was deleted.

Diff for: ‎test/test-exporter.js

+23-16
Original file line numberDiff line numberDiff line change
@@ -3,38 +3,43 @@
33

44
const expect = require('chai').expect
55
const BlockService = require('ipfs-block-service')
6-
const DAGService = require('ipfs-merkle-dag').DAGService
6+
const IPLDResolver = require('ipld-resolver')
77
const UnixFS = require('ipfs-unixfs')
88
const fs = require('fs')
99
const path = require('path')
1010
const bs58 = require('bs58')
1111
const pull = require('pull-stream')
1212
const zip = require('pull-zip')
13+
const CID = require('cids')
1314

1415
const unixFSEngine = require('./../src')
1516
const exporter = unixFSEngine.exporter
1617

1718
module.exports = (repo) => {
1819
describe('exporter', () => {
19-
let ds
20+
let ipldResolver
2021

2122
const bigFile = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt'))
23+
2224
before(() => {
2325
const bs = new BlockService(repo)
24-
ds = new DAGService(bs)
26+
ipldResolver = new IPLDResolver(bs)
2527
})
2628

2729
it('import and export', (done) => {
2830
pull(
2931
pull.values([{
3032
path: '1.2MiB.txt',
31-
content: pull.values([bigFile, Buffer('hello world')])
33+
content: pull.values([
34+
bigFile,
35+
Buffer('hello world')
36+
])
3237
}]),
33-
unixFSEngine.importer(ds),
38+
unixFSEngine.importer(ipldResolver),
3439
pull.map((file) => {
3540
expect(file.path).to.be.eql('1.2MiB.txt')
3641

37-
return exporter(file.multihash, ds)
42+
return exporter(file.multihash, ipldResolver)
3843
}),
3944
pull.flatten(),
4045
pull.collect((err, files) => {
@@ -50,15 +55,15 @@ module.exports = (repo) => {
5055
const mhBuf = new Buffer(bs58.decode(hash))
5156

5257
pull(
53-
ds.getStream(hash),
58+
ipldResolver.getStream(new CID(hash)),
5459
pull.map((node) => UnixFS.unmarshal(node.data)),
5560
pull.collect((err, nodes) => {
5661
expect(err).to.not.exist
5762

5863
const unmarsh = nodes[0]
5964

6065
pull(
61-
exporter(mhBuf, ds),
66+
exporter(mhBuf, ipldResolver),
6267
pull.collect(onFiles)
6368
)
6469

@@ -79,10 +84,10 @@ module.exports = (repo) => {
7984
pull(
8085
zip(
8186
pull(
82-
ds.getStream(hash),
87+
ipldResolver.getStream(new CID(hash)),
8388
pull.map((node) => UnixFS.unmarshal(node.data))
8489
),
85-
exporter(hash, ds)
90+
exporter(hash, ipldResolver)
8691
),
8792
pull.collect((err, values) => {
8893
expect(err).to.not.exist
@@ -97,7 +102,7 @@ module.exports = (repo) => {
97102
it('export a small file with links', (done) => {
98103
const hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q'
99104
pull(
100-
exporter(hash, ds),
105+
exporter(hash, ipldResolver),
101106
pull.collect((err, files) => {
102107
expect(err).to.not.exist
103108

@@ -109,7 +114,7 @@ module.exports = (repo) => {
109114
it('export a large file > 5mb', (done) => {
110115
const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE'
111116
pull(
112-
exporter(hash, ds),
117+
exporter(hash, ipldResolver),
113118
pull.collect((err, files) => {
114119
expect(err).to.not.exist
115120

@@ -123,7 +128,7 @@ module.exports = (repo) => {
123128
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN'
124129

125130
pull(
126-
exporter(hash, ds),
131+
exporter(hash, ipldResolver),
127132
pull.collect((err, files) => {
128133
expect(err).to.not.exist
129134

@@ -162,7 +167,7 @@ module.exports = (repo) => {
162167
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
163168

164169
pull(
165-
exporter(hash, ds),
170+
exporter(hash, ipldResolver),
166171
pull.collect((err, files) => {
167172
expect(err).to.not.exist
168173
expect(files[0].content).to.not.exist
@@ -176,7 +181,7 @@ module.exports = (repo) => {
176181
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3'
177182

178183
pull(
179-
exporter(hash, ds),
184+
exporter(hash, ipldResolver),
180185
pull.collect((err, files) => {
181186
expect(err).to.exist
182187
done()
@@ -190,7 +195,9 @@ function fileEql (f1, f2, done) {
190195
pull(
191196
f1.content,
192197
pull.collect((err, data) => {
193-
if (err) return done(err)
198+
if (err) {
199+
return done(err)
200+
}
194201

195202
try {
196203
if (f2) {

Diff for: ‎test/test-fixed-size-chunker.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
/* eslint-env mocha */
22
'use strict'
33

4-
const chunker = require('./../src/chunker-fixed-size')
4+
const chunker = require('./../src/chunker/fixed-size')
55
const fs = require('fs')
66
const expect = require('chai').expect
77
const path = require('path')

Diff for: ‎test/test-importer.js

+12-12
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,11 @@
44
const importer = require('./../src').importer
55
const expect = require('chai').expect
66
const BlockService = require('ipfs-block-service')
7-
const DAGService = require('ipfs-merkle-dag').DAGService
87
const fs = require('fs')
98
const path = require('path')
109
const pull = require('pull-stream')
1110
const mh = require('multihashes')
11+
const IPLDResolver = require('ipld-resolver')
1212

1313
function stringifyMh (files) {
1414
return files.map((file) => {
@@ -19,7 +19,7 @@ function stringifyMh (files) {
1919

2020
module.exports = function (repo) {
2121
describe('importer', function () {
22-
let ds
22+
let ipldResolver
2323

2424
const bigFile = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt'))
2525
const smallFile = fs.readFileSync(path.join(__dirname, '/test-data/200Bytes.txt'))
@@ -30,7 +30,7 @@ module.exports = function (repo) {
3030

3131
before(() => {
3232
const bs = new BlockService(repo)
33-
ds = new DAGService(bs)
33+
ipldResolver = new IPLDResolver(bs)
3434
})
3535

3636
it('bad input', (done) => {
@@ -39,7 +39,7 @@ module.exports = function (repo) {
3939
path: '200Bytes.txt',
4040
content: 'banana'
4141
}]),
42-
importer(ds),
42+
importer(ipldResolver),
4343
pull.onEnd((err) => {
4444
expect(err).to.exist
4545
done()
@@ -53,7 +53,7 @@ module.exports = function (repo) {
5353
path: '200Bytes.txt',
5454
content: pull.values([smallFile])
5555
}]),
56-
importer(ds),
56+
importer(ipldResolver),
5757
pull.collect((err, files) => {
5858
expect(err).to.not.exist
5959
expect(stringifyMh(files)).to.be.eql([{
@@ -72,7 +72,7 @@ module.exports = function (repo) {
7272
path: '200Bytes.txt',
7373
content: smallFile
7474
}]),
75-
importer(ds),
75+
importer(ipldResolver),
7676
pull.collect((err, files) => {
7777
expect(err).to.not.exist
7878
expect(stringifyMh(files)).to.be.eql([{
@@ -91,7 +91,7 @@ module.exports = function (repo) {
9191
path: 'foo/bar/200Bytes.txt',
9292
content: pull.values([smallFile])
9393
}]),
94-
importer(ds),
94+
importer(ipldResolver),
9595
pull.collect((err, files) => {
9696
expect(err).to.not.exist
9797
expect(files.length).to.equal(3)
@@ -129,7 +129,7 @@ module.exports = function (repo) {
129129
path: '1.2MiB.txt',
130130
content: pull.values([bigFile])
131131
}]),
132-
importer(ds),
132+
importer(ipldResolver),
133133
pull.collect((err, files) => {
134134
expect(err).to.not.exist
135135
expect(stringifyMh(files)).to.be.eql([{
@@ -148,7 +148,7 @@ module.exports = function (repo) {
148148
path: 'foo-big/1.2MiB.txt',
149149
content: pull.values([bigFile])
150150
}]),
151-
importer(ds),
151+
importer(ipldResolver),
152152
pull.collect((err, files) => {
153153
expect(err).to.not.exist
154154

@@ -176,7 +176,7 @@ module.exports = function (repo) {
176176
pull.values([{
177177
path: 'empty-dir'
178178
}]),
179-
importer(ds),
179+
importer(ipldResolver),
180180
pull.collect((err, files) => {
181181
expect(err).to.not.exist
182182

@@ -200,7 +200,7 @@ module.exports = function (repo) {
200200
path: 'pim/1.2MiB.txt',
201201
content: pull.values([bigFile])
202202
}]),
203-
importer(ds),
203+
importer(ipldResolver),
204204
pull.collect((err, files) => {
205205
expect(err).to.not.exist
206206

@@ -235,7 +235,7 @@ module.exports = function (repo) {
235235
path: 'pam/1.2MiB.txt',
236236
content: pull.values([bigFile])
237237
}]),
238-
importer(ds),
238+
importer(ipldResolver),
239239
pull.collect((err, files) => {
240240
expect(err).to.not.exist
241241

0 commit comments

Comments
 (0)
This repository has been archived.