This repository was archived by the owner on Aug 12, 2020. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 20
/
Copy pathbuilder.js
137 lines (111 loc) · 3.9 KB
/
builder.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
/* eslint-env mocha */
'use strict'
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const BlockService = require('ipfs-block-service')
const pull = require('pull-stream')
const mh = require('multihashes')
const Ipld = require('ipld')
const eachSeries = require('async').eachSeries
const CID = require('cids')
const UnixFS = require('ipfs-unixfs')
const createBuilder = require('../src/builder')
const FixedSizeChunker = require('../src/chunker/fixed-size')
module.exports = (repo) => {
describe('builder', () => {
let ipld
const testMultihashes = Object.keys(mh.names).slice(1, 40)
before(() => {
const bs = new BlockService(repo)
ipld = new Ipld({blockService: bs})
})
it('allows multihash hash algorithm to be specified', (done) => {
eachSeries(testMultihashes, (hashAlg, cb) => {
const options = { hashAlg, strategy: 'flat' }
const content = String(Math.random() + Date.now())
const inputFile = {
path: content + '.txt',
content: Buffer.from(content)
}
const onCollected = (err, nodes) => {
if (err) return cb(err)
const node = nodes[0]
expect(node).to.exist()
const cid = new CID(node.multihash)
// Verify multihash has been encoded using hashAlg
expect(mh.decode(cid.multihash).name).to.equal(hashAlg)
// Fetch using hashAlg encoded multihash
ipld.get(cid, (err, res) => {
if (err) return cb(err)
const content = UnixFS.unmarshal(res.value.data).data
expect(content.equals(inputFile.content)).to.be.true()
cb()
})
}
pull(
pull.values([Object.assign({}, inputFile)]),
createBuilder(FixedSizeChunker, ipld, options),
pull.collect(onCollected)
)
}, done)
})
it('allows multihash hash algorithm to be specified for big file', function (done) {
this.timeout(30000)
eachSeries(testMultihashes, (hashAlg, cb) => {
const options = { hashAlg, strategy: 'flat' }
const content = String(Math.random() + Date.now())
const inputFile = {
path: content + '.txt',
// Bigger than maxChunkSize
content: Buffer.alloc(262144 + 5).fill(1)
}
const onCollected = (err, nodes) => {
if (err) return cb(err)
const node = nodes[0]
try {
expect(node).to.exist()
const cid = new CID(node.multihash)
expect(mh.decode(cid.multihash).name).to.equal(hashAlg)
} catch (err) {
return cb(err)
}
cb()
}
pull(
pull.values([Object.assign({}, inputFile)]),
createBuilder(FixedSizeChunker, ipld, options),
pull.collect(onCollected)
)
}, done)
})
it('allows multihash hash algorithm to be specified for a directory', (done) => {
eachSeries(testMultihashes, (hashAlg, cb) => {
const options = { hashAlg, strategy: 'flat' }
const inputFile = {
path: `${String(Math.random() + Date.now())}-dir`,
content: null
}
const onCollected = (err, nodes) => {
if (err) return cb(err)
const node = nodes[0]
expect(node).to.exist()
const cid = new CID(node.multihash)
expect(mh.decode(cid.multihash).name).to.equal(hashAlg)
// Fetch using hashAlg encoded multihash
ipld.get(cid, (err, res) => {
if (err) return cb(err)
const meta = UnixFS.unmarshal(res.value.data)
expect(meta.type).to.equal('directory')
cb()
})
}
pull(
pull.values([Object.assign({}, inputFile)]),
createBuilder(FixedSizeChunker, ipld, options),
pull.collect(onCollected)
)
}, done)
})
})
}