Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

Commit 8dc4211

Browse files
committed
perf: deep require pull stream modules
In able to create a minimal bundle, require pull stream modules directly: https://www.npmjs.com/package/pull-stream#minimal-bundle
1 parent f3f6d32 commit 8dc4211

File tree

7 files changed

+87
-62
lines changed

7 files changed

+87
-62
lines changed

src/dir-flat.js

+9-6
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
'use strict'
22

3-
const pull = require('pull-stream')
3+
const pull = require('pull-stream/pull')
4+
const values = require('pull-stream/sources/values')
5+
const filter = require('pull-stream/throughs/filter')
6+
const map = require('pull-stream/throughs/map')
47
const cat = require('pull-cat')
58

69
// Logic to export a unixfs directory.
@@ -20,14 +23,14 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
2023

2124
// we are at the max depth so no need to descend into children
2225
if (options.maxDepth && options.maxDepth <= depth) {
23-
return pull.values([dir])
26+
return values([dir])
2427
}
2528

2629
const streams = [
2730
pull(
28-
pull.values(node.links),
29-
pull.filter((item) => accepts === undefined || item.name === accepts),
30-
pull.map((link) => ({
31+
values(node.links),
32+
filter((item) => accepts === undefined || item.name === accepts),
33+
map((link) => ({
3134
depth: depth + 1,
3235
size: link.size,
3336
name: link.name,
@@ -43,7 +46,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
4346

4447
// place dir before if not specifying subtree
4548
if (!pathRest.length || options.fullPath) {
46-
streams.unshift(pull.values([dir]))
49+
streams.unshift(values([dir]))
4750
}
4851

4952
return cat(streams)

src/dir-hamt-sharded.js

+9-6
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
'use strict'
22

3-
const pull = require('pull-stream')
3+
const pull = require('pull-stream/pull')
4+
const values = require('pull-stream/sources/values')
5+
const filter = require('pull-stream/throughs/filter')
6+
const map = require('pull-stream/throughs/map')
47
const cat = require('pull-cat')
58

69
// Logic to export a unixfs directory.
@@ -21,13 +24,13 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
2124

2225
// we are at the max depth so no need to descend into children
2326
if (options.maxDepth && options.maxDepth <= depth) {
24-
return pull.values([dir])
27+
return values([dir])
2528
}
2629

2730
const streams = [
2831
pull(
29-
pull.values(node.links),
30-
pull.map((link) => {
32+
values(node.links),
33+
map((link) => {
3134
// remove the link prefix (2 chars for the bucket index)
3235
const p = link.name.substring(2)
3336
const pp = p ? path + '/' + p : path
@@ -50,14 +53,14 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
5053
return ''
5154
}
5255
}),
53-
pull.filter(Boolean),
56+
filter(Boolean),
5457
resolve
5558
)
5659
]
5760

5861
// place dir before if not specifying subtree
5962
if (!pathRest.length || options.fullPath) {
60-
streams.unshift(pull.values([dir]))
63+
streams.unshift(values([dir]))
6164
}
6265

6366
return cat(streams)

src/file.js

+30-23
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,14 @@
22

33
const traverse = require('pull-traverse')
44
const UnixFS = require('ipfs-unixfs')
5-
const pull = require('pull-stream')
5+
const pull = require('pull-stream/pull')
6+
const values = require('pull-stream/sources/values')
7+
const error = require('pull-stream/sources/error')
8+
const once = require('pull-stream/sources/once')
9+
const empty = require('pull-stream/sources/empty')
10+
const filter = require('pull-stream/throughs/filter')
11+
const flatten = require('pull-stream/throughs/flatten')
12+
const map = require('pull-stream/throughs/map')
613
const paramap = require('pull-paramap')
714
const extractDataFromBlock = require('./extract-data-from-block')
815

@@ -11,15 +18,15 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
1118
const accepts = pathRest[0]
1219

1320
if (accepts !== undefined && accepts !== path) {
14-
return pull.empty()
21+
return empty()
1522
}
1623

1724
let file
1825

1926
try {
2027
file = UnixFS.unmarshal(node.data)
21-
} catch (error) {
22-
return pull.error(error)
28+
} catch (err) {
29+
return error(err)
2330
}
2431

2532
const fileSize = size || file.fileSize()
@@ -28,21 +35,21 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
2835
let length = options.length
2936

3037
if (offset < 0) {
31-
return pull.error(new Error('Offset must be greater than or equal to 0'))
38+
return error(new Error('Offset must be greater than or equal to 0'))
3239
}
3340

3441
if (offset > fileSize) {
35-
return pull.error(new Error('Offset must be less than the file size'))
42+
return error(new Error('Offset must be less than the file size'))
3643
}
3744

3845
if (length < 0) {
39-
return pull.error(new Error('Length must be greater than or equal to 0'))
46+
return error(new Error('Length must be greater than or equal to 0'))
4047
}
4148

4249
if (length === 0) {
43-
return pull.once({
50+
return once({
4451
depth: depth,
45-
content: pull.once(Buffer.alloc(0)),
52+
content: once(Buffer.alloc(0)),
4653
name: name,
4754
path: path,
4855
multihash: cid.buffer,
@@ -61,7 +68,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
6168

6269
const content = streamBytes(dag, node, fileSize, offset, length)
6370

64-
return pull.values([{
71+
return values([{
6572
depth: depth,
6673
content: content,
6774
name: name,
@@ -74,7 +81,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
7481

7582
function streamBytes (dag, node, fileSize, offset, length) {
7683
if (offset === fileSize || length === 0) {
77-
return pull.once(Buffer.alloc(0))
84+
return once(Buffer.alloc(0))
7885
}
7986

8087
const end = offset + length
@@ -85,8 +92,8 @@ function streamBytes (dag, node, fileSize, offset, length) {
8592
start: 0,
8693
end: fileSize
8794
}, getChildren(dag, offset, end)),
88-
pull.map(extractData(offset, end)),
89-
pull.filter(Boolean)
95+
map(extractData(offset, end)),
96+
filter(Boolean)
9097
)
9198
}
9299

@@ -98,15 +105,15 @@ function getChildren (dag, offset, end) {
98105
return function visitor ({ node }) {
99106
if (Buffer.isBuffer(node)) {
100107
// this is a leaf node, can't traverse any further
101-
return pull.empty()
108+
return empty()
102109
}
103110

104111
let file
105112

106113
try {
107114
file = UnixFS.unmarshal(node.data)
108-
} catch (error) {
109-
return pull.error(error)
115+
} catch (err) {
116+
return error(err)
110117
}
111118

112119
const nodeHasData = Boolean(file.data && file.data.length)
@@ -142,11 +149,11 @@ function getChildren (dag, offset, end) {
142149
}
143150

144151
return pull(
145-
pull.once(filteredLinks),
152+
once(filteredLinks),
146153
paramap((children, cb) => {
147-
dag.getMany(children.map(child => child.link.cid), (error, results) => {
148-
if (error) {
149-
return cb(error)
154+
dag.getMany(children.map(child => child.link.cid), (err, results) => {
155+
if (err) {
156+
return cb(err)
150157
}
151158

152159
cb(null, results.map((result, index) => {
@@ -161,7 +168,7 @@ function getChildren (dag, offset, end) {
161168
}))
162169
})
163170
}),
164-
pull.flatten()
171+
flatten()
165172
)
166173
}
167174
}
@@ -187,8 +194,8 @@ function extractData (requestedStart, requestedEnd) {
187194
}
188195

189196
block = file.data
190-
} catch (error) {
191-
throw new Error(`Failed to unmarshal node - ${error.message}`)
197+
} catch (err) {
198+
throw new Error(`Failed to unmarshal node - ${err.message}`)
192199
}
193200
}
194201

src/index.js

+9-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
'use strict'
22

3-
const pull = require('pull-stream')
3+
const pull = require('pull-stream/pull')
4+
const values = require('pull-stream/sources/values')
5+
const error = require('pull-stream/sources/error')
6+
const filter = require('pull-stream/throughs/filter')
7+
const map = require('pull-stream/throughs/map')
48
const CID = require('cids')
59

610
const createResolver = require('./resolve').createResolver
@@ -49,7 +53,7 @@ module.exports = (path, dag, options) => {
4953
try {
5054
dPath = pathBaseAndRest(path)
5155
} catch (err) {
52-
return pull.error(err)
56+
return error(err)
5357
}
5458

5559
const pathLengthToCut = join(
@@ -58,16 +62,16 @@ module.exports = (path, dag, options) => {
5862
const cid = new CID(dPath.base)
5963

6064
return pull(
61-
pull.values([{
65+
values([{
6266
multihash: cid.buffer,
6367
name: dPath.base,
6468
path: dPath.base,
6569
pathRest: dPath.rest,
6670
depth: 0
6771
}]),
6872
createResolver(dag, options),
69-
pull.filter(Boolean),
70-
pull.map((node) => {
73+
filter(Boolean),
74+
map((node) => {
7175
return {
7276
depth: node.depth,
7377
name: node.name,

src/object.js

+6-4
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
'use strict'
22

33
const CID = require('cids')
4-
const pull = require('pull-stream')
4+
const pull = require('pull-stream/pull')
5+
const values = require('pull-stream/sources/values')
6+
const error = require('pull-stream/sources/error')
57

68
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) => {
79
let newNode
@@ -10,13 +12,13 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
1012
newNode = node[pathElem]
1113
const newName = path + '/' + pathElem
1214
if (!newNode) {
13-
return pull.error(new Error(`not found`))
15+
return error(new Error(`not found`))
1416
}
1517

1618
const isCID = CID.isCID(newNode)
1719

1820
return pull(
19-
pull.values([{
21+
values([{
2022
depth: depth,
2123
name: pathElem,
2224
path: newName,
@@ -27,6 +29,6 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
2729
}]),
2830
resolve)
2931
} else {
30-
return pull.error(new Error('invalid node type'))
32+
return error(new Error('invalid node type'))
3133
}
3234
}

src/raw.js

+11-9
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
'use strict'
22

3-
const pull = require('pull-stream')
3+
const error = require('pull-stream/sources/error')
4+
const once = require('pull-stream/sources/once')
5+
const empty = require('pull-stream/sources/empty')
46
const extractDataFromBlock = require('./extract-data-from-block')
57

68
// Logic to export a single raw block
79
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => {
810
const accepts = pathRest[0]
911

1012
if (accepts !== undefined && accepts !== path) {
11-
return pull.empty()
13+
return empty()
1214
}
1315

1416
size = size || node.length
@@ -17,21 +19,21 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
1719
let length = options.length
1820

1921
if (offset < 0) {
20-
return pull.error(new Error('Offset must be greater than or equal to 0'))
22+
return error(new Error('Offset must be greater than or equal to 0'))
2123
}
2224

2325
if (offset > size) {
24-
return pull.error(new Error('Offset must be less than the file size'))
26+
return error(new Error('Offset must be less than the file size'))
2527
}
2628

2729
if (length < 0) {
28-
return pull.error(new Error('Length must be greater than or equal to 0'))
30+
return error(new Error('Length must be greater than or equal to 0'))
2931
}
3032

3133
if (length === 0) {
32-
return pull.once({
34+
return once({
3335
depth,
34-
content: pull.once(Buffer.alloc(0)),
36+
content: once(Buffer.alloc(0)),
3537
hash: cid,
3638
name,
3739
path,
@@ -48,9 +50,9 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
4850
length = size - offset
4951
}
5052

51-
return pull.once({
53+
return once({
5254
depth,
53-
content: pull.once(extractDataFromBlock(node, 0, offset, offset + length)),
55+
content: once(extractDataFromBlock(node, 0, offset, offset + length)),
5456
hash: cid,
5557
name,
5658
path,

0 commit comments

Comments
 (0)