Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 7a9a7b3

Browse files
authored
Merge pull request #49 from ipfs/clean-up
Clean up
2 parents 04e7483 + cacb55c commit 7a9a7b3

File tree

4 files changed

+98
-117
lines changed

4 files changed

+98
-117
lines changed

Diff for: README.md

+33-36
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
# IPFS unixFS Engine
1+
IPFS unixFS Engine
2+
==================
23

34
[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io)
45
[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/)
@@ -48,19 +49,19 @@ And write the importing logic:
4849
```js
4950
// Dependencies to create a DAG Service (where the dir will be imported into)
5051
const memStore = require('abstract-blob-store')
51-
const ipfsRepo = require('ipfs-repo')
52-
const ipfsBlock = require('ipfs-block')
53-
const ipfsBlockService = require('ipfs-block-service')
54-
const ipfsMerkleDag = require('ipfs-merkle-dag')
52+
const Repo = require('ipfs-repo')
53+
const Block = require('ipfs-block')
54+
const BlockService = require('ipfs-block-service')
55+
const MerkleDag = require('ipfs-merkle-dag')
5556
const fs = require('fs')
5657

57-
const repo = new ipfsRepo('', { stores: memStore })
58-
const blocks = new ipfsBlockService(repo)
59-
const dag = new ipfsMerkleDag.DAGService(blocks)
58+
const repo = new Repo('', { stores: memStore })
59+
const blockService = new BlockService(repo)
60+
const dagService = new ipfsMerkleDag.DAGService(blocks)
6061

6162

62-
const Importer = require('ipfs-unixfs-engine').importer
63-
const add = new Importer(dag)
63+
const Importer = require('ipfs-unixfs-engine').Importer
64+
const filesAddStream = new Importer(dagService)
6465

6566
// An array to hold the return of nested file/dir info from the importer
6667
// A root DAG Node is received upon completion
@@ -76,26 +77,24 @@ const input2 = {path: /tmp/foo/quxx, content: rs2}
7677

7778
// Listen for the data event from the importer stream
7879

79-
add.on('data', (info) => {
80+
filesAddStream.on('data', (info) => {
8081
res.push(info)
8182
})
8283

8384
// The end event of the stream signals that the importer is done
8485

85-
add.on('end', () => {
86-
console.log('Finished adding files!')
87-
return
86+
filesAddStream.on('end', () => {
87+
console.log('Finished filesAddStreaming files!')
8888
})
8989

90-
// Calling write on the importer to add the file/object tuples
90+
// Calling write on the importer to filesAddStream the file/object tuples
9191

92-
add.write(input)
93-
add.write(input2)
94-
add.end()
92+
filesAddStream.write(input)
93+
filesAddStream.write(input2)
94+
filesAddStream.end()
9595
```
9696

9797
When run, the stat of DAG Node is outputted for each file on data event until the root:
98-
9998
```
10099
{ multihash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
101100
size: 39243,
@@ -143,38 +142,37 @@ Nodes.
143142
### Example Exporter
144143

145144
```
146-
const ipfsRepo = require('ipfs-repo')
147-
const ipfsBlock = require('ipfs-block')
148-
const ipfsBlockService = require('ipfs-block-service')
149-
const ipfsMerkleDag = require('ipfs-merkle-dag')
145+
const Repo = require('ipfs-repo')
146+
const Block = require('ipfs-block')
147+
const BlockService = require('ipfs-block-service')
148+
const MerkleDAG = require('ipfs-merkle-dag')
150149
151-
const repo = new ipfsRepo('', { stores: memStore })
152-
const blocks = new ipfsBlockService(repo)
153-
const dag = new ipfsMerkleDag.DAGService(blocks)
150+
const repo = new Repo('', { stores: memStore })
151+
const blockService = new BlockService(repo)
152+
const dagService = new MerkleDag.DAGService(blockService)
154153
155154
// Create an export readable object stream with the hash you want to export and a dag service
156155
157-
const exportEvent = Exporter(hash, dag)
156+
const filesStream = Exporter(<multihash>, dag)
158157
159158
// Pipe the return stream to console
160159
161-
exportEvent.on('data', (result) => {
162-
result.stream.pipe(process.stdout)
160+
filesStream.on('data', (file) => {
161+
file.content.pipe(process.stdout)
163162
}
164163
```
165164

166165
### Exporter: API
166+
167167
```js
168-
const Exporter = require('ipfs-unixfs-engine').exporter
168+
const Exporter = require('ipfs-unixfs-engine').Exporter
169169
```
170170

171171
### new Exporter(hash, dagService)
172172

173-
Uses the given [DAG Service][] to fetch an IPFS [UnixFS][] object(s) by their
174-
multiaddress.
173+
Uses the given [DAG Service][] to fetch an IPFS [UnixFS][] object(s) by their multiaddress.
175174

176-
Creates a new readable stream in object mode that outputs objects of the
177-
form
175+
Creates a new readable stream in object mode that outputs objects of the form
178176

179177
```js
180178
{
@@ -183,8 +181,7 @@ form
183181
}
184182
```
185183

186-
Errors are received as with a normal stream, by listening on the `'error'` event
187-
to be emitted.
184+
Errors are received as with a normal stream, by listening on the `'error'` event to be emitted.
188185

189186

190187
[DAG Service]: https://github.com/vijayee/js-ipfs-merkle-dag/

Diff for: src/exporter.js

+43-59
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
11
'use strict'
22

33
const debug = require('debug')
4-
const log = debug('exporter')
5-
log.err = debug('exporter:error')
4+
const log = debug('unixfs')
5+
log.err = debug('unixfs:error')
66
const isIPFS = require('is-ipfs')
7-
const bs58 = require('bs58')
87
const UnixFS = require('ipfs-unixfs')
98
const series = require('run-series')
109
const Readable = require('readable-stream').Readable
@@ -21,13 +20,10 @@ function Exporter (hash, dagService, options) {
2120
return new Exporter(hash, dagService, options)
2221
}
2322

24-
// Sanitize hash.
23+
// Sanitize hash
2524
if (!isIPFS.multihash(hash)) {
2625
throw new Error('not valid multihash')
2726
}
28-
if (Buffer.isBuffer(hash)) {
29-
hash = bs58.encode(hash)
30-
}
3127

3228
Readable.call(this, { objectMode: true })
3329

@@ -36,61 +32,52 @@ function Exporter (hash, dagService, options) {
3632
this._read = (n) => {}
3733

3834
let fileExporter = (node, name, done) => {
39-
let init = false
35+
if (!done) {
36+
throw new Error('done must be set')
37+
}
4038

41-
if (!done) throw new Error('done must be set')
39+
const contentRS = new Readable()
40+
contentRS._read = () => {}
4241

4342
// Logic to export a single (possibly chunked) unixfs file.
44-
var rs = new Readable()
4543
if (node.links.length === 0) {
4644
const unmarshaledData = UnixFS.unmarshal(node.data)
47-
rs._read = () => {
48-
if (init) {
49-
return
50-
}
51-
init = true
52-
rs.push(unmarshaledData.data)
53-
rs.push(null)
54-
}
55-
this.push({ content: rs, path: name })
45+
contentRS.push(unmarshaledData.data)
46+
contentRS.push(null)
47+
this.push({ content: contentRS, path: name })
5648
done()
5749
} else {
58-
rs._read = () => {
59-
if (init) {
60-
return
50+
const array = node.links.map((link) => {
51+
return (cb) => {
52+
dagService.get(link.hash, (err, res) => {
53+
if (err) {
54+
return cb(err)
55+
}
56+
var unmarshaledData = UnixFS.unmarshal(res.data)
57+
contentRS.push(unmarshaledData.data)
58+
cb()
59+
})
6160
}
62-
init = true
63-
64-
const array = node.links.map((link) => {
65-
return (cb) => {
66-
dagService.get(link.hash, (err, res) => {
67-
if (err) {
68-
return cb(err)
69-
}
70-
var unmarshaledData = UnixFS.unmarshal(res.data)
71-
rs.push(unmarshaledData.data)
72-
cb()
73-
})
74-
}
75-
})
76-
series(array, (err, res) => {
77-
if (err) {
78-
rs.emit('error', err)
79-
return
80-
}
81-
rs.push(null)
82-
return
83-
})
84-
}
85-
this.push({ content: rs, path: name })
61+
})
62+
series(array, (err) => {
63+
if (err) {
64+
return contentRS.emit('error', err)
65+
}
66+
contentRS.push(null)
67+
})
68+
this.push({ content: contentRS, path: name })
8669
done()
8770
}
8871
}
8972

9073
// Logic to export a unixfs directory.
9174
let dirExporter = (node, name, add, done) => {
92-
if (!add) throw new Error('add must be set')
93-
if (!done) throw new Error('done must be set')
75+
if (!add) {
76+
throw new Error('add must be set')
77+
}
78+
if (!done) {
79+
throw new Error('done must be set')
80+
}
9481

9582
this.push({content: null, path: name})
9683

@@ -104,32 +91,29 @@ function Exporter (hash, dagService, options) {
10491
}
10592

10693
// Traverse the DAG asynchronously
107-
var self = this
108-
fieldtrip([{ path: hash, hash: hash }], visit, (err) => {
94+
fieldtrip([{path: hash, hash: hash}], visit.bind(this), (err) => {
10995
if (err) {
110-
self.emit('error', err)
111-
return
96+
return this.emit('error', err)
11297
}
113-
self.push(null)
98+
this.push(null)
11499
})
115100

116101
// Visit function: called once per node in the exported graph
117102
function visit (item, add, done) {
118-
dagService.get(item.hash, (err, fetchedNode) => {
103+
dagService.get(item.hash, (err, node) => {
119104
if (err) {
120-
self.emit('error', err)
121-
return
105+
return this.emit('error', err)
122106
}
123107

124-
const data = UnixFS.unmarshal(fetchedNode.data)
108+
const data = UnixFS.unmarshal(node.data)
125109
const type = data.type
126110

127111
if (type === 'directory') {
128-
dirExporter(fetchedNode, item.path, add, done)
112+
dirExporter(node, item.path, add, done)
129113
}
130114

131115
if (type === 'file') {
132-
fileExporter(fetchedNode, item.path, done)
116+
fileExporter(node, item.path, done)
133117
}
134118
})
135119
}

Diff for: src/importer.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
'use strict'
22

33
const debug = require('debug')
4-
const log = debug('importer')
5-
log.err = debug('importer:error')
4+
const log = debug('unixfs')
5+
log.err = debug('unixfs:error')
66
const fsc = require('./chunker-fixed-size')
77
const through2 = require('through2')
88
const merkleDAG = require('ipfs-merkle-dag')

Diff for: test/test-exporter.js

+20-20
Original file line numberDiff line numberDiff line change
@@ -88,52 +88,52 @@ module.exports = function (repo) {
8888
})
8989
testExport.pipe(concat((files) => {
9090
expect(files[0].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN')
91+
expect(files[0].content).to.not.exist
92+
9193
expect(files[1].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/200Bytes.txt')
94+
expect(files[1].content).to.exist
95+
9296
expect(files[2].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/dir-another')
97+
expect(files[2].content).to.not.exist
98+
9399
expect(files[3].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1')
100+
expect(files[3].content).to.not.exist
101+
94102
expect(files[4].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt')
103+
expect(files[4].content).to.exist
104+
95105
expect(files[5].path).to.equal('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/level-2')
106+
expect(files[5].content).to.not.exist
107+
96108
done()
97109
}))
98110
})
99111

100112
it('returns a null stream for dir', (done) => {
101-
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' // This hash doesn't exist in the repo
113+
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
102114
const bs = new BlockService(repo)
103115
const ds = new DAGService(bs)
104116
const testExport = exporter(hash, ds)
117+
105118
testExport.on('error', (err) => {
106119
expect(err).to.not.exist
107120
})
108-
testExport.on('data', (dir) => {
109-
expect(dir.content).to.equal(null)
121+
122+
testExport.on('data', (file) => {
123+
expect(file.content).to.not.exist
110124
done()
111125
})
112126
})
113127

114128
it('fails on non existent hash', (done) => {
115-
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3' // This hash doesn't exist in the repo
129+
// This hash doesn't exist in the repo
130+
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3'
116131
const bs = new BlockService(repo)
117132
const ds = new DAGService(bs)
118133
const testExport = exporter(hash, ds)
119134
testExport.on('error', (err) => {
120-
const error = err.toString()
121135
expect(err).to.exist
122-
const browser = error.includes('Error: key not found:')
123-
const node = error.includes('no such file or directory')
124-
// the browser and node js return different errors
125-
if (browser) {
126-
expect(error).to.contain('Error: key not found:')
127-
done()
128-
}
129-
if (node) {
130-
expect(error).to.contain('no such file or directory')
131-
done()
132-
}
133-
if (!node && !browser) {
134-
expect(node).to.equal(true)
135-
done()
136-
}
136+
done()
137137
})
138138
})
139139
})

0 commit comments

Comments
 (0)