Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit afa7688

Browse files
committed
structure and cleaning
1 parent 3751be4 commit afa7688

File tree

6 files changed

+390
-375
lines changed

6 files changed

+390
-375
lines changed

src/exporter.js

+124
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
'use strict'
2+
3+
const debug = require('debug')
4+
const log = debug('exporter')
5+
log.err = debug('exporter:error')
6+
const UnixFS = require('ipfs-unixfs')
7+
const async = require('async')
8+
const events = require('events')
9+
const Readable = require('stream').Readable
10+
const pathj = require('path')
11+
12+
exports = module.exports = exporter
13+
14+
function exporter (hash, dagService, options, callback) {
15+
if (typeof options === 'function') {
16+
callback = options
17+
options = {}
18+
}
19+
20+
const ee = new events.EventEmitter()
21+
dagService.get(hash, (err, fetchedNode) => {
22+
if (err) {
23+
if (callback) {
24+
return callback(err)
25+
}
26+
return
27+
}
28+
const data = UnixFS.unmarshal(fetchedNode.data)
29+
const type = data.type
30+
if (type === 'directory') {
31+
dirExporter(fetchedNode, hash, callback)
32+
}
33+
if (type === 'file') {
34+
fileExporter(fetchedNode, hash, false, callback)
35+
}
36+
})
37+
return ee
38+
39+
function fileExporter (node, name, dir, callback) {
40+
if (typeof dir === 'function') { callback = dir; dir = {} }
41+
var rs = new Readable()
42+
if (node.links.length === 0) {
43+
const unmarshaledData = UnixFS.unmarshal(node.data)
44+
ee.emit('file', { stream: rs, path: name, dir: dir })
45+
rs.push(unmarshaledData.data)
46+
rs.push(null)
47+
if (callback) {
48+
callback()
49+
}
50+
return
51+
} else {
52+
ee.emit('file', { stream: rs, path: name, dir: dir })
53+
var init = false
54+
rs._read = () => {
55+
if (init) {
56+
return
57+
}
58+
init = true
59+
async.forEachSeries(node.links, (link, callback) => {
60+
dagService.get(link.hash, (err, res) => {
61+
if (err) {
62+
callback(err)
63+
}
64+
var unmarshaledData = UnixFS.unmarshal(res.data)
65+
rs.push(unmarshaledData.data)
66+
callback()
67+
})
68+
}, (err) => {
69+
if (err) {
70+
if (callback) {
71+
return callback(err)
72+
}
73+
return
74+
}
75+
rs.push(null)
76+
if (callback) {
77+
callback()
78+
}
79+
return
80+
})
81+
}
82+
}
83+
}
84+
85+
function dirExporter (node, name, callback) {
86+
var rs = new Readable()
87+
if (node.links.length === 0) {
88+
rs.push(node.data)
89+
rs.push(null)
90+
ee.emit('file', {stream: rs, path: name})
91+
if (callback) {
92+
callback()
93+
}
94+
return
95+
} else {
96+
async.forEachSeries(node.links, (link, callback) => {
97+
dagService.get(link.hash, (err, res) => {
98+
if (err) {
99+
callback(err)
100+
}
101+
var unmarshaledData = UnixFS.unmarshal(res.data)
102+
if (unmarshaledData.type === 'file') {
103+
return (fileExporter(res, pathj.join(name, link.name), callback))
104+
}
105+
if (unmarshaledData.type === 'directory') {
106+
return (dirExporter(res, pathj.join(name, link.name), callback))
107+
}
108+
callback()
109+
})
110+
}, (err) => {
111+
if (err) {
112+
if (callback) {
113+
return callback(err)
114+
}
115+
return
116+
}
117+
if (callback) {
118+
callback()
119+
}
120+
return
121+
})
122+
}
123+
}
124+
}

src/importer.js

+249
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,249 @@
1+
'use strict'
2+
3+
const debug = require('debug')
4+
const log = debug('importer')
5+
log.err = debug('importer:error')
6+
const fs = require('fs')
7+
const mDAG = require('ipfs-merkle-dag')
8+
const FixedSizeChunker = require('./chunker-fixed-size')
9+
const through2 = require('through2')
10+
const UnixFS = require('ipfs-unixfs')
11+
const async = require('async')
12+
13+
exports = module.exports
14+
15+
const CHUNK_SIZE = 262144
16+
17+
// Use a layout + chunkers to convert a directory (or file) to the layout format
18+
exports.import = (target, dagService, options, callback) => {
19+
if (typeof options === 'function') { callback = options; options = {} }
20+
21+
if (!target) { return callback(new Error('must specify target')) }
22+
if (!dagService) { return callback(new Error('must specify dag service')) }
23+
24+
// options.recursive : follow dirs
25+
// options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker }
26+
27+
options = options || {}
28+
29+
if (Buffer.isBuffer(target)) {
30+
bufferImporter(target, callback)
31+
} else if (typeof target.on === 'function') {
32+
// TODO Create Stream Importer
33+
// streamImporter(options.stream, callback)
34+
return callback(new Error('stream importer has not been built yet'))
35+
} else if (typeof target === 'string') {
36+
var stats
37+
try {
38+
stats = fs.statSync(target)
39+
} catch (e) {
40+
return callback(e)
41+
}
42+
if (stats.isFile()) {
43+
fileImporter(target, callback)
44+
} else if (stats.isDirectory() && options.recursive) {
45+
dirImporter(target, callback)
46+
} else {
47+
return callback(new Error('recursive must be true to add a directory'))
48+
}
49+
}
50+
51+
function fileImporter (path, callback) {
52+
const stats = fs.statSync(path)
53+
if (stats.size > CHUNK_SIZE) {
54+
const links = [] // { Hash: , Size: , Name: }
55+
fs.createReadStream(path)
56+
.pipe(new FixedSizeChunker(CHUNK_SIZE))
57+
.pipe(through2((chunk, enc, cb) => {
58+
// TODO: check if this is right (I believe it should be type 'raw'
59+
// https://github.com/ipfs/go-ipfs/issues/2331
60+
const raw = new UnixFS('file', chunk)
61+
62+
const node = new mDAG.DAGNode(raw.marshal())
63+
64+
dagService.add(node, function (err) {
65+
if (err) {
66+
return log.err(err)
67+
}
68+
links.push({
69+
Hash: node.multihash(),
70+
Size: node.size(),
71+
leafSize: raw.fileSize(),
72+
Name: ''
73+
})
74+
cb()
75+
})
76+
}, (cb) => {
77+
const file = new UnixFS('file')
78+
const parentNode = new mDAG.DAGNode()
79+
links.forEach((l) => {
80+
file.addBlockSize(l.leafSize)
81+
const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash)
82+
parentNode.addRawLink(link)
83+
})
84+
85+
parentNode.data = file.marshal()
86+
dagService.add(parentNode, (err) => {
87+
if (err) {
88+
return log.err(err)
89+
}
90+
91+
const pathSplit = path.split('/')
92+
const fileName = pathSplit[pathSplit.length - 1]
93+
94+
callback(null, {
95+
Hash: parentNode.multihash(),
96+
Size: parentNode.size(),
97+
Name: fileName
98+
}) && cb()
99+
})
100+
}))
101+
} else {
102+
// create just one file node with the data directly
103+
var buf = fs.readFileSync(path)
104+
const fileUnixFS = new UnixFS('file', buf)
105+
const fileNode = new mDAG.DAGNode(fileUnixFS.marshal())
106+
107+
dagService.add(fileNode, (err) => {
108+
if (err) {
109+
return log.err(err)
110+
}
111+
112+
const split = path.split('/')
113+
const fileName = split[split.length - 1]
114+
115+
callback(null, {
116+
Hash: fileNode.multihash(),
117+
Size: fileNode.size(),
118+
Name: fileName
119+
})
120+
})
121+
}
122+
}
123+
124+
function dirImporter (path, callback) {
125+
const files = fs.readdirSync(path)
126+
const dirUnixFS = new UnixFS('directory')
127+
const dirNode = new mDAG.DAGNode()
128+
129+
if (files.length === 0) {
130+
dirNode.data = dirUnixFS.marshal()
131+
dagService.add(dirNode, (err) => {
132+
if (err) {
133+
return callback(err)
134+
}
135+
136+
const split = path.split('/')
137+
const dirName = split[split.length - 1]
138+
139+
callback(null, {
140+
Hash: dirNode.multihash(),
141+
Size: dirNode.size(),
142+
Name: dirName
143+
})
144+
})
145+
return
146+
}
147+
148+
async.map(
149+
files,
150+
(file, cb) => {
151+
const filePath = path + '/' + file
152+
const stats = fs.statSync(filePath)
153+
if (stats.isFile()) {
154+
return fileImporter(filePath, cb)
155+
} if (stats.isDirectory()) {
156+
return dirImporter(filePath, cb)
157+
} else {
158+
return cb(new Error('Found a weird file' + path + file))
159+
}
160+
},
161+
(err, results) => {
162+
if (err) {
163+
return callback(err)
164+
}
165+
results.forEach((result) => {
166+
dirNode.addRawLink(new mDAG.DAGLink(result.Name, result.Size, result.Hash))
167+
})
168+
169+
dirNode.data = dirUnixFS.marshal()
170+
171+
dagService.add(dirNode, (err) => {
172+
if (err) {
173+
return callback(err)
174+
}
175+
176+
const split = path.split('/')
177+
const dirName = split[split.length - 1]
178+
179+
callback(null, {
180+
Hash: dirNode.multihash(),
181+
Size: dirNode.size(),
182+
Name: dirName
183+
})
184+
})
185+
})
186+
}
187+
function bufferImporter (buffer, callback) {
188+
const links = [] // { Hash: , Size: , Name: }
189+
if (buffer.length > CHUNK_SIZE) {
190+
var fsc = new FixedSizeChunker(CHUNK_SIZE)
191+
fsc.write(buffer)
192+
fsc.end()
193+
fsc.pipe(through2((chunk, enc, cb) => {
194+
// TODO: check if this is right (I believe it should be type 'raw'
195+
// https://github.com/ipfs/go-ipfs/issues/2331
196+
const raw = new UnixFS('file', chunk)
197+
const node = new mDAG.DAGNode(raw.marshal())
198+
199+
dagService.add(node, function (err) {
200+
if (err) {
201+
return log.err(err)
202+
}
203+
links.push({
204+
Hash: node.multihash(),
205+
Size: node.size(),
206+
leafSize: raw.fileSize(),
207+
Name: ''
208+
})
209+
cb()
210+
})
211+
}, (cb) => {
212+
const file = new UnixFS('file')
213+
const parentNode = new mDAG.DAGNode()
214+
links.forEach((l) => {
215+
file.addBlockSize(l.leafSize)
216+
const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash)
217+
parentNode.addRawLink(link)
218+
})
219+
parentNode.data = file.marshal()
220+
dagService.add(parentNode, (err) => {
221+
if (err) {
222+
return log.err(err)
223+
}
224+
225+
callback(null, {
226+
Hash: parentNode.multihash(),
227+
Size: parentNode.size()
228+
}) && cb()
229+
})
230+
}))
231+
} else {
232+
// create just one file node with the data directly
233+
const fileUnixFS = new UnixFS('file', buffer)
234+
const fileNode = new mDAG.DAGNode(fileUnixFS.marshal())
235+
236+
dagService.add(fileNode, (err) => {
237+
if (err) {
238+
return log.err(err)
239+
}
240+
241+
callback(null, {
242+
Hash: fileNode.multihash(),
243+
Size: fileNode.size()
244+
})
245+
})
246+
}
247+
}
248+
// function streamImporter (stream, callback) {}
249+
}

0 commit comments

Comments
 (0)