Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit f4082d5

Browse files
committedOct 26, 2016
wip: migrate importer to async IPLD format interface
1 parent 8778c81 commit f4082d5

14 files changed

+286
-190
lines changed
 

‎README.md

+8
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,14 @@ IPFS unixFS Engine
2525
- [Contribute](#contribute)
2626
- [License](#license)
2727

28+
## BEWARE BEWARE BEWARE there might be 🐉
29+
30+
This module has passed through several iterations and still is far from a nice and easy understandable codebase. Currently missing features:
31+
32+
- tar importer
33+
- trickle dag exporter
34+
- sharding
35+
2836
## Install
2937

3038
With [npm](https://npmjs.org/) installed, run

‎package.json

+9-7
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "ipfs-unixfs-engine",
33
"version": "0.11.4",
44
"description": "JavaScript implementation of the unixfs Engine used by IPFS",
5-
"main": "src/index.js",
5+
"main": "lib/index.js",
66
"jsnext:main": "src/index.js",
77
"scripts": {
88
"lint": "aegir-lint",
@@ -34,13 +34,13 @@
3434
},
3535
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engineg#readme",
3636
"devDependencies": {
37-
"aegir": "^8.0.1",
37+
"aegir": "^8.1.2",
3838
"buffer-loader": "0.0.1",
3939
"chai": "^3.5.0",
40-
"fs-pull-blob-store": "^0.3.0",
41-
"idb-pull-blob-store": "^0.4.0",
42-
"ipfs-block-service": "^0.5.0",
43-
"ipfs-repo": "^0.9.0",
40+
"fs-pull-blob-store": "^0.4.1",
41+
"idb-pull-blob-store": "^0.5.1",
42+
"ipfs-block-service": "^0.6.0",
43+
"ipfs-repo": "^0.10.0",
4444
"ncp": "^2.0.0",
4545
"pre-commit": "^1.1.3",
4646
"pull-zip": "^2.0.0",
@@ -49,8 +49,10 @@
4949
"run-series": "^1.1.4"
5050
},
5151
"dependencies": {
52-
"ipld-dag-pb": "^0.0.1",
52+
"cids": "^0.2.0",
5353
"ipfs-unixfs": "^0.1.4",
54+
"ipld-dag-pb": "^0.1.2",
55+
"ipld-resolver": "^0.1.1",
5456
"is-ipfs": "^0.2.0",
5557
"multihashes": "^0.2.2",
5658
"pull-block": "^1.0.2",

‎src/chunker-fixed-size.js

-7
This file was deleted.

‎src/chunker/fixed-size.js

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
'use strict'
2+
3+
const pullBlock = require('pull-block')
4+
5+
module.exports = (size) => {
6+
return pullBlock(size, { zeroPadding: false })
7+
}
File renamed without changes.
File renamed without changes.

‎src/exporter.js ‎src/exporter/index.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,12 @@ const traverse = require('pull-traverse')
44
const pull = require('pull-stream')
55
const CID = require('cids')
66

7-
const util = require('./util')
7+
const util = require('./../util')
88
const switchType = util.switchType
99
const cleanMultihash = util.cleanMultihash
1010

11-
const dirExporter = require('./exporters/dir')
12-
const fileExporter = require('./exporters/file')
11+
const dirExporter = require('./dir')
12+
const fileExporter = require('./file')
1313

1414
module.exports = (hash, ipldResolver, options) => {
1515
hash = cleanMultihash(hash)

‎src/importer/flush-tree.js

+182
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,182 @@
1+
'use strict'
2+
3+
const mh = require('multihashes')
4+
const UnixFS = require('ipfs-unixfs')
5+
const CID = require('cids')
6+
const dagPB = require('ipld-dag-pb')
7+
const asyncEach = require('async/each')
8+
9+
const DAGLink = dagPB.DAGLink
10+
const DAGNode = dagPB.DAGNode
11+
12+
module.exports = (files, ipldResolver, source, callback) => {
13+
// 1) convert files to a tree
14+
const fileTree = createTree(files)
15+
16+
if (Object.keys(fileTree).length === 0) {
17+
return callback()// no dirs to be created
18+
}
19+
20+
// 2) create sizeIndex
21+
const sizeIndex = createSizeIndex(files)
22+
23+
// 3) bottom up flushing
24+
traverse(fileTree, sizeIndex, null, ipldResolver, source, callback)
25+
}
26+
27+
/*
28+
* createTree
29+
*
30+
* received an array of files with the format:
31+
* {
32+
* path: // full path
33+
* multihash: // multihash of the dagNode
34+
* size: // cumulative size
35+
* }
36+
*
37+
* returns a JSON object that represents a tree where branches are the paths
38+
* and the leaves are objects with file names and respective multihashes, such
39+
* as:
40+
* {
41+
* foo: {
42+
* bar: {
43+
* baz.txt: <multihash>
44+
* }
45+
* }
46+
* }
47+
*/
48+
function createTree (files) {
49+
const fileTree = {}
50+
51+
files.forEach((file) => {
52+
let splitted = file.path.split('/')
53+
if (splitted.length === 1) {
54+
return // adding just one file
55+
}
56+
if (splitted[0] === '') {
57+
splitted = splitted.slice(1)
58+
}
59+
var tmpTree = fileTree
60+
61+
for (var i = 0; i < splitted.length; i++) {
62+
if (!tmpTree[splitted[i]]) {
63+
tmpTree[splitted[i]] = {}
64+
}
65+
if (i === splitted.length - 1) {
66+
tmpTree[splitted[i]] = file.multihash
67+
} else {
68+
tmpTree = tmpTree[splitted[i]]
69+
}
70+
}
71+
})
72+
73+
return fileTree
74+
}
75+
76+
/*
77+
* create a size index that goes like:
78+
* { <multihash>: <size> }
79+
*/
80+
function createSizeIndex (files) {
81+
const sizeIndex = {}
82+
83+
files.forEach((file) => {
84+
sizeIndex[mh.toB58String(file.multihash)] = file.size
85+
})
86+
87+
return sizeIndex
88+
}
89+
90+
/*
91+
* expand the branches recursively (depth first), flush them first
92+
* and then traverse through the bottoum up, flushing everynode
93+
*
94+
* Algorithm tl;dr;
95+
* create a dirNode
96+
* Object.keys
97+
* If the value is an Object
98+
* create a dir Node
99+
* Object.keys
100+
* Once finished, add the result as a link to the dir node
101+
* If the value is not an object
102+
* add as a link to the dirNode
103+
*/
104+
function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
105+
const keys = Object.keys(tree)
106+
107+
let tmp = tree
108+
109+
asyncEach(keys, (key, cb) => {
110+
if (isLeaf(tmp[key])) {
111+
cb()
112+
} else {
113+
path = path ? path + '/' + key : key
114+
console.log('->', path)
115+
116+
traverse(tmp[key], sizeIndex, path, ipldResolver, source, (err, multihash) => {
117+
if (err) {
118+
return done(err)
119+
}
120+
tmp[key] = multihash
121+
cb()
122+
})
123+
}
124+
}, () => {
125+
next(tmp, done)
126+
})
127+
128+
function next (tree, cb) {
129+
// at this stage, all keys are multihashes
130+
// create a dir node
131+
// add all the multihashes as links
132+
// return this new node multihash
133+
134+
const keys = Object.keys(tree)
135+
136+
const ufsDir = new UnixFS('directory')
137+
const node = new DAGNode(ufsDir.marshal())
138+
139+
keys.forEach((key) => {
140+
const b58mh = mh.toB58String(tree[key])
141+
const link = new DAGLink(key, sizeIndex[b58mh], tree[key])
142+
node.addRawLink(link)
143+
})
144+
145+
console.log('0---->', path)
146+
node.multihash((err, multihash) => {
147+
if (err) {
148+
return cb(err)
149+
}
150+
node.size((err, size) => {
151+
if (err) {
152+
return cb(err)
153+
}
154+
155+
sizeIndex[mh.toB58String(multihash)] = size
156+
console.log('1---->', path)
157+
158+
ipldResolver.put({
159+
node: node,
160+
cid: new CID(multihash)
161+
}, (err) => {
162+
if (err) {
163+
source.push(new Error('failed to store dirNode'))
164+
} else if (path) {
165+
console.log('2---->', path)
166+
source.push({
167+
path: path,
168+
multihash: multihash,
169+
size: size
170+
})
171+
}
172+
173+
cb(null, multihash)
174+
})
175+
})
176+
})
177+
}
178+
}
179+
180+
function isLeaf (value) {
181+
return !(typeof value === 'object' && !Buffer.isBuffer(value))
182+
}

0 commit comments

Comments
 (0)