Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit 53158a5

Browse files
committed
add, cat, get core + cli offline
1 parent 036eaf6 commit 53158a5

File tree

10 files changed

+353
-38
lines changed

10 files changed

+353
-38
lines changed

package.json

+13-12
Original file line numberDiff line numberDiff line change
@@ -37,46 +37,47 @@
3737
},
3838
"homepage": "https://github.com/ipfs/js-ipfs#readme",
3939
"devDependencies": {
40-
"aegir": "^3.0.0",
40+
"aegir": "^3.0.1",
4141
"async": "^2.0.0-rc.3",
4242
"buffer-loader": "0.0.1",
4343
"chai": "^3.5.0",
4444
"expose-loader": "^0.7.1",
4545
"form-data": "^1.0.0-rc3",
4646
"idb-plus-blob-store": "^1.1.2",
47-
"lodash": "^4.11.1",
48-
"mocha": "^2.3.4",
47+
"lodash": "^4.11.2",
48+
"mocha": "^2.4.5",
4949
"ncp": "^2.0.0",
5050
"nexpect": "^0.5.0",
5151
"pre-commit": "^1.1.2",
52-
"rimraf": "^2.4.4",
52+
"rimraf": "^2.5.2",
5353
"stream-to-promise": "^1.1.0",
5454
"transform-loader": "^0.2.3"
5555
},
5656
"dependencies": {
5757
"babel-runtime": "^6.6.1",
5858
"bl": "^1.1.2",
59-
"boom": "^3.1.1",
59+
"boom": "^3.1.2",
6060
"bs58": "^3.0.0",
6161
"debug": "^2.2.0",
6262
"fs-blob-store": "^5.2.1",
6363
"glob": "^7.0.3",
6464
"hapi": "^13.3.0",
65-
"ipfs-api": "^3.0.1",
65+
"ipfs-api": "^3.0.2",
6666
"ipfs-block": "^0.3.0",
6767
"ipfs-block-service": "^0.3.0",
68-
"ipfs-data-importing": "^0.3.3",
6968
"ipfs-merkle-dag": "^0.5.0",
7069
"ipfs-multipart": "^0.1.0",
71-
"ipfs-repo": "^0.7.1",
72-
"joi": "^8.0.2",
73-
"libp2p-ipfs": "^0.3.3",
70+
"ipfs-repo": "^0.8.0",
71+
"ipfs-unixfs-engine": "^0.6.0",
72+
"joi": "^8.0.5",
73+
"libp2p-ipfs": "^0.3.5",
7474
"lodash.get": "^4.2.1",
75-
"lodash.set": "^4.0.0",
76-
"multiaddr": "^1.3.0",
75+
"lodash.set": "^4.1.0",
76+
"multiaddr": "^1.4.1",
7777
"peer-book": "0.1.0",
7878
"peer-id": "^0.6.6",
7979
"peer-info": "^0.6.2",
80+
"readable-stream": "^1.1.13",
8081
"ronin": "^0.3.11",
8182
"temp": "^0.8.3"
8283
},

src/cli/commands/files/add.js

+70-8
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,16 @@
11
'use strict'
22

33
const Command = require('ronin').Command
4-
const IPFS = require('../../../core')
4+
const utils = require('../../utils')
55
const debug = require('debug')
66
const log = debug('cli:version')
77
log.error = debug('cli:version:error')
88
const bs58 = require('bs58')
9+
const Readable = require('stream').Readable
10+
const fs = require('fs')
11+
const async = require('async')
12+
const pathj = require('path')
13+
const glob = require('glob')
914

1015
module.exports = Command.extend({
1116
desc: 'Add a file to IPFS using the UnixFS data format',
@@ -19,15 +24,72 @@ module.exports = Command.extend({
1924
},
2025

2126
run: (recursive, path) => {
22-
var node = new IPFS()
23-
path = process.cwd() + '/' + path
24-
node.files.add(path, {
25-
recursive: recursive
26-
}, (err, stats) => {
27+
let s
28+
let rs
29+
30+
if (!path) {
31+
throw new Error('Error: Argument \'path\' is required')
32+
}
33+
34+
s = fs.statSync(path)
35+
36+
if (s.isDirectory() && recursive === false) {
37+
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
38+
}
39+
if (path === '.' && recursive === true) {
40+
path = process.cwd()
41+
s = fs.statSync(process.cwd())
42+
} else if (path === '.' && recursive === false) {
43+
s = fs.statSync(process.cwd())
44+
if (s.isDirectory()) {
45+
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
46+
}
47+
}
48+
49+
glob(pathj.join(path, '/**/*'), (err, res) => {
2750
if (err) {
28-
return console.log(err)
51+
throw err
2952
}
30-
console.log('added', bs58.encode(stats.Hash).toString(), stats.Name)
53+
utils.getIPFS((err, ipfs) => {
54+
if (err) {
55+
throw err
56+
}
57+
const i = ipfs.files.add()
58+
i.on('data', (file) => {
59+
console.log('added', bs58.encode(file.multihash).toString(), file.path)
60+
})
61+
if (res.length !== 0) {
62+
const index = path.lastIndexOf('/')
63+
async.eachLimit(res, 10, (element, callback) => {
64+
rs = new Readable()
65+
const addPath = element.substring(index + 1, element.length)
66+
if (fs.statSync(element).isDirectory()) {
67+
callback()
68+
} else {
69+
const buffered = fs.readFileSync(element)
70+
rs.push(buffered)
71+
rs.push(null)
72+
const filePair = {path: addPath, stream: rs}
73+
i.write(filePair)
74+
callback()
75+
}
76+
}, (err) => {
77+
if (err) {
78+
throw err
79+
}
80+
i.end()
81+
})
82+
} else {
83+
rs = new Readable()
84+
const buffered = fs.readFileSync(path)
85+
path = path.substring(path.lastIndexOf('/') + 1, path.length)
86+
rs.push(buffered)
87+
rs.push(null)
88+
const filePair = {path: path, stream: rs}
89+
i.write(filePair)
90+
i.end()
91+
}
92+
})
3193
})
3294
}
3395
})

src/cli/commands/files/cat.js

+37
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
'use strict'
2+
3+
const Command = require('ronin').Command
4+
const debug = require('debug')
5+
const utils = require('../../utils')
6+
const log = debug('cli:files')
7+
log.error = debug('cli:files:error')
8+
9+
module.exports = Command.extend({
10+
desc: 'Download IPFS objects',
11+
12+
options: {},
13+
14+
run: (path, options) => {
15+
if (!path) {
16+
throw new Error("Argument 'path' is required")
17+
}
18+
if (!options) {
19+
options = {}
20+
}
21+
utils.getIPFS((err, ipfs) => {
22+
if (err) {
23+
throw err
24+
}
25+
ipfs.files.cat(path, (err, res) => {
26+
if (err) {
27+
throw (err)
28+
}
29+
if (res) {
30+
res.on('file', (data) => {
31+
data.stream.pipe(process.stdout)
32+
})
33+
}
34+
})
35+
})
36+
}
37+
})

src/cli/commands/files/get.js

+69
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
'use strict'
2+
3+
const Command = require('ronin').Command
4+
const debug = require('debug')
5+
const utils = require('../../utils')
6+
const log = debug('cli:files')
7+
log.error = debug('cli:files:error')
8+
var fs = require('fs')
9+
const pathj = require('path')
10+
11+
module.exports = Command.extend({
12+
desc: 'Download IPFS objects',
13+
14+
options: {},
15+
16+
run: (path, options) => {
17+
let dir
18+
let filepath
19+
let ws
20+
21+
if (!path) {
22+
throw new Error("Argument 'path' is required")
23+
}
24+
if (!options) {
25+
options = {}
26+
dir = process.cwd()
27+
} else {
28+
if (options.slice(-1) !== '/') {
29+
options += '/'
30+
}
31+
dir = options
32+
}
33+
34+
utils.getIPFS((err, ipfs) => {
35+
if (err) {
36+
throw err
37+
}
38+
ipfs.files.get(path, (err, data) => {
39+
if (err) {
40+
throw err
41+
}
42+
data.on('file', (data) => {
43+
if (data.path.lastIndexOf('/') === -1) {
44+
filepath = data.path
45+
if (data.dir === false) {
46+
ws = fs.createWriteStream(pathj.join(dir, data.path))
47+
data.stream.pipe(ws)
48+
} else {
49+
try {
50+
fs.mkdirSync(pathj.join(dir, data.path))
51+
} catch (err) {
52+
throw err
53+
}
54+
}
55+
} else {
56+
filepath = data.path.substring(0, data.path.lastIndexOf('/') + 1)
57+
try {
58+
fs.mkdirSync(pathj.join(dir, filepath))
59+
} catch (err) {
60+
throw err
61+
}
62+
ws = fs.createWriteStream(pathj.join(dir, data.path))
63+
data.stream.pipe(ws)
64+
}
65+
})
66+
})
67+
})
68+
}
69+
})

src/core/index.js

+49-5
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,13 @@ const DAGService = mDAG.DAGService
99
const peerId = require('peer-id')
1010
const PeerInfo = require('peer-info')
1111
const multiaddr = require('multiaddr')
12-
const importer = require('ipfs-data-importing').import
12+
const Importer = require('ipfs-unixfs-engine').importer
13+
const Exporter = require('ipfs-unixfs-engine').exporter
1314
const libp2p = require('libp2p-ipfs')
1415
const init = require('./init')
1516
const IPFSRepo = require('ipfs-repo')
1617
const PeerBook = require('peer-book')
18+
const UnixFS = require('ipfs-unixfs')
1719

1820
exports = module.exports = IPFS
1921

@@ -393,10 +395,52 @@ function IPFS (repo) {
393395
}
394396

395397
this.files = {
396-
add: (path, options, callback) => {
397-
options.path = path
398-
options.dagService = dagS
399-
importer(options, callback)
398+
add: (arr, callback) => {
399+
if (typeof arr === 'function') {
400+
callback = arr
401+
arr = undefined
402+
}
403+
if (callback === undefined) {
404+
callback = function noop () {}
405+
}
406+
if (arr === undefined) {
407+
return new Importer(dagS)
408+
}
409+
410+
const i = new Importer(dagS)
411+
const res = []
412+
413+
i.on('data', (info) => {
414+
res.push(info)
415+
})
416+
417+
i.on('end', () => {
418+
callback(null, res)
419+
})
420+
421+
arr.forEach((tuple) => {
422+
i.write(tuple)
423+
})
424+
425+
i.end()
426+
},
427+
cat: (hash, callback) => {
428+
dagS.get(hash, (err, fetchedNode) => {
429+
if (err) {
430+
return callback(err, null)
431+
}
432+
const data = UnixFS.unmarshal(fetchedNode.data)
433+
if (data.type === 'directory') {
434+
callback('This dag node is a directory', null)
435+
} else {
436+
const exportEvent = Exporter(hash, dagS)
437+
callback(null, exportEvent)
438+
}
439+
})
440+
},
441+
get: (hash, callback) => {
442+
var exportFile = Exporter(hash, dagS)
443+
callback(null, exportFile)
400444
}
401445
}
402446
}

0 commit comments

Comments
 (0)