diff --git a/package.json b/package.json index 0722f826ec..425c94b262 100644 --- a/package.json +++ b/package.json @@ -132,6 +132,7 @@ "multihashes": "~0.4.5", "once": "^1.4.0", "path-exists": "^3.0.0", + "pipe-args": "^1.3.0", "peer-book": "^0.4.0", "peer-id": "^0.8.7", "peer-info": "^0.9.2", diff --git a/src/cli/bin.js b/src/cli/bin.js index cd506046dc..dd7eb398b0 100755 --- a/src/cli/bin.js +++ b/src/cli/bin.js @@ -2,11 +2,19 @@ 'use strict' +const pipe = require('pipe-args') const yargs = require('yargs') const updateNotifier = require('update-notifier') const readPkgUp = require('read-pkg-up') const utils = require('./utils') +const enableStdin = [ + 'data', 'path', 'object data', 'ref', 'key', 'ipfs-path', 'add', 'get', 'cat', + 'name', 'address', 'files', 'peer', 'recursive', 'default-config', 'peer ID' +] + +const hasPipedArgs = pipe.load({ commands: enableStdin }) + const pkg = readPkgUp.sync({cwd: __dirname}).pkg updateNotifier({ pkg, @@ -50,14 +58,15 @@ utils.getIPFS((err, ipfs, cleanup) => { if (err) { throw err } - + // finalize cli setup cli // eslint-disable-line .help() .strict(false) .completion() .parse(args, { - ipfs: ipfs + ipfs: ipfs, + hasPipedArgs: hasPipedArgs }, (err, argv, output) => { if (output) { console.log(output) diff --git a/src/cli/commands/files/add.js b/src/cli/commands/files/add.js index 9d8b7beb7d..0ba208e39d 100644 --- a/src/cli/commands/files/add.js +++ b/src/cli/commands/files/add.js @@ -2,6 +2,7 @@ const fs = require('fs') const path = require('path') +const stream = require('stream') const glob = require('glob') const sortBy = require('lodash.sortby') const pull = require('pull-stream') @@ -36,6 +37,19 @@ function checkPath (inPath, recursive) { return inPath } +function printResult (added) { + sortBy(added, 'path') + .reverse() + .map((file) => { + const log = [ 'added', file.hash ] + + if (file.path.length > 0) log.push(file.path) + + return log.join(' ') + }) + .forEach((msg) => console.log(msg)) +} + module.exports = { command: 'add ', @@ -69,8 +83,7 @@ module.exports = { }, handler (argv) { - const inPath = checkPath(argv.file, argv.recursive) - const index = inPath.lastIndexOf('/') + 1 + const hasPipedArgs = argv.hasPipedArgs const options = { strategy: argv.trickle ? 'trickle' : 'balanced', shardSplitThreshold: argv.enableShardingExperiment ? argv.shardSplitThreshold : Infinity @@ -94,26 +107,62 @@ module.exports = { } } - createAddStream((err, addStream) => { - if (err) { + // if there are piped arguments, input is data to publish instead of a file + // path + if(hasPipedArgs) { + const data = argv.file + const dataStream = new stream.Readable() + dataStream.push(Buffer.from(data, 'utf8')) + dataStream.push(null) + createAddStream((err, addStream) => { + if(err) throw err - } + + addDataPipeline([dataStream], addStream) + }) + } + else { + const inPath = checkPath(argv.file, argv.recursive) + const index = inPath.lastIndexOf('/') + 1 - glob(path.join(inPath, '/**/*'), (err, list) => { + createAddStream((err, addStream) => { if (err) { throw err } - if (list.length === 0) { - list = [inPath] - } + + glob(path.join(inPath, '/**/*'), (err, list) => { + if (err) { + throw err + } + if (list.length === 0) { + list = [inPath] + } + + addFilePipeline(index, addStream, list, argv.wrapWithDirectory) + }) + }) + } + } +} - addPipeline(index, addStream, list, argv.wrapWithDirectory) - }) +function addDataPipeline (dataStream, addStream) { + pull( + pull.values(dataStream), + pull.map(dataStream => { + return { path: '', content: dataStream } + }), + addStream, + pull.collect((err, added) => { + if(err) { + throw err + } + printResult(added) }) - } + ) } -function addPipeline (index, addStream, list, wrapWithDirectory) { + +function addFilePipeline (index, addStream, list, wrapWithDirectory) { pull( zip( pull.values(list), @@ -122,16 +171,19 @@ function addPipeline (index, addStream, list, wrapWithDirectory) { paramap(fs.stat.bind(fs), 50) ) ), - pull.map((pair) => ({ - path: pair[0], - isDirectory: pair[1].isDirectory() - })), - pull.filter((file) => !file.isDirectory), + pull.map((pair) => { + return ({ + path: pair[0], + isDirectory: pair[1].isDirectory() + })}), + pull.filter((file) => { + return !file.isDirectory + }), pull.map((file) => ({ path: file.path.substring(index, file.path.length), originalPath: file.path })), - pull.map((file) => ({ + pull.map((file) => ({ path: wrapWithDirectory ? path.join(WRAPPER, file.path) : file.path, content: fs.createReadStream(file.originalPath) })), @@ -144,17 +196,7 @@ function addPipeline (index, addStream, list, wrapWithDirectory) { if (err) { throw err } - - sortBy(added, 'path') - .reverse() - .map((file) => { - const log = [ 'added', file.hash ] - - if (file.path.length > 0) log.push(file.path) - - return log.join(' ') - }) - .forEach((msg) => console.log(msg)) + printResult(added) }) ) } diff --git a/test/cli/files.js b/test/cli/files.js index 7dc01599c6..9b6a4af096 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -127,6 +127,15 @@ describe('files', () => runOnAndOff((thing) => { }) }) + it('add with piped argument', () => { + // echo 'src/init-files/init-docs/readme' | jsipfs files add + return ipfs('files add', { piped: 'readme' }) + .then((out) => { + expect(out) + .to.eql('added QmR9th2YSQrZsbENPmjeQ8JKor9noz9aoKfAsZw5SE3d6K QmR9th2YSQrZsbENPmjeQ8JKor9noz9aoKfAsZw5SE3d6K\n') + }) + }) + it('add and wrap with a directory', () => { return ipfs('add -w src/init-files/init-docs/readme').then((out) => { expect(out).to.be.eql([ diff --git a/test/utils/ipfs-exec.js b/test/utils/ipfs-exec.js index 2d7f3746e5..a9701abbee 100644 --- a/test/utils/ipfs-exec.js +++ b/test/utils/ipfs-exec.js @@ -33,11 +33,24 @@ module.exports = (repoPath, opts) => { function ipfs () { let args = Array.from(arguments) - if (args.length === 1) { + let pipedArgs + + _.map(args, arg => arg.piped ? (pipedArgs = arg.piped) : '') + + if (args.length === 1 || args.length === 2 && pipedArgs) { args = args[0].split(' ') } const cp = exec(args) + + // Passes content of pipedArgs to childProcess as if jsipfs had been called + // with piped arguments + if (pipedArgs) { + cp.stdin.setEncoding('utf-8') + cp.stdin.write(`${pipedArgs}\n`) + cp.stdin.end() + } + const res = cp.then((res) => { // We can't escape the os.tmpdir warning due to: // https://github.com/shelljs/shelljs/blob/master/src/tempdir.js#L43