Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

fix: CLI should accept content from stdin (#474) #785

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@
"multihashes": "~0.4.5",
"once": "^1.4.0",
"path-exists": "^3.0.0",
"pipe-args": "^1.3.0",
"peer-book": "^0.4.0",
"peer-id": "^0.8.7",
"peer-info": "^0.9.2",
Expand Down
13 changes: 11 additions & 2 deletions src/cli/bin.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,19 @@

'use strict'

const pipe = require('pipe-args')
const yargs = require('yargs')
const updateNotifier = require('update-notifier')
const readPkgUp = require('read-pkg-up')
const utils = require('./utils')

const enableStdin = [
'data', 'path', 'object data', 'ref', 'key', 'ipfs-path', 'add', 'get', 'cat',
'name', 'address', 'files', 'peer', 'recursive', 'default-config', 'peer ID'
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems to be missing a few commands. config and pubsub are two I remember from the top of my head, but I guess there is more.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here is a Github search that shows which go-ipfs commands accepts stdin: https://github.com/ipfs/go-ipfs/search?utf8=%E2%9C%93&q=EnableStdin&type=

]

const hasPipedArgs = pipe.load({ commands: enableStdin })

const pkg = readPkgUp.sync({cwd: __dirname}).pkg
updateNotifier({
pkg,
Expand Down Expand Up @@ -50,14 +58,15 @@ utils.getIPFS((err, ipfs, cleanup) => {
if (err) {
throw err
}

// finalize cli setup
cli // eslint-disable-line
.help()
.strict(false)
.completion()
.parse(args, {
ipfs: ipfs
ipfs: ipfs,
hasPipedArgs: hasPipedArgs
}, (err, argv, output) => {
if (output) {
console.log(output)
Expand Down
102 changes: 72 additions & 30 deletions src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

const fs = require('fs')
const path = require('path')
const stream = require('stream')
const glob = require('glob')
const sortBy = require('lodash.sortby')
const pull = require('pull-stream')
Expand Down Expand Up @@ -36,6 +37,19 @@ function checkPath (inPath, recursive) {
return inPath
}

function printResult (added) {
sortBy(added, 'path')
.reverse()
.map((file) => {
const log = [ 'added', file.hash ]

if (file.path.length > 0) log.push(file.path)

return log.join(' ')
})
.forEach((msg) => console.log(msg))
}

module.exports = {
command: 'add <file>',

Expand Down Expand Up @@ -69,8 +83,7 @@ module.exports = {
},

handler (argv) {
const inPath = checkPath(argv.file, argv.recursive)
const index = inPath.lastIndexOf('/') + 1
const hasPipedArgs = argv.hasPipedArgs
const options = {
strategy: argv.trickle ? 'trickle' : 'balanced',
shardSplitThreshold: argv.enableShardingExperiment ? argv.shardSplitThreshold : Infinity
Expand All @@ -94,26 +107,62 @@ module.exports = {
}
}

createAddStream((err, addStream) => {
if (err) {
// if there are piped arguments, input is data to publish instead of a file
// path
if(hasPipedArgs) {
const data = argv.file
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I do cat someFile.txt | ipfs add does pipe-args buffer the whole file first? Well, then it is missing the point of pipes, right?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, the whole file is going to be buffered before the command is triggered. I assumed that since this is for a CLI interface, it would make sense to do so. So it's better to buffer the file using streams and make it async?

const dataStream = new stream.Readable()
dataStream.push(Buffer.from(data, 'utf8'))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It might not be utf8, that option out, it doesn't really matter.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep, it can use the default. I will remove that option parameter.

dataStream.push(null)
createAddStream((err, addStream) => {
if(err)
throw err
}

addDataPipeline([dataStream], addStream)
})
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Something doesn't look right here:

createAddStream is going to glob a dir and return the stream which has all the files resulting from that glob.

If you want to add one stream, you should not need to call that.

Copy link
Author

@gpestana gpestana Jul 14, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see. So the right solution would be to do something like this?

if (hasStdin) {
  ipfs.files.add({path:'', content: process.stdin}, (err, files)
} else { 
// as it is now
}

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, see: #785 (review)

}
else {
const inPath = checkPath(argv.file, argv.recursive)
const index = inPath.lastIndexOf('/') + 1

glob(path.join(inPath, '/**/*'), (err, list) => {
createAddStream((err, addStream) => {
if (err) {
throw err
}
if (list.length === 0) {
list = [inPath]
}

glob(path.join(inPath, '/**/*'), (err, list) => {
if (err) {
throw err
}
if (list.length === 0) {
list = [inPath]
}

addFilePipeline(index, addStream, list, argv.wrapWithDirectory)
})
})
}
}
}

addPipeline(index, addStream, list, argv.wrapWithDirectory)
})
function addDataPipeline (dataStream, addStream) {
pull(
pull.values(dataStream),
pull.map(dataStream => {
return { path: '', content: dataStream }
}),
addStream,
pull.collect((err, added) => {
if(err) {
throw err
}
printResult(added)
})
}
)
}

function addPipeline (index, addStream, list, wrapWithDirectory) {

function addFilePipeline (index, addStream, list, wrapWithDirectory) {
pull(
zip(
pull.values(list),
Expand All @@ -122,16 +171,19 @@ function addPipeline (index, addStream, list, wrapWithDirectory) {
paramap(fs.stat.bind(fs), 50)
)
),
pull.map((pair) => ({
path: pair[0],
isDirectory: pair[1].isDirectory()
})),
pull.filter((file) => !file.isDirectory),
pull.map((pair) => {
return ({
path: pair[0],
isDirectory: pair[1].isDirectory()
})}),
pull.filter((file) => {
return !file.isDirectory
}),
pull.map((file) => ({
path: file.path.substring(index, file.path.length),
originalPath: file.path
})),
pull.map((file) => ({
pull.map((file) => ({
path: wrapWithDirectory ? path.join(WRAPPER, file.path) : file.path,
content: fs.createReadStream(file.originalPath)
})),
Expand All @@ -144,17 +196,7 @@ function addPipeline (index, addStream, list, wrapWithDirectory) {
if (err) {
throw err
}

sortBy(added, 'path')
.reverse()
.map((file) => {
const log = [ 'added', file.hash ]

if (file.path.length > 0) log.push(file.path)

return log.join(' ')
})
.forEach((msg) => console.log(msg))
printResult(added)
})
)
}
9 changes: 9 additions & 0 deletions test/cli/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,15 @@ describe('files', () => runOnAndOff((thing) => {
})
})

it('add with piped argument', () => {
// echo 'src/init-files/init-docs/readme' | jsipfs files add
return ipfs('files add', { piped: 'readme' })
.then((out) => {
expect(out)
.to.eql('added QmR9th2YSQrZsbENPmjeQ8JKor9noz9aoKfAsZw5SE3d6K QmR9th2YSQrZsbENPmjeQ8JKor9noz9aoKfAsZw5SE3d6K\n')
})
})

it('add and wrap with a directory', () => {
return ipfs('add -w src/init-files/init-docs/readme').then((out) => {
expect(out).to.be.eql([
Expand Down
15 changes: 14 additions & 1 deletion test/utils/ipfs-exec.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,24 @@ module.exports = (repoPath, opts) => {

function ipfs () {
let args = Array.from(arguments)
if (args.length === 1) {
let pipedArgs

_.map(args, arg => arg.piped ? (pipedArgs = arg.piped) : '')

if (args.length === 1 || args.length === 2 && pipedArgs) {
args = args[0].split(' ')
}

const cp = exec(args)

// Passes content of pipedArgs to childProcess as if jsipfs had been called
// with piped arguments
if (pipedArgs) {
cp.stdin.setEncoding('utf-8')
cp.stdin.write(`${pipedArgs}\n`)
cp.stdin.end()
}

const res = cp.then((res) => {
// We can't escape the os.tmpdir warning due to:
// https://github.com/shelljs/shelljs/blob/master/src/tempdir.js#L43
Expand Down