Skip to content

Commit

Permalink
fix(unpack): fix hang on large file on open() fail
Browse files Browse the repository at this point in the history
The fs-minipass WriteStream always returns false from a write() call if
it did not successfully open the file descriptor.  This makes the
Parse/Unpack stream get backed up if the file was large enough that it
did not already move on to the next entry in the archive.

Fix: #264
  • Loading branch information
isaacs committed Aug 4, 2021
1 parent 97c46fc commit 84acbd3
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 6 deletions.
5 changes: 4 additions & 1 deletion lib/unpack.js
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,10 @@ class Unpack extends Parser {
stream.on('error', er => {
if (stream.fd)
fs.close(stream.fd, () => {})
// flush all the data out so that we aren't left hanging
// if the error wasn't actually fatal. otherwise the parse
// is blocked, and we never proceed.
stream.write = () => true
this[ONERROR](er, entry)
fullyDone()
})
Expand Down Expand Up @@ -510,7 +514,6 @@ class Unpack extends Parser {
done()
} else if (er || this[ISREUSABLE](entry, st))
this[MAKEFS](null, entry, done)

else if (st.isDirectory()) {
if (entry.type === 'Directory') {
if (!this.noChmod && (!entry.mode || (st.mode & 0o7777) === entry.mode))
Expand Down
18 changes: 13 additions & 5 deletions test/make-tar.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,22 @@ if (module === require.main)
return require('tap').pass('this is fine')

const Header = require('../lib/header.js')
module.exports = chunks =>
Buffer.concat(chunks.map(chunk => {
if (Buffer.isBuffer(chunk))
module.exports = chunks => {
let dataLen = 0
return Buffer.concat(chunks.map(chunk => {
if (Buffer.isBuffer(chunk)) {
dataLen += chunk.length
return chunk
const buf = Buffer.alloc(512)
}
const size = Math.max(typeof chunk === 'string'
? 512 * Math.ceil(chunk.length / 512)
: 512)
dataLen += size
const buf = Buffer.alloc(size)
if (typeof chunk === 'string')
buf.write(chunk)
else
new Header(chunk).encode(buf, 0)
return buf
}), chunks.length * 512)
}), dataLen)
}
39 changes: 39 additions & 0 deletions test/unpack.js
Original file line number Diff line number Diff line change
Expand Up @@ -2880,3 +2880,42 @@ t.test('close fd when error setting mtime', t => {
})
unpack.end(data)
})

t.test('do not hang on large files that fail to open()', t => {
const data = makeTar([
{
type: 'Directory',
path: 'x',
},
{
type: 'File',
size: 31745,
path: 'x/y',
},
'x'.repeat(31745),
'',
'',
])
t.teardown(mutateFS.fail('open', new Error('nope')))
const dir = path.resolve(unpackdir, 'no-hang-for-large-file-failures')
mkdirp.sync(dir)
const WARNINGS = []
const unpack = new Unpack({
cwd: dir,
onwarn: (code, msg) => WARNINGS.push([code, msg]),
})
unpack.on('end', () => {
t.strictSame(WARNINGS, [['TAR_ENTRY_ERROR', 'nope']])
t.end()
})
unpack.write(data.slice(0, 2048))
setTimeout(() => {
unpack.write(data.slice(2048, 4096))
setTimeout(() => {
unpack.write(data.slice(4096))
setTimeout(() => {
unpack.end()
})
})
})
})

0 comments on commit 84acbd3

Please sign in to comment.