Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add initial support for brotli #391

Merged
merged 3 commits into from
Sep 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 17 additions & 6 deletions lib/pack.js
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,25 @@ const Pack = warner(class Pack extends Minipass {

this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {}
if (opt.gzip || opt.brotli) {
if (opt.gzip && opt.brotli) {
throw new TypeError('gzip and brotli are mutually exclusive')
}
if (this.portable) {
opt.gzip.portable = true
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {}
}
if (this.portable) {
opt.gzip.portable = true
}
this.zip = new zlib.Gzip(opt.gzip)
}
if (opt.brotli) {
if (typeof opt.brotli !== 'object') {
opt.brotli = {}
}
this.zip = new zlib.BrotliCompress(opt.brotli)
}
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
Expand Down
8 changes: 6 additions & 2 deletions lib/parse.js
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,10 @@ module.exports = warner(class Parser extends EE {
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// Unlike gzip, brotli doesn't have any magic bytes to identify it
// Users need to explicitly tell us they're extracting a brotli file
// Or we infer from the file extension
this.brotli = opt.brotli || (opt.file && (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')))

// have to set this so that streams are ok piping into it
this.writable = true
Expand Down Expand Up @@ -361,10 +365,10 @@ module.exports = warner(class Parser extends EE {
this[UNZIP] = false
}
}
if (this[UNZIP] === null) {
if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP] = this.brotli ? new zlib.BrotliDecompress() : new zlib.Unzip()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
Expand Down
2 changes: 1 addition & 1 deletion lib/replace.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required')
}

if (opt.gzip) {
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}

Expand Down
2 changes: 1 addition & 1 deletion lib/update.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required')
}

if (opt.gzip) {
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}

Expand Down
39 changes: 39 additions & 0 deletions test/extract.js
Original file line number Diff line number Diff line change
Expand Up @@ -310,3 +310,42 @@ t.test('sync gzip error edge case test', async t => {

t.end()
})

t.test('brotli', async t => {
const file = path.resolve(__dirname, 'fixtures/example.tbr')
const dir = path.resolve(__dirname, 'brotli')

t.beforeEach(async () => {
await mkdirp(dir)
})

t.afterEach(async () => {
await rimraf(dir)
})

t.test('fails if unknown file extension', async t => {
const filename = path.resolve(__dirname, 'brotli/example.unknown')
const f = fs.openSync(filename, 'a')
fs.closeSync(f)

const expect = new Error('TAR_BAD_ARCHIVE: Unrecognized archive format')

t.throws(_ => x({ sync: true, file: filename }), expect)
})

t.test('succeeds based on file extension', t => {
x({ sync: true, file: file, C: dir })

t.same(fs.readdirSync(dir + '/x').sort(),
['1', '10', '2', '3', '4', '5', '6', '7', '8', '9'])
t.end()
})

t.test('succeeds when passed explicit option', t => {
x({ sync: true, file: file, C: dir, brotli: true })

t.same(fs.readdirSync(dir + '/x').sort(),
['1', '10', '2', '3', '4', '5', '6', '7', '8', '9'])
t.end()
})
})
Binary file added test/fixtures/example.tbr
Binary file not shown.
169 changes: 169 additions & 0 deletions test/pack.js
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,19 @@ t.test('if gzip is truthy, make it an object', t => {
t.end()
})

t.test('if brotli is truthy, make it an object', t => {
const opt = { brotli: true }
new Pack(opt)
t.type(opt.brotli, 'object')
t.end()
})

t.test('throws if both gzip and brotli are truthy', t => {
const opt = { gzip: true, brotli: true }
t.throws(_ => new Pack(opt), new TypeError('gzip and brotli are mutually exclusive'))
t.end()
})

t.test('gzip, also a very deep path', t => {
const out = []

Expand Down Expand Up @@ -454,6 +467,84 @@ t.test('gzip, also a very deep path', t => {
})
})

t.test('brotli, also a very deep path', t => {
const out = []

new Pack({
cwd: files,
brotli: { flush: 1 },
})
.add('dir')
.add('long-path')
.on('data', c => out.push(c))
.end()
.on('end', _ => {
const zipped = Buffer.concat(out)
const data = zlib.brotliDecompressSync(zipped)
const entries = []
for (var i = 0; i < data.length; i += 512) {
const slice = data.slice(i, i + 512)
const h = new Header(slice)
if (h.nullBlock) {
entries.push('null block')
} else if (h.cksumValid) {
entries.push([h.type, h.path])
} else if (entries[entries.length - 1][0] === 'File') {
entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, ''))
}
}

const expect = [
['Directory', 'dir/'],
['Directory', 'long-path/'],
['File', 'dir/x'],
['Directory', 'long-path/r/'],
['Directory', 'long-path/r/e/'],
['Directory', 'long-path/r/e/a/'],
['Directory', 'long-path/r/e/a/l/'],
['Directory', 'long-path/r/e/a/l/l/'],
['Directory', 'long-path/r/e/a/l/l/y/'],
['Directory', 'long-path/r/e/a/l/l/y/-/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'],
['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'],
['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
['ExtendedHeader', 'PaxHeader/Ω.txt'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'],
'null block',
'null block',
]

let ok = true
entries.forEach((entry, i) => {
ok = ok &&
t.equal(entry[0], expect[i][0]) &&
t.equal(entry[1], expect[i][1]) &&
(!entry[2] || t.equal(entry[2], expect[i][2]))
})

t.end()
})
})

t.test('very deep gzip path, sync', t => {
const pack = new PackSync({
cwd: files,
Expand Down Expand Up @@ -533,6 +624,84 @@ t.test('very deep gzip path, sync', t => {
t.end()
})

t.test('very deep brotli path, sync', t => {
const pack = new PackSync({
cwd: files,
brotli: true,
}).add('dir')
.add('long-path')
.end()

// these do nothing!
pack.pause()
pack.resume()

const zipped = pack.read()
t.type(zipped, Buffer)
const data = zlib.brotliDecompressSync(zipped)
const entries = []
for (var i = 0; i < data.length; i += 512) {
const slice = data.slice(i, i + 512)
const h = new Header(slice)
if (h.nullBlock) {
entries.push('null block')
} else if (h.cksumValid) {
entries.push([h.type, h.path])
} else if (entries[entries.length - 1][0] === 'File') {
entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, ''))
}
}

const expect = [
['Directory', 'dir/'],
['File', 'dir/x'],
['Directory', 'long-path/'],
['Directory', 'long-path/r/'],
['Directory', 'long-path/r/e/'],
['Directory', 'long-path/r/e/a/'],
['Directory', 'long-path/r/e/a/l/'],
['Directory', 'long-path/r/e/a/l/l/'],
['Directory', 'long-path/r/e/a/l/l/y/'],
['Directory', 'long-path/r/e/a/l/l/y/-/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'],
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'],
['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'],
['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
['ExtendedHeader', 'PaxHeader/Ω.txt'],
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'],
'null block',
'null block',
]

let ok = true
entries.forEach((entry, i) => {
ok = ok &&
t.equal(entry[0], expect[i][0]) &&
t.equal(entry[1], expect[i][1]) &&
(!entry[2] || t.equal(entry[2], expect[i][2]))
})

t.end()
})

t.test('write after end', t => {
const p = new Pack()
p.end()
Expand Down
46 changes: 46 additions & 0 deletions test/parse.js
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,52 @@ t.test('fixture tests', t => {
bs.end(zlib.gzipSync(tardata))
})

t.test('compress with brotli based on filename .tar.br', t => {
const p = new Parse({
maxMetaEntrySize: maxMeta,
filter: filter ? (path, entry) => entry.size % 2 !== 0 : null,
strict: strict,
file: 'example.tar.br',
})
trackEvents(t, expect, p)
p.end(zlib.brotliCompressSync(tardata))
})

t.test('compress with brotli based on filename .tbr', t => {
const p = new Parse({
maxMetaEntrySize: maxMeta,
filter: filter ? (path, entry) => entry.size % 2 !== 0 : null,
strict: strict,
file: 'example.tbr',
})
trackEvents(t, expect, p)
p.end(zlib.brotliCompressSync(tardata))
})

t.test('compress with brotli all at once', t => {
const p = new Parse({
maxMetaEntrySize: maxMeta,
filter: filter ? (path, entry) => entry.size % 2 !== 0 : null,
strict: strict,
brotli: {},
})
trackEvents(t, expect, p)
p.end(zlib.brotliCompressSync(tardata))
})

t.test('compress with brotli byte at a time', t => {
const bs = new ByteStream()
const bp = new Parse({
maxMetaEntrySize: maxMeta,
filter: filter ? (path, entry) => entry.size % 2 !== 0 : null,
strict: strict,
brotli: {},
})
trackEvents(t, expect, bp)
bs.pipe(bp)
bs.end(zlib.brotliCompressSync(tardata))
})

t.test('async chunks', t => {
const p = new Parse({
maxMetaEntrySize: maxMeta,
Expand Down
25 changes: 25 additions & 0 deletions test/replace.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ const fixtureDef = {
'zero.tar': Buffer.from(''),
'empty.tar': Buffer.alloc(512),
'compressed.tgz': zlib.gzipSync(data),
'compressed.tbr': zlib.brotliCompressSync(data),
}

t.test('basic file add to archive (good or truncated)', t => {
Expand Down Expand Up @@ -211,6 +212,30 @@ t.test('cannot append to gzipped archives', async t => {
}, [path.basename(__filename)], er => t.match(er, expect))
})

t.test('cannot append to brotli compressed archives', async t => {
const dir = t.testdir({
'compressed.tbr': fixtureDef['compressed.tbr'],
})
const file = resolve(dir, 'compressed.tbr')

const expect = new Error('cannot append to compressed archives')
const expectT = new TypeError('cannot append to compressed archives')

t.throws(_ => r({
file,
cwd: __dirname,
brotli: true,
}, [path.basename(__filename)]), expectT)

t.throws(_ => r({
file,
cwd: __dirname,
sync: true,
}, [path.basename(__filename)]), expect)

t.end()
})

t.test('other throws', t => {
t.throws(_ => r({}, ['asdf']), new TypeError('file is required'))
t.throws(_ => r({ file: 'asdf' }, []),
Expand Down
Loading