diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index acb9a176..8981bb2d 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -17,19 +17,19 @@ jobs: node-version: [14.x, 16.x, 18.x, 20.x] steps: - - uses: actions/checkout@v4.2.1 - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v4.0.4 - with: - node-version: ${{ matrix.node-version }} - - name: npm install and test - run: | - npm ci - npm test - env: - CI: true - - name: Archive production artifacts - uses: actions/upload-artifact@v4 - with: - name: tmp-zip-node-v${{ matrix.node-version }} - path: tmp/*.zip + - uses: actions/checkout@v4.2.1 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4.0.4 + with: + node-version: ${{ matrix.node-version }} + - name: npm install and test + run: | + npm ci + npm test + env: + CI: true + - name: Archive production artifacts + uses: actions/upload-artifact@v4 + with: + name: tmp-zip-node-v${{ matrix.node-version }} + path: tmp/*.zip diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1fa92d43..b13ddccd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,13 +2,13 @@ #### Code Style Guide -* code should be indented with 2 spaces -* single quotes should be used where feasible -* commas should be followed by a single space (function params, etc) -* variable declaration should include `var`, [no multiple declarations](http://benalman.com/news/2012/05/multiple-var-statements-javascript/) +- code should be indented with 2 spaces +- single quotes should be used where feasible +- commas should be followed by a single space (function params, etc) +- variable declaration should include `var`, [no multiple declarations](http://benalman.com/news/2012/05/multiple-var-statements-javascript/) #### Tests -* tests should be added to the nodeunit configs in `test/` -* tests can be run with `npm test` -* see existing tests for guidance \ No newline at end of file +- tests should be added to the nodeunit configs in `test/` +- tests can be run with `npm test` +- see existing tests for guidance diff --git a/README.md b/README.md index ff85b9ba..eaf57b36 100644 --- a/README.md +++ b/README.md @@ -21,19 +21,19 @@ This module is meant to be wrapped internally by other modules and therefore lac If you want a module that handles entry queueing and much more, you should check out [archiver](https://npmjs.org/package/archiver) which uses this module internally. ```js -const Packer = require('zip-stream'); +const Packer = require("zip-stream"); const archive = new Packer(); // OR new Packer(options) -archive.on('error', function(err) { +archive.on("error", function (err) { throw err; }); // pipe archive where you want it (ie fs, http, etc) // listen to the destination's end, close, or finish event -archive.entry('string contents', { name: 'string.txt' }, function(err, entry) { +archive.entry("string contents", { name: "string.txt" }, function (err, entry) { if (err) throw err; - archive.entry(null, { name: 'directory/' }, function(err, entry) { + archive.entry(null, { name: "directory/" }, function (err, entry) { if (err) throw err; archive.finish(); }); diff --git a/index.js b/index.js index 2c692376..1949ddbb 100644 --- a/index.js +++ b/index.js @@ -1,3 +1,7 @@ +import { inherits as inherits$0 } from "util"; +import { ZipArchiveOutputStream as ZipArchiveOutputStream$0 } from "compress-commons"; +import { ZipArchiveEntry as ZipArchiveEntry$0 } from "compress-commons"; +import util from "archiver-utils"; /** * ZipStream * @@ -5,13 +9,11 @@ * @license [MIT]{@link https://github.com/archiverjs/node-zip-stream/blob/master/LICENSE} * @copyright (c) 2014 Chris Talkington, contributors. */ -var inherits = require('util').inherits; - -var ZipArchiveOutputStream = require('compress-commons').ZipArchiveOutputStream; -var ZipArchiveEntry = require('compress-commons').ZipArchiveEntry; - -var util = require('archiver-utils'); - +var inherits = { inherits: inherits$0 }.inherits; +var ZipArchiveOutputStream = { + ZipArchiveOutputStream: ZipArchiveOutputStream$0, +}.ZipArchiveOutputStream; +var ZipArchiveEntry = { ZipArchiveEntry: ZipArchiveEntry$0 }.ZipArchiveEntry; /** * @constructor * @extends external:ZipArchiveOutputStream @@ -23,34 +25,30 @@ var util = require('archiver-utils'); * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} * to control compression. */ -var ZipStream = module.exports = function(options) { +var ZipStream = function (options) { if (!(this instanceof ZipStream)) { return new ZipStream(options); } - options = this.options = options || {}; options.zlib = options.zlib || {}; - ZipArchiveOutputStream.call(this, options); - - if (typeof options.level === 'number' && options.level >= 0) { + if (typeof options.level === "number" && options.level >= 0) { options.zlib.level = options.level; delete options.level; } - - if (!options.forceZip64 && typeof options.zlib.level === 'number' && options.zlib.level === 0) { + if ( + !options.forceZip64 && + typeof options.zlib.level === "number" && + options.zlib.level === 0 + ) { options.store = true; } - options.namePrependSlash = options.namePrependSlash || false; - if (options.comment && options.comment.length > 0) { this.setComment(options.comment); } }; - inherits(ZipStream, ZipArchiveOutputStream); - /** * Normalizes entry data with fallbacks for key properties. * @@ -58,41 +56,34 @@ inherits(ZipStream, ZipArchiveOutputStream); * @param {Object} data * @return {Object} */ -ZipStream.prototype._normalizeFileData = function(data) { +ZipStream.prototype._normalizeFileData = function (data) { data = util.defaults(data, { - type: 'file', + type: "file", name: null, namePrependSlash: this.options.namePrependSlash, linkname: null, date: null, mode: null, store: this.options.store, - comment: '' + comment: "", }); - - var isDir = data.type === 'directory'; - var isSymlink = data.type === 'symlink'; - + var isDir = data.type === "directory"; + var isSymlink = data.type === "symlink"; if (data.name) { data.name = util.sanitizePath(data.name); - - if (!isSymlink && data.name.slice(-1) === '/') { + if (!isSymlink && data.name.slice(-1) === "/") { isDir = true; - data.type = 'directory'; + data.type = "directory"; } else if (isDir) { - data.name += '/'; + data.name += "/"; } } - if (isDir || isSymlink) { data.store = true; } - data.date = util.dateify(data.date); - return data; }; - /** * Appends an entry given an input source (text string, buffer, or stream). * @@ -108,80 +99,68 @@ ZipStream.prototype._normalizeFileData = function(data) { * @param {Function} callback * @return this */ -ZipStream.prototype.entry = function(source, data, callback) { - if (typeof callback !== 'function') { +ZipStream.prototype.entry = function (source, data, callback) { + if (typeof callback !== "function") { callback = this._emitErrorCallback.bind(this); } - data = this._normalizeFileData(data); - - if (data.type !== 'file' && data.type !== 'directory' && data.type !== 'symlink') { - callback(new Error(data.type + ' entries not currently supported')); + if ( + data.type !== "file" && + data.type !== "directory" && + data.type !== "symlink" + ) { + callback(new Error(data.type + " entries not currently supported")); return; } - - if (typeof data.name !== 'string' || data.name.length === 0) { - callback(new Error('entry name must be a non-empty string value')); + if (typeof data.name !== "string" || data.name.length === 0) { + callback(new Error("entry name must be a non-empty string value")); return; } - - if (data.type === 'symlink' && typeof data.linkname !== 'string') { - callback(new Error('entry linkname must be a non-empty string value when type equals symlink')); + if (data.type === "symlink" && typeof data.linkname !== "string") { + callback( + new Error( + "entry linkname must be a non-empty string value when type equals symlink", + ), + ); return; } - var entry = new ZipArchiveEntry(data.name); entry.setTime(data.date, this.options.forceLocalTime); - if (data.namePrependSlash) { entry.setName(data.name, true); } - if (data.store) { entry.setMethod(0); } - if (data.comment.length > 0) { entry.setComment(data.comment); } - - if (data.type === 'symlink' && typeof data.mode !== 'number') { + if (data.type === "symlink" && typeof data.mode !== "number") { data.mode = 40960; // 0120000 } - - if (typeof data.mode === 'number') { - if (data.type === 'symlink') { + if (typeof data.mode === "number") { + if (data.type === "symlink") { data.mode |= 40960; } - entry.setUnixMode(data.mode); } - - if (data.type === 'symlink' && typeof data.linkname === 'string') { + if (data.type === "symlink" && typeof data.linkname === "string") { source = Buffer.from(data.linkname); } - - return ZipArchiveOutputStream.prototype.entry.call(this, entry, source, callback); + return ZipArchiveOutputStream.prototype.entry.call( + this, + entry, + source, + callback, + ); }; - /** * Finalizes the instance and prevents further appending to the archive * structure (queue will continue til drained). * * @return void */ -ZipStream.prototype.finalize = function() { +ZipStream.prototype.finalize = function () { this.finish(); }; - -/** - * Returns the current number of bytes written to this stream. - * @function ZipStream#getBytesWritten - * @returns {Number} - */ - -/** - * Compress Commons ZipArchiveOutputStream - * @external ZipArchiveOutputStream - * @see {@link https://github.com/archiverjs/node-compress-commons} - */ +export default ZipStream; diff --git a/package-lock.json b/package-lock.json index 932702c8..68830d5f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -20,6 +20,7 @@ "minami": "1.2.3", "mkdirp": "3.0.1", "mocha": "10.7.3", + "prettier": "3.3.3", "rimraf": "5.0.10" }, "engines": { @@ -1406,6 +1407,21 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, "node_modules/process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -2928,6 +2944,12 @@ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, + "prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "dev": true + }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", diff --git a/package.json b/package.json index a105dd07..8f66ced8 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,7 @@ "minami": "1.2.3", "mkdirp": "3.0.1", "mocha": "10.7.3", + "prettier": "3.3.3", "rimraf": "5.0.10" }, "keywords": [ diff --git a/test/helpers/index.js b/test/helpers/index.js index 07034530..c6e0f72f 100644 --- a/test/helpers/index.js +++ b/test/helpers/index.js @@ -1,101 +1,73 @@ -var crypto = require('crypto'); -var fs = require('fs'); -var inherits = require('util').inherits; - -var Stream = require('stream').Stream; -var Readable = require('readable-stream').Readable; -var Writable = require('readable-stream').Writable; - +import crypto from "crypto"; +import fs from "fs"; +import { inherits as inherits$0 } from "util"; +import stream from "stream"; +import { Readable as Readable$0 } from "readable-stream"; +import { Writable as Writable$0 } from "readable-stream"; +var inherits = { inherits: inherits$0 }.inherits; +var Stream = stream.Stream; +var Readable = { Readable: Readable$0 }.Readable; +var Writable = { Writable: Writable$0 }.Writable; function adjustDateByOffset(d, offset) { - d = (d instanceof Date) ? d : new Date(); - + d = d instanceof Date ? d : new Date(); if (offset >= 1) { d.setMinutes(d.getMinutes() - offset); } else { d.setMinutes(d.getMinutes() + Math.abs(offset)); } - return d; } - -module.exports.adjustDateByOffset = adjustDateByOffset; - function binaryBuffer(n) { var buffer = Buffer.alloc(n); - for (var i = 0; i < n; i++) { - buffer.writeUInt8(i&255, i); + buffer.writeUInt8(i & 255, i); } - return buffer; } - -module.exports.binaryBuffer = binaryBuffer; - function BinaryStream(size, options) { Readable.call(this, options); - var buf = Buffer.alloc(size); - for (var i = 0; i < size; i++) { - buf.writeUInt8(i&255, i); + buf.writeUInt8(i & 255, i); } - this.push(buf); this.push(null); } - inherits(BinaryStream, Readable); - -BinaryStream.prototype._read = function(size) {}; - -module.exports.BinaryStream = BinaryStream; - +BinaryStream.prototype._read = function (size) {}; function DeadEndStream(options) { Writable.call(this, options); } - inherits(DeadEndStream, Writable); - -DeadEndStream.prototype._write = function(chuck, encoding, callback) { +DeadEndStream.prototype._write = function (chuck, encoding, callback) { callback(); }; - -module.exports.DeadEndStream = DeadEndStream; - function fileBuffer(filepath) { return fs.readFileSync(filepath); } - -module.exports.fileBuffer = fileBuffer; - function UnBufferedStream() { this.readable = true; } - inherits(UnBufferedStream, Stream); - -module.exports.UnBufferedStream = UnBufferedStream; - function WriteHashStream(path, options) { fs.WriteStream.call(this, path, options); - - this.hash = crypto.createHash('sha1'); + this.hash = crypto.createHash("sha1"); this.digest = null; - - this.on('close', function() { - this.digest = this.hash.digest('hex'); + this.on("close", function () { + this.digest = this.hash.digest("hex"); }); } - inherits(WriteHashStream, fs.WriteStream); - -WriteHashStream.prototype.write = function(chunk) { +WriteHashStream.prototype.write = function (chunk) { if (chunk) { this.hash.update(chunk); } - return fs.WriteStream.prototype.write.call(this, chunk); }; - -module.exports.WriteHashStream = WriteHashStream; +export { adjustDateByOffset }; +export { binaryBuffer }; +export { BinaryStream }; +export { DeadEndStream }; +export { fileBuffer }; +export { UnBufferedStream }; +export { WriteHashStream }; diff --git a/test/pack.js b/test/pack.js index ded0e5b6..1d263b26 100644 --- a/test/pack.js +++ b/test/pack.js @@ -1,374 +1,385 @@ -/*global before,describe,it */ -var fs = require('fs'); - -var assert = require('chai').assert; -var mkdir = require('mkdirp'); -var Readable = require('readable-stream').Readable; - -var helpers = require('./helpers'); +import fs from "fs"; +import { assert as assert$0 } from "chai"; +import * as mkdir from "mkdirp"; +import { Readable as Readable$0 } from "readable-stream"; +import * as helpers from "./helpers/index.js"; +import Packer from "../index.js"; +var assert = { assert: assert$0 }.assert; +var Readable = { Readable: Readable$0 }.Readable; var binaryBuffer = helpers.binaryBuffer; var fileBuffer = helpers.fileBuffer; - -var Packer = require('../index.js'); - var testBuffer = binaryBuffer(1024 * 16); - -var testDate = new Date('Jan 03 2013 14:26:38 GMT'); -var testDate2 = new Date('Feb 10 2013 10:24:42 GMT'); - -var testDateOverflow = new Date('Jan 1 2044 00:00:00 GMT'); -var testDateUnderflow = new Date('Dec 30 1979 23:59:58 GMT'); - -describe('pack', function() { - before(function() { - mkdir.sync('tmp'); +var testDate = new Date("Jan 03 2013 14:26:38 GMT"); +var testDate2 = new Date("Feb 10 2013 10:24:42 GMT"); +var testDateOverflow = new Date("Jan 1 2044 00:00:00 GMT"); +var testDateUnderflow = new Date("Dec 30 1979 23:59:58 GMT"); +describe("pack", function () { + before(function () { + mkdir.sync("tmp"); }); - - describe('#entry', function() { - - it('should append Buffer sources', function(done) { + describe("#entry", function () { + it("should append Buffer sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/buffer.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/buffer.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(testBuffer, { name: 'buffer.txt', date: testDate }); + archive.entry(testBuffer, { name: "buffer.txt", date: testDate }); archive.finalize(); }); - - it('should append Stream sources', function(done) { + it("should append Stream sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/stream.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/stream.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate }); + archive.entry(fs.createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + }); archive.finalize(); }); - - it('should append Stream-like sources', function(done) { + it("should append Stream-like sources", function (done) { var archive = new Packer(); - var testStream = fs.createWriteStream('tmp/stream-like.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/stream-like.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(Readable.from(['test']), { name: 'stream-like.txt', date: testDate }); + archive.entry(Readable.from(["test"]), { + name: "stream-like.txt", + date: testDate, + }); archive.finalize(); }); - - it('should append multiple sources', function(done) { + it("should append multiple sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/multiple.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/multiple.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry('string', { name: 'string.txt', date: testDate }, function(err) { - if (err) throw err; - archive.entry(testBuffer, { name: 'buffer.txt', date: testDate2 }, function(err) { + archive.entry( + "string", + { name: "string.txt", date: testDate }, + function (err) { if (err) throw err; - archive.entry(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate2 }, function(err) { - if (err) throw err; - archive.entry(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream-store.txt', date: testDate, store: true }, function(err) { + archive.entry( + testBuffer, + { name: "buffer.txt", date: testDate2 }, + function (err) { if (err) throw err; - archive.finalize(); - }); - }); - }); - }); + archive.entry( + fs.createReadStream("test/fixtures/test.txt"), + { name: "stream.txt", date: testDate2 }, + function (err) { + if (err) throw err; + archive.entry( + fs.createReadStream("test/fixtures/test.txt"), + { name: "stream-store.txt", date: testDate, store: true }, + function (err) { + if (err) throw err; + archive.finalize(); + }, + ); + }, + ); + }, + ); + }, + ); }); - - it('should support STORE for Buffer sources', function(done) { + it("should support STORE for Buffer sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/buffer-store.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/buffer-store.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(testBuffer, { name: 'buffer.txt', date: testDate, store: true }); + archive.entry(testBuffer, { + name: "buffer.txt", + date: testDate, + store: true, + }); archive.finalize(); }); - - it('should support STORE for Stream sources', function(done) { + it("should support STORE for Stream sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/stream-store.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/stream-store.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate, store: true }); + archive.entry(fs.createReadStream("test/fixtures/test.txt"), { + name: "stream.txt", + date: testDate, + store: true, + }); archive.finalize(); }); - - it('should support archive and file comments', function(done) { + it("should support archive and file comments", function (done) { var archive = new Packer({ - comment: 'this is a zip comment', - forceUTC: true + comment: "this is a zip comment", + forceUTC: true, }); - - var testStream = fs.createWriteStream('tmp/comments.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/comments.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(testBuffer, { name: 'buffer.txt', date: testDate, comment: 'this is a file comment' }); + archive.entry(testBuffer, { + name: "buffer.txt", + date: testDate, + comment: "this is a file comment", + }); archive.finalize(); }); - - it('should STORE files when compression level is zero', function(done) { + it("should STORE files when compression level is zero", function (done) { var archive = new Packer({ forceUTC: true, - level: 0 + level: 0, }); - - var testStream = fs.createWriteStream('tmp/store-level0.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/store-level0.zip"); + testStream.on("close", function () { //assert.equal(testStream.digest, '70b50994c971dbb0e457781cf6d23ca82e5ccbc0'); done(); }); - archive.pipe(testStream); - - archive.entry(testBuffer, { name: 'buffer.txt', date: testDate }); + archive.entry(testBuffer, { name: "buffer.txt", date: testDate }); archive.finalize(); }); - - it('should properly handle utf8 encoded characters in file names and comments', function(done) { + it("should properly handle utf8 encoded characters in file names and comments", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/accentedchars-filenames.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/accentedchars-filenames.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(testBuffer, { name: 'àáâãäçèéêëìíîïñòóôõöùúûüýÿ.txt', date: testDate, comment: 'àáâãäçèéêëìíîïñòóôõöùúûüýÿ' }, function(err) { - if (err) throw err; - archive.entry(testBuffer, { name: 'ÀÁÂÃÄÇÈÉÊËÌÍÎÏÑÒÓÔÕÖÙÚÛÜÝ.txt', date: testDate2, comment: 'ÀÁÂÃÄÇÈÉÊËÌÍÎÏÑÒÓÔÕÖÙÚÛÜÝ' }, function(err) { + archive.entry( + testBuffer, + { + name: "àáâãäçèéêëìíîïñòóôõöùúûüýÿ.txt", + date: testDate, + comment: "àáâãäçèéêëìíîïñòóôõöùúûüýÿ", + }, + function (err) { if (err) throw err; - archive.finalize(); - }); - }); + archive.entry( + testBuffer, + { + name: "ÀÁÂÃÄÇÈÉÊËÌÍÎÏÑÒÓÔÕÖÙÚÛÜÝ.txt", + date: testDate2, + comment: "ÀÁÂÃÄÇÈÉÊËÌÍÎÏÑÒÓÔÕÖÙÚÛÜÝ", + }, + function (err) { + if (err) throw err; + archive.finalize(); + }, + ); + }, + ); }); - - it('should append zero length sources', function(done) { + it("should append zero length sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/zerolength.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/zerolength.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry('', { name: 'string.txt', date: testDate }, function(err) { + archive.entry("", { name: "string.txt", date: testDate }, function (err) { if (err) throw err; - archive.entry(Buffer.alloc(0), { name: 'buffer.txt', date: testDate }, function(err) { - if (err) throw err; - archive.entry(fs.createReadStream('test/fixtures/empty.txt'), { name: 'stream.txt', date: testDate }, function(err) { + archive.entry( + Buffer.alloc(0), + { name: "buffer.txt", date: testDate }, + function (err) { if (err) throw err; - archive.finalize(); - }); - }); + archive.entry( + fs.createReadStream("test/fixtures/empty.txt"), + { name: "stream.txt", date: testDate }, + function (err) { + if (err) throw err; + archive.finalize(); + }, + ); + }, + ); }); }); - - it('should support setting file mode (permissions)', function(done) { + it("should support setting file mode (permissions)", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/filemode.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/filemode.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(testBuffer, { name: 'buffer.txt', date: testDate, mode: 0644 }); + archive.entry(testBuffer, { + name: "buffer.txt", + date: testDate, + mode: 0644, + }); archive.finalize(); }); - - it('should support creating an empty zip', function(done) { + it("should support creating an empty zip", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/empty.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/empty.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - archive.finalize(); }); - - it('should support compressing images for Buffer sources', function(done) { + it("should support compressing images for Buffer sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/buffer-image.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/buffer-image.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(fileBuffer('test/fixtures/image.png'), { name: 'image.png', date: testDate }); + archive.entry(fileBuffer("test/fixtures/image.png"), { + name: "image.png", + date: testDate, + }); archive.finalize(); }); - - it('should support compressing images for Stream sources', function(done) { + it("should support compressing images for Stream sources", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/stream-image.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/stream-image.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(fs.createReadStream('test/fixtures/image.png'), { name: 'image.png', date: testDate }); + archive.entry(fs.createReadStream("test/fixtures/image.png"), { + name: "image.png", + date: testDate, + }); archive.finalize(); }); - - it('should prevent UInt32 under/overflow of dates', function(done) { + it("should prevent UInt32 under/overflow of dates", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/date-boundaries.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/date-boundaries.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(testBuffer, { name: 'date-underflow.txt', date: testDateUnderflow }, function(err) { - if (err) throw err; - archive.entry(testBuffer, { name: 'date-overflow.txt', date: testDateOverflow }, function(err) { + archive.entry( + testBuffer, + { name: "date-underflow.txt", date: testDateUnderflow }, + function (err) { if (err) throw err; - archive.finalize(); - }); - }); + archive.entry( + testBuffer, + { name: "date-overflow.txt", date: testDateOverflow }, + function (err) { + if (err) throw err; + archive.finalize(); + }, + ); + }, + ); }); - - it('should handle data that exceeds its internal buffer size', function(done) { + it("should handle data that exceeds its internal buffer size", function (done) { var archive = new Packer({ highWaterMark: 1024 * 4, - forceUTC: true + forceUTC: true, }); - - var testStream = fs.createWriteStream('tmp/buffer-overflow.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/buffer-overflow.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(binaryBuffer(1024 * 512), { name: 'buffer-overflow.txt', date: testDate }, function(err) { - if (err) throw err; - archive.entry(binaryBuffer(1024 * 1024), { name: 'buffer-overflow-store.txt', date: testDate, store: true }, function(err) { + archive.entry( + binaryBuffer(1024 * 512), + { name: "buffer-overflow.txt", date: testDate }, + function (err) { if (err) throw err; - archive.finalize(); - }); - }); + archive.entry( + binaryBuffer(1024 * 1024), + { name: "buffer-overflow-store.txt", date: testDate, store: true }, + function (err) { + if (err) throw err; + archive.finalize(); + }, + ); + }, + ); }); - - it('should support directory entries', function(done) { + it("should support directory entries", function (done) { var archive = new Packer(); - - var testStream = fs.createWriteStream('tmp/type-directory.zip'); - - testStream.on('close', function() { + var testStream = fs.createWriteStream("tmp/type-directory.zip"); + testStream.on("close", function () { done(); }); - archive.pipe(testStream); - - archive.entry(null, { name: 'directory/', date: testDate }); - + archive.entry(null, { name: "directory/", date: testDate }); archive.finalize(); }); - - it('should support symlink entries', function(done) { - var archive = new Packer(); - var testStream = fs.createWriteStream('tmp/type-symlink.zip'); - - testStream.on('close', function() { - done(); - }); - - archive.pipe(testStream); - - archive.entry('some text', { name: 'file', date: testDate }, function(err) { - if (err) throw err; - archive.entry(null, { type: 'symlink', name: 'file-link', linkname: 'file', date: testDate }, function(err) { - if (err) throw err; - archive.entry(null, { type: 'symlink', name: 'file-link-2', linkname: 'file', date: testDate, mode: 0644 }, function(err) { - if (err) throw err; - archive.finalize(); - }); - }); - }); + it("should support symlink entries", function (done) { + var archive = new Packer(); + var testStream = fs.createWriteStream("tmp/type-symlink.zip"); + testStream.on("close", function () { + done(); + }); + archive.pipe(testStream); + archive.entry( + "some text", + { name: "file", date: testDate }, + function (err) { + if (err) throw err; + archive.entry( + null, + { + type: "symlink", + name: "file-link", + linkname: "file", + date: testDate, + }, + function (err) { + if (err) throw err; + archive.entry( + null, + { + type: "symlink", + name: "file-link-2", + linkname: "file", + date: testDate, + mode: 0644, + }, + function (err) { + if (err) throw err; + archive.finalize(); + }, + ); + }, + ); + }, + ); }); - - - it('should support appending forward slash to entry names', function(done) { - var archive = new Packer({ - namePrependSlash: true, - }); - var testStream = fs.createWriteStream('tmp/name-prepend-slash.zip'); - - testStream.on('close', function() { - done(); - }); - - archive.pipe(testStream); - - archive.entry('some text', { name: 'file', namePrependSlash: false, date: testDate }, function(err) { - if (err) throw err; - archive.entry('more text', { type: 'file', name: 'file-with-prefix', date: testDate }, function(err) { - if (err) throw err; - archive.finalize(); - }); - }); + it("should support appending forward slash to entry names", function (done) { + var archive = new Packer({ + namePrependSlash: true, + }); + var testStream = fs.createWriteStream("tmp/name-prepend-slash.zip"); + testStream.on("close", function () { + done(); + }); + archive.pipe(testStream); + archive.entry( + "some text", + { name: "file", namePrependSlash: false, date: testDate }, + function (err) { + if (err) throw err; + archive.entry( + "more text", + { type: "file", name: "file-with-prefix", date: testDate }, + function (err) { + if (err) throw err; + archive.finalize(); + }, + ); + }, + ); }); - - }); - });