diff --git a/.gitignore b/.gitignore index f6c0377bd29d3f..72ccaddaceb4f5 100644 --- a/.gitignore +++ b/.gitignore @@ -69,11 +69,9 @@ ipch/ /config_fips.gypi *-nodegyp* /gyp-mac-tool -/dist-osx /npm.wxs /tools/msvs/npm.wixobj /tools/msvs/genfiles/ -/tools/osx-pkg.pmdoc/index.xml /test/addons/??_*/ email.md deps/v8-* @@ -101,6 +99,7 @@ deps/npm/node_modules/.bin/ # build/release artifacts /*.tar.* +/*.pkg /SHASUMS*.txt* # test artifacts diff --git a/BUILDING.md b/BUILDING.md index a32b30bb62745c..6443e52b2277d5 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -90,7 +90,7 @@ On macOS you will need to install the `Xcode Command Line Tools` by running installed, you can find them under the menu `Xcode -> Open Developer Tool -> More Developer Tools...`. This step will install `clang`, `clang++`, and `make`. -* You may want to setup [firewall rules](tools/macosx-firewall.sh) +* After building, you may want to setup [firewall rules](tools/macosx-firewall.sh) to avoid popups asking to accept incoming network connections when running tests: If the path to your build directory contains a space, the build will likely fail. @@ -128,6 +128,9 @@ To run the tests: $ make test ``` +At this point you are ready to make code changes and re-run the tests! +Optionally, continue below. + To run the tests and generate code coverage reports: ```console @@ -144,7 +147,7 @@ and overwrites the `lib/` directory. To clean up after generating the coverage reports: ```console -make coverage-clean +$ make coverage-clean ``` To build the documentation: diff --git a/CHANGELOG.md b/CHANGELOG.md index fe69f4fffd3272..c3b39fa78ab0d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,7 +27,8 @@ release.
Check out the docs,
+Check out the docs.
You can use the npm help
command to read any of them.
If you're a developer, and you want to use npm to publish your program, -you should read this
+you should read this.When you find issues, please report them:
npm config set <key> <value> [-g|--global]
npm config get <key>
npm config delete <key>
-npm config list [-l]
+npm config list [-l] [--json]
npm config edit
npm get <key>
npm set <key> <value> [-g|--global]
@@ -39,7 +39,8 @@ get
Echo the config value to stdout.
npm config list
-
Show all the config settings. Use -l
to also show defaults.
Show all the config settings. Use -l
to also show defaults. Use --json
+to show the settings in json format.
npm config delete key
Deletes the key from all configuration files.
@@ -67,5 +68,5 @@npm ls promzard
in npm's source tree will show:
-npm@5.3.0 /path/to/npm
+npm@5.4.2 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
It will print out extraneous, missing, and invalid packages.
@@ -110,5 +110,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-outdated.html b/deps/npm/html/doc/cli/npm-outdated.html
index b25aaec3d3af19..1f1eb8f9c7619c 100644
--- a/deps/npm/html/doc/cli/npm-outdated.html
+++ b/deps/npm/html/doc/cli/npm-outdated.html
@@ -116,5 +116,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-owner.html b/deps/npm/html/doc/cli/npm-owner.html
index 7b9980c67c50b4..5bdca245c8d4a6 100644
--- a/deps/npm/html/doc/cli/npm-owner.html
+++ b/deps/npm/html/doc/cli/npm-owner.html
@@ -51,5 +51,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-pack.html b/deps/npm/html/doc/cli/npm-pack.html
index 2d66916d8b96df..3482078c397f5d 100644
--- a/deps/npm/html/doc/cli/npm-pack.html
+++ b/deps/npm/html/doc/cli/npm-pack.html
@@ -41,5 +41,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-ping.html b/deps/npm/html/doc/cli/npm-ping.html
index 68a5e8574983f4..9eea492d372800 100644
--- a/deps/npm/html/doc/cli/npm-ping.html
+++ b/deps/npm/html/doc/cli/npm-ping.html
@@ -13,8 +13,12 @@ npm-ping
Ping npm registry
SYNOPSIS
npm ping [--registry <registry>]
DESCRIPTION
-Ping the configured or given npm registry and verify authentication.
-SEE ALSO
+Ping the configured or given npm registry and verify authentication.
+If it works it will output something like:
+Ping success: {*Details about registry*}
+
otherwise you will get:
+Ping error: {*Detail about error}
+
SEE ALSO
- npm-config(1)
- npm-config(7)
@@ -32,5 +36,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-prefix.html b/deps/npm/html/doc/cli/npm-prefix.html
index 2560340d5e16f7..fb92fd80d316f9 100644
--- a/deps/npm/html/doc/cli/npm-prefix.html
+++ b/deps/npm/html/doc/cli/npm-prefix.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-prune.html b/deps/npm/html/doc/cli/npm-prune.html
index a7073a9e022a9a..ad1872cfd531f1 100644
--- a/deps/npm/html/doc/cli/npm-prune.html
+++ b/deps/npm/html/doc/cli/npm-prune.html
@@ -40,5 +40,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-publish.html b/deps/npm/html/doc/cli/npm-publish.html
index 2d3b1d1db8bc55..b58f0fc207e650 100644
--- a/deps/npm/html/doc/cli/npm-publish.html
+++ b/deps/npm/html/doc/cli/npm-publish.html
@@ -79,5 +79,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-rebuild.html b/deps/npm/html/doc/cli/npm-rebuild.html
index bc8ee7877132e3..f415fbbc822826 100644
--- a/deps/npm/html/doc/cli/npm-rebuild.html
+++ b/deps/npm/html/doc/cli/npm-rebuild.html
@@ -35,5 +35,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-repo.html b/deps/npm/html/doc/cli/npm-repo.html
index e2ef9ee958a673..182965fb704702 100644
--- a/deps/npm/html/doc/cli/npm-repo.html
+++ b/deps/npm/html/doc/cli/npm-repo.html
@@ -41,5 +41,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-restart.html b/deps/npm/html/doc/cli/npm-restart.html
index a6648c5716ac74..36a9e0b9b52ed1 100644
--- a/deps/npm/html/doc/cli/npm-restart.html
+++ b/deps/npm/html/doc/cli/npm-restart.html
@@ -53,5 +53,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-root.html b/deps/npm/html/doc/cli/npm-root.html
index 94b59687abf668..49ac12783222a6 100644
--- a/deps/npm/html/doc/cli/npm-root.html
+++ b/deps/npm/html/doc/cli/npm-root.html
@@ -35,5 +35,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-run-script.html b/deps/npm/html/doc/cli/npm-run-script.html
index 037ad1a64c233d..5960d30434b2e4 100644
--- a/deps/npm/html/doc/cli/npm-run-script.html
+++ b/deps/npm/html/doc/cli/npm-run-script.html
@@ -29,7 +29,7 @@ SYNOPSIS
and not to any pre or post script.
The env
script is a special built-in command that can be used to list
environment variables that will be available to the script at runtime. If an
-"env" command is defined in your package it will take precedence over the
+"env" command is defined in your package, it will take precedence over the
built-in.
In addition to the shell's pre-existing PATH
, npm run
adds
node_modules/.bin
to the PATH
provided to scripts. Any binaries provided by
@@ -37,7 +37,14 @@
SYNOPSIS
prefix. For example, if there is a devDependency
on tap
in your package,
you should write:
"scripts": {"test": "tap test/\*.js"}
-
instead of "scripts": {"test": "node_modules/.bin/tap test/\*.js"}
to run your tests.
+
instead of
+"scripts": {"test": "node_modules/.bin/tap test/\*.js"}
+
to run your tests.
+Scripts are run from the root of the module, regardless of what your current
+working directory is when you call npm run
. If you want your script to
+use different behavior based on what subdirectory you're in, you can use the
+INIT_CWD
environment variable, which holds the full path you were in when
+you ran npm run
.
npm run
sets the NODE
environment variable to the node
executable with
which npm
is executed. Also, if the --scripts-prepend-node-path
is passed,
the directory within which node
resides is added to the
@@ -67,5 +74,5 @@
Commit and tag the version change.
+Run git commit hooks when committing the version change.
javascript package manager
npm <command> [args]
5.3.0
+5.4.2
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency @@ -126,7 +126,7 @@
Isaac Z. Schlueter :: isaacs :: @izs :: -i@izs.me
+i@izs.meBoth email and url are optional either way.
npm also sets a top-level "maintainers" field with your npm user info.
The "files" field is an array of files to include in your project. If -you name a folder in the array, then it will also include the files -inside that folder. (Unless they would be ignored by another rule.)
-You can also provide a ".npmignore" file in the root of your package or
-in subdirectories, which will keep files from being included, even
-if they would be picked up by the files array. The .npmignore
file
-works just like a .gitignore
.
The optional "files" field is an array of file patterns that describes +the entries to be included when your package is installed as a +dependency. If the files array is omitted, everything except +automatically-excluded files will be included in your publish. If you +name a folder in the array, then it will also include the files inside +that folder (unless they would be ignored by another rule in this +section.).
+You can also provide a .npmignore
file in the root of your package or
+in subdirectories, which will keep files from being included. At the
+root of your package it will not override the "files" field, but in
+subdirectories it will. The .npmignore
file works just like a
+.gitignore
. If there is a .gitignore
file, and .npmignore
is
+missing, .gitignore
's contents will be used instead.
Files included with the "package.json#files" field cannot be excluded
+through .npmignore
or .gitignore
.
Certain files are always included, regardless of settings:
package.json
This file describes an exact, and more importantly reproducible
-node_modules
tree. Once it's present, and future installation will base its
+node_modules
tree. Once it's present, any future installation will base its
work off this file, instead of recalculating dependency versions off
package.json(5).
The presence of a package lock changes the installation behavior such that:
@@ -145,4 +145,4 @@Both email and url are optional either way.
npm also sets a top-level "maintainers" field with your npm user info.
The "files" field is an array of files to include in your project. If -you name a folder in the array, then it will also include the files -inside that folder. (Unless they would be ignored by another rule.)
-You can also provide a ".npmignore" file in the root of your package or
-in subdirectories, which will keep files from being included, even
-if they would be picked up by the files array. The .npmignore
file
-works just like a .gitignore
.
The optional "files" field is an array of file patterns that describes +the entries to be included when your package is installed as a +dependency. If the files array is omitted, everything except +automatically-excluded files will be included in your publish. If you +name a folder in the array, then it will also include the files inside +that folder (unless they would be ignored by another rule in this +section.).
+You can also provide a .npmignore
file in the root of your package or
+in subdirectories, which will keep files from being included. At the
+root of your package it will not override the "files" field, but in
+subdirectories it will. The .npmignore
file works just like a
+.gitignore
. If there is a .gitignore
file, and .npmignore
is
+missing, .gitignore
's contents will be used instead.
Files included with the "package.json#files" field cannot be excluded
+through .npmignore
or .gitignore
.
Certain files are always included, regardless of settings:
package.json
Notice that you need to use underscores instead of dashes, so --allow-same-version
+would become npm_config_allow_same_version=true
.
The four relevant files are:
The Certificate Authority signing certificate that is trusted for SSL -connections to the registry. Values should be in PEM format with newlines +connections to the registry. Values should be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string "\n". For example:
ca="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----"
Set to null
to only allow "known" registrars, or to a specific CA cert
@@ -221,7 +223,7 @@
A client certificate to pass when accessing the registry. Values should be in -PEM format with newlines replaced by the string "\n". For example:
+PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the string "\n". For example:cert="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----"
It is not the path to a certificate file (and there is no "certfile" option).
Tag the commit when using the npm version
command.
true
Run git commit hooks when using the npm version
command.
What level of logs to report. On failure, all logs are written to
npm-debug.log
in the current working directory.
Any logs of a higher level than the setting are shown. -The default is "warn", which shows warn and error output.
+Any logs of a higher level than the setting are shown. The default is "notice".
When set to true
, npm will display a progress bar during time intensive
operations, if process.stderr
is a TTY.
Set to false
to suppress the progress bar.
Whether or not to include proprietary extended attributes in the -tarballs created by npm.
-Unless you are expecting to unpack package tarballs with something other -than npm -- particularly a very outdated tar implementation -- leave -this as true.
CHANGELOG
(and its variants)LICENSE
/ LICENCE
If, given the structure of your project, you find .npmignore
to be a
+maintenance headache, you might instead try populating the files
+property of package.json
, which is an array of file or directory names
+that should be included in your package. Sometimes a whitelist is easier
+to manage than a blacklist.
.npmignore
or files
config worksIf you want to double check that your package will include only the files
+you intend it to when published, you can run the npm pack
command locally
+which will generate a tarball in the working directory, the same way it
+does for publishing.
npm link
is designed to install a development package and see the
changes in real time without having to keep re-installing it. (You do
@@ -194,5 +204,5 @@
Handling Module
npm owner ls <pkgname>
Don't squat on package names. Publish code or move out of the way.
@@ -55,12 +55,12 @@add alice foo
to add Alice as an owner of the
foo package.If you see bad behavior like this, please report it to abuse@npmjs.com right +
If you see bad behavior like this, please report it to abuse@npmjs.com right away. You are never expected to resolve abusive behavior on your own. We are here to help.
If you think another npm publisher is infringing your trademark, such as by -using a confusingly similar package name, email abuse@npmjs.com with a link to +using a confusingly similar package name, email abuse@npmjs.com with a link to the package or user account on https://npmjs.com. Attach a copy of your trademark registration certificate.
If we see that the package's publisher is intentionally misleading others by @@ -134,5 +134,5 @@
How npm handles the "scripts" field
npm supports the "scripts" property of the package.json script, for the +
npm supports the "scripts" property of the package.json file, for the following scripts:
The semantic versioner for npm
+npm install --save semver
+`
+
$ npm install semver
-$ node
-var semver = require('semver')
+As a node module:
+const semver = require('semver')
semver.valid('1.2.3') // '1.2.3'
semver.valid('a.b.c') // null
@@ -21,10 +24,11 @@ Usage
semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
semver.gt('1.2.3', '9.8.7') // false
semver.lt('1.2.3', '9.8.7') // true
-
As a command-line utility:
+
+As a command-line utility:
$ semver -h
-SemVer 5.1.0
+SemVer 5.3.0
A JavaScript implementation of the http://semver.org/ specification
Copyright Isaac Z. Schlueter
@@ -113,15 +117,15 @@ Prerelease Tags
Prerelease Identifiers
The method .inc
takes an additional identifier
string argument that
will append the value of the string as a prerelease identifier:
-> semver.inc('1.2.3', 'prerelease', 'beta')
-'1.2.4-beta.0'
+semver.inc('1.2.3', 'prerelease', 'beta')
+// '1.2.4-beta.0'
command-line example:
-$ semver 1.2.3 -i prerelease --preid beta
+$ semver 1.2.3 -i prerelease --preid beta
1.2.4-beta.0
Which then can be used to increment further:
-$ semver 1.2.4-beta.0 -i prerelease
+$ semver 1.2.4-beta.0 -i prerelease
1.2.4-beta.1
Advanced Range Syntax
@@ -263,6 +267,8 @@ Functions
major(v)
: Return the major version number.
minor(v)
: Return the minor version number.
patch(v)
: Return the patch version number.
+intersects(r1, r2, loose)
: Return true if the two supplied ranges
+or comparators intersect.
Comparison
@@ -286,6 +292,10 @@ Comparison
(major
, premajor
, minor
, preminor
, patch
, prepatch
, or prerelease
),
or null if the versions are the same.
+Comparators
+
+intersects(comparator)
: Return true if the comparators intersect
+
Ranges
validRange(range)
: Return the valid range or null if it's not valid
@@ -303,6 +313,7 @@ Ranges
the bounds of the range in either the high or low direction. The
hilo
argument must be either the string '>'
or '<'
. (This is
the function called by gtr
and ltr
.)
+intersects(range)
: Return true if any of the ranges comparators intersect
Note that, since ranges may be non-contiguous, a version might not be
greater than a range, less than a range, or satisfy a range! For
@@ -325,5 +336,5 @@
Ranges
-
+
diff --git a/deps/npm/lib/build.js b/deps/npm/lib/build.js
index 44ac40a00708b5..6a788bc8575c2b 100644
--- a/deps/npm/lib/build.js
+++ b/deps/npm/lib/build.js
@@ -18,6 +18,8 @@ var link = require('./utils/link.js')
var linkIfExists = link.ifExists
var cmdShim = require('cmd-shim')
var cmdShimIfExists = cmdShim.ifExists
+var isHashbangFile = require('./utils/is-hashbang-file.js')
+var dos2Unix = require('./utils/convert-line-endings.js').dos2Unix
var asyncMap = require('slide').asyncMap
var ini = require('ini')
var writeFile = require('write-file-atomic')
@@ -187,13 +189,18 @@ function linkBins (pkg, folder, parent, gtop, cb) {
if (er && er.code === 'ENOENT' && npm.config.get('ignore-scripts')) {
return cb()
}
- if (er || !gtop) return cb(er)
- var dest = path.resolve(binRoot, b)
- var out = npm.config.get('parseable')
- ? dest + '::' + src + ':BINFILE'
- : dest + ' -> ' + src
- if (!npm.config.get('json') && !npm.config.get('parseable')) output(out)
- cb()
+ if (er) return cb(er)
+ isHashbangFile(src).then((isHashbang) => {
+ if (isHashbang) return dos2Unix(src)
+ }).then(() => {
+ if (!gtop) return cb()
+ var dest = path.resolve(binRoot, b)
+ var out = npm.config.get('parseable')
+ ? dest + '::' + src + ':BINFILE'
+ : dest + ' -> ' + src
+ if (!npm.config.get('json') && !npm.config.get('parseable')) output(out)
+ cb()
+ }).catch(cb)
})
}
)
diff --git a/deps/npm/lib/config.js b/deps/npm/lib/config.js
index 0426546274e950..d260c04a54ce65 100644
--- a/deps/npm/lib/config.js
+++ b/deps/npm/lib/config.js
@@ -19,7 +19,7 @@ config.usage = usage(
'npm config set ' +
'\nnpm config get []' +
'\nnpm config delete ' +
- '\nnpm config list' +
+ '\nnpm config list [--json]' +
'\nnpm config edit' +
'\nnpm set ' +
'\nnpm get []'
@@ -45,9 +45,11 @@ config.completion = function (opts, cb) {
case 'rm':
return cb(null, Object.keys(types))
case 'edit':
- case 'list': case 'ls':
+ case 'list':
+ case 'ls':
+ return cb(null, [])
+ default:
return cb(null, [])
- default: return cb(null, [])
}
}
@@ -57,12 +59,21 @@ config.completion = function (opts, cb) {
function config (args, cb) {
var action = args.shift()
switch (action) {
- case 'set': return set(args[0], args[1], cb)
- case 'get': return get(args[0], cb)
- case 'delete': case 'rm': case 'del': return del(args[0], cb)
- case 'list': case 'ls': return list(cb)
- case 'edit': return edit(cb)
- default: return unknown(action, cb)
+ case 'set':
+ return set(args[0], args[1], cb)
+ case 'get':
+ return get(args[0], cb)
+ case 'delete':
+ case 'rm':
+ case 'del':
+ return del(args[0], cb)
+ case 'list':
+ case 'ls':
+ return npm.config.get('json') ? listJson(cb) : list(cb)
+ case 'edit':
+ return edit(cb)
+ default:
+ return unknown(action, cb)
}
}
@@ -159,15 +170,49 @@ function sort (a, b) {
}
function publicVar (k) {
- return !(k.charAt(0) === '_' ||
- k.indexOf(':_') !== -1 ||
- types[k] !== types[k])
+ return !(k.charAt(0) === '_' || k.indexOf(':_') !== -1)
}
function getKeys (data) {
return Object.keys(data).filter(publicVar).sort(sort)
}
+function listJson (cb) {
+ const publicConf = npm.config.keys.reduce((publicConf, k) => {
+ var value = npm.config.get(k)
+
+ if (publicVar(k) &&
+ // argv is not really config, it's command config
+ k !== 'argv' &&
+ // logstream is a Stream, and would otherwise produce circular refs
+ k !== 'logstream') publicConf[k] = value
+
+ return publicConf
+ }, {})
+
+ output(JSON.stringify(publicConf, null, 2))
+ return cb()
+}
+
+function listFromSource (title, conf, long) {
+ var confKeys = getKeys(conf)
+ var msg = ''
+
+ if (confKeys.length) {
+ msg += '; ' + title + '\n'
+ confKeys.forEach(function (k) {
+ var val = JSON.stringify(conf[k])
+ if (conf[k] !== npm.config.get(k)) {
+ if (!long) return
+ msg += '; ' + k + ' = ' + val + ' (overridden)\n'
+ } else msg += k + ' = ' + val + '\n'
+ })
+ msg += '\n'
+ }
+
+ return msg
+}
+
function list (cb) {
var msg = ''
var long = npm.config.get('long')
@@ -185,92 +230,22 @@ function list (cb) {
}
// env configs
- var env = npm.config.sources.env.data
- var envKeys = getKeys(env)
- if (envKeys.length) {
- msg += '; environment configs\n'
- envKeys.forEach(function (k) {
- if (env[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' +
- JSON.stringify(env[k]) + ' (overridden)\n'
- } else msg += k + ' = ' + JSON.stringify(env[k]) + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('environment configs', npm.config.sources.env.data, long)
// project config file
var project = npm.config.sources.project
- var pconf = project.data
- var ppath = project.path
- var pconfKeys = getKeys(pconf)
- if (pconfKeys.length) {
- msg += '; project config ' + ppath + '\n'
- pconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(pconf[k])
- if (pconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('project config ' + project.path, project.data, long)
// user config file
- var uconf = npm.config.sources.user.data
- var uconfKeys = getKeys(uconf)
- if (uconfKeys.length) {
- msg += '; userconfig ' + npm.config.get('userconfig') + '\n'
- uconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(uconf[k])
- if (uconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('userconfig ' + npm.config.get('userconfig'), npm.config.sources.user.data, long)
// global config file
- var gconf = npm.config.sources.global.data
- var gconfKeys = getKeys(gconf)
- if (gconfKeys.length) {
- msg += '; globalconfig ' + npm.config.get('globalconfig') + '\n'
- gconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(gconf[k])
- if (gconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('globalconfig ' + npm.config.get('globalconfig'), npm.config.sources.global.data, long)
// builtin config file
var builtin = npm.config.sources.builtin || {}
if (builtin && builtin.data) {
- var bconf = builtin.data
- var bpath = builtin.path
- var bconfKeys = getKeys(bconf)
- if (bconfKeys.length) {
- msg += '; builtin config ' + bpath + '\n'
- bconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(bconf[k])
- if (bconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('builtin config ' + builtin.path, builtin.data, long)
}
// only show defaults if --long
diff --git a/deps/npm/lib/config/defaults.js b/deps/npm/lib/config/defaults.js
index 93bac84a6108f9..3a566ee0feeec8 100644
--- a/deps/npm/lib/config/defaults.js
+++ b/deps/npm/lib/config/defaults.js
@@ -144,6 +144,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
git: 'git',
'git-tag-version': true,
+ 'commit-hooks': true,
global: false,
globalconfig: path.resolve(globalPrefix, 'etc', 'npmrc'),
@@ -185,7 +186,6 @@ Object.defineProperty(exports, 'defaults', {get: function () {
prefix: globalPrefix,
production: process.env.NODE_ENV === 'production',
'progress': !process.env.TRAVIS && !process.env.CI,
- 'proprietary-attribs': true,
proxy: null,
'https-proxy': null,
'user-agent': 'npm/{npm-version} ' +
@@ -271,6 +271,7 @@ exports.types = {
'fetch-retry-maxtimeout': Number,
git: String,
'git-tag-version': Boolean,
+ 'commit-hooks': Boolean,
global: Boolean,
globalconfig: path,
'global-style': Boolean,
@@ -314,7 +315,6 @@ exports.types = {
prefix: path,
production: Boolean,
progress: Boolean,
- 'proprietary-attribs': Boolean,
proxy: [null, false, url], // allow proxy to be disabled explicitly
'rebuild-bundle': Boolean,
registry: [null, url],
diff --git a/deps/npm/lib/config/lifecycle.js b/deps/npm/lib/config/lifecycle.js
new file mode 100644
index 00000000000000..5fca93939db5fe
--- /dev/null
+++ b/deps/npm/lib/config/lifecycle.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const npm = require('../npm.js')
+const log = require('npmlog')
+
+module.exports = lifecycleOpts
+
+let opts
+
+function lifecycleOpts (moreOpts) {
+ if (!opts) {
+ opts = {
+ config: npm.config.snapshot,
+ dir: npm.dir,
+ failOk: false,
+ force: npm.config.get('force'),
+ group: npm.config.get('group'),
+ ignorePrepublish: npm.config.get('ignore-prepublish'),
+ ignoreScripts: npm.config.get('ignore-scripts'),
+ log: log,
+ production: npm.config.get('production'),
+ scriptShell: npm.config.get('script-shell'),
+ scriptsPrependNodePath: npm.config.get('scripts-prepend-node-path'),
+ unsafePerm: npm.config.get('unsafe-perm'),
+ user: npm.config.get('user')
+ }
+ }
+
+ return moreOpts ? Object.assign({}, opts, moreOpts) : opts
+}
diff --git a/deps/npm/lib/help.js b/deps/npm/lib/help.js
index 9763d5fccdc1d9..64c80f78745647 100644
--- a/deps/npm/lib/help.js
+++ b/deps/npm/lib/help.js
@@ -12,6 +12,7 @@ var npm = require('./npm.js')
var log = require('npmlog')
var opener = require('opener')
var glob = require('glob')
+var didYouMean = require('./utils/did-you-mean')
var cmdList = require('./config/cmd-list').cmdList
var shorthands = require('./config/cmd-list').shorthands
var commands = cmdList.concat(Object.keys(shorthands))
@@ -181,6 +182,11 @@ function npmUsage (valid, cb) {
'',
'npm@' + npm.version + ' ' + path.dirname(__dirname)
].join('\n'))
+
+ if (npm.argv.length > 1) {
+ didYouMean(npm.argv[1], commands)
+ }
+
cb(valid)
}
diff --git a/deps/npm/lib/install/action/extract.js b/deps/npm/lib/install/action/extract.js
index 5534e8b28a3fcd..8e80d4adda383f 100644
--- a/deps/npm/lib/install/action/extract.js
+++ b/deps/npm/lib/install/action/extract.js
@@ -20,16 +20,34 @@ const workerFarm = require('worker-farm')
const WORKER_PATH = require.resolve('./extract-worker.js')
let workers
+// NOTE: temporarily disabled on non-OSX due to ongoing issues:
+//
+// * Seems to make Windows antivirus issues much more common
+// * Messes with Docker (I think)
+//
+// There are other issues that should be fixed that affect OSX too:
+//
+// * Logging is messed up right now because pacote does its own thing
+// * Global deduplication in pacote breaks due to multiple procs
+//
+// As these get fixed, we can start experimenting with re-enabling it
+// at least on some platforms.
+const ENABLE_WORKERS = process.platform === 'darwin'
+
extract.init = () => {
- workers = workerFarm({
- maxConcurrentCallsPerWorker: npm.limit.fetch,
- maxRetries: 1
- }, WORKER_PATH)
+ if (ENABLE_WORKERS) {
+ workers = workerFarm({
+ maxConcurrentCallsPerWorker: npm.limit.fetch,
+ maxRetries: 1
+ }, WORKER_PATH)
+ }
return BB.resolve()
}
extract.teardown = () => {
- workerFarm.end(workers)
- workers = null
+ if (ENABLE_WORKERS) {
+ workerFarm.end(workers)
+ workers = null
+ }
return BB.resolve()
}
module.exports = extract
@@ -54,7 +72,7 @@ function extract (staging, pkg, log) {
let msg = args
const spec = typeof args[0] === 'string' ? npa(args[0]) : args[0]
args[0] = spec.raw
- if (spec.registry || spec.type === 'remote') {
+ if (ENABLE_WORKERS && (spec.registry || spec.type === 'remote')) {
// We can't serialize these options
opts.loglevel = opts.log.level
opts.log = null
diff --git a/deps/npm/lib/install/action/install.js b/deps/npm/lib/install/action/install.js
index 754bff43ffca99..a5cf63b7396a03 100644
--- a/deps/npm/lib/install/action/install.js
+++ b/deps/npm/lib/install/action/install.js
@@ -4,5 +4,5 @@ var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('install', packageId(pkg))
- lifecycle(pkg.package, 'install', pkg.path, false, false, next)
+ lifecycle(pkg.package, 'install', pkg.path, next)
}
diff --git a/deps/npm/lib/install/action/move.js b/deps/npm/lib/install/action/move.js
index bc9bf6a883edea..00d58a15923176 100644
--- a/deps/npm/lib/install/action/move.js
+++ b/deps/npm/lib/install/action/move.js
@@ -7,7 +7,6 @@ var rimraf = require('rimraf')
var mkdirp = require('mkdirp')
var rmStuff = require('../../unbuild.js').rmStuff
var lifecycle = require('../../utils/lifecycle.js')
-var updatePackageJson = require('../update-package-json.js')
var move = require('../../utils/move.js')
/*
@@ -19,14 +18,13 @@ var move = require('../../utils/move.js')
module.exports = function (staging, pkg, log, next) {
log.silly('move', pkg.fromPath, pkg.path)
chain([
- [lifecycle, pkg.package, 'preuninstall', pkg.fromPath, false, true],
- [lifecycle, pkg.package, 'uninstall', pkg.fromPath, false, true],
+ [lifecycle, pkg.package, 'preuninstall', pkg.fromPath, { failOk: true }],
+ [lifecycle, pkg.package, 'uninstall', pkg.fromPath, { failOk: true }],
[rmStuff, pkg.package, pkg.fromPath],
- [lifecycle, pkg.package, 'postuninstall', pkg.fromPath, false, true],
+ [lifecycle, pkg.package, 'postuninstall', pkg.fromPath, { failOk: true }],
[moveModuleOnly, pkg.fromPath, pkg.path, log],
- [lifecycle, pkg.package, 'preinstall', pkg.path, false, true],
- [removeEmptyParents, path.resolve(pkg.fromPath, '..')],
- [updatePackageJson, pkg, pkg.path]
+ [lifecycle, pkg.package, 'preinstall', pkg.path, { failOk: true }],
+ [removeEmptyParents, path.resolve(pkg.fromPath, '..')]
], next)
}
diff --git a/deps/npm/lib/install/action/postinstall.js b/deps/npm/lib/install/action/postinstall.js
index 197dc1e6f9ec93..01accb2a471657 100644
--- a/deps/npm/lib/install/action/postinstall.js
+++ b/deps/npm/lib/install/action/postinstall.js
@@ -4,5 +4,5 @@ var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('postinstall', packageId(pkg))
- lifecycle(pkg.package, 'postinstall', pkg.path, false, false, next)
+ lifecycle(pkg.package, 'postinstall', pkg.path, next)
}
diff --git a/deps/npm/lib/install/action/preinstall.js b/deps/npm/lib/install/action/preinstall.js
index a16082ef7303da..374ff563326c01 100644
--- a/deps/npm/lib/install/action/preinstall.js
+++ b/deps/npm/lib/install/action/preinstall.js
@@ -4,5 +4,5 @@ var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('preinstall', packageId(pkg))
- lifecycle(pkg.package, 'preinstall', pkg.path, false, false, next)
+ lifecycle(pkg.package, 'preinstall', pkg.path, next)
}
diff --git a/deps/npm/lib/install/action/prepare.js b/deps/npm/lib/install/action/prepare.js
index 5e4333a5b51c6f..d48c8e7e86249a 100644
--- a/deps/npm/lib/install/action/prepare.js
+++ b/deps/npm/lib/install/action/prepare.js
@@ -19,8 +19,8 @@ module.exports = function (staging, pkg, log, next) {
var buildpath = moduleStagingPath(staging, pkg)
chain(
[
- [lifecycle, pkg.package, 'prepublish', buildpath, false, false],
- [lifecycle, pkg.package, 'prepare', buildpath, false, false]
+ [lifecycle, pkg.package, 'prepublish', buildpath],
+ [lifecycle, pkg.package, 'prepare', buildpath]
],
next
)
diff --git a/deps/npm/lib/install/action/unbuild.js b/deps/npm/lib/install/action/unbuild.js
index ce20df75d39d70..dbfbd9c4b12cc8 100644
--- a/deps/npm/lib/install/action/unbuild.js
+++ b/deps/npm/lib/install/action/unbuild.js
@@ -6,11 +6,11 @@ var rmStuff = Bluebird.promisify(require('../../unbuild.js').rmStuff)
module.exports = function (staging, pkg, log) {
log.silly('unbuild', packageId(pkg))
- return lifecycle(pkg.package, 'preuninstall', pkg.path, false, true).then(() => {
- return lifecycle(pkg.package, 'uninstall', pkg.path, false, true)
+ return lifecycle(pkg.package, 'preuninstall', pkg.path, { failOk: true }).then(() => {
+ return lifecycle(pkg.package, 'uninstall', pkg.path, { failOk: true })
}).then(() => {
return rmStuff(pkg.package, pkg.path)
}).then(() => {
- return lifecycle(pkg.package, 'postuninstall', pkg.path, false, true)
+ return lifecycle(pkg.package, 'postuninstall', pkg.path, { failOk: true })
})
}
diff --git a/deps/npm/lib/install/actions.js b/deps/npm/lib/install/actions.js
index 028d932373f1f6..9f0dcfa5dc2996 100644
--- a/deps/npm/lib/install/actions.js
+++ b/deps/npm/lib/install/actions.js
@@ -80,6 +80,7 @@ function runAction (action, staging, pkg, log) {
}
function markAsFailed (pkg) {
+ if (pkg.failed) return
pkg.failed = true
pkg.requires.forEach((req) => {
req.requiredBy = req.requiredBy.filter((reqReqBy) => {
diff --git a/deps/npm/lib/install/deps.js b/deps/npm/lib/install/deps.js
index d7a2c27c1cfb1c..c93907a416337a 100644
--- a/deps/npm/lib/install/deps.js
+++ b/deps/npm/lib/install/deps.js
@@ -62,8 +62,9 @@ function doesChildVersionMatch (child, requested, requestor) {
// In those cases _from, will be preserved and we can compare that to ensure that they
// really came from the same sources.
// You'll see this scenario happen with at least tags and git dependencies.
+ // Some buggy clients will write spaces into the module name part of a _from.
if (child.package._from) {
- var fromReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^' + moduleName(child) + '@'), ''))
+ var fromReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^\s*' + moduleName(child) + '\s*@'), ''))
if (fromReq.rawSpec === requested.rawSpec) return true
if (fromReq.type === requested.type && fromReq.saveSpec && fromReq.saveSpec === requested.saveSpec) return true
}
@@ -197,18 +198,31 @@ function matchingDep (tree, name) {
exports.getAllMetadata = function (args, tree, where, next) {
asyncMap(args, function (arg, done) {
- var spec = npa(arg)
+ let spec
+ try {
+ spec = npa(arg)
+ } catch (e) {
+ return done(e)
+ }
if (spec.type !== 'file' && spec.type !== 'directory' && (spec.name == null || spec.rawSpec === '')) {
return fs.stat(path.join(arg, 'package.json'), (err) => {
if (err) {
var version = matchingDep(tree, spec.name)
if (version) {
- return fetchPackageMetadata(npa.resolve(spec.name, version), where, done)
+ try {
+ return fetchPackageMetadata(npa.resolve(spec.name, version), where, done)
+ } catch (e) {
+ return done(e)
+ }
} else {
return fetchPackageMetadata(spec, where, done)
}
} else {
- return fetchPackageMetadata(npa('file:' + arg), where, done)
+ try {
+ return fetchPackageMetadata(npa('file:' + arg), where, done)
+ } catch (e) {
+ return done(e)
+ }
}
})
} else {
diff --git a/deps/npm/lib/install/update-package-json.js b/deps/npm/lib/install/update-package-json.js
index 14339d0012397d..afffaf78002061 100644
--- a/deps/npm/lib/install/update-package-json.js
+++ b/deps/npm/lib/install/update-package-json.js
@@ -4,6 +4,7 @@ var writeFileAtomic = require('write-file-atomic')
var moduleName = require('../utils/module-name.js')
var deepSortObject = require('../utils/deep-sort-object.js')
var sortedObject = require('sorted-object')
+var isWindows = require('../utils/is-windows.js')
var sortKeys = [
'dependencies', 'devDependencies', 'bundleDependencies',
@@ -47,7 +48,9 @@ module.exports = function (mod, buildpath, next) {
var data = JSON.stringify(sortedObject(pkg), null, 2) + '\n'
writeFileAtomic(path.resolve(buildpath, 'package.json'), data, {
- // We really don't need this guarantee, and fsyncing here is super slow.
- fsync: false
+ // We really don't need this guarantee, and fsyncing here is super slow. Except on
+ // Windows where there isn't a big performance difference and it prevents errors when
+ // rolling back optional packages (#17671)
+ fsync: isWindows
}, next)
}
diff --git a/deps/npm/lib/install/validate-tree.js b/deps/npm/lib/install/validate-tree.js
index ccd4e2e310c1b1..24a140171d45c1 100644
--- a/deps/npm/lib/install/validate-tree.js
+++ b/deps/npm/lib/install/validate-tree.js
@@ -40,7 +40,7 @@ function thenValidateAllPeerDeps (idealTree, next) {
validate('OF', arguments)
validateAllPeerDeps(idealTree, function (tree, pkgname, version) {
var warn = new Error(packageId(tree) + ' requires a peer of ' + pkgname + '@' +
- version + ' but none was installed.')
+ version + ' but none is installed. You must install peer dependencies yourself.')
warn.code = 'EPEERINVALID'
idealTree.warnings.push(warn)
})
diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js
index 2e3db79c3be7d0..7c0ea71e773f98 100644
--- a/deps/npm/lib/ls.js
+++ b/deps/npm/lib/ls.js
@@ -135,7 +135,7 @@ function filterByEnv (data) {
var devKeys = Object.keys(data.devDependencies || [])
var prodKeys = Object.keys(data._dependencies || [])
Object.keys(data.dependencies).forEach(function (name) {
- if (!dev && inList(devKeys, name) && data.dependencies[name].missing) {
+ if (!dev && inList(devKeys, name) && !inList(prodKeys, name) && data.dependencies[name].missing) {
return
}
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index 990d8c51098593..3a84947f79fcd3 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -25,7 +25,6 @@
var npmconf = require('./config/core.js')
var log = require('npmlog')
- var tty = require('tty')
var path = require('path')
var abbrev = require('abbrev')
var which = require('which')
@@ -285,20 +284,20 @@
switch (color) {
case 'always':
- log.enableColor()
npm.color = true
break
case false:
- log.disableColor()
npm.color = false
break
default:
- if (process.stdout.isTTY) npm.color = true
- else if (!tty.isatty) npm.color = true
- else if (tty.isatty(1)) npm.color = true
- else npm.color = false
+ npm.color = process.stdout.isTTY && process.env['TERM'] !== 'dumb'
break
}
+ if (npm.color) {
+ log.enableColor()
+ } else {
+ log.disableColor()
+ }
if (config.get('unicode')) {
log.enableUnicode()
@@ -306,7 +305,7 @@
log.disableUnicode()
}
- if (config.get('progress') && (process.stderr.isTTY || (tty.isatty && tty.isatty(2)))) {
+ if (config.get('progress') && process.stderr.isTTY && process.env['TERM'] !== 'dumb') {
log.enableProgress()
} else {
log.disableProgress()
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
index f2fb2df79a3cca..a38137b66c88c5 100644
--- a/deps/npm/lib/outdated.js
+++ b/deps/npm/lib/outdated.js
@@ -32,7 +32,6 @@ var table = require('text-table')
var semver = require('semver')
var npa = require('npm-package-arg')
var mutateIntoLogicalTree = require('./install/mutate-into-logical-tree.js')
-var cache = require('./cache.js')
var npm = require('./npm.js')
var long = npm.config.get('long')
var mapToRegistry = require('./utils/map-to-registry.js')
@@ -42,6 +41,7 @@ var computeVersionSpec = require('./install/deps.js').computeVersionSpec
var moduleName = require('./utils/module-name.js')
var output = require('./utils/output.js')
var ansiTrim = require('./utils/ansi-trim')
+var fetchPackageMetadata = require('./fetch-package-metadata.js')
function uniq (list) {
// we maintain the array because we need an array, not iterator, return
@@ -387,8 +387,12 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) {
}
}
- // We didn't find the version in the doc. See if cache can find it.
- cache.add(dep, req, null, false, onCacheAdd)
+ // We didn't find the version in the doc. See if we can find it in metadata.
+ var spec = dep
+ if (req) {
+ spec = dep + '@' + req
+ }
+ fetchPackageMetadata(spec, '', onCacheAdd)
function onCacheAdd (er, d) {
// if this fails, then it means we can't update this thing.
diff --git a/deps/npm/lib/pack.js b/deps/npm/lib/pack.js
index ae3bb260bad02a..c428482035c98b 100644
--- a/deps/npm/lib/pack.js
+++ b/deps/npm/lib/pack.js
@@ -26,8 +26,9 @@ const pipe = BB.promisify(require('mississippi').pipe)
const prepublishWarning = require('./utils/warn-deprecated')('prepublish-on-install')
const pinflight = require('promise-inflight')
const readJson = BB.promisify(require('read-package-json'))
-const tarPack = BB.promisify(require('./utils/tar').pack)
const writeStreamAtomic = require('fs-write-stream-atomic')
+const tar = require('tar')
+const packlist = require('npm-packlist')
pack.usage = 'npm pack [[<@scope>/]...]'
@@ -118,11 +119,20 @@ function packDirectory (mani, dir, target) {
}).then((pkg) => {
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
const tmpTarget = path.join(tmp, path.basename(target))
- return tarPack(tmpTarget, dir, pkg).then(() => {
- return move(tmpTarget, target, {Promise: BB, fs})
- }).then(() => {
- return lifecycle(pkg, 'postpack', dir)
- }).then(() => target)
+
+ const tarOpt = {
+ file: tmpTarget,
+ cwd: dir,
+ prefix: 'package/',
+ portable: true,
+ gzip: true
+ }
+
+ return packlist({ path: dir })
+ .then((files) => tar.create(tarOpt, files))
+ .then(() => move(tmpTarget, target, {Promise: BB, fs}))
+ .then(() => lifecycle(pkg, 'postpack', dir))
+ .then(() => target)
})
})
}
diff --git a/deps/npm/lib/ping.js b/deps/npm/lib/ping.js
index e06be9a47168e6..13f390397ce18c 100644
--- a/deps/npm/lib/ping.js
+++ b/deps/npm/lib/ping.js
@@ -15,7 +15,13 @@ function ping (args, silent, cb) {
var auth = npm.config.getCredentialsByURI(registry)
npm.registry.ping(registry, {auth: auth}, function (er, pong, data, res) {
- if (!silent) output(JSON.stringify(pong))
+ if (!silent) {
+ if (er) {
+ output('Ping error: ' + er)
+ } else {
+ output('Ping success: ' + JSON.stringify(pong))
+ }
+ }
cb(er, er ? null : pong, data, res)
})
}
diff --git a/deps/npm/lib/restart.js b/deps/npm/lib/restart.js
index 601249fd6b36b0..41f9c3a7568622 100644
--- a/deps/npm/lib/restart.js
+++ b/deps/npm/lib/restart.js
@@ -1 +1 @@
-module.exports = require('./utils/lifecycle.js').cmd('restart')
+module.exports = require('./utils/lifecycle-cmd.js')('restart')
diff --git a/deps/npm/lib/run-script.js b/deps/npm/lib/run-script.js
index 05bc1fe98b7763..fb7781f55179b4 100644
--- a/deps/npm/lib/run-script.js
+++ b/deps/npm/lib/run-script.js
@@ -166,7 +166,7 @@ function run (pkg, wd, cmd, args, cb) {
}
// when running scripts explicitly, assume that they're trusted.
- return [lifecycle, pkg, c, wd, true]
+ return [lifecycle, pkg, c, wd, { unsafePerm: true }]
}), cb)
}
diff --git a/deps/npm/lib/shrinkwrap.js b/deps/npm/lib/shrinkwrap.js
index a541d868fc2d02..956a6936468916 100644
--- a/deps/npm/lib/shrinkwrap.js
+++ b/deps/npm/lib/shrinkwrap.js
@@ -101,7 +101,7 @@ function shrinkwrapDeps (deps, top, tree, seen) {
if (!seen) seen = new Set()
if (seen.has(tree)) return
seen.add(tree)
- tree.children.sort(function (aa, bb) { return moduleName(aa).localeCompare(moduleName(bb)) }).forEach(function (child) {
+ sortModules(tree.children).forEach(function (child) {
if (child.fakeChild) {
deps[moduleName(child)] = child.fakeChild
return
@@ -130,7 +130,7 @@ function shrinkwrapDeps (deps, top, tree, seen) {
if (isOnlyOptional(child)) pkginfo.optional = true
if (child.requires.length) {
pkginfo.requires = {}
- child.requires.sort((a, b) => moduleName(a).localeCompare(moduleName(b))).forEach((required) => {
+ sortModules(child.requires).forEach((required) => {
var requested = required.package._requested || getRequested(required) || {}
pkginfo.requires[moduleName(required)] = childVersion(top, required, requested)
})
@@ -142,6 +142,14 @@ function shrinkwrapDeps (deps, top, tree, seen) {
})
}
+function sortModules (modules) {
+ // sort modules with the locale-agnostic Unicode sort
+ var sortedModuleNames = modules.map(moduleName).sort()
+ return modules.sort((a, b) => (
+ sortedModuleNames.indexOf(moduleName(a)) - sortedModuleNames.indexOf(moduleName(b))
+ ))
+}
+
function childVersion (top, child, req) {
if (req.type === 'directory' || req.type === 'file') {
return 'file:' + unixFormatPath(path.relative(top.path, child.package._resolved || req.fetchSpec))
diff --git a/deps/npm/lib/start.js b/deps/npm/lib/start.js
index 85d61e78d0a43a..e9785365007771 100644
--- a/deps/npm/lib/start.js
+++ b/deps/npm/lib/start.js
@@ -1 +1 @@
-module.exports = require('./utils/lifecycle.js').cmd('start')
+module.exports = require('./utils/lifecycle-cmd.js')('start')
diff --git a/deps/npm/lib/stop.js b/deps/npm/lib/stop.js
index e4d02ff28165b9..fd43d08fc12edf 100644
--- a/deps/npm/lib/stop.js
+++ b/deps/npm/lib/stop.js
@@ -1 +1 @@
-module.exports = require('./utils/lifecycle.js').cmd('stop')
+module.exports = require('./utils/lifecycle-cmd.js')('stop')
diff --git a/deps/npm/lib/test.js b/deps/npm/lib/test.js
index 4ef025c4bac50c..06138ac00a3be7 100644
--- a/deps/npm/lib/test.js
+++ b/deps/npm/lib/test.js
@@ -1,12 +1,8 @@
module.exports = test
-const testCmd = require('./utils/lifecycle.js').cmd('test')
-const usage = require('./utils/usage')
+const testCmd = require('./utils/lifecycle-cmd.js')('test')
-test.usage = usage(
- 'test',
- 'npm test [-- ]'
-)
+test.usage = testCmd.usage
function test (args, cb) {
testCmd(args, function (er) {
diff --git a/deps/npm/lib/unbuild.js b/deps/npm/lib/unbuild.js
index 9ba5972d8a3666..78293c9ca269b6 100644
--- a/deps/npm/lib/unbuild.js
+++ b/deps/npm/lib/unbuild.js
@@ -38,14 +38,14 @@ function unbuild_ (silent) {
if (er) return gentlyRm(folder, false, base, cb)
chain(
[
- [lifecycle, pkg, 'preuninstall', folder, false, true],
- [lifecycle, pkg, 'uninstall', folder, false, true],
+ [lifecycle, pkg, 'preuninstall', folder, { failOk: true }],
+ [lifecycle, pkg, 'uninstall', folder, { failOk: true }],
!silent && function (cb) {
output('unbuild ' + pkg._id)
cb()
},
[rmStuff, pkg, folder],
- [lifecycle, pkg, 'postuninstall', folder, false, true],
+ [lifecycle, pkg, 'postuninstall', folder, { failOk: true }],
[gentlyRm, folder, false, base]
],
cb
@@ -60,7 +60,9 @@ function rmStuff (pkg, folder, cb) {
// otherwise, then bins are in folder/../.bin
var parent = pkg.name[0] === '@' ? path.dirname(path.dirname(folder)) : path.dirname(folder)
var gnm = npm.dir
- var top = gnm === parent
+ // gnm might be an absolute path, parent might be relative
+ // this checks they're the same directory regardless
+ var top = path.relative(gnm, parent) === ''
log.verbose('unbuild rmStuff', pkg._id, 'from', gnm)
if (!top) log.verbose('unbuild rmStuff', 'in', parent)
diff --git a/deps/npm/lib/utils/convert-line-endings.js b/deps/npm/lib/utils/convert-line-endings.js
new file mode 100644
index 00000000000000..b05d328aacdcb6
--- /dev/null
+++ b/deps/npm/lib/utils/convert-line-endings.js
@@ -0,0 +1,49 @@
+'use strict'
+
+const Transform = require('stream').Transform
+const Bluebird = require('bluebird')
+const fs = require('graceful-fs')
+const stat = Bluebird.promisify(fs.stat)
+const chmod = Bluebird.promisify(fs.chmod)
+const fsWriteStreamAtomic = require('fs-write-stream-atomic')
+
+module.exports.dos2Unix = dos2Unix
+
+function dos2Unix (file) {
+ return stat(file).then((stats) => {
+ let previousChunkEndedInCR = false
+ return new Bluebird((resolve, reject) => {
+ fs.createReadStream(file)
+ .on('error', reject)
+ .pipe(new Transform({
+ transform: function (chunk, encoding, done) {
+ let data = chunk.toString()
+ if (previousChunkEndedInCR) {
+ data = '\r' + data
+ }
+ if (data[data.length - 1] === '\r') {
+ data = data.slice(0, -1)
+ previousChunkEndedInCR = true
+ } else {
+ previousChunkEndedInCR = false
+ }
+ done(null, data.replace(/\r\n/g, '\n'))
+ },
+ flush: function (done) {
+ if (previousChunkEndedInCR) {
+ this.push('\r')
+ }
+ done()
+ }
+ }))
+ .on('error', reject)
+ .pipe(fsWriteStreamAtomic(file))
+ .on('error', reject)
+ .on('finish', function () {
+ resolve(chmod(file, stats.mode))
+ })
+ })
+ })
+}
+
+// could add unix2Dos and legacy Mac functions if need be
diff --git a/deps/npm/lib/utils/did-you-mean.js b/deps/npm/lib/utils/did-you-mean.js
new file mode 100644
index 00000000000000..8e72dde5fa0132
--- /dev/null
+++ b/deps/npm/lib/utils/did-you-mean.js
@@ -0,0 +1,20 @@
+var meant = require('meant')
+var output = require('./output.js')
+
+function didYouMean (scmd, commands) {
+ var bestSimilarity = meant(scmd, commands).map(function (str) {
+ return ' ' + str
+ })
+
+ if (bestSimilarity.length === 0) return
+ if (bestSimilarity.length === 1) {
+ output('\nDid you mean this?\n' + bestSimilarity[0])
+ } else {
+ output(
+ ['\nDid you mean one of these?']
+ .concat(bestSimilarity.slice(0, 3)).join('\n')
+ )
+ }
+}
+
+module.exports = didYouMean
diff --git a/deps/npm/lib/utils/error-handler.js b/deps/npm/lib/utils/error-handler.js
index 52a675bea640ad..b2fd45a5f3e5fb 100644
--- a/deps/npm/lib/utils/error-handler.js
+++ b/deps/npm/lib/utils/error-handler.js
@@ -170,20 +170,12 @@ function errorHandler (er) {
;[
'type',
- 'fstream_path',
- 'fstream_unc_path',
- 'fstream_type',
- 'fstream_class',
- 'fstream_finish_call',
- 'fstream_linkpath',
'stack',
- 'fstream_stack',
'statusCode',
'pkgid'
].forEach(function (k) {
var v = er[k]
if (!v) return
- if (k === 'fstream_stack') v = v.join('\n')
log.verbose(k, v)
})
diff --git a/deps/npm/lib/utils/is-hashbang-file.js b/deps/npm/lib/utils/is-hashbang-file.js
new file mode 100644
index 00000000000000..f1677381fa129c
--- /dev/null
+++ b/deps/npm/lib/utils/is-hashbang-file.js
@@ -0,0 +1,19 @@
+'use strict'
+const Bluebird = require('bluebird')
+const fs = require('graceful-fs')
+const open = Bluebird.promisify(fs.open)
+const close = Bluebird.promisify(fs.close)
+
+module.exports = isHashbangFile
+
+function isHashbangFile (file) {
+ return open(file, 'r').then((fileHandle) => {
+ return new Bluebird((resolve, reject) => {
+ fs.read(fileHandle, new Buffer(new Array(2)), 0, 2, 0, function (err, bytesRead, buffer) {
+ close(fileHandle).then(() => {
+ resolve(!err && buffer.toString() === '#!')
+ }).catch(reject)
+ })
+ })
+ })
+}
diff --git a/deps/npm/lib/utils/lifecycle-cmd.js b/deps/npm/lib/utils/lifecycle-cmd.js
new file mode 100644
index 00000000000000..bb802f45ee08c3
--- /dev/null
+++ b/deps/npm/lib/utils/lifecycle-cmd.js
@@ -0,0 +1,18 @@
+exports = module.exports = cmd
+
+var npm = require('../npm.js')
+var usage = require('./usage.js')
+
+function cmd (stage) {
+ function CMD (args, cb) {
+ npm.commands['run-script']([stage].concat(args), cb)
+ }
+ CMD.usage = usage(stage, 'npm ' + stage + ' [-- ]')
+ var installedShallow = require('./completion/installed-shallow.js')
+ CMD.completion = function (opts, cb) {
+ installedShallow(opts, function (d) {
+ return d.scripts && d.scripts[stage]
+ }, cb)
+ }
+ return CMD
+}
diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js
index 412c1c69448a9a..2d3265e0eb2b3e 100644
--- a/deps/npm/lib/utils/lifecycle.js
+++ b/deps/npm/lib/utils/lifecycle.js
@@ -1,458 +1,14 @@
-exports = module.exports = lifecycle
-exports.cmd = cmd
-exports.makeEnv = makeEnv
-exports._incorrectWorkingDirectory = _incorrectWorkingDirectory
+exports = module.exports = runLifecycle
-var log = require('npmlog')
-var spawn = require('./spawn')
-var npm = require('../npm.js')
-var path = require('path')
-var fs = require('graceful-fs')
-var chain = require('slide').chain
-var Stream = require('stream').Stream
-var PATH = 'PATH'
-var uidNumber = require('uid-number')
-var umask = require('./umask')
-var usage = require('./usage')
-var output = require('./output.js')
-var which = require('which')
+const lifecycleOpts = require('../config/lifecycle')
+const lifecycle = require('npm-lifecycle')
-// windows calls it's path 'Path' usually, but this is not guaranteed.
-if (process.platform === 'win32') {
- PATH = 'Path'
- Object.keys(process.env).forEach(function (e) {
- if (e.match(/^PATH$/i)) {
- PATH = e
- }
- })
-}
-
-function logid (pkg, stage) {
- return pkg._id + '~' + stage + ':'
-}
-
-function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
- if (typeof cb !== 'function') {
- cb = failOk
- failOk = false
- }
- if (typeof cb !== 'function') {
- cb = unsafe
- unsafe = false
- }
- if (typeof cb !== 'function') {
- cb = wd
- wd = null
- }
-
- while (pkg && pkg._data) pkg = pkg._data
- if (!pkg) return cb(new Error('Invalid package data'))
-
- log.info('lifecycle', logid(pkg, stage), pkg._id)
- if (!pkg.scripts) pkg.scripts = {}
-
- if (npm.config.get('ignore-scripts')) {
- log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-scripts is set to true', pkg._id)
- pkg.scripts = {}
- }
- if (stage === 'prepublish' && npm.config.get('ignore-prepublish')) {
- log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-prepublish is set to true', pkg._id)
- delete pkg.scripts.prepublish
- }
-
- if (!pkg.scripts[stage]) return cb()
-
- validWd(wd || path.resolve(npm.dir, pkg.name), function (er, wd) {
- if (er) return cb(er)
-
- unsafe = unsafe || npm.config.get('unsafe-perm')
-
- if ((wd.indexOf(npm.dir) !== 0 || _incorrectWorkingDirectory(wd, pkg)) &&
- !unsafe && pkg.scripts[stage]) {
- log.warn('lifecycle', logid(pkg, stage), 'cannot run in wd',
- '%s %s (wd=%s)', pkg._id, pkg.scripts[stage], wd
- )
- return cb()
- }
-
- // set the env variables, then run scripts as a child process.
- var env = makeEnv(pkg)
- env.npm_lifecycle_event = stage
- env.npm_node_execpath = env.NODE = env.NODE || process.execPath
- env.npm_execpath = require.main.filename
-
- // 'nobody' typically doesn't have permission to write to /tmp
- // even if it's never used, sh freaks out.
- if (!npm.config.get('unsafe-perm')) env.TMPDIR = wd
-
- lifecycle_(pkg, stage, wd, env, unsafe, failOk, cb)
- })
-}
-
-function _incorrectWorkingDirectory (wd, pkg) {
- return wd.lastIndexOf(pkg.name) !== wd.length - pkg.name.length
-}
-
-function lifecycle_ (pkg, stage, wd, env, unsafe, failOk, cb) {
- var pathArr = []
- var p = wd.split(/[\\\/]node_modules[\\\/]/)
- var acc = path.resolve(p.shift())
-
- p.forEach(function (pp) {
- pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
- acc = path.join(acc, 'node_modules', pp)
- })
- pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
-
- // we also unshift the bundled node-gyp-bin folder so that
- // the bundled one will be used for installing things.
- pathArr.unshift(path.join(__dirname, '..', '..', 'bin', 'node-gyp-bin'))
-
- if (shouldPrependCurrentNodeDirToPATH()) {
- // prefer current node interpreter in child scripts
- pathArr.push(path.dirname(process.execPath))
- }
-
- if (env[PATH]) pathArr.push(env[PATH])
- env[PATH] = pathArr.join(process.platform === 'win32' ? ';' : ':')
-
- var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
-
- if (packageLifecycle) {
- // define this here so it's available to all scripts.
- env.npm_lifecycle_script = pkg.scripts[stage]
- } else {
- log.silly('lifecycle', logid(pkg, stage), 'no script for ' + stage + ', continuing')
- }
-
- function done (er) {
- if (er) {
- if (npm.config.get('force')) {
- log.info('lifecycle', logid(pkg, stage), 'forced, continuing', er)
- er = null
- } else if (failOk) {
- log.warn('lifecycle', logid(pkg, stage), 'continuing anyway', er.message)
- er = null
- }
- }
- cb(er)
- }
-
- chain(
- [
- packageLifecycle && [runPackageLifecycle, pkg, env, wd, unsafe],
- [runHookLifecycle, pkg, env, wd, unsafe]
- ],
- done
- )
-}
-
-function shouldPrependCurrentNodeDirToPATH () {
- var cfgsetting = npm.config.get('scripts-prepend-node-path')
- if (cfgsetting === false) return false
- if (cfgsetting === true) return true
-
- var isDifferentNodeInPath
-
- var isWindows = process.platform === 'win32'
- var foundExecPath
- try {
- foundExecPath = which.sync(path.basename(process.execPath), {pathExt: isWindows ? ';' : ':'})
- // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable.
- isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !==
- fs.realpathSync(foundExecPath).toUpperCase()
- } catch (e) {
- isDifferentNodeInPath = true
- }
-
- if (cfgsetting === 'warn-only') {
- if (isDifferentNodeInPath && !shouldPrependCurrentNodeDirToPATH.hasWarned) {
- if (foundExecPath) {
- log.warn('lifecycle', 'The node binary used for scripts is', foundExecPath, 'but npm is using', process.execPath, 'itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
- } else {
- log.warn('lifecycle', 'npm is using', process.execPath, 'but there is no node binary in the current PATH. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
- }
- shouldPrependCurrentNodeDirToPATH.hasWarned = true
- }
-
- return false
- }
-
- return isDifferentNodeInPath
-}
-
-function validWd (d, cb) {
- fs.stat(d, function (er, st) {
- if (er || !st.isDirectory()) {
- var p = path.dirname(d)
- if (p === d) {
- return cb(new Error('Could not find suitable wd'))
- }
- return validWd(p, cb)
- }
- return cb(null, d)
- })
-}
-
-function runPackageLifecycle (pkg, env, wd, unsafe, cb) {
- // run package lifecycle scripts in the package root, or the nearest parent.
- var stage = env.npm_lifecycle_event
- var cmd = env.npm_lifecycle_script
-
- var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
- '\n> ' + cmd + '\n'
- runCmd(note, cmd, pkg, env, stage, wd, unsafe, cb)
-}
-
-var running = false
-var queue = []
-function dequeue () {
- running = false
- if (queue.length) {
- var r = queue.shift()
- runCmd.apply(null, r)
- }
-}
-
-function runCmd (note, cmd, pkg, env, stage, wd, unsafe, cb) {
- if (running) {
- queue.push([note, cmd, pkg, env, stage, wd, unsafe, cb])
- return
- }
-
- running = true
- log.pause()
- var user = unsafe ? null : npm.config.get('user')
- var group = unsafe ? null : npm.config.get('group')
-
- if (log.level !== 'silent') {
- output(note)
- }
- log.verbose('lifecycle', logid(pkg, stage), 'unsafe-perm in lifecycle', unsafe)
-
- if (process.platform === 'win32') {
- unsafe = true
- }
-
- if (unsafe) {
- runCmd_(cmd, pkg, env, wd, stage, unsafe, 0, 0, cb)
- } else {
- uidNumber(user, group, function (er, uid, gid) {
- runCmd_(cmd, pkg, env, wd, stage, unsafe, uid, gid, cb)
- })
- }
-}
-
-function runCmd_ (cmd, pkg, env, wd, stage, unsafe, uid, gid, cb_) {
- function cb (er) {
- cb_.apply(null, arguments)
- log.resume()
- process.nextTick(dequeue)
- }
-
- var conf = {
- cwd: wd,
- env: env,
- stdio: [ 0, 1, 2 ]
- }
-
- if (!unsafe) {
- conf.uid = uid ^ 0
- conf.gid = gid ^ 0
- }
-
- var sh = 'sh'
- var shFlag = '-c'
-
- var customShell = npm.config.get('script-shell')
-
- if (customShell) {
- sh = customShell
- } else if (process.platform === 'win32') {
- sh = process.env.comspec || 'cmd'
- shFlag = '/d /s /c'
- conf.windowsVerbatimArguments = true
- }
-
- log.verbose('lifecycle', logid(pkg, stage), 'PATH:', env[PATH])
- log.verbose('lifecycle', logid(pkg, stage), 'CWD:', wd)
- log.silly('lifecycle', logid(pkg, stage), 'Args:', [shFlag, cmd])
-
- var proc = spawn(sh, [shFlag, cmd], conf)
-
- proc.on('error', procError)
- proc.on('close', function (code, signal) {
- log.silly('lifecycle', logid(pkg, stage), 'Returned: code:', code, ' signal:', signal)
- if (signal) {
- process.kill(process.pid, signal)
- } else if (code) {
- var er = new Error('Exit status ' + code)
- er.errno = code
- }
- procError(er)
- })
- process.once('SIGTERM', procKill)
- process.once('SIGINT', procInterupt)
-
- function procError (er) {
- if (er) {
- log.info('lifecycle', logid(pkg, stage), 'Failed to exec ' + stage + ' script')
- er.message = pkg._id + ' ' + stage + ': `' + cmd + '`\n' +
- er.message
- if (er.code !== 'EPERM') {
- er.code = 'ELIFECYCLE'
- }
- fs.stat(npm.dir, function (statError, d) {
- if (statError && statError.code === 'ENOENT' && npm.dir.split(path.sep).slice(-1)[0] === 'node_modules') {
- log.warn('', 'Local package.json exists, but node_modules missing, did you mean to install?')
- }
- })
- er.pkgid = pkg._id
- er.stage = stage
- er.script = cmd
- er.pkgname = pkg.name
- }
- process.removeListener('SIGTERM', procKill)
- process.removeListener('SIGTERM', procInterupt)
- process.removeListener('SIGINT', procKill)
- return cb(er)
- }
- function procKill () {
- proc.kill()
- }
- function procInterupt () {
- proc.kill('SIGINT')
- proc.on('exit', function () {
- process.exit()
- })
- process.once('SIGINT', procKill)
- }
-}
-
-function runHookLifecycle (pkg, env, wd, unsafe, cb) {
- // check for a hook script, run if present.
- var stage = env.npm_lifecycle_event
- var hook = path.join(npm.dir, '.hooks', stage)
- var cmd = hook
-
- fs.stat(hook, function (er) {
- if (er) return cb()
- var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
- '\n> ' + cmd
- runCmd(note, hook, pkg, env, stage, wd, unsafe, cb)
- })
-}
-
-function makeEnv (data, prefix, env) {
- prefix = prefix || 'npm_package_'
- if (!env) {
- env = {}
- for (var i in process.env) {
- if (!i.match(/^npm_/)) {
- env[i] = process.env[i]
- }
- }
-
- // express and others respect the NODE_ENV value.
- if (npm.config.get('production')) env.NODE_ENV = 'production'
- } else if (!data.hasOwnProperty('_lifecycleEnv')) {
- Object.defineProperty(data, '_lifecycleEnv',
- {
- value: env,
- enumerable: false
- }
- )
- }
-
- for (i in data) {
- if (i.charAt(0) !== '_') {
- var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
- if (i === 'readme') {
- continue
- }
- if (data[i] && typeof data[i] === 'object') {
- try {
- // quick and dirty detection for cyclical structures
- JSON.stringify(data[i])
- makeEnv(data[i], envKey + '_', env)
- } catch (ex) {
- // usually these are package objects.
- // just get the path and basic details.
- var d = data[i]
- makeEnv(
- { name: d.name, version: d.version, path: d.path },
- envKey + '_',
- env
- )
- }
- } else {
- env[envKey] = String(data[i])
- env[envKey] = env[envKey].indexOf('\n') !== -1
- ? JSON.stringify(env[envKey])
- : env[envKey]
- }
- }
+function runLifecycle (pkg, stage, wd, moreOpts, cb) {
+ if (typeof moreOpts === 'function') {
+ cb = moreOpts
+ moreOpts = null
}
- if (prefix !== 'npm_package_') return env
-
- prefix = 'npm_config_'
- var pkgConfig = {}
- var keys = npm.config.keys
- var pkgVerConfig = {}
- var namePref = data.name + ':'
- var verPref = data.name + '@' + data.version + ':'
-
- keys.forEach(function (i) {
- // in some rare cases (e.g. working with nerf darts), there are segmented
- // "private" (underscore-prefixed) config names -- don't export
- if (i.charAt(0) === '_' && i.indexOf('_' + namePref) !== 0 || i.match(/:_/)) {
- return
- }
- var value = npm.config.get(i)
- if (value instanceof Stream || Array.isArray(value)) return
- if (i.match(/umask/)) value = umask.toString(value)
- if (!value) value = ''
- else if (typeof value === 'number') value = '' + value
- else if (typeof value !== 'string') value = JSON.stringify(value)
-
- value = value.indexOf('\n') !== -1
- ? JSON.stringify(value)
- : value
- i = i.replace(/^_+/, '')
- var k
- if (i.indexOf(namePref) === 0) {
- k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, '_')
- pkgConfig[k] = value
- } else if (i.indexOf(verPref) === 0) {
- k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, '_')
- pkgVerConfig[k] = value
- }
- var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
- env[envKey] = value
- })
-
- prefix = 'npm_package_config_'
- ;[pkgConfig, pkgVerConfig].forEach(function (conf) {
- for (var i in conf) {
- var envKey = (prefix + i)
- env[envKey] = conf[i]
- }
- })
-
- return env
-}
-
-function cmd (stage) {
- function CMD (args, cb) {
- npm.commands['run-script']([stage].concat(args), cb)
- }
- CMD.usage = usage(stage, 'npm ' + stage + ' [-- ]')
- var installedShallow = require('./completion/installed-shallow.js')
- CMD.completion = function (opts, cb) {
- installedShallow(opts, function (d) {
- return d.scripts && d.scripts[stage]
- }, cb)
- }
- return CMD
+ const opts = lifecycleOpts(moreOpts)
+ lifecycle(pkg, stage, wd, opts).then(cb, cb)
}
diff --git a/deps/npm/lib/utils/module-name.js b/deps/npm/lib/utils/module-name.js
index 43e0f5fb128e53..89957b181fd053 100644
--- a/deps/npm/lib/utils/module-name.js
+++ b/deps/npm/lib/utils/module-name.js
@@ -11,7 +11,7 @@ function pathToPackageName (dir) {
var name = path.relative(path.resolve(dir, '..'), dir)
var scoped = path.relative(path.resolve(dir, '../..'), dir)
if (scoped[0] === '@') return scoped.replace(/\\/g, '/')
- return name
+ return name.trim()
}
module.exports.test.isNotEmpty = isNotEmpty
@@ -22,7 +22,7 @@ function isNotEmpty (str) {
var unknown = 0
function moduleName (tree) {
var pkg = tree.package || tree
- if (isNotEmpty(pkg.name)) return pkg.name
+ if (isNotEmpty(pkg.name) && typeof pkg.name === 'string') return pkg.name.trim()
var pkgName = pathToPackageName(tree.path)
if (pkgName !== '') return pkgName
if (tree._invalidName != null) return tree._invalidName
diff --git a/deps/npm/lib/utils/tar.js b/deps/npm/lib/utils/tar.js
deleted file mode 100644
index 12719e37e290ab..00000000000000
--- a/deps/npm/lib/utils/tar.js
+++ /dev/null
@@ -1,454 +0,0 @@
-'use strict'
-
-// commands for packing and unpacking tarballs
-// this file is used by lib/cache.js
-
-var fs = require('graceful-fs')
-var path = require('path')
-var writeFileAtomic = require('write-file-atomic')
-var writeStreamAtomic = require('fs-write-stream-atomic')
-var log = require('npmlog')
-var uidNumber = require('uid-number')
-var readJson = require('read-package-json')
-var tar = require('tar')
-var zlib = require('zlib')
-var fstream = require('fstream')
-var Packer = require('fstream-npm')
-var iferr = require('iferr')
-var inherits = require('inherits')
-var npm = require('../npm.js')
-var rm = require('./gently-rm.js')
-var myUid = process.getuid && process.getuid()
-var myGid = process.getgid && process.getgid()
-var readPackageTree = require('read-package-tree')
-var union = require('lodash.union')
-var moduleName = require('./module-name.js')
-var packageId = require('./package-id.js')
-var pulseTillDone = require('../utils/pulse-till-done.js')
-
-if (process.env.SUDO_UID && myUid === 0) {
- if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID
- if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID
-}
-
-exports.pack = pack
-exports.unpack = unpack
-
-function pack (tarball, folder, pkg, cb) {
- log.verbose('tar pack', [tarball, folder])
-
- log.verbose('tarball', tarball)
- log.verbose('folder', folder)
-
- readJson(path.join(folder, 'package.json'), function (er, pkg) {
- if (er || !pkg.bundleDependencies) {
- pack_(tarball, folder, null, pkg, cb)
- } else {
- // we require this at runtime due to load-order issues, because recursive
- // requires fail if you replace the exports object, and we do, not in deps, but
- // in a dep of it.
- var computeMetadata = require('../install/deps.js').computeMetadata
-
- readPackageTree(folder, pulseTillDone('pack:readTree:' + packageId(pkg), iferr(cb, function (tree) {
- computeMetadata(tree)
- pack_(tarball, folder, tree, pkg, pulseTillDone('pack:' + packageId(pkg), cb))
- })))
- }
- })
-}
-
-function BundledPacker (props) {
- Packer.call(this, props)
-}
-inherits(BundledPacker, Packer)
-
-BundledPacker.prototype.applyIgnores = function (entry, partial, entryObj) {
- if (!entryObj || entryObj.type !== 'Directory') {
- // package.json files can never be ignored.
- if (entry === 'package.json') return true
-
- // readme files should never be ignored.
- if (entry.match(/^readme(\.[^\.]*)$/i)) return true
-
- // license files should never be ignored.
- if (entry.match(/^(license|licence)(\.[^\.]*)?$/i)) return true
-
- // copyright notice files should never be ignored.
- if (entry.match(/^(notice)(\.[^\.]*)?$/i)) return true
-
- // changelogs should never be ignored.
- if (entry.match(/^(changes|changelog|history)(\.[^\.]*)?$/i)) return true
- }
-
- // special rules. see below.
- if (entry === 'node_modules' && this.packageRoot) return true
-
- // package.json main file should never be ignored.
- var mainFile = this.package && this.package.main
- if (mainFile && path.resolve(this.path, entry) === path.resolve(this.path, mainFile)) return true
-
- // some files are *never* allowed under any circumstances
- // (VCS folders, native build cruft, npm cruft, regular cruft)
- if (entry === '.git' ||
- entry === 'CVS' ||
- entry === '.svn' ||
- entry === '.hg' ||
- entry === '.lock-wscript' ||
- entry.match(/^\.wafpickle-[0-9]+$/) ||
- (this.parent && this.parent.packageRoot && this.basename === 'build' &&
- entry === 'config.gypi') ||
- entry === 'npm-debug.log' ||
- entry === '.npmrc' ||
- entry.match(/^\..*\.swp$/) ||
- entry === '.DS_Store' ||
- entry.match(/^\._/) ||
- entry.match(/^.*\.orig$/) ||
- // Package locks are never allowed in tarballs -- use shrinkwrap instead
- entry === 'package-lock.json'
- ) {
- return false
- }
-
- // in a node_modules folder, we only include bundled dependencies
- // also, prevent packages in node_modules from being affected
- // by rules set in the containing package, so that
- // bundles don't get busted.
- // Also, once in a bundle, everything is installed as-is
- // To prevent infinite cycles in the case of cyclic deps that are
- // linked with npm link, even in a bundle, deps are only bundled
- // if they're not already present at a higher level.
- if (this.bundleMagic) {
- // bubbling up. stop here and allow anything the bundled pkg allows
- if (entry.charAt(0) === '@') {
- var firstSlash = entry.indexOf('/')
- // continue to list the packages in this scope
- if (firstSlash === -1) return true
-
- // bubbling up. stop here and allow anything the bundled pkg allows
- if (entry.indexOf('/', firstSlash + 1) !== -1) return true
- // bubbling up. stop here and allow anything the bundled pkg allows
- } else if (entry.indexOf('/') !== -1) {
- return true
- }
-
- // never include the .bin. It's typically full of platform-specific
- // stuff like symlinks and .cmd files anyway.
- if (entry === '.bin') return false
-
- // the package root.
- var p = this.parent
- // the directory before this one.
- var pp = p && p.parent
- // the directory before that (if this is scoped)
- if (pp && pp.basename[0] === '@') pp = pp && pp.parent
-
- // if this entry has already been bundled, and is a symlink,
- // and it is the *same* symlink as this one, then exclude it.
- if (pp && pp.bundleLinks && this.bundleLinks &&
- pp.bundleLinks[entry] &&
- pp.bundleLinks[entry] === this.bundleLinks[entry]) {
- return false
- }
-
- // since it's *not* a symbolic link, if we're *already* in a bundle,
- // then we should include everything.
- if (pp && pp.package && pp.basename === 'node_modules') {
- return true
- }
-
- // only include it at this point if it's a bundleDependency
- return this.isBundled(entry)
- }
- // if (this.bundled) return true
-
- return Packer.prototype.applyIgnores.call(this, entry, partial, entryObj)
-}
-
-function nameMatch (name) { return function (other) { return name === moduleName(other) } }
-
-function pack_ (tarball, folder, tree, pkg, cb) {
- function InstancePacker (props) {
- BundledPacker.call(this, props)
- }
- inherits(InstancePacker, BundledPacker)
- InstancePacker.prototype.isBundled = function (name) {
- var bd = this.package && this.package.bundleDependencies
- if (!bd) return false
-
- if (!Array.isArray(bd)) {
- throw new Error(packageId(this) + '\'s `bundledDependencies` should ' +
- 'be an array')
- }
- if (!tree) return false
-
- if (bd.indexOf(name) !== -1) return true
- var pkg = tree.children.filter(nameMatch(name))[0]
- if (!pkg) return false
- var requiredBy = [].concat(pkg.requiredBy)
- var seen = new Set()
- while (requiredBy.length) {
- var reqPkg = requiredBy.shift()
- if (seen.has(reqPkg)) continue
- seen.add(reqPkg)
- if (!reqPkg) continue
- if (reqPkg.parent === tree && bd.indexOf(moduleName(reqPkg)) !== -1) {
- return true
- }
- requiredBy = union(requiredBy, reqPkg.requiredBy)
- }
- return false
- }
-
- new InstancePacker({ path: folder, type: 'Directory', isDirectory: true })
- .on('error', function (er) {
- if (er) log.error('tar pack', 'Error reading ' + folder)
- return cb(er)
- })
-
- // By default, npm includes some proprietary attributes in the
- // package tarball. This is sane, and allowed by the spec.
- // However, npm *itself* excludes these from its own package,
- // so that it can be more easily bootstrapped using old and
- // non-compliant tar implementations.
- .pipe(tar.Pack({ noProprietary: !npm.config.get('proprietary-attribs') }))
- .on('error', function (er) {
- if (er) log.error('tar.pack', 'tar creation error', tarball)
- cb(er)
- })
- .pipe(zlib.Gzip())
- .on('error', function (er) {
- if (er) log.error('tar.pack', 'gzip error ' + tarball)
- cb(er)
- })
- .pipe(writeStreamAtomic(tarball))
- .on('error', function (er) {
- if (er) log.error('tar.pack', 'Could not write ' + tarball)
- cb(er)
- })
- .on('close', cb)
-}
-
-function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
- log.verbose('tar', 'unpack', tarball)
- log.verbose('tar', 'unpacking to', unpackTarget)
- if (typeof cb !== 'function') {
- cb = gid
- gid = null
- }
- if (typeof cb !== 'function') {
- cb = uid
- uid = null
- }
- if (typeof cb !== 'function') {
- cb = fMode
- fMode = npm.modes.file
- }
- if (typeof cb !== 'function') {
- cb = dMode
- dMode = npm.modes.exec
- }
-
- uidNumber(uid, gid, function (er, uid, gid) {
- if (er) return cb(er)
- unpack_(tarball, unpackTarget, dMode, fMode, uid, gid, cb)
- })
-}
-
-function unpack_ (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
- rm(unpackTarget, function (er) {
- if (er) return cb(er)
- // gzip {tarball} --decompress --stdout \
- // | tar -mvxpf - --strip-components=1 -C {unpackTarget}
- gunzTarPerm(tarball, unpackTarget,
- dMode, fMode,
- uid, gid,
- function (er, folder) {
- if (er) return cb(er)
- readJson(path.resolve(folder, 'package.json'), cb)
- })
- })
-}
-
-function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) {
- if (!dMode) dMode = npm.modes.exec
- if (!fMode) fMode = npm.modes.file
- log.silly('gunzTarPerm', 'modes', [dMode.toString(8), fMode.toString(8)])
-
- var cbCalled = false
- function cb (er) {
- if (cbCalled) return
- cbCalled = true
- cb_(er, target)
- }
-
- var fst = fs.createReadStream(tarball)
-
- fst.on('open', function (fd) {
- fs.fstat(fd, function (er, st) {
- if (er) return fst.emit('error', er)
- if (st.size === 0) {
- er = new Error('0-byte tarball\n' +
- 'Please run `npm cache clean`')
- fst.emit('error', er)
- }
- })
- })
-
- // figure out who we're supposed to be, if we're not pretending
- // to be a specific user.
- if (npm.config.get('unsafe-perm') && process.platform !== 'win32') {
- uid = myUid
- gid = myGid
- }
-
- function extractEntry (entry) {
- log.silly('gunzTarPerm', 'extractEntry', entry.path)
- // never create things that are user-unreadable,
- // or dirs that are user-un-listable. Only leads to headaches.
- var originalMode = entry.mode = entry.mode || entry.props.mode
- entry.mode = entry.mode | (entry.type === 'Directory' ? dMode : fMode)
- entry.mode = entry.mode & (~npm.modes.umask)
- entry.props.mode = entry.mode
- if (originalMode !== entry.mode) {
- log.silly('gunzTarPerm', 'modified mode',
- [entry.path, originalMode, entry.mode])
- }
-
- // if there's a specific owner uid/gid that we want, then set that
- if (process.platform !== 'win32' &&
- typeof uid === 'number' &&
- typeof gid === 'number') {
- entry.props.uid = entry.uid = uid
- entry.props.gid = entry.gid = gid
- }
- }
-
- var extractOpts = { type: 'Directory', path: target, strip: 1 }
-
- if (process.platform !== 'win32' &&
- typeof uid === 'number' &&
- typeof gid === 'number') {
- extractOpts.uid = uid
- extractOpts.gid = gid
- }
-
- var sawIgnores = {}
- extractOpts.filter = function () {
- // symbolic links are not allowed in packages.
- if (this.type.match(/^.*Link$/)) {
- log.warn('excluding symbolic link',
- this.path.substr(target.length + 1) +
- ' -> ' + this.linkpath)
- return false
- }
-
- // Note: This mirrors logic in the fs read operations that are
- // employed during tarball creation, in the fstream-npm module.
- // It is duplicated here to handle tarballs that are created
- // using other means, such as system tar or git archive.
- if (this.type === 'File') {
- var base = path.basename(this.path)
- if (base === '.npmignore') {
- sawIgnores[ this.path ] = true
- } else if (base === '.gitignore') {
- var npmignore = this.path.replace(/\.gitignore$/, '.npmignore')
- if (sawIgnores[npmignore]) {
- // Skip this one, already seen.
- return false
- } else {
- // Rename, may be clobbered later.
- this.path = npmignore
- this._path = npmignore
- }
- }
- }
-
- return true
- }
-
- fst
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'error reading ' + tarball)
- cb(er)
- })
- .on('data', function OD (c) {
- // detect what it is.
- // Then, depending on that, we'll figure out whether it's
- // a single-file module, gzipped tarball, or naked tarball.
- // gzipped files all start with 1f8b08
- if (c[0] === 0x1F &&
- c[1] === 0x8B &&
- c[2] === 0x08) {
- fst
- .pipe(zlib.Unzip())
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'unzip error ' + tarball)
- cb(er)
- })
- .pipe(tar.Extract(extractOpts))
- .on('entry', extractEntry)
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'untar error ' + tarball)
- cb(er)
- })
- .on('close', cb)
- } else if (hasTarHeader(c)) {
- // naked tar
- fst
- .pipe(tar.Extract(extractOpts))
- .on('entry', extractEntry)
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'untar error ' + tarball)
- cb(er)
- })
- .on('close', cb)
- } else {
- // naked js file
- var jsOpts = { path: path.resolve(target, 'index.js') }
-
- if (process.platform !== 'win32' &&
- typeof uid === 'number' &&
- typeof gid === 'number') {
- jsOpts.uid = uid
- jsOpts.gid = gid
- }
-
- fst
- .pipe(fstream.Writer(jsOpts))
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'copy error ' + tarball)
- cb(er)
- })
- .on('close', function () {
- var j = path.resolve(target, 'package.json')
- readJson(j, function (er, d) {
- if (er) {
- log.error('not a package', tarball)
- return cb(er)
- }
- writeFileAtomic(j, JSON.stringify(d) + '\n', cb)
- })
- })
- }
-
- // now un-hook, and re-emit the chunk
- fst.removeListener('data', OD)
- fst.emit('data', c)
- })
-}
-
-function hasTarHeader (c) {
- return c[257] === 0x75 && // tar archives have 7573746172 at position
- c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
- c[259] === 0x74 &&
- c[260] === 0x61 &&
- c[261] === 0x72 &&
-
- ((c[262] === 0x00 &&
- c[263] === 0x30 &&
- c[264] === 0x30) ||
-
- (c[262] === 0x20 &&
- c[263] === 0x20 &&
- c[264] === 0x00))
-}
diff --git a/deps/npm/lib/utils/unsupported.js b/deps/npm/lib/utils/unsupported.js
index 91f494f4be88f0..35a0d5b310be57 100644
--- a/deps/npm/lib/utils/unsupported.js
+++ b/deps/npm/lib/utils/unsupported.js
@@ -1,13 +1,19 @@
'use strict'
var semver = require('semver')
-var supportedNode = '>= 4'
-var knownBroken = '>=0.1 <=0.7'
+var supportedNode = [
+ {ver: '4', min: '4.7.0'},
+ {ver: '6', min: '6.0.0'},
+ {ver: '7', min: '7.0.0'},
+ {ver: '8', min: '8.0.0'}
+]
+var knownBroken = '<4.7.0'
var checkVersion = exports.checkVersion = function (version) {
var versionNoPrerelease = version.replace(/-.*$/, '')
return {
+ version: versionNoPrerelease,
broken: semver.satisfies(versionNoPrerelease, knownBroken),
- unsupported: !semver.satisfies(versionNoPrerelease, supportedNode)
+ unsupported: !semver.satisfies(versionNoPrerelease, supportedNode.map(function (n) { return '^' + n.min }).join('||'))
}
}
@@ -15,8 +21,18 @@ exports.checkForBrokenNode = function () {
var nodejs = checkVersion(process.version)
if (nodejs.broken) {
console.error('ERROR: npm is known not to run on Node.js ' + process.version)
+ supportedNode.forEach(function (rel) {
+ if (semver.satisfies(nodejs.version, rel.ver)) {
+ console.error('Node.js ' + rel.ver + " is supported but the specific version you're running has")
+ console.error('a bug known to break npm. Please update to at least ${rel.min} to use this')
+ console.error('version of npm. You can find the latest release of Node.js at https://nodejs.org/')
+ process.exit(1)
+ }
+ })
+ var supportedMajors = supportedNode.map(function (n) { return n.ver }).join(', ')
console.error("You'll need to upgrade to a newer version in order to use this")
- console.error('version of npm. You can find the latest version at https://nodejs.org/')
+ console.error('version of npm. Supported versions are ' + supportedMajors + '. You can find the')
+ console.error('latest version at https://nodejs.org/')
process.exit(1)
}
}
@@ -25,9 +41,11 @@ exports.checkForUnsupportedNode = function () {
var nodejs = checkVersion(process.version)
if (nodejs.unsupported) {
var log = require('npmlog')
+ var supportedMajors = supportedNode.map(function (n) { return n.ver }).join(', ')
log.warn('npm', 'npm does not support Node.js ' + process.version)
log.warn('npm', 'You should probably upgrade to a newer version of node as we')
log.warn('npm', "can't make any promises that npm will work with this version.")
+ log.warn('npm', 'Supported releases of Node.js are the latest release of ' + supportedMajors + '.')
log.warn('npm', 'You can find the latest version at https://nodejs.org/')
}
}
diff --git a/deps/npm/lib/version.js b/deps/npm/lib/version.js
index c52b5158a03398..edcd664f2a7c4e 100644
--- a/deps/npm/lib/version.js
+++ b/deps/npm/lib/version.js
@@ -278,14 +278,22 @@ function checkGit (localData, cb) {
})
}
+module.exports.buildCommitArgs = buildCommitArgs
+function buildCommitArgs (args) {
+ args = args || [ 'commit' ]
+ if (!npm.config.get('commit-hooks')) args.push('-n')
+ return args
+}
+
function _commit (version, localData, cb) {
const options = { env: process.env }
const message = npm.config.get('message').replace(/%s/g, version)
const sign = npm.config.get('sign-git-tag')
+ const commitArgs = buildCommitArgs([ 'commit', '-m', message ])
const flagForTag = sign ? '-sm' : '-am'
stagePackageFiles(localData, options).then(() => {
- return git.exec([ 'commit', '-m', message ], options)
+ return git.exec(commitArgs, options)
}).then(() => {
if (!localData.existingTag) {
return git.exec([
diff --git a/deps/npm/man/man1/npm-README.1 b/deps/npm/man/man1/npm-README.1
index 7d44d1c1510c70..2b7747bbd20ae1 100644
--- a/deps/npm/man/man1/npm-README.1
+++ b/deps/npm/man/man1/npm-README.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "July 2017" "" ""
+.TH "NPM" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm\fR \- a JavaScript package manager
.P
@@ -143,12 +143,12 @@ this means that future npm installs will not remember the settings that
you have chosen\.
.SH More Docs
.P
-Check out the docs \fIhttps://docs\.npmjs\.com/\fR,
+Check out the docs \fIhttps://docs\.npmjs\.com/\fR\|\.
.P
You can use the \fBnpm help\fP command to read any of them\.
.P
If you're a developer, and you want to use npm to publish your program,
-you should read this \fIhttps://docs\.npmjs\.com/misc/developers\fR
+you should read this \fIhttps://docs\.npmjs\.com/misc/developers\fR\|\.
.SH BUGS
.P
When you find issues, please report them:
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index 2f86c385448760..41f8a556a42a1d 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ACCESS" "1" "July 2017" "" ""
+.TH "NPM\-ACCESS" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-access\fR \- Set access level on published packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index b90fd9ebd52583..2f17d1c5befe11 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ADDUSER" "1" "July 2017" "" ""
+.TH "NPM\-ADDUSER" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-adduser\fR \- Add a registry user account
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-bin.1 b/deps/npm/man/man1/npm-bin.1
index 9571db127a6eb9..0d5c11d02d2291 100644
--- a/deps/npm/man/man1/npm-bin.1
+++ b/deps/npm/man/man1/npm-bin.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BIN" "1" "July 2017" "" ""
+.TH "NPM\-BIN" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-bin\fR \- Display npm bin folder
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index 7126fda85fc2e9..ec8994b5f962e0 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUGS" "1" "July 2017" "" ""
+.TH "NPM\-BUGS" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-bugs\fR \- Bugs for a package in a web browser maybe
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-build.1 b/deps/npm/man/man1/npm-build.1
index 909ea651051a05..794010d6899945 100644
--- a/deps/npm/man/man1/npm-build.1
+++ b/deps/npm/man/man1/npm-build.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUILD" "1" "July 2017" "" ""
+.TH "NPM\-BUILD" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-build\fR \- Build a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-bundle.1 b/deps/npm/man/man1/npm-bundle.1
index 522dcc1a366353..21189657fc4aea 100644
--- a/deps/npm/man/man1/npm-bundle.1
+++ b/deps/npm/man/man1/npm-bundle.1
@@ -1,4 +1,4 @@
-.TH "NPM\-BUNDLE" "1" "July 2017" "" ""
+.TH "NPM\-BUNDLE" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-bundle\fR \- REMOVED
.SH DESCRIPTION
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index 9309a00a61290d..7e29a1007ecd00 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM\-CACHE" "1" "July 2017" "" ""
+.TH "NPM\-CACHE" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-cache\fR \- Manipulates packages cache
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index cd3343d7f66e19..a1f93bde258a2d 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM\-COMPLETION" "1" "July 2017" "" ""
+.TH "NPM\-COMPLETION" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-completion\fR \- Tab Completion for npm
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index e98d3b52be613f..daa2f68c975958 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM\-CONFIG" "1" "July 2017" "" ""
+.TH "NPM\-CONFIG" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-config\fR \- Manage the npm configuration files
.SH SYNOPSIS
@@ -8,7 +8,7 @@
npm config set [\-g|\-\-global]
npm config get
npm config delete
-npm config list [\-l]
+npm config list [\-l] [\-\-json]
npm config edit
npm get
npm set [\-g|\-\-global]
@@ -59,7 +59,8 @@ npm config list
.fi
.RE
.P
-Show all the config settings\. Use \fB\-l\fP to also show defaults\.
+Show all the config settings\. Use \fB\-l\fP to also show defaults\. Use \fB\-\-json\fP
+to show the settings in json format\.
.SS delete
.P
.RS 2
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index ffce7a43283bb0..26b35f96f0e225 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DEDUPE" "1" "July 2017" "" ""
+.TH "NPM\-DEDUPE" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-dedupe\fR \- Reduce duplication
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index f3f103bdf17a20..e0ae111ccb3dab 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DEPRECATE" "1" "July 2017" "" ""
+.TH "NPM\-DEPRECATE" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-deprecate\fR \- Deprecate a version of a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 3167e8e6cd7f37..e505c436716006 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DIST\-TAG" "1" "July 2017" "" ""
+.TH "NPM\-DIST\-TAG" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-dist-tag\fR \- Modify package distribution tags
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index 84881635a07bb4..a34cf45d980ac5 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DOCS" "1" "July 2017" "" ""
+.TH "NPM\-DOCS" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-docs\fR \- Docs for a package in a web browser maybe
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1
index 7a0b057b75f9a6..e1b2311da60bfa 100644
--- a/deps/npm/man/man1/npm-doctor.1
+++ b/deps/npm/man/man1/npm-doctor.1
@@ -1,4 +1,4 @@
-.TH "NPM\-DOCTOR" "1" "July 2017" "" ""
+.TH "NPM\-DOCTOR" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-doctor\fR \- Check your environments
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index 63cdeb77ca5294..51f6fef5fed785 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM\-EDIT" "1" "July 2017" "" ""
+.TH "NPM\-EDIT" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-edit\fR \- Edit an installed package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index ac993ff4dd6c3d..a44d80b690bbe4 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM\-EXPLORE" "1" "July 2017" "" ""
+.TH "NPM\-EXPLORE" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-explore\fR \- Browse an installed package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index 0d7971406a751e..87676d3e7c6aec 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP\-SEARCH" "1" "July 2017" "" ""
+.TH "NPM\-HELP\-SEARCH" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-help-search\fR \- Search npm help documentation
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index 224d45a35d1faa..8b9ba1d8392091 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM\-HELP" "1" "July 2017" "" ""
+.TH "NPM\-HELP" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-help\fR \- Get help on npm
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index 38bf391be8e71a..ab25fc4b8b3880 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM\-INIT" "1" "July 2017" "" ""
+.TH "NPM\-INIT" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-init\fR \- Interactively create a package\.json file
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1
index af1356d6093f7c..41fb9d3e312e95 100644
--- a/deps/npm/man/man1/npm-install-test.1
+++ b/deps/npm/man/man1/npm-install-test.1
@@ -1,4 +1,4 @@
-.TH "NPM" "" "July 2017" "" ""
+.TH "NPM" "" "September 2017" "" ""
.SH "NAME"
\fBnpm\fR
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index b33c7e8f8ec40b..691b0bd36c640d 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM\-INSTALL" "1" "July 2017" "" ""
+.TH "NPM\-INSTALL" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-install\fR \- Install a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index a1fa966c0f61c2..8560a204e606da 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LINK" "1" "July 2017" "" ""
+.TH "NPM\-LINK" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-link\fR \- Symlink a package folder
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index 74c22144648bfe..54e3d0841b6950 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LOGOUT" "1" "July 2017" "" ""
+.TH "NPM\-LOGOUT" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-logout\fR \- Log out of the registry
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 5c6744e450035d..1933770051ebd0 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM\-LS" "1" "July 2017" "" ""
+.TH "NPM\-LS" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-ls\fR \- List installed packages
.SH SYNOPSIS
@@ -22,7 +22,7 @@ For example, running \fBnpm ls promzard\fP in npm's source tree will show:
.P
.RS 2
.nf
-npm@5.3.0 /path/to/npm
+npm@5.4.2 /path/to/npm
└─┬ init\-package\-json@0\.0\.4
└── promzard@0\.1\.5
.fi
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index ccd38acb170afa..d2f1655c5096b0 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM\-OUTDATED" "1" "July 2017" "" ""
+.TH "NPM\-OUTDATED" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-outdated\fR \- Check for outdated packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index 62be1bdcd03d61..d6974989f35a56 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM\-OWNER" "1" "July 2017" "" ""
+.TH "NPM\-OWNER" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-owner\fR \- Manage package owners
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index 2e70602351b2ee..c5392a3cd4466b 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PACK" "1" "July 2017" "" ""
+.TH "NPM\-PACK" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-pack\fR \- Create a tarball from a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1
index 33693a1a5c3942..842f12524101ff 100644
--- a/deps/npm/man/man1/npm-ping.1
+++ b/deps/npm/man/man1/npm-ping.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PING" "1" "July 2017" "" ""
+.TH "NPM\-PING" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-ping\fR \- Ping npm registry
.SH SYNOPSIS
@@ -11,6 +11,21 @@ npm ping [\-\-registry ]
.SH DESCRIPTION
.P
Ping the configured or given npm registry and verify authentication\.
+If it works it will output something like:
+.P
+.RS 2
+.nf
+Ping success: {*Details about registry*}
+.fi
+.RE
+.P
+otherwise you will get:
+.P
+.RS 2
+.nf
+Ping error: {*Detail about error}
+.fi
+.RE
.SH SEE ALSO
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index 27fb7e9ef49b7a..eef01099a15bf2 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PREFIX" "1" "July 2017" "" ""
+.TH "NPM\-PREFIX" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-prefix\fR \- Display prefix
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index 3317b213c2a1a7..1b03557681d0db 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PRUNE" "1" "July 2017" "" ""
+.TH "NPM\-PRUNE" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-prune\fR \- Remove extraneous packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index 1529f1b2f31f13..84de9078dd0fd7 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM\-PUBLISH" "1" "July 2017" "" ""
+.TH "NPM\-PUBLISH" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-publish\fR \- Publish a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index 15bcaf92d3469f..e1c87dae4b4bf8 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM\-REBUILD" "1" "July 2017" "" ""
+.TH "NPM\-REBUILD" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-rebuild\fR \- Rebuild a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index a1732297cecdde..6ba81342188fcc 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM\-REPO" "1" "July 2017" "" ""
+.TH "NPM\-REPO" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-repo\fR \- Open package repository page in the browser
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index bb78c0248283a8..3264fd68989af0 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RESTART" "1" "July 2017" "" ""
+.TH "NPM\-RESTART" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-restart\fR \- Restart a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index bc704262e4e5c5..aa672c69c756c8 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM\-ROOT" "1" "July 2017" "" ""
+.TH "NPM\-ROOT" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-root\fR \- Display npm root
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index 6219b27a3c4f67..9de56ff7c5d920 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM\-RUN\-SCRIPT" "1" "July 2017" "" ""
+.TH "NPM\-RUN\-SCRIPT" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-run-script\fR \- Run arbitrary package scripts
.SH SYNOPSIS
@@ -34,7 +34,7 @@ and not to any pre or post script\.
.P
The \fBenv\fP script is a special built\-in command that can be used to list
environment variables that will be available to the script at runtime\. If an
-"env" command is defined in your package it will take precedence over the
+"env" command is defined in your package, it will take precedence over the
built\-in\.
.P
In addition to the shell's pre\-existing \fBPATH\fP, \fBnpm run\fP adds
@@ -49,7 +49,21 @@ you should write:
.fi
.RE
.P
-instead of \fB"scripts": {"test": "node_modules/\.bin/tap test/\\*\.js"}\fP to run your tests\.
+instead of
+.P
+.RS 2
+.nf
+"scripts": {"test": "node_modules/\.bin/tap test/\\*\.js"}
+.fi
+.RE
+.P
+to run your tests\.
+.P
+Scripts are run from the root of the module, regardless of what your current
+working directory is when you call \fBnpm run\fP\|\. If you want your script to
+use different behavior based on what subdirectory you're in, you can use the
+\fBINIT_CWD\fP environment variable, which holds the full path you were in when
+you ran \fBnpm run\fP\|\.
.P
\fBnpm run\fP sets the \fBNODE\fP environment variable to the \fBnode\fP executable with
which \fBnpm\fP is executed\. Also, if the \fB\-\-scripts\-prepend\-node\-path\fP is passed,
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index 190b75a34e44e6..f6f1b944142dd0 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM\-SEARCH" "1" "July 2017" "" ""
+.TH "NPM\-SEARCH" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-search\fR \- Search for packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index ad4a78d3e7d5f7..770f8820a9cf9e 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM\-SHRINKWRAP" "1" "July 2017" "" ""
+.TH "NPM\-SHRINKWRAP" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR \- Lock down dependency versions for publication
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index b325ff0ea1e423..619817c80f1be8 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STAR" "1" "July 2017" "" ""
+.TH "NPM\-STAR" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-star\fR \- Mark your favorite packages
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index 92e5bf7a4e6a20..3f7a1726475d4d 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STARS" "1" "July 2017" "" ""
+.TH "NPM\-STARS" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-stars\fR \- View packages marked as favorites
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index df0e1cd65b5575..399400e50c9367 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM\-START" "1" "July 2017" "" ""
+.TH "NPM\-START" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-start\fR \- Start a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index 30f2b67fc124da..f9f551cfd7d270 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM\-STOP" "1" "July 2017" "" ""
+.TH "NPM\-STOP" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-stop\fR \- Stop a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1
index 1d398c9fdc14d8..65c21840a2f5cb 100644
--- a/deps/npm/man/man1/npm-team.1
+++ b/deps/npm/man/man1/npm-team.1
@@ -1,4 +1,4 @@
-.TH "NPM\-TEAM" "1" "July 2017" "" ""
+.TH "NPM\-TEAM" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-team\fR \- Manage organization teams and team memberships
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index a17682b38ea2a7..105056b38f8357 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM\-TEST" "1" "July 2017" "" ""
+.TH "NPM\-TEST" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-test\fR \- Test a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index 0863df058d4f26..bb22a4059b3565 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM\-UNINSTALL" "1" "July 2017" "" ""
+.TH "NPM\-UNINSTALL" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-uninstall\fR \- Remove a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index ce7f2147679fe9..53e0bd8ca017d4 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM\-UNPUBLISH" "1" "July 2017" "" ""
+.TH "NPM\-UNPUBLISH" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-unpublish\fR \- Remove a package from the registry
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index 24b07a1fd0b68e..dfb3dd31aca42a 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM\-UPDATE" "1" "July 2017" "" ""
+.TH "NPM\-UPDATE" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-update\fR \- Update a package
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index de93869d33704c..bc3188b6369429 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM\-VERSION" "1" "July 2017" "" ""
+.TH "NPM\-VERSION" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-version\fR \- Bump a package version
.SH SYNOPSIS
@@ -121,6 +121,16 @@ Type: Boolean
.RE
.P
Commit and tag the version change\.
+.SS commit\-hooks
+.RS 0
+.IP \(bu 2
+Default: true
+.IP \(bu 2
+Type: Boolean
+
+.RE
+.P
+Run git commit hooks when committing the version change\.
.SS sign\-git\-tag
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index 503f71a9eceab7..6c5447dc6bc89d 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM\-VIEW" "1" "July 2017" "" ""
+.TH "NPM\-VIEW" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-view\fR \- View registry info
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index 180b949747a6db..598deebf15475a 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM\-WHOAMI" "1" "July 2017" "" ""
+.TH "NPM\-WHOAMI" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-whoami\fR \- Display npm username
.SH SYNOPSIS
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index a626190edd9608..2c799994dd47f5 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "July 2017" "" ""
+.TH "NPM" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm\fR \- javascript package manager
.SH SYNOPSIS
@@ -10,7 +10,7 @@ npm [args]
.RE
.SH VERSION
.P
-5.3.0
+5.4.2
.SH DESCRIPTION
.P
npm is the package manager for the Node JavaScript platform\. It puts
diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1
index f64614d36ec028..aeb74cf012e86d 100644
--- a/deps/npm/man/man1/npx.1
+++ b/deps/npm/man/man1/npx.1
@@ -1,4 +1,4 @@
-.TH "NPX" "1" "July 2017" "npx@9.1.0" "User Commands"
+.TH "NPX" "1" "August 2017" "npx@9.5.0" "User Commands"
.SH "NAME"
\fBnpx\fR \- execute npm package binaries
.SH SYNOPSIS
diff --git a/deps/npm/man/man5/npm-folders.5 b/deps/npm/man/man5/npm-folders.5
index 2442ef30567171..5b377f1c73f93b 100644
--- a/deps/npm/man/man5/npm-folders.5
+++ b/deps/npm/man/man5/npm-folders.5
@@ -1,4 +1,4 @@
-.TH "NPM\-FOLDERS" "5" "July 2017" "" ""
+.TH "NPM\-FOLDERS" "5" "September 2017" "" ""
.SH "NAME"
\fBnpm-folders\fR \- Folder Structures Used by npm
.SH DESCRIPTION
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index 2442ef30567171..5b377f1c73f93b 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "NPM\-FOLDERS" "5" "July 2017" "" ""
+.TH "NPM\-FOLDERS" "5" "September 2017" "" ""
.SH "NAME"
\fBnpm-folders\fR \- Folder Structures Used by npm
.SH DESCRIPTION
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index df570a7e1bcee2..1aa78b2621a04a 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE\.JSON" "5" "July 2017" "" ""
+.TH "PACKAGE\.JSON" "5" "September 2017" "" ""
.SH "NAME"
\fBpackage.json\fR \- Specifics of npm's package\.json handling
.SH DESCRIPTION
@@ -207,14 +207,23 @@ Both email and url are optional either way\.
npm also sets a top\-level "maintainers" field with your npm user info\.
.SH files
.P
-The "files" field is an array of files to include in your project\. If
-you name a folder in the array, then it will also include the files
-inside that folder\. (Unless they would be ignored by another rule\.)
-.P
-You can also provide a "\.npmignore" file in the root of your package or
-in subdirectories, which will keep files from being included, even
-if they would be picked up by the files array\. The \fB\|\.npmignore\fP file
-works just like a \fB\|\.gitignore\fP\|\.
+The optional "files" field is an array of file patterns that describes
+the entries to be included when your package is installed as a
+dependency\. If the files array is omitted, everything except
+automatically\-excluded files will be included in your publish\. If you
+name a folder in the array, then it will also include the files inside
+that folder (unless they would be ignored by another rule in this
+section\.)\.
+.P
+You can also provide a \fB\|\.npmignore\fP file in the root of your package or
+in subdirectories, which will keep files from being included\. At the
+root of your package it will not override the "files" field, but in
+subdirectories it will\. The \fB\|\.npmignore\fP file works just like a
+\fB\|\.gitignore\fP\|\. If there is a \fB\|\.gitignore\fP file, and \fB\|\.npmignore\fP is
+missing, \fB\|\.gitignore\fP\|'s contents will be used instead\.
+.P
+Files included with the "package\.json#files" field \fIcannot\fR be excluded
+through \fB\|\.npmignore\fP or \fB\|\.gitignore\fP\|\.
.P
Certain files are always included, regardless of settings:
.RS 0
diff --git a/deps/npm/man/man5/npm-package-locks.5 b/deps/npm/man/man5/npm-package-locks.5
index df808f2e98ab83..55cffce21c8517 100644
--- a/deps/npm/man/man5/npm-package-locks.5
+++ b/deps/npm/man/man5/npm-package-locks.5
@@ -1,4 +1,4 @@
-.TH "NPM\-PACKAGE\-LOCKS" "5" "July 2017" "" ""
+.TH "NPM\-PACKAGE\-LOCKS" "5" "September 2017" "" ""
.SH "NAME"
\fBnpm-package-locks\fR \- An explanation of npm lockfiles
.SH DESCRIPTION
@@ -119,7 +119,7 @@ which will look something like this:
.RE
.P
This file describes an \fIexact\fR, and more importantly \fIreproducible\fR
-\fBnode_modules\fP tree\. Once it's present, and future installation will base its
+\fBnode_modules\fP tree\. Once it's present, any future installation will base its
work off this file, instead of recalculating dependency versions off
npm help 5 package\.json\.
.P
diff --git a/deps/npm/man/man5/npm-shrinkwrap.json.5 b/deps/npm/man/man5/npm-shrinkwrap.json.5
index 9188009c10ed5a..ae8f7d7a45205d 100644
--- a/deps/npm/man/man5/npm-shrinkwrap.json.5
+++ b/deps/npm/man/man5/npm-shrinkwrap.json.5
@@ -1,4 +1,4 @@
-.TH "NPM\-SHRINKWRAP\.JSON" "5" "July 2017" "" ""
+.TH "NPM\-SHRINKWRAP\.JSON" "5" "September 2017" "" ""
.SH "NAME"
\fBnpm-shrinkwrap.json\fR \- A publishable lockfile
.SH DESCRIPTION
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index 4a35447e23bfe7..2ea230c3cf94b0 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,4 +1,4 @@
-.TH "NPMRC" "5" "July 2017" "" ""
+.TH "NPMRC" "5" "September 2017" "" ""
.SH "NAME"
\fBnpmrc\fR \- The npm config files
.SH DESCRIPTION
diff --git a/deps/npm/man/man5/package-lock.json.5 b/deps/npm/man/man5/package-lock.json.5
index 4e1f7b880a33e4..c84afffea937e4 100644
--- a/deps/npm/man/man5/package-lock.json.5
+++ b/deps/npm/man/man5/package-lock.json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE\-LOCK\.JSON" "5" "July 2017" "" ""
+.TH "PACKAGE\-LOCK\.JSON" "5" "September 2017" "" ""
.SH "NAME"
\fBpackage-lock.json\fR \- A manifestation of the manifest
.SH DESCRIPTION
diff --git a/deps/npm/man/man5/package.json.5 b/deps/npm/man/man5/package.json.5
index df570a7e1bcee2..1aa78b2621a04a 100644
--- a/deps/npm/man/man5/package.json.5
+++ b/deps/npm/man/man5/package.json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE\.JSON" "5" "July 2017" "" ""
+.TH "PACKAGE\.JSON" "5" "September 2017" "" ""
.SH "NAME"
\fBpackage.json\fR \- Specifics of npm's package\.json handling
.SH DESCRIPTION
@@ -207,14 +207,23 @@ Both email and url are optional either way\.
npm also sets a top\-level "maintainers" field with your npm user info\.
.SH files
.P
-The "files" field is an array of files to include in your project\. If
-you name a folder in the array, then it will also include the files
-inside that folder\. (Unless they would be ignored by another rule\.)
-.P
-You can also provide a "\.npmignore" file in the root of your package or
-in subdirectories, which will keep files from being included, even
-if they would be picked up by the files array\. The \fB\|\.npmignore\fP file
-works just like a \fB\|\.gitignore\fP\|\.
+The optional "files" field is an array of file patterns that describes
+the entries to be included when your package is installed as a
+dependency\. If the files array is omitted, everything except
+automatically\-excluded files will be included in your publish\. If you
+name a folder in the array, then it will also include the files inside
+that folder (unless they would be ignored by another rule in this
+section\.)\.
+.P
+You can also provide a \fB\|\.npmignore\fP file in the root of your package or
+in subdirectories, which will keep files from being included\. At the
+root of your package it will not override the "files" field, but in
+subdirectories it will\. The \fB\|\.npmignore\fP file works just like a
+\fB\|\.gitignore\fP\|\. If there is a \fB\|\.gitignore\fP file, and \fB\|\.npmignore\fP is
+missing, \fB\|\.gitignore\fP\|'s contents will be used instead\.
+.P
+Files included with the "package\.json#files" field \fIcannot\fR be excluded
+through \fB\|\.npmignore\fP or \fB\|\.gitignore\fP\|\.
.P
Certain files are always included, regardless of settings:
.RS 0
diff --git a/deps/npm/man/man7/npm-coding-style.7 b/deps/npm/man/man7/npm-coding-style.7
index bedeace4f3a215..6a4a9dd56b6a2c 100644
--- a/deps/npm/man/man7/npm-coding-style.7
+++ b/deps/npm/man/man7/npm-coding-style.7
@@ -1,4 +1,4 @@
-.TH "NPM\-CODING\-STYLE" "7" "July 2017" "" ""
+.TH "NPM\-CODING\-STYLE" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-coding-style\fR \- npm's "funny" coding style
.SH DESCRIPTION
diff --git a/deps/npm/man/man7/npm-config.7 b/deps/npm/man/man7/npm-config.7
index 07ab671f40625d..74b613df86ba32 100644
--- a/deps/npm/man/man7/npm-config.7
+++ b/deps/npm/man/man7/npm-config.7
@@ -1,4 +1,4 @@
-.TH "NPM\-CONFIG" "7" "July 2017" "" ""
+.TH "NPM\-CONFIG" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-config\fR \- More than you probably want to know about npm configuration
.SH DESCRIPTION
@@ -28,6 +28,9 @@ same\. However, please note that inside npm\-scripts \fI/misc/scripts\fR
npm will set its own environment variables and Node will prefer
those lowercase versions over any uppercase ones that you might set\.
For details see this issue \fIhttps://github\.com/npm/npm/issues/14528\fR\|\.
+.P
+Notice that you need to use underscores instead of dashes, so \fB\-\-allow\-same\-version\fP
+would become \fBnpm_config_allow_same_version=true\fP\|\.
.SS npmrc Files
.P
The four relevant files are:
@@ -253,7 +256,7 @@ Type: String, Array or null
.RE
.P
The Certificate Authority signing certificate that is trusted for SSL
-connections to the registry\. Values should be in PEM format with newlines
+connections to the registry\. Values should be in PEM format (Windows calls it "Base\-64 encoded X\.509 (\.CER)") with newlines
replaced by the string "\\n"\. For example:
.P
.RS 2
@@ -361,7 +364,7 @@ Type: String
.RE
.P
A client certificate to pass when accessing the registry\. Values should be in
-PEM format with newlines replaced by the string "\\n"\. For example:
+PEM format (Windows calls it "Base\-64 encoded X\.509 (\.CER)") with newlines replaced by the string "\\n"\. For example:
.P
.RS 2
.nf
@@ -541,6 +544,16 @@ Type: Boolean
.RE
.P
Tag the commit when using the \fBnpm version\fP command\.
+.SS commit\-hooks
+.RS 0
+.IP \(bu 2
+Default: \fBtrue\fP
+.IP \(bu 2
+Type: Boolean
+
+.RE
+.P
+Run git commit hooks when using the \fBnpm version\fP command\.
.SS global
.RS 0
.IP \(bu 2
@@ -802,19 +815,19 @@ to the npm registry\. Must be IPv4 in versions of Node prior to 0\.12\.
.SS loglevel
.RS 0
.IP \(bu 2
-Default: "warn"
+Default: "notice"
.IP \(bu 2
Type: String
.IP \(bu 2
-Values: "silent", "error", "warn", "http", "info", "verbose", "silly"
+Values: "silent", "error", "warn", "notice", "http", "timing", "info",
+"verbose", "silly"
.RE
.P
What level of logs to report\. On failure, \fIall\fR logs are written to
\fBnpm\-debug\.log\fP in the current working directory\.
.P
-Any logs of a higher level than the setting are shown\.
-The default is "warn", which shows warn and error output\.
+Any logs of a higher level than the setting are shown\. The default is "notice"\.
.SS logstream
.RS 0
.IP \(bu 2
@@ -1041,21 +1054,6 @@ When set to \fBtrue\fP, npm will display a progress bar during time intensive
operations, if \fBprocess\.stderr\fP is a TTY\.
.P
Set to \fBfalse\fP to suppress the progress bar\.
-.SS proprietary\-attribs
-.RS 0
-.IP \(bu 2
-Default: true
-.IP \(bu 2
-Type: Boolean
-
-.RE
-.P
-Whether or not to include proprietary extended attributes in the
-tarballs created by npm\.
-.P
-Unless you are expecting to unpack package tarballs with something other
-than npm \-\- particularly a very outdated tar implementation \-\- leave
-this as true\.
.SS proxy
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man7/npm-developers.7 b/deps/npm/man/man7/npm-developers.7
index ba6a0811c98ed7..fc1bf83f0157a6 100644
--- a/deps/npm/man/man7/npm-developers.7
+++ b/deps/npm/man/man7/npm-developers.7
@@ -1,4 +1,4 @@
-.TH "NPM\-DEVELOPERS" "7" "July 2017" "" ""
+.TH "NPM\-DEVELOPERS" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-developers\fR \- Developer Guide
.SH DESCRIPTION
@@ -175,6 +175,18 @@ The following paths and files are never ignored, so adding them to
\fBLICENSE\fP / \fBLICENCE\fP
.RE
+.P
+If, given the structure of your project, you find \fB\|\.npmignore\fP to be a
+maintenance headache, you might instead try populating the \fBfiles\fP
+property of \fBpackage\.json\fP, which is an array of file or directory names
+that should be included in your package\. Sometimes a whitelist is easier
+to manage than a blacklist\.
+.SS Testing whether your \fB\|\.npmignore\fP or \fBfiles\fP config works
+.P
+If you want to double check that your package will include only the files
+you intend it to when published, you can run the \fBnpm pack\fP command locally
+which will generate a tarball in the working directory, the same way it
+does for publishing\.
.SH Link Packages
.P
\fBnpm link\fP is designed to install a development package and see the
diff --git a/deps/npm/man/man7/npm-disputes.7 b/deps/npm/man/man7/npm-disputes.7
index b7767f3a4abc2f..fe31814423e05c 100644
--- a/deps/npm/man/man7/npm-disputes.7
+++ b/deps/npm/man/man7/npm-disputes.7
@@ -1,4 +1,4 @@
-.TH "NPM\-DISPUTES" "7" "July 2017" "" ""
+.TH "NPM\-DISPUTES" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-disputes\fR \- Handling Module Name Disputes
.P
diff --git a/deps/npm/man/man7/npm-index.7 b/deps/npm/man/man7/npm-index.7
index 7568e29882fd44..ca2a0618d8359b 100644
--- a/deps/npm/man/man7/npm-index.7
+++ b/deps/npm/man/man7/npm-index.7
@@ -1,4 +1,4 @@
-.TH "NPM\-INDEX" "7" "July 2017" "" ""
+.TH "NPM\-INDEX" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-index\fR \- Index of all npm documentation
.SS npm help README
diff --git a/deps/npm/man/man7/npm-orgs.7 b/deps/npm/man/man7/npm-orgs.7
index 8a25d47a33d7ad..469a4aa1730a4d 100644
--- a/deps/npm/man/man7/npm-orgs.7
+++ b/deps/npm/man/man7/npm-orgs.7
@@ -1,4 +1,4 @@
-.TH "NPM\-ORGS" "7" "July 2017" "" ""
+.TH "NPM\-ORGS" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-orgs\fR \- Working with Teams & Orgs
.SH DESCRIPTION
diff --git a/deps/npm/man/man7/npm-registry.7 b/deps/npm/man/man7/npm-registry.7
index abd1406097515f..9063886fa26e6d 100644
--- a/deps/npm/man/man7/npm-registry.7
+++ b/deps/npm/man/man7/npm-registry.7
@@ -1,4 +1,4 @@
-.TH "NPM\-REGISTRY" "7" "July 2017" "" ""
+.TH "NPM\-REGISTRY" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-registry\fR \- The JavaScript Package Registry
.SH DESCRIPTION
diff --git a/deps/npm/man/man7/npm-scope.7 b/deps/npm/man/man7/npm-scope.7
index fbb685c46035f0..72a9031521e646 100644
--- a/deps/npm/man/man7/npm-scope.7
+++ b/deps/npm/man/man7/npm-scope.7
@@ -1,4 +1,4 @@
-.TH "NPM\-SCOPE" "7" "July 2017" "" ""
+.TH "NPM\-SCOPE" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-scope\fR \- Scoped packages
.SH DESCRIPTION
diff --git a/deps/npm/man/man7/npm-scripts.7 b/deps/npm/man/man7/npm-scripts.7
index e93ad1e6509fa6..a8702cefbda4dd 100644
--- a/deps/npm/man/man7/npm-scripts.7
+++ b/deps/npm/man/man7/npm-scripts.7
@@ -1,9 +1,9 @@
-.TH "NPM\-SCRIPTS" "7" "July 2017" "" ""
+.TH "NPM\-SCRIPTS" "7" "September 2017" "" ""
.SH "NAME"
\fBnpm-scripts\fR \- How npm handles the "scripts" field
.SH DESCRIPTION
.P
-npm supports the "scripts" property of the package\.json script, for the
+npm supports the "scripts" property of the package\.json file, for the
following scripts:
.RS 0
.IP \(bu 2
diff --git a/deps/npm/man/man7/removing-npm.7 b/deps/npm/man/man7/removing-npm.7
index 1ee678b59e9344..56d24a9117c2a6 100644
--- a/deps/npm/man/man7/removing-npm.7
+++ b/deps/npm/man/man7/removing-npm.7
@@ -1,4 +1,4 @@
-.TH "NPM\-REMOVAL" "1" "July 2017" "" ""
+.TH "NPM\-REMOVAL" "1" "September 2017" "" ""
.SH "NAME"
\fBnpm-removal\fR \- Cleaning the Slate
.SH SYNOPSIS
diff --git a/deps/npm/man/man7/semver.7 b/deps/npm/man/man7/semver.7
index f0b22279d07f58..575503d25ea4f3 100644
--- a/deps/npm/man/man7/semver.7
+++ b/deps/npm/man/man7/semver.7
@@ -1,13 +1,21 @@
-.TH "SEMVER" "7" "July 2017" "" ""
+.TH "SEMVER" "7" "September 2017" "" ""
.SH "NAME"
\fBsemver\fR \- The semantic versioner for npm
+.SH Install
+.P
+.RS 2
+.nf
+npm install \-\-save semver
+`
+.fi
+.RE
.SH Usage
.P
+As a node module:
+.P
.RS 2
.nf
-$ npm install semver
-$ node
-var semver = require('semver')
+const semver = require('semver')
semver\.valid('1\.2\.3') // '1\.2\.3'
semver\.valid('a\.b\.c') // null
@@ -24,7 +32,7 @@ As a command\-line utility:
.nf
$ semver \-h
-SemVer 5\.1\.0
+SemVer 5\.3\.0
A JavaScript implementation of the http://semver\.org/ specification
Copyright Isaac Z\. Schlueter
@@ -138,8 +146,8 @@ will append the value of the string as a prerelease identifier:
.P
.RS 2
.nf
-> semver\.inc('1\.2\.3', 'prerelease', 'beta')
-\|'1\.2\.4\-beta\.0'
+semver\.inc('1\.2\.3', 'prerelease', 'beta')
+// '1\.2\.4\-beta\.0'
.fi
.RE
.P
@@ -364,6 +372,9 @@ if none exist\. Example: \fBprerelease('1\.2\.3\-alpha\.1') \-> ['alpha', 1]\fP
\fBminor(v)\fP: Return the minor version number\.
.IP \(bu 2
\fBpatch(v)\fP: Return the patch version number\.
+.IP \(bu 2
+\fBintersects(r1, r2, loose)\fP: Return true if the two supplied ranges
+or comparators intersect\.
.RE
.SS Comparison
@@ -398,6 +409,12 @@ in descending order when passed to \fBArray\.sort()\fP\|\.
(\fBmajor\fP, \fBpremajor\fP, \fBminor\fP, \fBpreminor\fP, \fBpatch\fP, \fBprepatch\fP, or \fBprerelease\fP),
or null if the versions are the same\.
+.RE
+.SS Comparators
+.RS 0
+.IP \(bu 2
+\fBintersects(comparator)\fP: Return true if the comparators intersect
+
.RE
.SS Ranges
.RS 0
@@ -423,6 +440,8 @@ versions possible in the range\.
the bounds of the range in either the high or low direction\. The
\fBhilo\fP argument must be either the string \fB\|'>'\fP or \fB\|'<'\fP\|\. (This is
the function called by \fBgtr\fP and \fBltr\fP\|\.)
+.IP \(bu 2
+\fBintersects(range)\fP: Return true if any of the ranges comparators intersect
.RE
.P
diff --git a/deps/npm/node_modules/fstream-npm/.npmignore b/deps/npm/node_modules/fstream-npm/.npmignore
deleted file mode 100644
index c02f40cee6bdec..00000000000000
--- a/deps/npm/node_modules/fstream-npm/.npmignore
+++ /dev/null
@@ -1,3 +0,0 @@
-# ignore the output junk from the example scripts
-example/output
-node_modules/
diff --git a/deps/npm/node_modules/fstream-npm/.travis.yml b/deps/npm/node_modules/fstream-npm/.travis.yml
deleted file mode 100644
index 905e270bca64ab..00000000000000
--- a/deps/npm/node_modules/fstream-npm/.travis.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-language: node_js
-sudo: false
-node_js:
- - "6"
- - "4"
- - "7"
-before_install:
- - "npm config set spin false"
- - "npm install -g npm"
-script: "npm test"
-notifications:
- slack: npm-inc:kRqQjto7YbINqHPb1X6nS3g8
diff --git a/deps/npm/node_modules/fstream-npm/README.md b/deps/npm/node_modules/fstream-npm/README.md
deleted file mode 100644
index 2c01e907318722..00000000000000
--- a/deps/npm/node_modules/fstream-npm/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# fstream-npm
-
-This is an fstream DirReader class that will read a directory and filter
-things according to the semantics of what goes in an npm package.
-
-For example:
-
-```javascript
-// This will print out all the files that would be included
-// by 'npm publish' or 'npm install' of this directory.
-
-var FN = require("fstream-npm")
-FN({ path: "./" })
- .on("child", function (e) {
- console.error(e.path.substr(e.root.path.length + 1))
- })
-```
-
diff --git a/deps/npm/node_modules/fstream-npm/example/bundle.js b/deps/npm/node_modules/fstream-npm/example/bundle.js
deleted file mode 100644
index 159e4f7b65d5fe..00000000000000
--- a/deps/npm/node_modules/fstream-npm/example/bundle.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// this example will bundle every dependency
-var P = require("../")
-P({ path: "./" })
- .on("package", bundleIt)
- .on("entry", function (e) {
- console.error(e.constructor.name, e.path.substr(e.root.dirname.length + 1))
- e.on("package", bundleIt)
- })
-
-function bundleIt (p) {
- p.bundleDependencies = Object.keys(p.dependencies || {})
-}
-
diff --git a/deps/npm/node_modules/fstream-npm/example/dir-tar.js b/deps/npm/node_modules/fstream-npm/example/dir-tar.js
deleted file mode 100644
index 393c796b2e367f..00000000000000
--- a/deps/npm/node_modules/fstream-npm/example/dir-tar.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// this will show what ends up in the fstream-npm package
-var P = require('fstream').DirReader
-var tar = require('tar')
-function f (entry) {
- return entry.basename !== '.git'
-}
-
-new P({ path: './', type: 'Directory', Directory: true, filter: f })
- .on('package', function (p) {
- console.error('package', p)
- })
- .on('ignoreFile', function (e) {
- console.error('ignoreFile', e)
- })
- .on('entry', function (e) {
- console.error(e.constructor.name, e.path.substr(e.root.path.length + 1))
- })
- .pipe(tar.Pack())
- .pipe(process.stdout)
diff --git a/deps/npm/node_modules/fstream-npm/example/dir.js b/deps/npm/node_modules/fstream-npm/example/dir.js
deleted file mode 100644
index e524a3bfdb8188..00000000000000
--- a/deps/npm/node_modules/fstream-npm/example/dir.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// this will show what ends up in the fstream-npm package
-var P = require('../')
-var DW = require('fstream').DirWriter
-
-var target = new DW({ Directory: true, type: 'Directory',
- path: __dirname + '/output'})
-
-function f (entry) {
- return entry.basename !== '.git'
-}
-
-P({ path: './', type: 'Directory', isDirectory: true, filter: f })
- .on('package', function (p) {
- console.error('package', p)
- })
- .on('ignoreFile', function (e) {
- console.error('ignoreFile', e)
- })
- .on('entry', function (e) {
- console.error(e.constructor.name, e.path)
- })
- .pipe(target)
- .on('end', function () {
- console.error('ended')
- })
diff --git a/deps/npm/node_modules/fstream-npm/example/example.js b/deps/npm/node_modules/fstream-npm/example/example.js
deleted file mode 100644
index 2c933c440480f2..00000000000000
--- a/deps/npm/node_modules/fstream-npm/example/example.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// this will show what ends up in the fstream-npm package
-var P = require('../')
-P({ path: './' })
- .on('package', function (p) {
- console.error('package', p)
- })
- .on('ignoreFile', function (e) {
- console.error('ignoreFile', e)
- })
- .on('entry', function (e) {
- console.error(e.constructor.name, e.path.substr(e.root.dirname.length + 1))
- })
diff --git a/deps/npm/node_modules/fstream-npm/example/ig-tar.js b/deps/npm/node_modules/fstream-npm/example/ig-tar.js
deleted file mode 100644
index 7a5b61fba659ce..00000000000000
--- a/deps/npm/node_modules/fstream-npm/example/ig-tar.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// this will show what ends up in the fstream-npm package
-var P = require('fstream-ignore')
-var tar = require('tar')
-function f (entry) {
- return entry.basename !== '.git'
-}
-
-new P({ path: './', type: 'Directory', Directory: true, filter: f })
- .on('package', function (p) {
- console.error('package', p)
- })
- .on('ignoreFile', function (e) {
- console.error('ignoreFile', e)
- })
- .on('entry', function (e) {
- console.error(e.constructor.name, e.path.substr(e.root.path.length + 1))
- })
- .pipe(tar.Pack())
- .pipe(process.stdout)
diff --git a/deps/npm/node_modules/fstream-npm/example/tar.js b/deps/npm/node_modules/fstream-npm/example/tar.js
deleted file mode 100644
index b7f7e5f04d9326..00000000000000
--- a/deps/npm/node_modules/fstream-npm/example/tar.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// this will show what ends up in the fstream-npm package
-var P = require('../')
-var tar = require('tar')
-function f () {
- return true
-}
-// function f (entry) {
-// return entry.basename !== ".git"
-// }
-
-new P({ path: './', type: 'Directory', isDirectory: true, filter: f })
- .on('package', function (p) {
- console.error('package', p)
- })
- .on('ignoreFile', function (e) {
- console.error('ignoreFile', e)
- })
- .on('entry', function (e) {
- console.error(e.constructor.name, e.path)
- })
- .on('end', function () {
- console.error('ended')
- })
- .pipe(tar.Pack())
- .pipe(process.stdout)
diff --git a/deps/npm/node_modules/fstream-npm/fstream-npm.js b/deps/npm/node_modules/fstream-npm/fstream-npm.js
deleted file mode 100644
index a2deda1736b192..00000000000000
--- a/deps/npm/node_modules/fstream-npm/fstream-npm.js
+++ /dev/null
@@ -1,380 +0,0 @@
-var Ignore = require('fstream-ignore')
-var inherits = require('inherits')
-var path = require('path')
-var fs = require('fs')
-
-module.exports = Packer
-
-inherits(Packer, Ignore)
-
-function Packer (props) {
- if (!(this instanceof Packer)) {
- return new Packer(props)
- }
-
- if (typeof props === 'string') {
- props = { path: props }
- }
-
- props.ignoreFiles = props.ignoreFiles || [ '.npmignore',
- '.gitignore',
- 'package.json' ]
-
- Ignore.call(this, props)
-
- this.bundled = props.bundled
- this.bundleLinks = props.bundleLinks
- this.package = props.package
-
- // only do the magic bundling stuff for the node_modules folder that
- // lives right next to a package.json file.
- this.bundleMagic = this.parent &&
- this.parent.packageRoot &&
- this.basename === 'node_modules'
-
- // in a node_modules folder, resolve symbolic links to
- // bundled dependencies when creating the package.
- props.follow = this.follow = this.bundleMagic
- // console.error("follow?", this.path, props.follow)
-
- if (this === this.root ||
- this.parent &&
- this.parent.bundleMagic &&
- this.basename.charAt(0) !== '.') {
- this.readBundledLinks()
- }
-
- this.on('entryStat', function (entry, props) {
- // files should *always* get into tarballs
- // in a user-writable state, even if they're
- // being installed from some wackey vm-mounted
- // read-only filesystem.
- entry.mode = props.mode = props.mode | parseInt('0200', 8)
- })
-}
-
-Packer.prototype.readBundledLinks = function () {
- if (this._paused) {
- this.once('resume', this.addIgnoreFiles)
- return
- }
-
- this.pause()
- fs.readdir(this.path + '/node_modules', function (er, list) {
- // no harm if there's no bundle
- var l = list && list.length
- if (er || l === 0) return this.resume()
-
- var errState = null
- var then = function then (er) {
- if (errState) return
- if (er) {
- errState = er
- return this.resume()
- }
- if (--l === 0) return this.resume()
- }.bind(this)
-
- list.forEach(function (pkg) {
- if (pkg.charAt(0) === '.') return then()
- var pd = this.path + '/node_modules/' + pkg
-
- // scoped packages
- if (pkg.charAt(0) === '@') {
- fs.readdir(pd, function (er, slist) {
- var sl = slist && slist.length
- if (er || sl === 0) return then(er)
-
- l += sl
- slist.forEach(function (spkg) {
- if (spkg.charAt(0) === '.') return then()
- var spd = pd + '/' + spkg
- fs.realpath(spd, function (er, rp) {
- if (er) return then()
- this.bundleLinks = this.bundleLinks || {}
- this.bundleLinks[pkg + '/' + spkg] = rp
- then()
- }.bind(this))
- }, this)
- then()
- }.bind(this))
- return
- }
-
- fs.realpath(pd, function (er, rp) {
- if (er) return then()
- this.bundleLinks = this.bundleLinks || {}
- this.bundleLinks[pkg] = rp
- then()
- }.bind(this))
- }, this)
- }.bind(this))
-}
-
-Packer.prototype.applyIgnores = function (entry, partial, entryObj) {
- if (!entryObj || entryObj.type !== 'Directory') {
- // package.json files can never be ignored.
- if (entry === 'package.json') return true
-
- // readme files should never be ignored.
- if (entry.match(/^readme(\.[^\.]*)$/i)) return true
-
- // license files should never be ignored.
- if (entry.match(/^(license|licence)(\.[^\.]*)?$/i)) return true
-
- // copyright notice files should never be ignored.
- if (entry.match(/^(notice)(\.[^\.]*)?$/i)) return true
-
- // changelogs should never be ignored.
- if (entry.match(/^(changes|changelog|history)(\.[^\.]*)?$/i)) return true
- }
-
- // special rules. see below.
- if (entry === 'node_modules' && this.packageRoot) return true
-
- // package.json main file should never be ignored.
- var mainFile = this.package && this.package.main
- if (mainFile && path.resolve(this.path, entry) === path.resolve(this.path, mainFile)) return true
-
- // some files are *never* allowed under any circumstances
- // (VCS folders, native build cruft, npm cruft, regular cruft)
- if (entry === '.git' ||
- entry === 'CVS' ||
- entry === '.svn' ||
- entry === '.hg' ||
- entry === '.lock-wscript' ||
- entry.match(/^\.wafpickle-[0-9]+$/) ||
- (this.parent && this.parent.packageRoot && this.basename === 'build' &&
- entry === 'config.gypi') ||
- entry === 'npm-debug.log' ||
- entry === '.npmrc' ||
- entry.match(/^\..*\.swp$/) ||
- entry === '.DS_Store' ||
- entry.match(/^\._/) ||
- entry.match(/^.*\.orig$/) ||
- // Package locks are never allowed in tarballs -- use shrinkwrap instead
- entry === 'package-lock.json'
- ) {
- return false
- }
-
- // in a node_modules folder, we only include bundled dependencies
- // also, prevent packages in node_modules from being affected
- // by rules set in the containing package, so that
- // bundles don't get busted.
- // Also, once in a bundle, everything is installed as-is
- // To prevent infinite cycles in the case of cyclic deps that are
- // linked with npm link, even in a bundle, deps are only bundled
- // if they're not already present at a higher level.
- if (this.bundleMagic) {
- if (entry.charAt(0) === '@') {
- var firstSlash = entry.indexOf('/')
- // continue to list the packages in this scope
- if (firstSlash === -1) return true
-
- // bubbling up. stop here and allow anything the bundled pkg allows
- if (entry.indexOf('/', firstSlash + 1) !== -1) return true
- }
- // bubbling up. stop here and allow anything the bundled pkg allows
- else if (entry.indexOf('/') !== -1) return true
-
- // never include the .bin. It's typically full of platform-specific
- // stuff like symlinks and .cmd files anyway.
- if (entry === '.bin') return false
-
- // the package root.
- var p = this.parent
- // the package before this one.
- var pp = p && p.parent
-
- // if this entry has already been bundled, and is a symlink,
- // and it is the *same* symlink as this one, then exclude it.
- if (pp && pp.bundleLinks && this.bundleLinks &&
- pp.bundleLinks[entry] &&
- pp.bundleLinks[entry] === this.bundleLinks[entry]) {
- return false
- }
-
- // since it's *not* a symbolic link, if we're *already* in a bundle,
- // then we should include everything.
- if (pp && pp.package && pp.basename === 'node_modules') {
- return true
- }
-
- // only include it at this point if it's a bundleDependency
- var bd = this.package && this.package.bundleDependencies
-
- if (bd && !Array.isArray(bd)) {
- throw new Error(this.package.name + '\'s `bundledDependencies` should ' +
- 'be an array')
- }
-
- var shouldBundle = bd && bd.indexOf(entry) !== -1
- // if we're not going to bundle it, then it doesn't count as a bundleLink
- // if (this.bundleLinks && !shouldBundle) delete this.bundleLinks[entry]
- return shouldBundle
- }
- // if (this.bundled) return true
-
- return Ignore.prototype.applyIgnores.call(this, entry, partial, entryObj)
-}
-
-Packer.prototype.addIgnoreFiles = function () {
- var entries = this.entries
- // if there's a .npmignore, then we do *not* want to
- // read the .gitignore.
- if (entries.indexOf('.npmignore') !== -1) {
- var i = entries.indexOf('.gitignore')
- if (i !== -1) {
- entries.splice(i, 1)
- }
- }
-
- this.entries = entries
-
- Ignore.prototype.addIgnoreFiles.call(this)
-}
-
-Packer.prototype.readRules = function (buf, e) {
- if (e !== 'package.json') {
- return Ignore.prototype.readRules.call(this, buf, e)
- }
-
- buf = buf.toString().trim()
-
- if (buf.length === 0) return []
-
- try {
- var p = this.package = JSON.parse(buf)
- } catch (er) {
- // just pretend it's a normal old file, not magic at all.
- return []
- }
-
- if (this === this.root) {
- this.bundleLinks = this.bundleLinks || {}
- this.bundleLinks[p.name] = this._path
- }
-
- this.packageRoot = true
- this.emit('package', p)
-
- // make bundle deps predictable
- if (p.bundledDependencies && !p.bundleDependencies) {
- p.bundleDependencies = p.bundledDependencies
- delete p.bundledDependencies
- }
-
- if (!p.files || !Array.isArray(p.files)) return []
-
- // ignore everything except what's in the files array.
- return ['*'].concat(p.files.map(function (f) {
- return '!' + f
- })).concat(p.files.map(function (f) {
- return '!' + f.replace(/\/+$/, '') + '/**'
- }))
-}
-
-Packer.prototype.getChildProps = function (stat) {
- var props = Ignore.prototype.getChildProps.call(this, stat)
-
- props.package = this.package
-
- props.bundled = this.bundled && this.bundled.slice(0)
- props.bundleLinks = this.bundleLinks &&
- Object.create(this.bundleLinks)
-
- // Directories have to be read as Packers
- // otherwise fstream.Reader will create a DirReader instead.
- if (stat.isDirectory()) {
- props.type = this.constructor
- }
-
- // only follow symbolic links directly in the node_modules folder.
- props.follow = false
- return props
-}
-
-var order = [
- 'package.json',
- '.npmignore',
- '.gitignore',
- /^README(\.md)?$/,
- 'LICENCE',
- 'LICENSE',
- /\.js$/
-]
-
-Packer.prototype.sort = function (a, b) {
- for (var i = 0, l = order.length; i < l; i++) {
- var o = order[i]
- if (typeof o === 'string') {
- if (a === o) return -1
- if (b === o) return 1
- } else {
- if (a.match(o)) return -1
- if (b.match(o)) return 1
- }
- }
-
- // deps go in the back
- if (a === 'node_modules') return 1
- if (b === 'node_modules') return -1
-
- return Ignore.prototype.sort.call(this, a, b)
-}
-
-Packer.prototype.emitEntry = function (entry) {
- if (this._paused) {
- this.once('resume', this.emitEntry.bind(this, entry))
- return
- }
-
- // if there is a .gitignore, then we're going to
- // rename it to .npmignore in the output.
- if (entry.basename === '.gitignore') {
- entry.basename = '.npmignore'
- entry.path = path.resolve(entry.dirname, entry.basename)
- }
-
- // all *.gyp files are renamed to binding.gyp for node-gyp
- // but only when they are in the same folder as a package.json file.
- if (entry.basename.match(/\.gyp$/) &&
- this.entries.indexOf('package.json') !== -1) {
- entry.basename = 'binding.gyp'
- entry.path = path.resolve(entry.dirname, entry.basename)
- }
-
- // skip over symbolic links
- if (entry.type === 'SymbolicLink') {
- entry.abort()
- return
- }
-
- if (entry.type !== 'Directory') {
- // make it so that the folder in the tarball is named "package"
- var h = path.dirname((entry.root || entry).path)
- var t = entry.path.substr(h.length + 1).replace(/^[^\/\\]+/, 'package')
- var p = h + '/' + t
-
- entry.path = p
- entry.dirname = path.dirname(p)
- return Ignore.prototype.emitEntry.call(this, entry)
- }
-
- // we don't want empty directories to show up in package
- // tarballs.
- // don't emit entry events for dirs, but still walk through
- // and read them. This means that we need to proxy up their
- // entry events so that those entries won't be missed, since
- // .pipe() doesn't do anythign special with "child" events, on
- // with "entry" events.
- var me = this
- entry.on('entry', function (e) {
- if (e.parent === entry) {
- e.parent = me
- me.emit('entry', e)
- }
- })
- entry.on('package', this.emit.bind(this, 'package'))
-}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/README.md b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/README.md
deleted file mode 100644
index 31170feae4e7d0..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/README.md
+++ /dev/null
@@ -1,22 +0,0 @@
-# fstream-ignore
-
-A fstream DirReader that filters out files that match globs in `.ignore`
-files throughout the tree, like how git ignores files based on a
-`.gitignore` file.
-
-Here's an example:
-
-```javascript
-var Ignore = require("fstream-ignore")
-Ignore({ path: __dirname
- , ignoreFiles: [".ignore", ".gitignore"]
- })
- .on("child", function (c) {
- console.error(c.path.substr(c.root.path.length + 1))
- })
- .pipe(tar.Pack())
- .pipe(fs.createWriteStream("foo.tar"))
-```
-
-This will tar up the files in __dirname into `foo.tar`, ignoring
-anything matched by the globs in any .iginore or .gitignore file.
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/ignore.js b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/ignore.js
deleted file mode 100644
index 212fc7bdea9835..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/ignore.js
+++ /dev/null
@@ -1,275 +0,0 @@
-// Essentially, this is a fstream.DirReader class, but with a
-// bit of special logic to read the specified sort of ignore files,
-// and a filter that prevents it from picking up anything excluded
-// by those files.
-
-var Minimatch = require("minimatch").Minimatch
-, fstream = require("fstream")
-, DirReader = fstream.DirReader
-, inherits = require("inherits")
-, path = require("path")
-, fs = require("fs")
-
-module.exports = IgnoreReader
-
-inherits(IgnoreReader, DirReader)
-
-function IgnoreReader (props) {
- if (!(this instanceof IgnoreReader)) {
- return new IgnoreReader(props)
- }
-
- // must be a Directory type
- if (typeof props === "string") {
- props = { path: path.resolve(props) }
- }
-
- props.type = "Directory"
- props.Directory = true
-
- if (!props.ignoreFiles) props.ignoreFiles = [".ignore"]
- this.ignoreFiles = props.ignoreFiles
-
- this.ignoreRules = null
-
- // ensure that .ignore files always show up at the top of the list
- // that way, they can be read before proceeding to handle other
- // entries in that same folder
- if (props.sort) {
- this._sort = props.sort === "alpha" ? alphasort : props.sort
- props.sort = null
- }
-
- this.on("entries", function () {
- // if there are any ignore files in the list, then
- // pause and add them.
- // then, filter the list based on our ignoreRules
-
- var hasIg = this.entries.some(this.isIgnoreFile, this)
-
- if (!hasIg) return this.filterEntries()
-
- this.addIgnoreFiles()
- })
-
- // we filter entries before we know what they are.
- // however, directories have to be re-tested against
- // rules with a "/" appended, because "a/b/" will only
- // match if "a/b" is a dir, and not otherwise.
- this.on("_entryStat", function (entry, props) {
- var t = entry.basename
- if (!this.applyIgnores(entry.basename,
- entry.type === "Directory",
- entry)) {
- entry.abort()
- }
- }.bind(this))
-
- DirReader.call(this, props)
-}
-
-
-IgnoreReader.prototype.addIgnoreFiles = function () {
- if (this._paused) {
- this.once("resume", this.addIgnoreFiles)
- return
- }
- if (this._ignoreFilesAdded) return
- this._ignoreFilesAdded = true
-
- var newIg = this.entries.filter(this.isIgnoreFile, this)
- , count = newIg.length
- , errState = null
-
- if (!count) return
-
- this.pause()
-
- var then = function (er) {
- if (errState) return
- if (er) return this.emit("error", errState = er)
- if (-- count === 0) {
- this.filterEntries()
- this.resume()
- } else {
- this.addIgnoreFile(newIg[newIg.length - count], then)
- }
- }.bind(this)
-
- this.addIgnoreFile(newIg[0], then)
-}
-
-
-IgnoreReader.prototype.isIgnoreFile = function (e) {
- return e !== "." &&
- e !== ".." &&
- -1 !== this.ignoreFiles.indexOf(e)
-}
-
-
-IgnoreReader.prototype.getChildProps = function (stat) {
- var props = DirReader.prototype.getChildProps.call(this, stat)
- props.ignoreFiles = this.ignoreFiles
-
- // Directories have to be read as IgnoreReaders
- // otherwise fstream.Reader will create a DirReader instead.
- if (stat.isDirectory()) {
- props.type = this.constructor
- }
- return props
-}
-
-
-IgnoreReader.prototype.addIgnoreFile = function (e, cb) {
- // read the file, and then call addIgnoreRules
- // if there's an error, then tell the cb about it.
-
- var ig = path.resolve(this.path, e)
- fs.readFile(ig, function (er, data) {
- if (er) return cb(er)
-
- this.emit("ignoreFile", e, data)
- var rules = this.readRules(data, e)
- this.addIgnoreRules(rules, e)
- cb()
- }.bind(this))
-}
-
-
-IgnoreReader.prototype.readRules = function (buf, e) {
- return buf.toString().split(/\r?\n/)
-}
-
-
-// Override this to do fancier things, like read the
-// "files" array from a package.json file or something.
-IgnoreReader.prototype.addIgnoreRules = function (set, e) {
- // filter out anything obvious
- set = set.filter(function (s) {
- s = s.trim()
- return s && !s.match(/^#/)
- })
-
- // no rules to add!
- if (!set.length) return
-
- // now get a minimatch object for each one of these.
- // Note that we need to allow dot files by default, and
- // not switch the meaning of their exclusion
- var mmopt = { matchBase: true, dot: true, flipNegate: true }
- , mm = set.map(function (s) {
- var m = new Minimatch(s, mmopt)
- m.ignoreFile = e
- return m
- })
-
- if (!this.ignoreRules) this.ignoreRules = []
- this.ignoreRules.push.apply(this.ignoreRules, mm)
-}
-
-
-IgnoreReader.prototype.filterEntries = function () {
- // this exclusion is at the point where we know the list of
- // entries in the dir, but don't know what they are. since
- // some of them *might* be directories, we have to run the
- // match in dir-mode as well, so that we'll pick up partials
- // of files that will be included later. Anything included
- // at this point will be checked again later once we know
- // what it is.
- this.entries = this.entries.filter(function (entry) {
- // at this point, we don't know if it's a dir or not.
- return this.applyIgnores(entry) || this.applyIgnores(entry, true)
- }, this)
-}
-
-
-IgnoreReader.prototype.applyIgnores = function (entry, partial, obj) {
- var included = true
-
- // this = /a/b/c
- // entry = d
- // parent /a/b sees c/d
- if (this.parent && this.parent.applyIgnores) {
- var pt = this.basename + "/" + entry
- included = this.parent.applyIgnores(pt, partial)
- }
-
- // Negated Rules
- // Since we're *ignoring* things here, negating means that a file
- // is re-included, if it would have been excluded by a previous
- // rule. So, negated rules are only relevant if the file
- // has been excluded.
- //
- // Similarly, if a file has been excluded, then there's no point
- // trying it against rules that have already been applied
- //
- // We're using the "flipnegate" flag here, which tells minimatch
- // to set the "negate" for our information, but still report
- // whether the core pattern was a hit or a miss.
-
- if (!this.ignoreRules) {
- return included
- }
-
- this.ignoreRules.forEach(function (rule) {
- // negation means inclusion
- if (rule.negate && included ||
- !rule.negate && !included) {
- // unnecessary
- return
- }
-
- // first, match against /foo/bar
- var match = rule.match("/" + entry)
-
- if (!match) {
- // try with the leading / trimmed off the test
- // eg: foo/bar instead of /foo/bar
- match = rule.match(entry)
- }
-
- // if the entry is a directory, then it will match
- // with a trailing slash. eg: /foo/bar/ or foo/bar/
- if (!match && partial) {
- match = rule.match("/" + entry + "/") ||
- rule.match(entry + "/")
- }
-
- // When including a file with a negated rule, it's
- // relevant if a directory partially matches, since
- // it may then match a file within it.
- // Eg, if you ignore /a, but !/a/b/c
- if (!match && rule.negate && partial) {
- match = rule.match("/" + entry, true) ||
- rule.match(entry, true)
- }
-
- if (match) {
- included = rule.negate
- }
- }, this)
-
- return included
-}
-
-
-IgnoreReader.prototype.sort = function (a, b) {
- var aig = this.ignoreFiles.indexOf(a) !== -1
- , big = this.ignoreFiles.indexOf(b) !== -1
-
- if (aig && !big) return -1
- if (big && !aig) return 1
- return this._sort(a, b)
-}
-
-IgnoreReader.prototype._sort = function (a, b) {
- return 0
-}
-
-function alphasort (a, b) {
- return a === b ? 0
- : a.toLowerCase() > b.toLowerCase() ? 1
- : a.toLowerCase() < b.toLowerCase() ? -1
- : a > b ? 1
- : -1
-}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/README.md b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/README.md
deleted file mode 100644
index ed2ec1fdd78f56..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/README.md
+++ /dev/null
@@ -1,123 +0,0 @@
-# brace-expansion
-
-[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
-as known from sh/bash, in JavaScript.
-
-[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion)
-[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion)
-[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/)
-
-[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion)
-
-## Example
-
-```js
-var expand = require('brace-expansion');
-
-expand('file-{a,b,c}.jpg')
-// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
-
-expand('-v{,,}')
-// => ['-v', '-v', '-v']
-
-expand('file{0..2}.jpg')
-// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
-
-expand('file-{a..c}.jpg')
-// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
-
-expand('file{2..0}.jpg')
-// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
-
-expand('file{0..4..2}.jpg')
-// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
-
-expand('file-{a..e..2}.jpg')
-// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
-
-expand('file{00..10..5}.jpg')
-// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
-
-expand('{{A..C},{a..c}}')
-// => ['A', 'B', 'C', 'a', 'b', 'c']
-
-expand('ppp{,config,oe{,conf}}')
-// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
-```
-
-## API
-
-```js
-var expand = require('brace-expansion');
-```
-
-### var expanded = expand(str)
-
-Return an array of all possible and valid expansions of `str`. If none are
-found, `[str]` is returned.
-
-Valid expansions are:
-
-```js
-/^(.*,)+(.+)?$/
-// {a,b,...}
-```
-
-A comma seperated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
-
-```js
-/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
-// {x..y[..incr]}
-```
-
-A numeric sequence from `x` to `y` inclusive, with optional increment.
-If `x` or `y` start with a leading `0`, all the numbers will be padded
-to have equal length. Negative numbers and backwards iteration work too.
-
-```js
-/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
-// {x..y[..incr]}
-```
-
-An alphabetic sequence from `x` to `y` inclusive, with optional increment.
-`x` and `y` must be exactly one character, and if given, `incr` must be a
-number.
-
-For compatibility reasons, the string `${` is not eligible for brace expansion.
-
-## Installation
-
-With [npm](https://npmjs.org) do:
-
-```bash
-npm install brace-expansion
-```
-
-## Contributors
-
-- [Julian Gruber](https://github.com/juliangruber)
-- [Isaac Z. Schlueter](https://github.com/isaacs)
-
-## License
-
-(MIT)
-
-Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/index.js b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/index.js
deleted file mode 100644
index 0478be81eabc2b..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/index.js
+++ /dev/null
@@ -1,201 +0,0 @@
-var concatMap = require('concat-map');
-var balanced = require('balanced-match');
-
-module.exports = expandTop;
-
-var escSlash = '\0SLASH'+Math.random()+'\0';
-var escOpen = '\0OPEN'+Math.random()+'\0';
-var escClose = '\0CLOSE'+Math.random()+'\0';
-var escComma = '\0COMMA'+Math.random()+'\0';
-var escPeriod = '\0PERIOD'+Math.random()+'\0';
-
-function numeric(str) {
- return parseInt(str, 10) == str
- ? parseInt(str, 10)
- : str.charCodeAt(0);
-}
-
-function escapeBraces(str) {
- return str.split('\\\\').join(escSlash)
- .split('\\{').join(escOpen)
- .split('\\}').join(escClose)
- .split('\\,').join(escComma)
- .split('\\.').join(escPeriod);
-}
-
-function unescapeBraces(str) {
- return str.split(escSlash).join('\\')
- .split(escOpen).join('{')
- .split(escClose).join('}')
- .split(escComma).join(',')
- .split(escPeriod).join('.');
-}
-
-
-// Basically just str.split(","), but handling cases
-// where we have nested braced sections, which should be
-// treated as individual members, like {a,{b,c},d}
-function parseCommaParts(str) {
- if (!str)
- return [''];
-
- var parts = [];
- var m = balanced('{', '}', str);
-
- if (!m)
- return str.split(',');
-
- var pre = m.pre;
- var body = m.body;
- var post = m.post;
- var p = pre.split(',');
-
- p[p.length-1] += '{' + body + '}';
- var postParts = parseCommaParts(post);
- if (post.length) {
- p[p.length-1] += postParts.shift();
- p.push.apply(p, postParts);
- }
-
- parts.push.apply(parts, p);
-
- return parts;
-}
-
-function expandTop(str) {
- if (!str)
- return [];
-
- // I don't know why Bash 4.3 does this, but it does.
- // Anything starting with {} will have the first two bytes preserved
- // but *only* at the top level, so {},a}b will not expand to anything,
- // but a{},b}c will be expanded to [a}c,abc].
- // One could argue that this is a bug in Bash, but since the goal of
- // this module is to match Bash's rules, we escape a leading {}
- if (str.substr(0, 2) === '{}') {
- str = '\\{\\}' + str.substr(2);
- }
-
- return expand(escapeBraces(str), true).map(unescapeBraces);
-}
-
-function identity(e) {
- return e;
-}
-
-function embrace(str) {
- return '{' + str + '}';
-}
-function isPadded(el) {
- return /^-?0\d/.test(el);
-}
-
-function lte(i, y) {
- return i <= y;
-}
-function gte(i, y) {
- return i >= y;
-}
-
-function expand(str, isTop) {
- var expansions = [];
-
- var m = balanced('{', '}', str);
- if (!m || /\$$/.test(m.pre)) return [str];
-
- var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
- var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
- var isSequence = isNumericSequence || isAlphaSequence;
- var isOptions = m.body.indexOf(',') >= 0;
- if (!isSequence && !isOptions) {
- // {a},b}
- if (m.post.match(/,.*\}/)) {
- str = m.pre + '{' + m.body + escClose + m.post;
- return expand(str);
- }
- return [str];
- }
-
- var n;
- if (isSequence) {
- n = m.body.split(/\.\./);
- } else {
- n = parseCommaParts(m.body);
- if (n.length === 1) {
- // x{{a,b}}y ==> x{a}y x{b}y
- n = expand(n[0], false).map(embrace);
- if (n.length === 1) {
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
- return post.map(function(p) {
- return m.pre + n[0] + p;
- });
- }
- }
- }
-
- // at this point, n is the parts, and we know it's not a comma set
- // with a single entry.
-
- // no need to expand pre, since it is guaranteed to be free of brace-sets
- var pre = m.pre;
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
-
- var N;
-
- if (isSequence) {
- var x = numeric(n[0]);
- var y = numeric(n[1]);
- var width = Math.max(n[0].length, n[1].length)
- var incr = n.length == 3
- ? Math.abs(numeric(n[2]))
- : 1;
- var test = lte;
- var reverse = y < x;
- if (reverse) {
- incr *= -1;
- test = gte;
- }
- var pad = n.some(isPadded);
-
- N = [];
-
- for (var i = x; test(i, y); i += incr) {
- var c;
- if (isAlphaSequence) {
- c = String.fromCharCode(i);
- if (c === '\\')
- c = '';
- } else {
- c = String(i);
- if (pad) {
- var need = width - c.length;
- if (need > 0) {
- var z = new Array(need + 1).join('0');
- if (i < 0)
- c = '-' + z + c.slice(1);
- else
- c = z + c;
- }
- }
- }
- N.push(c);
- }
- } else {
- N = concatMap(n, function(el) { return expand(el, false) });
- }
-
- for (var j = 0; j < N.length; j++) {
- for (var k = 0; k < post.length; k++) {
- var expansion = pre + N[j] + post[k];
- if (!isTop || isSequence || expansion)
- expansions.push(expansion);
- }
- }
-
- return expansions;
-}
-
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
deleted file mode 100644
index 4702c7d9a4f0b7..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
+++ /dev/null
@@ -1,77 +0,0 @@
-{
- "_from": "balanced-match@^1.0.0",
- "_id": "balanced-match@1.0.0",
- "_inBundle": false,
- "_integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
- "_location": "/fstream-npm/fstream-ignore/minimatch/brace-expansion/balanced-match",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "balanced-match@^1.0.0",
- "name": "balanced-match",
- "escapedName": "balanced-match",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/fstream-npm/fstream-ignore/minimatch/brace-expansion"
- ],
- "_resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
- "_shasum": "89b4d199ab2bee49de164ea02b89ce462d71b767",
- "_spec": "balanced-match@^1.0.0",
- "_where": "/Users/rebecca/code/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion",
- "author": {
- "name": "Julian Gruber",
- "email": "mail@juliangruber.com",
- "url": "http://juliangruber.com"
- },
- "bugs": {
- "url": "https://github.com/juliangruber/balanced-match/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Match balanced character pairs, like \"{\" and \"}\"",
- "devDependencies": {
- "matcha": "^0.7.0",
- "tape": "^4.6.0"
- },
- "homepage": "https://github.com/juliangruber/balanced-match",
- "keywords": [
- "match",
- "regexp",
- "test",
- "balanced",
- "parse"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "balanced-match",
- "repository": {
- "type": "git",
- "url": "git://github.com/juliangruber/balanced-match.git"
- },
- "scripts": {
- "bench": "make bench",
- "test": "make test"
- },
- "testling": {
- "files": "test/*.js",
- "browsers": [
- "ie/8..latest",
- "firefox/20..latest",
- "firefox/nightly",
- "chrome/25..latest",
- "chrome/canary",
- "opera/12..latest",
- "opera/next",
- "safari/5.1..latest",
- "ipad/6.0..latest",
- "iphone/6.0..latest",
- "android-browser/4.2..latest"
- ]
- },
- "version": "1.0.0"
-}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
deleted file mode 100644
index 439d78a33b9872..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
+++ /dev/null
@@ -1,92 +0,0 @@
-{
- "_from": "concat-map@0.0.1",
- "_id": "concat-map@0.0.1",
- "_integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
- "_location": "/fstream-npm/fstream-ignore/minimatch/brace-expansion/concat-map",
- "_phantomChildren": {},
- "_requested": {
- "type": "version",
- "registry": true,
- "raw": "concat-map@0.0.1",
- "name": "concat-map",
- "escapedName": "concat-map",
- "rawSpec": "0.0.1",
- "saveSpec": null,
- "fetchSpec": "0.0.1"
- },
- "_requiredBy": [
- "/fstream-npm/fstream-ignore/minimatch/brace-expansion"
- ],
- "_resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "_shasum": "d8a96bd77fd68df7793a73036a3ba0d5405d477b",
- "_shrinkwrap": null,
- "_spec": "concat-map@0.0.1",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion",
- "author": {
- "name": "James Halliday",
- "email": "mail@substack.net",
- "url": "http://substack.net"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/substack/node-concat-map/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "concatenative mapdashery",
- "devDependencies": {
- "tape": "~2.4.0"
- },
- "directories": {
- "example": "example",
- "test": "test"
- },
- "homepage": "https://github.com/substack/node-concat-map#readme",
- "keywords": [
- "concat",
- "concatMap",
- "map",
- "functional",
- "higher-order"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "concat-map",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/substack/node-concat-map.git"
- },
- "scripts": {
- "test": "tape test/*.js"
- },
- "testling": {
- "files": "test/*.js",
- "browsers": {
- "ie": [
- 6,
- 7,
- 8,
- 9
- ],
- "ff": [
- 3.5,
- 10,
- 15
- ],
- "chrome": [
- 10,
- 22
- ],
- "safari": [
- 5.1
- ],
- "opera": [
- 12
- ]
- }
- },
- "version": "0.0.1"
-}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/package.json b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/package.json
deleted file mode 100644
index 7d38d063ea6021..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/package.json
+++ /dev/null
@@ -1,75 +0,0 @@
-{
- "_from": "brace-expansion@^1.1.7",
- "_id": "brace-expansion@1.1.8",
- "_inBundle": false,
- "_integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=",
- "_location": "/fstream-npm/fstream-ignore/minimatch/brace-expansion",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "brace-expansion@^1.1.7",
- "name": "brace-expansion",
- "escapedName": "brace-expansion",
- "rawSpec": "^1.1.7",
- "saveSpec": null,
- "fetchSpec": "^1.1.7"
- },
- "_requiredBy": [
- "/fstream-npm/fstream-ignore/minimatch"
- ],
- "_resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz",
- "_shasum": "c07b211c7c952ec1f8efd51a77ef0d1d3990a292",
- "_spec": "brace-expansion@^1.1.7",
- "_where": "/Users/rebecca/code/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch",
- "author": {
- "name": "Julian Gruber",
- "email": "mail@juliangruber.com",
- "url": "http://juliangruber.com"
- },
- "bugs": {
- "url": "https://github.com/juliangruber/brace-expansion/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- },
- "deprecated": false,
- "description": "Brace expansion as known from sh/bash",
- "devDependencies": {
- "matcha": "^0.7.0",
- "tape": "^4.6.0"
- },
- "homepage": "https://github.com/juliangruber/brace-expansion",
- "keywords": [],
- "license": "MIT",
- "main": "index.js",
- "name": "brace-expansion",
- "repository": {
- "type": "git",
- "url": "git://github.com/juliangruber/brace-expansion.git"
- },
- "scripts": {
- "bench": "matcha test/perf/bench.js",
- "gentest": "bash test/generate.sh",
- "test": "tape test/*.js"
- },
- "testling": {
- "files": "test/*.js",
- "browsers": [
- "ie/8..latest",
- "firefox/20..latest",
- "firefox/nightly",
- "chrome/25..latest",
- "chrome/canary",
- "opera/12..latest",
- "opera/next",
- "safari/5.1..latest",
- "ipad/6.0..latest",
- "iphone/6.0..latest",
- "android-browser/4.2..latest"
- ]
- },
- "version": "1.1.8"
-}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/package.json b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/package.json
deleted file mode 100644
index e4bdaeffc12c7e..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/package.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
- "_from": "minimatch@^3.0.0",
- "_id": "minimatch@3.0.4",
- "_inBundle": false,
- "_integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
- "_location": "/fstream-npm/fstream-ignore/minimatch",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "minimatch@^3.0.0",
- "name": "minimatch",
- "escapedName": "minimatch",
- "rawSpec": "^3.0.0",
- "saveSpec": null,
- "fetchSpec": "^3.0.0"
- },
- "_requiredBy": [
- "/fstream-npm/fstream-ignore"
- ],
- "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
- "_shasum": "5166e286457f03306064be5497e8dbb0c3d32083",
- "_spec": "minimatch@^3.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/fstream-npm/node_modules/fstream-ignore",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me"
- },
- "bugs": {
- "url": "https://github.com/isaacs/minimatch/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "deprecated": false,
- "description": "a glob matcher in javascript",
- "devDependencies": {
- "tap": "^10.3.2"
- },
- "engines": {
- "node": "*"
- },
- "files": [
- "minimatch.js"
- ],
- "homepage": "https://github.com/isaacs/minimatch#readme",
- "license": "ISC",
- "main": "minimatch.js",
- "name": "minimatch",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/minimatch.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap test/*.js --cov"
- },
- "version": "3.0.4"
-}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/package.json b/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/package.json
deleted file mode 100644
index ee66985a5ebfa2..00000000000000
--- a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/package.json
+++ /dev/null
@@ -1,64 +0,0 @@
-{
- "_from": "fstream-ignore@^1.0.0",
- "_id": "fstream-ignore@1.0.5",
- "_integrity": "sha1-nDHa40dnAY/h0kmyTa2mfQktoQU=",
- "_location": "/fstream-npm/fstream-ignore",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "fstream-ignore@^1.0.0",
- "name": "fstream-ignore",
- "escapedName": "fstream-ignore",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/fstream-npm"
- ],
- "_resolved": "https://registry.npmjs.org/fstream-ignore/-/fstream-ignore-1.0.5.tgz",
- "_shasum": "9c31dae34767018fe1d249b24dada67d092da105",
- "_shrinkwrap": null,
- "_spec": "fstream-ignore@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/fstream-npm",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/isaacs/fstream-ignore/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "fstream": "^1.0.0",
- "inherits": "2",
- "minimatch": "^3.0.0"
- },
- "deprecated": false,
- "description": "A thing for ignoring files based on globs",
- "devDependencies": {
- "mkdirp": "",
- "rimraf": "",
- "tap": "^5.7.1"
- },
- "files": [
- "ignore.js"
- ],
- "homepage": "https://github.com/isaacs/fstream-ignore#readme",
- "license": "ISC",
- "main": "ignore.js",
- "name": "fstream-ignore",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/fstream-ignore.git"
- },
- "scripts": {
- "test": "tap test/*.js --cov"
- },
- "version": "1.0.5"
-}
diff --git a/deps/npm/node_modules/fstream-npm/package.json b/deps/npm/node_modules/fstream-npm/package.json
deleted file mode 100644
index 67169501835444..00000000000000
--- a/deps/npm/node_modules/fstream-npm/package.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
- "_from": "fstream-npm@~1.2.1",
- "_id": "fstream-npm@1.2.1",
- "_inBundle": false,
- "_integrity": "sha512-iBHpm/LmD1qw0TlHMAqVd9rwdU6M+EHRUnPkXpRi5G/Hf0FIFH+oZFryodAU2MFNfGRh/CzhUFlMKV3pdeOTDw==",
- "_location": "/fstream-npm",
- "_phantomChildren": {
- "fstream": "1.0.11",
- "inherits": "2.0.3"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "fstream-npm@~1.2.1",
- "name": "fstream-npm",
- "escapedName": "fstream-npm",
- "rawSpec": "~1.2.1",
- "saveSpec": null,
- "fetchSpec": "~1.2.1"
- },
- "_requiredBy": [
- "#USER",
- "/"
- ],
- "_resolved": "https://registry.npmjs.org/fstream-npm/-/fstream-npm-1.2.1.tgz",
- "_shasum": "08c4a452f789dcbac4c89a4563c902b2c862fd5b",
- "_spec": "fstream-npm@~1.2.1",
- "_where": "/Users/rebecca/code/npm",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/fstream-npm/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "fstream-ignore": "^1.0.0",
- "inherits": "2"
- },
- "deprecated": false,
- "description": "fstream class for creating npm packages",
- "devDependencies": {
- "graceful-fs": "^4.1.2",
- "mkdirp": "^0.5.1",
- "rimraf": "^2.4.2",
- "standard": "^4.3.1",
- "tap": "^1.3.2"
- },
- "homepage": "https://github.com/npm/fstream-npm#readme",
- "license": "ISC",
- "main": "./fstream-npm.js",
- "name": "fstream-npm",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/fstream-npm.git"
- },
- "scripts": {
- "test": "standard && tap test/*.js"
- },
- "version": "1.2.1"
-}
diff --git a/deps/npm/node_modules/fstream-npm/test/ignores.js b/deps/npm/node_modules/fstream-npm/test/ignores.js
deleted file mode 100644
index ac94251f72caa2..00000000000000
--- a/deps/npm/node_modules/fstream-npm/test/ignores.js
+++ /dev/null
@@ -1,132 +0,0 @@
-var fs = require('graceful-fs')
-var join = require('path').join
-
-var mkdirp = require('mkdirp')
-var rimraf = require('rimraf')
-var test = require('tap').test
-
-var Packer = require('..')
-
-var pkg = join(__dirname, 'test-package')
-
-var elfJS = function () {/*
-module.exports = function () {
- console.log("i'm a elf")
-}
-*/}.toString().split('\n').slice(1, -1).join()
-
-var json = {
- 'name': 'test-package',
- 'version': '3.1.4',
- 'main': 'elf.js'
-}
-
-test('setup', function (t) {
- setup()
- t.end()
-})
-
-var included = [
- 'package.json',
- 'elf.js',
- join('deps', 'foo', 'config', 'config.gypi')
-]
-
-test('follows npm package ignoring rules', function (t) {
- var subject = new Packer({ path: pkg, type: 'Directory', isDirectory: true })
- var filenames = []
- subject.on('entry', function (entry) {
- t.equal(entry.type, 'File', 'only files in this package')
-
- // include relative path in filename
- var filename = entry._path.slice(entry.root._path.length + 1)
-
- filenames.push(filename)
- })
- // need to do this so fstream doesn't explode when files are removed from
- // under it
- subject.on('end', function () {
- // ensure we get *exactly* the results we expect by comparing in both
- // directions
- filenames.forEach(function (filename) {
- t.ok(
- included.indexOf(filename) > -1,
- filename + ' is included'
- )
- })
- included.forEach(function (filename) {
- t.ok(
- filenames.indexOf(filename) > -1,
- filename + ' is not included'
- )
- })
- t.end()
- })
-})
-
-test('cleanup', function (t) {
- // rimraf.sync chokes here for some reason
- rimraf(pkg, function () { t.end() })
-})
-
-function setup () {
- rimraf.sync(pkg)
- mkdirp.sync(pkg)
- fs.writeFileSync(
- join(pkg, 'package.json'),
- JSON.stringify(json, null, 2)
- )
-
- fs.writeFileSync(
- join(pkg, 'elf.js'),
- elfJS
- )
-
- fs.writeFileSync(
- join(pkg, '.npmrc'),
- 'packaged=false'
- )
-
- fs.writeFileSync(
- join(pkg, '.npmignore'),
- '.npmignore\ndummy\npackage.json'
- )
-
- fs.writeFileSync(
- join(pkg, 'dummy'),
- 'foo'
- )
-
- var buildDir = join(pkg, 'build')
- mkdirp.sync(buildDir)
- fs.writeFileSync(
- join(buildDir, 'config.gypi'),
- "i_wont_be_included_by_fstream='with any luck'"
- )
-
- var depscfg = join(pkg, 'deps', 'foo', 'config')
- mkdirp.sync(depscfg)
- fs.writeFileSync(
- join(depscfg, 'config.gypi'),
- "i_will_be_included_by_fstream='with any luck'"
- )
-
- fs.writeFileSync(
- join(buildDir, 'npm-debug.log'),
- '0 lol\n'
- )
-
- var gitDir = join(pkg, '.git')
- mkdirp.sync(gitDir)
- fs.writeFileSync(
- join(gitDir, 'gitstub'),
- "won't fool git, also won't be included by fstream"
- )
-
- var historyDir = join(pkg, 'node_modules/history')
- mkdirp.sync(historyDir)
- fs.writeFileSync(
- join(historyDir, 'README.md'),
- "please don't include me"
- )
-}
diff --git a/deps/npm/node_modules/fstream-npm/test/scoped.js b/deps/npm/node_modules/fstream-npm/test/scoped.js
deleted file mode 100644
index db0c3d4c062b5d..00000000000000
--- a/deps/npm/node_modules/fstream-npm/test/scoped.js
+++ /dev/null
@@ -1,99 +0,0 @@
-var fs = require('graceful-fs')
-var join = require('path').join
-
-var mkdirp = require('mkdirp')
-var rimraf = require('rimraf')
-var test = require('tap').test
-
-var Packer = require('..')
-
-var pkg = join(__dirname, 'test-package-scoped')
-
-var elfJS = function () {/*
-module.exports = function () {
- console.log("i'm a elf")
-}
-*/}.toString().split('\n').slice(1, -1).join()
-
-var json = {
- 'name': 'test-package-scoped',
- 'version': '3.1.4',
- 'main': 'elf.js',
- 'bundledDependencies': [
- '@npmwombat/scoped'
- ]
-}
-
-test('setup', function (t) {
- setup()
- t.end()
-})
-
-var expected = [
- 'package.json',
- 'elf.js',
- join('node_modules', '@npmwombat', 'scoped', 'index.js'),
- join('node_modules', '@npmwombat', 'scoped', 'node_modules', 'example', 'index.js')
-]
-
-test('includes bundledDependencies', function (t) {
- var subject = new Packer({ path: pkg, type: 'Directory', isDirectory: true })
- var actual = []
- subject.on('entry', function (entry) {
- t.equal(entry.type, 'File', 'only files in this package')
- // include relative path in filename
- var filename = entry._path.slice(entry.root._path.length + 1)
- actual.push(filename)
- })
- // need to do this so fstream doesn't explode when files are removed from
- // under it
- subject.on('end', function () {
- // ensure we get *exactly* the results we expect by comparing in both
- // directions
- actual.forEach(function (filename) {
- t.ok(
- expected.indexOf(filename) > -1,
- filename + ' is included'
- )
- })
- expected.forEach(function (filename) {
- t.ok(
- actual.indexOf(filename) > -1,
- filename + ' is not included'
- )
- })
- t.end()
- })
-})
-
-test('cleanup', function (t) {
- // rimraf.sync chokes here for some reason
- rimraf(pkg, function () { t.end() })
-})
-
-function setup () {
- rimraf.sync(pkg)
- mkdirp.sync(pkg)
- fs.writeFileSync(
- join(pkg, 'package.json'),
- JSON.stringify(json, null, 2)
- )
-
- fs.writeFileSync(
- join(pkg, 'elf.js'),
- elfJS
- )
-
- var scopedDir = join(pkg, 'node_modules', '@npmwombat', 'scoped')
- mkdirp.sync(scopedDir)
- fs.writeFileSync(
- join(scopedDir, 'index.js'),
- "console.log('hello wombat')"
- )
- var scopedContent = join(scopedDir, 'node_modules', 'example')
- mkdirp.sync(scopedContent)
- fs.writeFileSync(
- join(scopedContent, 'index.js'),
- "console.log('hello example')"
- )
-}
diff --git a/deps/npm/node_modules/fstream/package.json b/deps/npm/node_modules/fstream/package.json
deleted file mode 100644
index 9a08117fe6b67d..00000000000000
--- a/deps/npm/node_modules/fstream/package.json
+++ /dev/null
@@ -1,67 +0,0 @@
-{
- "_from": "fstream@~1.0.11",
- "_id": "fstream@1.0.11",
- "_integrity": "sha1-XB+x8RdHcRTwYyoOtLcbPLD9MXE=",
- "_location": "/fstream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "fstream@~1.0.11",
- "name": "fstream",
- "escapedName": "fstream",
- "rawSpec": "~1.0.11",
- "saveSpec": null,
- "fetchSpec": "~1.0.11"
- },
- "_requiredBy": [
- "/",
- "/fstream-npm/fstream-ignore",
- "/node-gyp",
- "/tar"
- ],
- "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.11.tgz",
- "_shasum": "5c1fb1f117477114f0632a0eb4b71b3cb0fd3171",
- "_shrinkwrap": null,
- "_spec": "fstream@~1.0.11",
- "_where": "/Users/zkat/Documents/code/npm",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/npm/fstream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "graceful-fs": "^4.1.2",
- "inherits": "~2.0.0",
- "mkdirp": ">=0.5 0",
- "rimraf": "2"
- },
- "deprecated": false,
- "description": "Advanced file system stream things",
- "devDependencies": {
- "standard": "^4.0.0",
- "tap": "^1.2.0"
- },
- "engines": {
- "node": ">=0.6"
- },
- "homepage": "https://github.com/npm/fstream#readme",
- "license": "ISC",
- "main": "fstream.js",
- "name": "fstream",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/fstream.git"
- },
- "scripts": {
- "test": "standard && tap examples/*.js"
- },
- "version": "1.0.11"
-}
diff --git a/deps/npm/node_modules/libnpx/CHANGELOG.md b/deps/npm/node_modules/libnpx/CHANGELOG.md
index 19a89f7c47f392..c5de9f16c547f5 100644
--- a/deps/npm/node_modules/libnpx/CHANGELOG.md
+++ b/deps/npm/node_modules/libnpx/CHANGELOG.md
@@ -2,6 +2,108 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+# [9.6.0](https://github.com/zkat/npx/compare/v9.5.0...v9.6.0) (2017-08-17)
+
+
+### Features
+
+* **i18n:** add Arabic translation (#111) ([3c5b99a](https://github.com/zkat/npx/commit/3c5b99a))
+* **i18n:** add Dutch (#108) ([ed116fd](https://github.com/zkat/npx/commit/ed116fd))
+
+
+
+
+# [9.5.0](https://github.com/zkat/npx/compare/v9.4.1...v9.5.0) (2017-07-28)
+
+
+### Features
+
+* **i18n:** add Polish translations (#99) ([8442f59](https://github.com/zkat/npx/commit/8442f59))
+
+
+
+
+## [9.4.1](https://github.com/zkat/npx/compare/v9.4.0...v9.4.1) (2017-07-21)
+
+
+### Bug Fixes
+
+* **i18n:** fix filename for uk.json locale ([2c770e4](https://github.com/zkat/npx/commit/2c770e4))
+
+
+
+
+# [9.4.0](https://github.com/zkat/npx/compare/v9.3.2...v9.4.0) (2017-07-21)
+
+
+### Bug Fixes
+
+* **i18n:** minor fixes to ru locale (#92) ([f4d5051](https://github.com/zkat/npx/commit/f4d5051)), closes [#92](https://github.com/zkat/npx/issues/92)
+
+
+### Features
+
+* **i18n:** `no` locale fallback for Norwegian bokmål ⚠️ In case of weird setups ⚠️ (#91) ([74f0e4c](https://github.com/zkat/npx/commit/74f0e4c))
+* **i18n:** add Bahasa Indonesia locale (#95) ([80dceeb](https://github.com/zkat/npx/commit/80dceeb))
+* **i18n:** add serbian translation (#96) ([040de7a](https://github.com/zkat/npx/commit/040de7a))
+* **i18n:** add Ukrainian locale (#93) ([9a3ef33](https://github.com/zkat/npx/commit/9a3ef33))
+* **i18n:** Added Norwegian (bokmål and nynorsk) translations (#90) ([6c5c733](https://github.com/zkat/npx/commit/6c5c733))
+
+
+
+
+## [9.3.2](https://github.com/zkat/npx/compare/v9.3.1...v9.3.2) (2017-07-17)
+
+
+### Bug Fixes
+
+* **exec:** detect a wider range of shebang lines for node scripts (#89) ([1841b6f](https://github.com/zkat/npx/commit/1841b6f))
+* **windows:** escape spawn args because windows is picky (#87) ([314e5eb](https://github.com/zkat/npx/commit/314e5eb))
+* **windows:** get magic shim detection working on Windows (#88) ([255aeeb](https://github.com/zkat/npx/commit/255aeeb))
+
+
+
+
+## [9.3.1](https://github.com/zkat/npx/compare/v9.3.0...v9.3.1) (2017-07-17)
+
+
+### Bug Fixes
+
+* **deps:** update to npm[@5](https://github.com/5).3.0 ([2b14de2](https://github.com/zkat/npx/commit/2b14de2))
+
+
+
+
+# [9.3.0](https://github.com/zkat/npx/compare/v9.2.3...v9.3.0) (2017-07-17)
+
+
+### Features
+
+* **i18n:** add Korean locale (#86) ([3655314](https://github.com/zkat/npx/commit/3655314))
+
+
+
+
+## [9.2.3](https://github.com/zkat/npx/compare/v9.2.2...v9.2.3) (2017-07-17)
+
+
+### Bug Fixes
+
+* **paths:** support npm/npx paths with spaces in them ([8f3b829](https://github.com/zkat/npx/commit/8f3b829))
+
+
+
+
+## [9.2.2](https://github.com/zkat/npx/compare/v9.2.1...v9.2.2) (2017-07-15)
+
+
+### Bug Fixes
+
+* **npm:** escape path to npm, too ([333d2ff](https://github.com/zkat/npx/commit/333d2ff))
+
+
+
## [9.2.1](https://github.com/zkat/npx/compare/v9.2.0...v9.2.1) (2017-07-14)
diff --git a/deps/npm/node_modules/libnpx/index.js b/deps/npm/node_modules/libnpx/index.js
index d42172e500c744..097d67cd02bdde 100644
--- a/deps/npm/node_modules/libnpx/index.js
+++ b/deps/npm/node_modules/libnpx/index.js
@@ -9,8 +9,6 @@ const parseArgs = require('./parse-args.js')
const path = require('path')
const which = promisify(require('which'))
-const PATH_SEP = process.platform === 'win32' ? ';' : ':'
-
module.exports = npx
module.exports.parseArgs = parseArgs
function npx (argv) {
@@ -41,7 +39,7 @@ function npx (argv) {
return localBinPath(process.cwd()).then(local => {
if (local) {
// Local project paths take priority. Go ahead and prepend it.
- process.env.PATH = `${local}${PATH_SEP}${process.env.PATH}`
+ process.env.PATH = `${local}${path.delimiter}${process.env.PATH}`
}
return Promise.all([
// Figuring out if a command exists, early on, lets us maybe
@@ -85,6 +83,9 @@ function npx (argv) {
argv.package.length === 1
) {
return promisify(fs.readdir)(results.bin).then(bins => {
+ if (process.platform === 'win32') {
+ bins = bins.filter(b => b !== 'etc' && b !== 'node_modules')
+ }
const cmd = new RegExp(`^${argv.command}(?:\\.cmd)?$`, 'i')
const matching = bins.find(b => b.match(cmd))
return path.resolve(results.bin, bins[matching] || bins[0])
@@ -128,7 +129,7 @@ function getEnv (opts) {
const args = ['run', 'env', '--parseable']
return findNodeScript(opts.npm, {isLocal: true}).then(npmPath => {
if (npmPath) {
- args.unshift(opts.npm)
+ args.unshift(child.escapeArg(opts.npm))
return process.argv[0]
} else {
return opts.npm
@@ -155,7 +156,7 @@ function ensurePackages (specs, opts) {
// This will make temp bins _higher priority_ than even local bins.
// This is intentional, since npx assumes that if you went through
// the trouble of doing `-p`, you're rather have that one. Right? ;)
- process.env.PATH = `${bins}${PATH_SEP}${process.env.PATH}`
+ process.env.PATH = `${bins}${path.delimiter}${process.env.PATH}`
if (!info) { info = {} }
info.prefix = prefix
info.bin = bins
@@ -194,7 +195,8 @@ function getNpmCache (opts) {
}
return findNodeScript(opts.npm, {isLocal: true}).then(npmPath => {
if (npmPath) {
- args.unshift(opts.npm)
+ // This one is NOT escaped as a path because it's handed to Node.
+ args.unshift(child.escapeArg(opts.npm))
return process.argv[0]
} else {
return opts.npm
@@ -220,7 +222,11 @@ function installPackages (specs, prefix, opts) {
const args = buildArgs(specs, prefix, opts)
return findNodeScript(opts.npm, {isLocal: true}).then(npmPath => {
if (npmPath) {
- args.unshift(opts.npm)
+ args.unshift(
+ process.platform === 'win32'
+ ? child.escapeArg(opts.npm)
+ : opts.npm
+ )
return process.argv[0]
} else {
return opts.npm
@@ -319,8 +325,7 @@ function findNodeScript (existing, opts) {
throw new Error(Y()`command not found: ${existing}`)
}
} else if (process.platform !== 'win32') {
- const line = '#!/usr/bin/env node\n'
- const bytecount = line.length
+ const bytecount = 400
const buf = Buffer.alloc(bytecount)
return promisify(fs.open)(existing, 'r').then(fd => {
return promisify(fs.read)(fd, buf, 0, bytecount, 0).then(() => {
@@ -329,8 +334,26 @@ function findNodeScript (existing, opts) {
return promisify(fs.close)(fd).then(() => { throw err })
})
}).then(() => {
- return buf.toString('utf8') === line && existing
+ const re = /#!\s*(?:\/usr\/bin\/env\s*node|\/usr\/local\/bin\/node|\/usr\/bin\/node)\s*\r?\n/i
+ return buf.toString('utf8').match(re) && existing
})
+ } else if (process.platform === 'win32') {
+ const buf = Buffer.alloc(1000)
+ return promisify(fs.open)(existing, 'r').then(fd => {
+ return promisify(fs.read)(fd, buf, 0, 1000, 0).then(() => {
+ return promisify(fs.close)(fd)
+ }, err => {
+ return promisify(fs.close)(fd).then(() => { throw err })
+ })
+ }).then(() => {
+ return buf.toString('utf8').trim()
+ }).then(str => {
+ const cmd = /"%~dp0\\node\.exe"\s+"%~dp0\\(.*)"\s+%\*/
+ const mingw = /"\$basedir\/node"\s+"\$basedir\/(.*)"\s+"\$@"/i
+ return str.match(cmd) || str.match(mingw)
+ }).then(match => {
+ return match && path.join(path.dirname(existing), match[1])
+ }).then(x => console.log(x) || x)
}
})
}
diff --git a/deps/npm/node_modules/libnpx/libnpx.1 b/deps/npm/node_modules/libnpx/libnpx.1
index 7d9b273ef7631f..b8010b632a74de 100644
--- a/deps/npm/node_modules/libnpx/libnpx.1
+++ b/deps/npm/node_modules/libnpx/libnpx.1
@@ -1,4 +1,4 @@
-.TH "NPX" "1" "July 2017" "libnpx@9.2.0" "User Commands"
+.TH "NPX" "1" "August 2017" "libnpx@9.5.0" "User Commands"
.SH "NAME"
\fBnpx\fR \- execute npm package binaries
.SH SYNOPSIS
diff --git a/deps/npm/node_modules/libnpx/locales/ar.json b/deps/npm/node_modules/libnpx/locales/ar.json
new file mode 100644
index 00000000000000..8ca0fcc66e9bd5
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/ar.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": ".npm الثنائية من حزم node تنفيذ ملفات \n %s ",
+ "Package to be installed.": ".الحزمة التي سيتم تثبيتها",
+ "Location of the npm cache.": ".npm موقع الذاكرة المخبأة ل",
+ "Skip installation if a package is missing.": "تخطي التثبيت في حال وجود حزمة مفقودة.",
+ "Path to user npmrc.": "مسار المستخدم إلى npmrc.",
+ "Execute string as if inside `npm run-script`.": ".`npm run-script` نص الامر التنفيذي كما في",
+ "Shell to execute the command with, if any.": "واجهة تنفيذ الأوامر المرغوب استخدامها في حال وجودها.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": ".npx الامرالمتولد لاستدعاء كحل بديل في حالة \"لم يتم العثور على الامر\" من قبل ",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": " .npx سيتم قسرياً التثبيت بشكل مؤقت للاصدار الاحدث من قبل .$PATH تجاهل الملفات التنفيذية الثنائية الحالية في المسار ",
+ "npm binary to use for internal operations.": ".التنفيذي الثنائي الذي سيتم استعماله للعمليات الداخلية npm ملف",
+ "For the full documentation, see the manual page for npx(1).": ".npx(1) للوصول للمستندات المساعدة الكاملة, ابحث في صفحة الدليل عن",
+ "Unable to guess a binary name from %s. Please use --package.": ". --package الرجاء استعمال .%s لم يتمكن تخمين اسم التطبيق الثنائي من ",
+ "\nERROR: You must supply a command.\n": "\n.خطأ : يتوجب على المستخدم ادخال امر\n",
+ "Command failed: %s %s": "%s %s :فشل الامر ",
+ "Install for %s failed with code %s": "%s بالكود %s تم فشل تثبيت",
+ "%s not found. Trying with npx...": "...npx سيتم اعادة المحاولة باستعمال .%s لم يتم العثور على",
+ "command not found: %s": "%s :لم يتم العثور على الأمر",
+ "options": "خيارات",
+ "command": "أمر",
+ "version": "الإصدار",
+ "command-arg": "بارامتر الأمر",
+ "command-string": "نص الأمر",
+ "shell": "واجهة تنفيذ الأوامر",
+ "package": "الحزمة",
+ "npx: installed %s in %ss": "%ss في %s تم تثبيت ",
+ "Suppress output from npx itself. Subcommands will not be affected.": "الاوامر الفرعية لن تتاثر بالتعديل.npx إخفاءالخرج من",
+ "Extra node argument when calling a node binary.": ".node البارامتر الاضافي عند استدعاء"
+}
diff --git a/deps/npm/node_modules/libnpx/locales/id.json b/deps/npm/node_modules/libnpx/locales/id.json
new file mode 100644
index 00000000000000..1e3e29fa5b1476
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/id.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Jalankan berkas-berkas binari darpi paket-paket npm.\n%s",
+ "Package to be installed.": "Paket yang akan dipasang.",
+ "Location of the npm cache.": "Lokasi dari cache npm.",
+ "Skip installation if a package is missing.": "Lewati pemasangan jika paket tidak ditemukan.",
+ "Path to user npmrc.": "Jalur ke npmrc milik pengguna.",
+ "Execute string as if inside `npm run-script`.": "Jalankan string seperti ada di dalam `npm run-script`.",
+ "Shell to execute the command with, if any.": "Shell untuk menjalankan sebuah perintah, jika ada.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Membangkitkan kode shell untuk menggunakan npx sebagai cadangan dari \"perintah tidak ditemukan\" .",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Jangan hiraukan berkas-berkas binari yang ada di $PATH, atau yang ada di proyek lokal. Ini memaksa npx untuk melakukan pemasangan sementara dan menggunakan versi yang terakhir.",
+ "npm binary to use for internal operations.": "Binari npm yang digunakan untuk operasi internal.",
+ "For the full documentation, see the manual page for npx(1).": "Untuk dokumentasi lengkap, mohon baca laman manual untuk npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Tidak dapat menebak nama binari dari %s. Mohon gunakan --package.",
+ "\nERROR: You must supply a command.\n": "\nGALAT: Anda harus menyediakan sebuah perintah.\n",
+ "Command failed: %s %s": "Perintah tidak berhasil: %s %s",
+ "Install for %s failed with code %s": "Pemasangan untuk %s tidak berhasil dengan kode %s",
+ "%s not found. Trying with npx...": "%s tidak ditemukan. Mencoba dengan npx...",
+ "command not found: %s": "Perintah tidak ditemukan: %s",
+ "options": "opsi-opsi",
+ "command": "perintah",
+ "version": "versi",
+ "command-arg": "arg-perintah",
+ "command-string": "string-perintah",
+ "shell": "shell",
+ "package": "paket",
+ "npx: installed %s in %ss": "npx: %s terpasang di %ss",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Hilangkan keluaran dari npx itu sendiri. Sub-sub perintah tidak akan terpengaruh.",
+ "Extra node argument when calling a node binary.": "Argumen node ekstra ketika memanggil binari node."
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/libnpx/locales/ko.json b/deps/npm/node_modules/libnpx/locales/ko.json
new file mode 100644
index 00000000000000..790537a29af2dc
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/ko.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "npm 패키지에서 바이너리를 실행합니다.\n%s",
+ "Package to be installed.": "패키지가 설치되었습니다.",
+ "Location of the npm cache.": "npm 캐시의 위치.",
+ "Skip installation if a package is missing.": "패키지가 없으면 설치를 건너뜁니다.",
+ "Path to user npmrc.": "사용자 npmrc의 경로.",
+ "Execute string as if inside `npm run-script`.": "문자열이 `npm run-script`안에 있는 것처럼 실행합니다.",
+ "Shell to execute the command with, if any.": "명령을 실행할 쉘(존재하는 경우).",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "\"명령을 찾을 수 없습니다\"의 대안으로 npx가 사용하도록 쉘 코드를 생성합니다.",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "$PATH나 로컬 프로젝트에 있는 바이너리를 무시합니다. 이는 npx가 최신 버전을 임시로 설치해서 사용하도록 강제합니다.",
+ "npm binary to use for internal operations.": "내부 작업에 사용할 npm 바이너리.",
+ "For the full documentation, see the manual page for npx(1).": "전체 문서는 npx(1) 매뉴얼 페이지를 보세요.",
+ "Unable to guess a binary name from %s. Please use --package.": "%s 에서 바이너리 이름을 추측할 수 없습니다. --package 를 사용해 주세요.",
+ "\nERROR: You must supply a command.\n": "\nERROR: 명령을 제공해야 합니다.\n",
+ "Command failed: %s %s": "명령이 실패했습니다: %s %s",
+ "Install for %s failed with code %s": "%s 설치가 %s 코드로 실패했습니다",
+ "%s not found. Trying with npx...": "%s 을 찾을 수 없습니다. npx로 시도해 보세요...",
+ "command not found: %s": "명령을 찾을 수 없습니다: %s",
+ "options": "옵션",
+ "command": "명령",
+ "version": "버전",
+ "command-arg": "명령-인자",
+ "command-string": "명령-문자열",
+ "shell": "쉘",
+ "package": "패키지",
+ "npx: installed %s in %ss": "%ss 에 %s 를 설치했습니다",
+ "Suppress output from npx itself. Subcommands will not be affected.": "npx의 출력을 감춥니다. 하위 명령은 영향을 받지 않습니다.",
+ "Extra node argument when calling a node binary.": "node 바이너리를 호출할 때 사용하는 추가 node 인자"
+}
diff --git a/deps/npm/node_modules/libnpx/locales/nb.json b/deps/npm/node_modules/libnpx/locales/nb.json
new file mode 100644
index 00000000000000..5b3fa42788d3bf
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/nb.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Kjør binærfiler fra npm-pakker.\n%s",
+ "Package to be installed.": "Pakken som skal installeres.",
+ "Location of the npm cache.": "Hvor npm-cachen er.",
+ "Skip installation if a package is missing.": "La være å installere dersom pakken mangler.",
+ "Path to user npmrc.": "Sti til brukerens npmrc.",
+ "Execute string as if inside `npm run-script`.": "Kjør streng som om den var inni `npm run-script`.",
+ "Shell to execute the command with, if any.": "Skall å kjøre kommandoen med, hvis noe.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Generer skallkode for å bruke npx som \"kommandoen finnes ikke\" fallback.",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Ignorerer eksisterende binærfiler i $PATH eller i det lokale prosjektet. Dette tvinger npx til å installere siste versjon av pakken midlertidig.",
+ "npm binary to use for internal operations.": "npm-binærfil som skal brukes for interne operasjoner.",
+ "For the full documentation, see the manual page for npx(1).": "For hele dokumentasjonen, se brukermanualen for npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Klarer ikke å gjette en binærfils navn ut fra %s. Vennligst bruk --package.",
+ "\nERROR: You must supply a command.\n": "\nFEIL: Du må legge ved en kommando.\n",
+ "Command failed: %s %s": "Kommando feilet: %s %s",
+ "Install for %s failed with code %s": "Installasjon for %s feilet med kode %s",
+ "%s not found. Trying with npx...": "Kunne ikke finne%s. Prøver med npx...",
+ "command not found: %s": "kommando ikke funnet: %s",
+ "options": "innstillinger",
+ "command": "kommando",
+ "version": "versjon",
+ "command-arg": "kommando-argument",
+ "command-string": "kommando-streng",
+ "shell": "skall",
+ "package": "pakke",
+ "npx: installed %s in %ss": "npx: installerte %s på %ss",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Skjul kommandoer fra npx. Sub-kommandoer vil ikke berøres.",
+ "Extra node argument when calling a node binary.": "Ekstra node-argument når en node-binærfil blir kalt."
+}
diff --git a/deps/npm/node_modules/libnpx/locales/nl.json b/deps/npm/node_modules/libnpx/locales/nl.json
new file mode 100644
index 00000000000000..31fe4078566148
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/nl.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Voer een programma van een npm package uit.\n%s",
+ "Package to be installed.": "De te installeren package.",
+ "Location of the npm cache.": "Plaats van de npm cache.",
+ "Skip installation if a package is missing.": "Sla installatie over als een package nog niet geïnstalleerd is.",
+ "Path to user npmrc.": "Pad naar je npmrc.",
+ "Execute string as if inside `npm run-script`.": "Voer de tekst uit, alsof het `npm run-script` was.",
+ "Shell to execute the command with, if any.": "De shell te gebruiken met dit commando, indien nodig.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Genereer shell-code als vervanging bij \"command not found\".",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Negeer bestaande programmas in $PATH, of in het lokale prject. Dit forceert npx om een tijdelijke installatie te doen en de laatste versie te gebruiken.",
+ "npm binary to use for internal operations.": "npm programma te gebuiken voor interne operaties.",
+ "For the full documentation, see the manual page for npx(1).": "Voor de volledige documentatie, raadpleeg de man pagina van npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Vond geen programmanaam voor %S. Probeer opnieuw met --package.",
+ "\nERROR: You must supply a command.\n": "\nFOUT: Je moet een commando ingeven.\n",
+ "Command failed: %s %s": "Commando mislukt: %s %s",
+ "Install for %s failed with code %s": "De installatie van %s is mislukt met code %s",
+ "%s not found. Trying with npx...": "%s werd niet gevonden. Probeert nu met npx…",
+ "command not found: %s": "programma werd niet gevonden in %s",
+ "options": "opties",
+ "command": "commando",
+ "version": "versie",
+ "command-arg": "commandoargumenten",
+ "command-string": "commandotekst",
+ "shell": "shell",
+ "package": "package",
+ "npx: installed %s in %ss": "npx: heeft %s in %ss geïnstalleerd",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Geef geen uitvoer voor npx zelf. Heeft geen invoed op subcommando's",
+ "Extra node argument when calling a node binary.": "Extra argumenten voor node, wanner een node-programma gebruikt wordt"
+}
diff --git a/deps/npm/node_modules/libnpx/locales/nn.json b/deps/npm/node_modules/libnpx/locales/nn.json
new file mode 100644
index 00000000000000..6eef4268fd2493
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/nn.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Køyr binærfiler frå npm-pakkar.\n%s",
+ "Package to be installed.": "Pakken som skal bli installert.",
+ "Location of the npm cache.": "Kvar npm-mellomlagringa ligg.",
+ "Skip installation if a package is missing.": "La være å installere viss pakken manglar.",
+ "Path to user npmrc.": "Sti til brukaren sin npmrc-fil.",
+ "Execute string as if inside `npm run-script`.": "Køyr streng som om den var inni `npm run-script`.",
+ "Shell to execute the command with, if any.": "Skall å køyre kommandoen med, viss noe.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Generer skallkode for å bruke npx som \"kommandoen finst ikkje\" fallback.",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Ignorerer eksisterende binærfilar i $PATH eller i det lokale prosjektet. Dette tvingar npx til å installere siste versjon av pakken midlertidig.",
+ "npm binary to use for internal operations.": "npm-binærfil som skal brukes for interne operasjonar.",
+ "For the full documentation, see the manual page for npx(1).": "For heile dokumentasjonen, sjå brukarmanualen for npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Klarar ikkje å gjette ein binærfil sitt namn ut frå %s. Vennligst bruk --package.",
+ "\nERROR: You must supply a command.\n": "\nFEIL: Du må legge ved ein kommando.\n",
+ "Command failed: %s %s": "Kommando feilte: %s %s",
+ "Install for %s failed with code %s": "Installasjon for %s feilte med kode %s",
+ "%s not found. Trying with npx...": "Kunne ikkje finne%s. Prøver med npx...",
+ "command not found: %s": "kommando ikkje funnet: %s",
+ "options": "innstillinger",
+ "command": "kommando",
+ "version": "versjon",
+ "command-arg": "kommando-argument",
+ "command-string": "kommando-streng",
+ "shell": "skall",
+ "package": "pakke",
+ "npx: installed %s in %ss": "npx: installerte %s på %ss",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Skjul kommandoer frå npx. Sub-kommandoer vil ikkje rørast.",
+ "Extra node argument when calling a node binary.": "Ekstra node-argument når ein node-binærfil blir kalt."
+ }
diff --git a/deps/npm/node_modules/libnpx/locales/no.json b/deps/npm/node_modules/libnpx/locales/no.json
new file mode 100644
index 00000000000000..5b3fa42788d3bf
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/no.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Kjør binærfiler fra npm-pakker.\n%s",
+ "Package to be installed.": "Pakken som skal installeres.",
+ "Location of the npm cache.": "Hvor npm-cachen er.",
+ "Skip installation if a package is missing.": "La være å installere dersom pakken mangler.",
+ "Path to user npmrc.": "Sti til brukerens npmrc.",
+ "Execute string as if inside `npm run-script`.": "Kjør streng som om den var inni `npm run-script`.",
+ "Shell to execute the command with, if any.": "Skall å kjøre kommandoen med, hvis noe.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Generer skallkode for å bruke npx som \"kommandoen finnes ikke\" fallback.",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Ignorerer eksisterende binærfiler i $PATH eller i det lokale prosjektet. Dette tvinger npx til å installere siste versjon av pakken midlertidig.",
+ "npm binary to use for internal operations.": "npm-binærfil som skal brukes for interne operasjoner.",
+ "For the full documentation, see the manual page for npx(1).": "For hele dokumentasjonen, se brukermanualen for npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Klarer ikke å gjette en binærfils navn ut fra %s. Vennligst bruk --package.",
+ "\nERROR: You must supply a command.\n": "\nFEIL: Du må legge ved en kommando.\n",
+ "Command failed: %s %s": "Kommando feilet: %s %s",
+ "Install for %s failed with code %s": "Installasjon for %s feilet med kode %s",
+ "%s not found. Trying with npx...": "Kunne ikke finne%s. Prøver med npx...",
+ "command not found: %s": "kommando ikke funnet: %s",
+ "options": "innstillinger",
+ "command": "kommando",
+ "version": "versjon",
+ "command-arg": "kommando-argument",
+ "command-string": "kommando-streng",
+ "shell": "skall",
+ "package": "pakke",
+ "npx: installed %s in %ss": "npx: installerte %s på %ss",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Skjul kommandoer fra npx. Sub-kommandoer vil ikke berøres.",
+ "Extra node argument when calling a node binary.": "Ekstra node-argument når en node-binærfil blir kalt."
+}
diff --git a/deps/npm/node_modules/libnpx/locales/pl.json b/deps/npm/node_modules/libnpx/locales/pl.json
new file mode 100644
index 00000000000000..6cf93db525d2f6
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/pl.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Uruchamia pliki wykonywalne (binarki) z paczek npm.\n%s",
+ "Package to be installed.": "Paczka do zainstalowania.",
+ "Location of the npm cache.": "Lokalizacja cache npm.",
+ "Skip installation if a package is missing.": "Pomiń instalacje w przypadku braku paczki.",
+ "Path to user npmrc.": "Ścieżka do pliku npmrc.",
+ "Execute string as if inside `npm run-script`.": "Wykonaj polecenie jak gdyby znajdowało się w `npm run-script`.",
+ "Shell to execute the command with, if any.": "Shell do wykonania polecenia, jeśli potrzeba.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Wynegeruj kod dla shell-a aby używać npx zamiast \"nie znaleziono polecenia\".",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Ignoruje istniejące paczki w $PATH lub lokalnym projekcie. Wymusza na npx tymczasową instalację i użycie najnowszej wersji.",
+ "npm binary to use for internal operations.": "Ścieżka do binarki npm używanej przez npx.",
+ "For the full documentation, see the manual page for npx(1).": "Dla pełnej dokumentacji zobacz manual dla npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Nie udało się ustalić binarki dla %s. Proszę użyć --package.",
+ "\nERROR: You must supply a command.\n": "\nERROR: Musisz podać polecenie.\n",
+ "Command failed: %s %s": "Polecenie się nie powiodło: %s %s",
+ "Install for %s failed with code %s": "Instalacja %s nie udała się, kod błędu: %s",
+ "%s not found. Trying with npx...": "%s: nie znaleziono polecenia. Próba uruchomienia przy użyciu npx...",
+ "command not found: %s": "nie znaleziono polecenia: %s",
+ "options": "opcje",
+ "command": "polecenie",
+ "version": "wersja",
+ "command-arg": "argument polecenia",
+ "command-string": "tekst polecenia",
+ "shell": "shell",
+ "package": "paczka",
+ "npx: installed %s in %ss": "npx: zainstalowano %s w %ss",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Wycisza wyjście z npx. Nie dotyczy podprocesów.",
+ "Extra node argument when calling a node binary.": "Dodatkowe argumenty przekazywane do node."
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/libnpx/locales/ru.json b/deps/npm/node_modules/libnpx/locales/ru.json
index 3f431373c8b08d..801aeb58bb7e20 100644
--- a/deps/npm/node_modules/libnpx/locales/ru.json
+++ b/deps/npm/node_modules/libnpx/locales/ru.json
@@ -3,11 +3,11 @@
"Package to be installed.": "Пакет, который будет установлен.",
"Location of the npm cache.": "Расположение npm кеша.",
"Skip installation if a package is missing.": "Пропустить установку, если пакет отсутствует.",
- "Path to user npmrc.": "Передать пользователя в nprc.",
- "Execute string as if inside `npm run-script`.": "Выпонить скрипт как внутри `npm run-script`.",
+ "Path to user npmrc.": "Передать пользователя в npmrc.",
+ "Execute string as if inside `npm run-script`.": "Выполнить скрипт как внутри `npm run-script`.",
"Shell to execute the command with, if any.": "Shell, чтобы выполнить команду, если необходимо.",
"Generate shell code to use npx as the \"command not found\" fallback.": "Создает код для shell, чтобы использовать npx при \"command not found\".",
- "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Игнорирует все существующие скрипты в $PATH или локальном проекте. Что заставляет npx использовать временную установку и использовать последние версии",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Игнорирует все существующие скрипты в $PATH или локальном проекте. Что заставляет npx использовать временную установку и использовать последние версии.",
"npm binary to use for internal operations.": "npm файл для использования во внутренних операциях.",
"For the full documentation, see the manual page for npx(1).": "Полная документация доступна в мануале npx(1).",
"Unable to guess a binary name from %s. Please use --package.": "Невозможно угадать команду из %s. Пожалуйста, используйте --package.",
@@ -25,5 +25,4 @@
"package": "пакет",
"npx: installed %s in %ss": "npx: установлен %s в %ss",
"Suppress output from npx itself. Subcommands will not be affected.": "Отключить вывод npx. Поведение подкоманд не будет изменено."
-
}
diff --git a/deps/npm/node_modules/libnpx/locales/sr.json b/deps/npm/node_modules/libnpx/locales/sr.json
new file mode 100644
index 00000000000000..467e380df61ccf
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/sr.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Izvršavam binarne pakete iz npm packages.\n%s",
+ "Package to be installed.": "Paket će biti instaliran.",
+ "Location of the npm cache.": "Lokacija npm keša.",
+ "Skip installation if a package is missing.": "Preskoci instalaciju ako paket nedostaje.",
+ "Path to user npmrc.": "Putanja do korisnikovog npmrc.",
+ "Execute string as if inside `npm run-script`.": "Izvrši string kao da se nalazi unutar `npm run-script`.",
+ "Shell to execute the command with, if any.": "Skripta izvršava komandu sa, ako postoji.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Napravi kod u Terminalu koji će koristiti npx kao zamenu za \"komanda nije pronađena\".",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Ignoriše postojeće binarne pakete u $PATH, ili u lokalnom projektu. Ovo će naterati npx da izvrši privremenu instalaciju i koristi najnoviju verziju.",
+ "npm binary to use for internal operations.": "npm binarni paket za upotrebu u lokalnim operacijama.",
+ "For the full documentation, see the manual page for npx(1).": "Za kompletnu dokumentaciju, pogledajte stranicu sa uputstvima za npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Ne možemo da pogodimo ime binarnog paketa iz %s. Molimo koristite --package.",
+ "\nERROR: You must supply a command.\n": "\nERROR: Morate uneti komandu.\n",
+ "Command failed: %s %s": "Komanda nije uspela: %s %s",
+ "Install for %s failed with code %s": "Instalacija za %s nije uspela sa kodom %s",
+ "%s not found. Trying with npx...": "%s nije pronadjen. Pokušavam sa npx...",
+ "command not found: %s": "komanda nije pronadjena: %s",
+ "options": "opcije",
+ "command": "komanda",
+ "version": "verzija",
+ "command-arg": "command-arg",
+ "command-string": "command-string",
+ "shell": "shell",
+ "package": "paket",
+ "npx: installed %s in %ss": "npx: instaliran %s u %ss",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Zabranjen izlaz iz samog npx. Na podkomande neće biti uticaja..",
+ "Extra node argument when calling a node binary.": "Dodatni node argument kada pozivate node binarni."
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/libnpx/locales/uk.json b/deps/npm/node_modules/libnpx/locales/uk.json
new file mode 100644
index 00000000000000..729a0d696daa10
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/locales/uk.json
@@ -0,0 +1,29 @@
+{
+ "Execute binaries from npm packages.\n%s": "Виконуйте скрипти з npm пакетів.\n%s",
+ "Package to be installed.": "Пакет, який буде встановлено.",
+ "Location of the npm cache.": "Розташування npm кешу.",
+ "Skip installation if a package is missing.": "Пропустити встановлення, якщо пакет відсутній.",
+ "Path to user npmrc.": "Шлях до npmrc файлу користувача.",
+ "Execute string as if inside `npm run-script`.": "Виконати рядок наче в `npm run-script`.",
+ "Shell to execute the command with, if any.": "Shell для виконання команди, якщо є.",
+ "Generate shell code to use npx as the \"command not found\" fallback.": "Генерує shell скрипт для використання npx як фолбеку для \"command not found\".",
+ "Ignores existing binaries in $PATH, or in the local project. This forces npx to do a temporary install and use the latest version.": "Ігнорує існуючі бінарники в $PATH, чи в локальному проекті. Це змусить npx виконати тимчасове встановлення з використанням останньої версії.",
+ "npm binary to use for internal operations.": "npm файл для використання у внутрішніх операціях.",
+ "For the full documentation, see the manual page for npx(1).": "Повну документацію дивіться в мануалі npx(1).",
+ "Unable to guess a binary name from %s. Please use --package.": "Неможливо визначити ім'я бінарника з %s. Будь ласка, використовуйте --package.",
+ "\nERROR: You must supply a command.\n": "\nПОМИЛКА: Ви повинні вказати команду.\n",
+ "Command failed: %s %s": "Невдала команда: %s %s",
+ "Install for %s failed with code %s": "Невдале встановлення %s з кодом помилки %s",
+ "%s not found. Trying with npx...": "%s не знайдено. Спробуємо з npx...",
+ "command not found: %s": "команда не знайдена: %s",
+ "options": "опції",
+ "command": "команда",
+ "version": "версія",
+ "command-arg": "команда-аргумент",
+ "command-string": "команда-рядок",
+ "shell": "shell",
+ "package": "пакет",
+ "npx: installed %s in %ss": "npx: встановлено %s за %sс",
+ "Suppress output from npx itself. Subcommands will not be affected.": "Вимкнути вивід npx. Поведінка підкоманд не буде змінена.",
+ "Extra node argument when calling a node binary.": "Додатковий node аргумент під час виклику node бібліотеки."
+}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/index.js
index 0219cfa73fba3d..74ba8ee2de8bcc 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/index.js
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/index.js
@@ -9,12 +9,17 @@ const _getStream = require('get-stream');
const pFinally = require('p-finally');
const onExit = require('signal-exit');
const errname = require('./lib/errname');
+const stdio = require('./lib/stdio');
const TEN_MEGABYTES = 1000 * 1000 * 10;
function handleArgs(cmd, args, opts) {
let parsed;
+ if (opts && opts.env && opts.extendEnv !== false) {
+ opts.env = Object.assign({}, process.env, opts.env);
+ }
+
if (opts && opts.__winShell === true) {
delete opts.__winShell;
parsed = {
@@ -32,19 +37,23 @@ function handleArgs(cmd, args, opts) {
maxBuffer: TEN_MEGABYTES,
stripEof: true,
preferLocal: true,
+ localDir: parsed.options.cwd || process.cwd(),
encoding: 'utf8',
reject: true,
cleanup: true
}, parsed.options);
+ opts.stdio = stdio(opts);
+
if (opts.preferLocal) {
- opts.env = npmRunPath.env(opts);
+ opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir}));
}
return {
cmd: parsed.command,
args: parsed.args,
- opts
+ opts,
+ parsed
};
}
@@ -153,7 +162,7 @@ module.exports = (cmd, args, opts) => {
timeoutId = setTimeout(() => {
timeoutId = null;
timedOut = true;
- spawned.kill(parsed.killSignal);
+ spawned.kill(parsed.opts.killSignal);
}, parsed.opts.timeout);
}
@@ -167,6 +176,13 @@ module.exports = (cmd, args, opts) => {
cleanupTimeout();
resolve({err});
});
+
+ if (spawned.stdin) {
+ spawned.stdin.on('error', err => {
+ cleanupTimeout();
+ resolve({err});
+ });
+ }
});
function destroy() {
@@ -198,7 +214,21 @@ module.exports = (cmd, args, opts) => {
if (err || code !== 0 || signal !== null) {
if (!err) {
- err = new Error(`Command failed: ${joinedCmd}\n${stderr}${stdout}`);
+ let output = '';
+
+ if (Array.isArray(parsed.opts.stdio)) {
+ if (parsed.opts.stdio[2] !== 'inherit') {
+ output += output.length > 0 ? stderr : `\n${stderr}`;
+ }
+
+ if (parsed.opts.stdio[1] !== 'inherit') {
+ output += `\n${stdout}`;
+ }
+ } else if (parsed.opts.stdio !== 'inherit') {
+ output = `\n${stderr}${stdout}`;
+ }
+
+ err = new Error(`Command failed: ${joinedCmd}${output}`);
err.code = code < 0 ? errname(code) : code;
}
@@ -233,7 +263,7 @@ module.exports = (cmd, args, opts) => {
};
}), destroy);
- crossSpawn._enoent.hookChildProcess(spawned, parsed);
+ crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed);
handleInput(spawned, parsed.opts);
@@ -264,6 +294,10 @@ module.exports.sync = (cmd, args, opts) => {
const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts);
+ if (result.error || result.status !== 0) {
+ throw (result.error || new Error(result.stderr === '' ? result.stdout : result.stderr));
+ }
+
result.stdout = handleOutput(parsed.opts, result.stdout);
result.stderr = handleOutput(parsed.opts, result.stderr);
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/lib/errname.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/lib/errname.js
index a99d7500c258b5..328f3e35da0dc0 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/lib/errname.js
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/lib/errname.js
@@ -12,7 +12,7 @@ try {
uv = process.binding('uv');
if (typeof uv.errname !== 'function') {
- throw new Error('uv.errname is not a function');
+ throw new TypeError('uv.errname is not a function');
}
} catch (err) {
console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err);
@@ -33,5 +33,5 @@ function errname(uv, code) {
module.exports = code => errname(uv, code);
-// used for testing the fallback behavior
+// Used for testing the fallback behavior
module.exports.__test__ = errname;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/lib/stdio.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/lib/stdio.js
new file mode 100644
index 00000000000000..a82d46838ac9b9
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/lib/stdio.js
@@ -0,0 +1,41 @@
+'use strict';
+const alias = ['stdin', 'stdout', 'stderr'];
+
+const hasAlias = opts => alias.some(x => Boolean(opts[x]));
+
+module.exports = opts => {
+ if (!opts) {
+ return null;
+ }
+
+ if (opts.stdio && hasAlias(opts)) {
+ throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`);
+ }
+
+ if (typeof opts.stdio === 'string') {
+ return opts.stdio;
+ }
+
+ const stdio = opts.stdio || [];
+
+ if (!Array.isArray(stdio)) {
+ throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
+ }
+
+ const result = [];
+ const len = Math.max(stdio.length, alias.length);
+
+ for (let i = 0; i < len; i++) {
+ let value = null;
+
+ if (stdio[i] !== undefined) {
+ value = stdio[i];
+ } else if (opts[alias[i]] !== undefined) {
+ value = opts[alias[i]];
+ }
+
+ result[i] = value;
+ }
+
+ return result;
+};
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/license b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/license
index 654d0bfe943437..e7af2f77107d73 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/license
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/license
@@ -1,21 +1,9 @@
-The MIT License (MIT)
+MIT License
Copyright (c) Sindre Sorhus (sindresorhus.com)
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/CHANGELOG.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/CHANGELOG.md
new file mode 100644
index 00000000000000..f1298a82f7bc69
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/CHANGELOG.md
@@ -0,0 +1,6 @@
+## 5.0.0 - 2016-10-30
+
+- Add support for `options.shell`
+- Improve parsing of shebangs by using [`shebang-command`](https://github.com/kevva/shebang-command) module
+- Refactor some code to make it more clear
+- Update README caveats
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/README.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/README.md
index 18cc2b8bc8c8b9..dde730df1b13b1 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/README.md
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/README.md
@@ -32,8 +32,8 @@ Node has issues when using spawn on Windows:
- It ignores [PATHEXT](https://github.com/joyent/node/issues/2318)
- It does not support [shebangs](http://pt.wikipedia.org/wiki/Shebang)
+- No `options.shell` support on node < v6
- It does not allow you to run `del` or `dir`
-- It does not properly escape arguments with spaces or special characters
All these issues are handled correctly by `cross-spawn`.
There are some known modules, such as [win-spawn](https://github.com/ForbesLindesay/win-spawn), that try to solve this but they are either broken or provide faulty escaping of shell arguments.
@@ -43,7 +43,8 @@ There are some known modules, such as [win-spawn](https://github.com/ForbesLinde
Exactly the same way as node's [`spawn`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options) or [`spawnSync`](https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options), so it's a drop in replacement.
-```javascript
+
+```js
var spawn = require('cross-spawn');
// Spawn NPM asynchronously
@@ -53,12 +54,25 @@ var child = spawn('npm', ['list', '-g', '-depth', '0'], { stdio: 'inherit' });
var results = spawn.sync('npm', ['list', '-g', '-depth', '0'], { stdio: 'inherit' });
```
-## Caveat
-On Windows, cross-spawn will only spawn `cmd.exe` if necessary. If the extension
-of the executable is `.exe` or `.com`, it will spawn it directly. If you wish
-to override this behavior and *always* spawn a shell, pass the `{shell: true}`
-option.
+## Caveats
+
+#### `options.shell` as an alternative to `cross-spawn`
+
+Starting from node v6, `spawn` has a `shell` option that allows you run commands from within a shell. This new option solves most of the problems that `cross-spawn` attempts to solve, but:
+
+- It's not supported in node < v6
+- It has no support for shebangs on Windows
+- You must manually escape the command and arguments which is very error prone, specially when passing user input
+
+If you are using the `shell` option to spawn a command in a cross platform way, consider using `cross-spawn` instead. You have been warned.
+
+
+#### Shebangs
+
+While `cross-spawn` handles shebangs on Windows, its support is limited: e.g.: it doesn't handle arguments after the path, e.g.: `#!/bin/bash -e`.
+
+Remember to always test your code on Windows!
## Tests
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/enoent.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/enoent.js
index 74ff06e495950a..d0a193aecd92b7 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/enoent.js
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/enoent.js
@@ -1,7 +1,7 @@
'use strict';
var isWin = process.platform === 'win32';
-var resolveCommand = require('./resolveCommand');
+var resolveCommand = require('./util/resolveCommand');
var isNode10 = process.version.indexOf('v0.10.') === 0;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/hasBrokenSpawn.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/hasBrokenSpawn.js
deleted file mode 100644
index e73f906b617792..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/hasBrokenSpawn.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict';
-
-module.exports = (function () {
- if (process.platform !== 'win32') {
- return false;
- }
- var nodeVer = process.version.substr(1).split('.').map(function (num) {
- return parseInt(num, 10);
- });
- return (nodeVer[0] === 0 && nodeVer[1] < 12);
-})();
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/parse.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/parse.js
index 77cbb83d2db792..10a013625be10a 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/parse.js
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/parse.js
@@ -1,90 +1,92 @@
'use strict';
-var fs = require('fs');
-var LRU = require('lru-cache');
-var resolveCommand = require('./resolveCommand');
-var hasBrokenSpawn = require('./hasBrokenSpawn');
+var resolveCommand = require('./util/resolveCommand');
+var hasEmptyArgumentBug = require('./util/hasEmptyArgumentBug');
+var escapeArgument = require('./util/escapeArgument');
+var escapeCommand = require('./util/escapeCommand');
+var readShebang = require('./util/readShebang');
var isWin = process.platform === 'win32';
-var shebangCache = new LRU({ max: 50, maxAge: 30 * 1000 }); // Cache just for 30sec
+var skipShellRegExp = /\.(?:com|exe)$/i;
-function readShebang(command) {
- var buffer;
- var fd;
- var match;
+// Supported in Node >= 6 and >= 4.8
+var supportsShellOption = parseInt(process.version.substr(1).split('.')[0], 10) >= 6 ||
+ parseInt(process.version.substr(1).split('.')[0], 10) === 4 && parseInt(process.version.substr(1).split('.')[1], 10) >= 8;
+
+function parseNonShell(parsed) {
var shebang;
+ var needsShell;
+ var applyQuotes;
- // Check if it is in the cache first
- if (shebangCache.has(command)) {
- return shebangCache.get(command);
+ if (!isWin) {
+ return parsed;
}
- // Read the first 150 bytes from the file
- buffer = new Buffer(150);
-
- try {
- fd = fs.openSync(command, 'r');
- fs.readSync(fd, buffer, 0, 150, 0);
- fs.closeSync(fd);
- } catch (e) { /* empty */ }
-
- // Check if it is a shebang
- match = buffer.toString().trim().match(/#!(.+)/i);
+ // Detect & add support for shebangs
+ parsed.file = resolveCommand(parsed.command);
+ parsed.file = parsed.file || resolveCommand(parsed.command, true);
+ shebang = parsed.file && readShebang(parsed.file);
- if (match) {
- shebang = match[1].replace(/\/usr\/bin\/env\s+/i, ''); // Remove /usr/bin/env
+ if (shebang) {
+ parsed.args.unshift(parsed.file);
+ parsed.command = shebang;
+ needsShell = hasEmptyArgumentBug || !skipShellRegExp.test(resolveCommand(shebang) || resolveCommand(shebang, true));
+ } else {
+ needsShell = hasEmptyArgumentBug || !skipShellRegExp.test(parsed.file);
}
- // Store the shebang in the cache
- shebangCache.set(command, shebang);
+ // If a shell is required, use cmd.exe and take care of escaping everything correctly
+ if (needsShell) {
+ // Escape command & arguments
+ applyQuotes = (parsed.command !== 'echo'); // Do not quote arguments for the special "echo" command
+ parsed.command = escapeCommand(parsed.command);
+ parsed.args = parsed.args.map(function (arg) {
+ return escapeArgument(arg, applyQuotes);
+ });
+
+ // Make use of cmd.exe
+ parsed.args = ['/d', '/s', '/c', '"' + parsed.command + (parsed.args.length ? ' ' + parsed.args.join(' ') : '') + '"'];
+ parsed.command = process.env.comspec || 'cmd.exe';
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ }
- return shebang;
+ return parsed;
}
-function escapeArg(arg, quote) {
- // Convert to string
- arg = '' + arg;
+function parseShell(parsed) {
+ var shellCommand;
- // If we are not going to quote the argument,
- // escape shell metacharacters, including double and single quotes:
- if (!quote) {
- arg = arg.replace(/([\(\)%!\^<>&|;,"'\s])/g, '^$1');
- } else {
- // Sequence of backslashes followed by a double quote:
- // double up all the backslashes and escape the double quote
- arg = arg.replace(/(\\*)"/g, '$1$1\\"');
+ // If node supports the shell option, there's no need to mimic its behavior
+ if (supportsShellOption) {
+ return parsed;
+ }
- // Sequence of backslashes followed by the end of the string
- // (which will become a double quote later):
- // double up all the backslashes
- arg = arg.replace(/(\\*)$/, '$1$1');
+ // Mimic node shell option, see: https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335
+ shellCommand = [parsed.command].concat(parsed.args).join(' ');
- // All other backslashes occur literally
+ if (isWin) {
+ parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe';
+ parsed.args = ['/d', '/s', '/c', '"' + shellCommand + '"'];
+ parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
+ } else {
+ if (typeof parsed.options.shell === 'string') {
+ parsed.command = parsed.options.shell;
+ } else if (process.platform === 'android') {
+ parsed.command = '/system/bin/sh';
+ } else {
+ parsed.command = '/bin/sh';
+ }
- // Quote the whole thing:
- arg = '"' + arg + '"';
+ parsed.args = ['-c', shellCommand];
}
- return arg;
+ return parsed;
}
-function escapeCommand(command) {
- // Do not escape if this command is not dangerous..
- // We do this so that commands like "echo" or "ifconfig" work
- // Quoting them, will make them unaccessible
- return /^[a-z0-9_-]+$/i.test(command) ? command : escapeArg(command, true);
-}
-
-function requiresShell(command) {
- return !/\.(?:com|exe)$/i.test(command);
-}
+// ------------------------------------------------
function parse(command, args, options) {
- var shebang;
- var applyQuotes;
- var file;
- var original;
- var shell;
+ var parsed;
// Normalize arguments, similar to nodejs
if (args && !Array.isArray(args)) {
@@ -94,47 +96,18 @@ function parse(command, args, options) {
args = args ? args.slice(0) : []; // Clone array to avoid changing the original
options = options || {};
- original = command;
-
- if (isWin) {
- // Detect & add support for shebangs
- file = resolveCommand(command);
- file = file || resolveCommand(command, true);
- shebang = file && readShebang(file);
- shell = options.shell || hasBrokenSpawn;
-
- if (shebang) {
- args.unshift(file);
- command = shebang;
- shell = shell || requiresShell(resolveCommand(shebang) || resolveCommand(shebang, true));
- } else {
- shell = shell || requiresShell(file);
- }
- if (shell) {
- // Escape command & arguments
- applyQuotes = (command !== 'echo'); // Do not quote arguments for the special "echo" command
- command = escapeCommand(command);
- args = args.map(function (arg) {
- return escapeArg(arg, applyQuotes);
- });
-
- // Use cmd.exe
- args = ['/s', '/c', '"' + command + (args.length ? ' ' + args.join(' ') : '') + '"'];
- command = process.env.comspec || 'cmd.exe';
-
- // Tell node's spawn that the arguments are already escaped
- options.windowsVerbatimArguments = true;
- }
- }
-
- return {
+ // Build our parsed object
+ parsed = {
command: command,
args: args,
options: options,
- file: file,
- original: original,
+ file: undefined,
+ original: command,
};
+
+ // Delegate further parsing to shell or non-shell
+ return options.shell ? parseShell(parsed) : parseNonShell(parsed);
}
module.exports = parse;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/escapeArgument.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/escapeArgument.js
new file mode 100644
index 00000000000000..367263f6699201
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/escapeArgument.js
@@ -0,0 +1,30 @@
+'use strict';
+
+function escapeArgument(arg, quote) {
+ // Convert to string
+ arg = '' + arg;
+
+ // If we are not going to quote the argument,
+ // escape shell metacharacters, including double and single quotes:
+ if (!quote) {
+ arg = arg.replace(/([()%!^<>&|;,"'\s])/g, '^$1');
+ } else {
+ // Sequence of backslashes followed by a double quote:
+ // double up all the backslashes and escape the double quote
+ arg = arg.replace(/(\\*)"/g, '$1$1\\"');
+
+ // Sequence of backslashes followed by the end of the string
+ // (which will become a double quote later):
+ // double up all the backslashes
+ arg = arg.replace(/(\\*)$/, '$1$1');
+
+ // All other backslashes occur literally
+
+ // Quote the whole thing:
+ arg = '"' + arg + '"';
+ }
+
+ return arg;
+}
+
+module.exports = escapeArgument;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/escapeCommand.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/escapeCommand.js
new file mode 100644
index 00000000000000..d9c25b26566bc6
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/escapeCommand.js
@@ -0,0 +1,12 @@
+'use strict';
+
+var escapeArgument = require('./escapeArgument');
+
+function escapeCommand(command) {
+ // Do not escape if this command is not dangerous..
+ // We do this so that commands like "echo" or "ifconfig" work
+ // Quoting them, will make them unaccessible
+ return /^[a-z0-9_-]+$/i.test(command) ? command : escapeArgument(command, true);
+}
+
+module.exports = escapeCommand;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/hasEmptyArgumentBug.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/hasEmptyArgumentBug.js
new file mode 100644
index 00000000000000..9f2eba63555360
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/hasEmptyArgumentBug.js
@@ -0,0 +1,18 @@
+'use strict';
+
+// See: https://github.com/IndigoUnited/node-cross-spawn/pull/34#issuecomment-221623455
+function hasEmptyArgumentBug() {
+ var nodeVer;
+
+ if (process.platform !== 'win32') {
+ return false;
+ }
+
+ nodeVer = process.version.substr(1).split('.').map(function (num) {
+ return parseInt(num, 10);
+ });
+
+ return (nodeVer[0] === 0 && nodeVer[1] < 12);
+}
+
+module.exports = hasEmptyArgumentBug();
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/readShebang.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/readShebang.js
new file mode 100644
index 00000000000000..2cf3541c99b88a
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/readShebang.js
@@ -0,0 +1,37 @@
+'use strict';
+
+var fs = require('fs');
+var LRU = require('lru-cache');
+var shebangCommand = require('shebang-command');
+
+var shebangCache = new LRU({ max: 50, maxAge: 30 * 1000 }); // Cache just for 30sec
+
+function readShebang(command) {
+ var buffer;
+ var fd;
+ var shebang;
+
+ // Check if it is in the cache first
+ if (shebangCache.has(command)) {
+ return shebangCache.get(command);
+ }
+
+ // Read the first 150 bytes from the file
+ buffer = new Buffer(150);
+
+ try {
+ fd = fs.openSync(command, 'r');
+ fs.readSync(fd, buffer, 0, 150, 0);
+ fs.closeSync(fd);
+ } catch (e) { /* empty */ }
+
+ // Attempt to extract shebang (null is returned if not a shebang)
+ shebang = shebangCommand(buffer.toString());
+
+ // Store the shebang in the cache
+ shebangCache.set(command, shebang);
+
+ return shebang;
+}
+
+module.exports = readShebang;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/resolveCommand.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/resolveCommand.js
similarity index 100%
rename from deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/resolveCommand.js
rename to deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/lib/util/resolveCommand.js
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/index.js
new file mode 100644
index 00000000000000..2de70b0742665d
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/index.js
@@ -0,0 +1,19 @@
+'use strict';
+var shebangRegex = require('shebang-regex');
+
+module.exports = function (str) {
+ var match = str.match(shebangRegex);
+
+ if (!match) {
+ return null;
+ }
+
+ var arr = match[0].replace(/#! ?/, '').split(' ');
+ var bin = arr[0].split('/').pop();
+ var arg = arr[1];
+
+ return (bin === 'env' ?
+ arg :
+ bin + (arg ? ' ' + arg : '')
+ );
+};
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/license b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/license
new file mode 100644
index 00000000000000..0f8cf79c3c93ad
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Kevin Martensson (github.com/kevva)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/index.js
new file mode 100644
index 00000000000000..d052d2e05e60c7
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/index.js
@@ -0,0 +1,2 @@
+'use strict';
+module.exports = /^#!.*/;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/license b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/license
similarity index 100%
rename from deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/license
rename to deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/license
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/package.json
new file mode 100644
index 00000000000000..f462aa5893bf71
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/package.json
@@ -0,0 +1,64 @@
+{
+ "_from": "shebang-regex@^1.0.0",
+ "_id": "shebang-regex@1.0.0",
+ "_inBundle": false,
+ "_integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=",
+ "_location": "/libnpx/yargs/os-locale/execa/cross-spawn/shebang-command/shebang-regex",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "shebang-regex@^1.0.0",
+ "name": "shebang-regex",
+ "escapedName": "shebang-regex",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/libnpx/yargs/os-locale/execa/cross-spawn/shebang-command"
+ ],
+ "_resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz",
+ "_shasum": "da42f49740c0b42db2ca9728571cb190c98efea3",
+ "_spec": "shebang-regex@^1.0.0",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/shebang-regex/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Regular expression for matching a shebang",
+ "devDependencies": {
+ "ava": "0.0.4"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/sindresorhus/shebang-regex#readme",
+ "keywords": [
+ "re",
+ "regex",
+ "regexp",
+ "shebang",
+ "match",
+ "test"
+ ],
+ "license": "MIT",
+ "name": "shebang-regex",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/shebang-regex.git"
+ },
+ "scripts": {
+ "test": "node test.js"
+ },
+ "version": "1.0.0"
+}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/readme.md
new file mode 100644
index 00000000000000..ef75e51b5bf0fe
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/node_modules/shebang-regex/readme.md
@@ -0,0 +1,29 @@
+# shebang-regex [![Build Status](https://travis-ci.org/sindresorhus/shebang-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/shebang-regex)
+
+> Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix))
+
+
+## Install
+
+```
+$ npm install --save shebang-regex
+```
+
+
+## Usage
+
+```js
+var shebangRegex = require('shebang-regex');
+var str = '#!/usr/bin/env node\nconsole.log("unicorns");';
+
+shebangRegex.test(str);
+//=> true
+
+shebangRegex.exec(str)[0];
+//=> '#!/usr/bin/env node'
+```
+
+
+## License
+
+MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/package.json
new file mode 100644
index 00000000000000..ab0bf5ea1c2d31
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "shebang-command@^1.2.0",
+ "_id": "shebang-command@1.2.0",
+ "_inBundle": false,
+ "_integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=",
+ "_location": "/libnpx/yargs/os-locale/execa/cross-spawn/shebang-command",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "shebang-command@^1.2.0",
+ "name": "shebang-command",
+ "escapedName": "shebang-command",
+ "rawSpec": "^1.2.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.2.0"
+ },
+ "_requiredBy": [
+ "/libnpx/yargs/os-locale/execa/cross-spawn"
+ ],
+ "_resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz",
+ "_shasum": "44aac65b695b03398968c39f363fee5deafdf1ea",
+ "_spec": "shebang-command@^1.2.0",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn",
+ "author": {
+ "name": "Kevin Martensson",
+ "email": "kevinmartensson@gmail.com",
+ "url": "github.com/kevva"
+ },
+ "bugs": {
+ "url": "https://github.com/kevva/shebang-command/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "shebang-regex": "^1.0.0"
+ },
+ "deprecated": false,
+ "description": "Get the command from a shebang",
+ "devDependencies": {
+ "ava": "*",
+ "xo": "*"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/kevva/shebang-command#readme",
+ "keywords": [
+ "cmd",
+ "command",
+ "parse",
+ "shebang"
+ ],
+ "license": "MIT",
+ "name": "shebang-command",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/kevva/shebang-command.git"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "version": "1.2.0",
+ "xo": {
+ "ignores": [
+ "test.js"
+ ]
+ }
+}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/readme.md
new file mode 100644
index 00000000000000..16b0be4d7d09fa
--- /dev/null
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/node_modules/shebang-command/readme.md
@@ -0,0 +1,39 @@
+# shebang-command [![Build Status](https://travis-ci.org/kevva/shebang-command.svg?branch=master)](https://travis-ci.org/kevva/shebang-command)
+
+> Get the command from a shebang
+
+
+## Install
+
+```
+$ npm install --save shebang-command
+```
+
+
+## Usage
+
+```js
+const shebangCommand = require('shebang-command');
+
+shebangCommand('#!/usr/bin/env node');
+//=> 'node'
+
+shebangCommand('#!/bin/bash');
+//=> 'bash'
+```
+
+
+## API
+
+### shebangCommand(string)
+
+#### string
+
+Type: `string`
+
+String containing a shebang.
+
+
+## License
+
+MIT © [Kevin Martensson](http://github.com/kevva)
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/package.json
index c269a6260b4568..77b39f96c2574b 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/package.json
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/cross-spawn/package.json
@@ -1,36 +1,43 @@
{
- "_from": "cross-spawn@^4.0.0",
- "_id": "cross-spawn@4.0.2",
+ "_from": "cross-spawn@^5.0.1",
+ "_id": "cross-spawn@5.1.0",
"_inBundle": false,
- "_integrity": "sha1-e5JHYhwjrf3ThWAEqCPL45dCTUE=",
+ "_integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=",
"_location": "/libnpx/yargs/os-locale/execa/cross-spawn",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "cross-spawn@^4.0.0",
+ "raw": "cross-spawn@^5.0.1",
"name": "cross-spawn",
"escapedName": "cross-spawn",
- "rawSpec": "^4.0.0",
+ "rawSpec": "^5.0.1",
"saveSpec": null,
- "fetchSpec": "^4.0.0"
+ "fetchSpec": "^5.0.1"
},
"_requiredBy": [
"/libnpx/yargs/os-locale/execa"
],
- "_resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-4.0.2.tgz",
- "_shasum": "7b9247621c23adfdd3856004a823cbe397424d41",
- "_shrinkwrap": null,
- "_spec": "cross-spawn@^4.0.0",
+ "_resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz",
+ "_shasum": "e8bd0efee58fcff6f8f94510a0a554bbfa235449",
+ "_spec": "cross-spawn@^5.0.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa",
- "bin": null,
+ "author": {
+ "name": "IndigoUnited",
+ "email": "hello@indigounited.com",
+ "url": "http://indigounited.com"
+ },
+ "bugs": {
+ "url": "https://github.com/IndigoUnited/node-cross-spawn/issues/"
+ },
"bundleDependencies": false,
"dependencies": {
"lru-cache": "^4.0.1",
+ "shebang-command": "^1.2.0",
"which": "^1.2.9"
},
"deprecated": false,
- "description": "[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Build status][appveyor-image]][appveyor-url] [![Dependency status][david-dm-image]][david-dm-url] [![Dev Dependency status][david-dm-dev-image]][david-dm-dev-url]",
+ "description": "Cross platform child_process#spawn and child_process#spawnSync",
"devDependencies": {
"@satazor/eslint-config": "^3.0.0",
"eslint": "^3.0.0",
@@ -38,11 +45,39 @@
"glob": "^7.0.0",
"mkdirp": "^0.5.1",
"mocha": "^3.0.2",
+ "once": "^1.4.0",
"rimraf": "^2.5.0"
},
+ "files": [
+ "index.js",
+ "lib"
+ ],
+ "homepage": "https://github.com/IndigoUnited/node-cross-spawn#readme",
+ "keywords": [
+ "spawn",
+ "spawnSync",
+ "windows",
+ "cross",
+ "platform",
+ "path",
+ "ext",
+ "path-ext",
+ "path_ext",
+ "shebang",
+ "hashbang",
+ "cmd",
+ "execute"
+ ],
+ "license": "MIT",
+ "main": "index.js",
"name": "cross-spawn",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "4.0.2"
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/IndigoUnited/node-cross-spawn.git"
+ },
+ "scripts": {
+ "lint": "eslint '{*.js,lib/**/*.js,test/**/*.js}'",
+ "test": "node test/prepare && mocha --bail test/test"
+ },
+ "version": "5.1.0"
}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/buffer-stream.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/buffer-stream.js
index cc834c4dc84877..ae45d3d9e74179 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/buffer-stream.js
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/buffer-stream.js
@@ -1,14 +1,13 @@
-var PassThrough = require('stream').PassThrough;
-var objectAssign = require('object-assign');
+'use strict';
+const PassThrough = require('stream').PassThrough;
-module.exports = function (opts) {
- opts = objectAssign({}, opts);
+module.exports = opts => {
+ opts = Object.assign({}, opts);
- var array = opts.array;
- var encoding = opts.encoding;
-
- var buffer = encoding === 'buffer';
- var objectMode = false;
+ const array = opts.array;
+ let encoding = opts.encoding;
+ const buffer = encoding === 'buffer';
+ let objectMode = false;
if (array) {
objectMode = !(encoding || buffer);
@@ -20,16 +19,15 @@ module.exports = function (opts) {
encoding = null;
}
- var len = 0;
- var ret = [];
-
- var stream = new PassThrough({objectMode: objectMode});
+ let len = 0;
+ const ret = [];
+ const stream = new PassThrough({objectMode});
if (encoding) {
stream.setEncoding(encoding);
}
- stream.on('data', function (chunk) {
+ stream.on('data', chunk => {
ret.push(chunk);
if (objectMode) {
@@ -39,16 +37,15 @@ module.exports = function (opts) {
}
});
- stream.getBufferedValue = function () {
+ stream.getBufferedValue = () => {
if (array) {
return ret;
}
+
return buffer ? Buffer.concat(ret, len) : ret.join('');
};
- stream.getBufferedLength = function () {
- return len;
- };
+ stream.getBufferedLength = () => len;
return stream;
};
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/index.js
index aa60cf038f34f9..2dc5ee96af2d95 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/index.js
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/index.js
@@ -1,24 +1,31 @@
'use strict';
-var Promise = require('pinkie-promise');
-var objectAssign = require('object-assign');
-var bufferStream = require('./buffer-stream');
+const bufferStream = require('./buffer-stream');
function getStream(inputStream, opts) {
if (!inputStream) {
return Promise.reject(new Error('Expected a stream'));
}
- opts = objectAssign({maxBuffer: Infinity}, opts);
- var maxBuffer = opts.maxBuffer;
- var stream;
- var clean;
+ opts = Object.assign({maxBuffer: Infinity}, opts);
+
+ const maxBuffer = opts.maxBuffer;
+ let stream;
+ let clean;
+
+ const p = new Promise((resolve, reject) => {
+ const error = err => {
+ if (err) { // null check
+ err.bufferedData = stream.getBufferedValue();
+ }
+
+ reject(err);
+ };
- var p = new Promise(function (resolve, reject) {
stream = bufferStream(opts);
inputStream.once('error', error);
inputStream.pipe(stream);
- stream.on('data', function () {
+ stream.on('data', () => {
if (stream.getBufferedLength() > maxBuffer) {
reject(new Error('maxBuffer exceeded'));
}
@@ -26,34 +33,19 @@ function getStream(inputStream, opts) {
stream.once('error', error);
stream.on('end', resolve);
- clean = function () {
- // some streams doesn't implement the stream.Readable interface correctly
+ clean = () => {
+ // some streams doesn't implement the `stream.Readable` interface correctly
if (inputStream.unpipe) {
inputStream.unpipe(stream);
}
};
-
- function error(err) {
- if (err) { // null check
- err.bufferedData = stream.getBufferedValue();
- }
- reject(err);
- }
});
p.then(clean, clean);
- return p.then(function () {
- return stream.getBufferedValue();
- });
+ return p.then(() => stream.getBufferedValue());
}
module.exports = getStream;
-
-module.exports.buffer = function (stream, opts) {
- return getStream(stream, objectAssign({}, opts, {encoding: 'buffer'}));
-};
-
-module.exports.array = function (stream, opts) {
- return getStream(stream, objectAssign({}, opts, {array: true}));
-};
+module.exports.buffer = (stream, opts) => getStream(stream, Object.assign({}, opts, {encoding: 'buffer'}));
+module.exports.array = (stream, opts) => getStream(stream, Object.assign({}, opts, {array: true}));
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/index.js
deleted file mode 100644
index 0930cf8890b9af..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/index.js
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
-object-assign
-(c) Sindre Sorhus
-@license MIT
-*/
-
-'use strict';
-/* eslint-disable no-unused-vars */
-var getOwnPropertySymbols = Object.getOwnPropertySymbols;
-var hasOwnProperty = Object.prototype.hasOwnProperty;
-var propIsEnumerable = Object.prototype.propertyIsEnumerable;
-
-function toObject(val) {
- if (val === null || val === undefined) {
- throw new TypeError('Object.assign cannot be called with null or undefined');
- }
-
- return Object(val);
-}
-
-function shouldUseNative() {
- try {
- if (!Object.assign) {
- return false;
- }
-
- // Detect buggy property enumeration order in older V8 versions.
-
- // https://bugs.chromium.org/p/v8/issues/detail?id=4118
- var test1 = new String('abc'); // eslint-disable-line no-new-wrappers
- test1[5] = 'de';
- if (Object.getOwnPropertyNames(test1)[0] === '5') {
- return false;
- }
-
- // https://bugs.chromium.org/p/v8/issues/detail?id=3056
- var test2 = {};
- for (var i = 0; i < 10; i++) {
- test2['_' + String.fromCharCode(i)] = i;
- }
- var order2 = Object.getOwnPropertyNames(test2).map(function (n) {
- return test2[n];
- });
- if (order2.join('') !== '0123456789') {
- return false;
- }
-
- // https://bugs.chromium.org/p/v8/issues/detail?id=3056
- var test3 = {};
- 'abcdefghijklmnopqrst'.split('').forEach(function (letter) {
- test3[letter] = letter;
- });
- if (Object.keys(Object.assign({}, test3)).join('') !==
- 'abcdefghijklmnopqrst') {
- return false;
- }
-
- return true;
- } catch (err) {
- // We don't expect any of the above to throw, but better to be safe.
- return false;
- }
-}
-
-module.exports = shouldUseNative() ? Object.assign : function (target, source) {
- var from;
- var to = toObject(target);
- var symbols;
-
- for (var s = 1; s < arguments.length; s++) {
- from = Object(arguments[s]);
-
- for (var key in from) {
- if (hasOwnProperty.call(from, key)) {
- to[key] = from[key];
- }
- }
-
- if (getOwnPropertySymbols) {
- symbols = getOwnPropertySymbols(from);
- for (var i = 0; i < symbols.length; i++) {
- if (propIsEnumerable.call(from, symbols[i])) {
- to[symbols[i]] = from[symbols[i]];
- }
- }
- }
- }
-
- return to;
-};
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/package.json
deleted file mode 100644
index cb1283bacebae9..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/package.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "_from": "object-assign@^4.0.1",
- "_id": "object-assign@4.1.1",
- "_inBundle": false,
- "_integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=",
- "_location": "/libnpx/yargs/os-locale/execa/get-stream/object-assign",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "object-assign@^4.0.1",
- "name": "object-assign",
- "escapedName": "object-assign",
- "rawSpec": "^4.0.1",
- "saveSpec": null,
- "fetchSpec": "^4.0.1"
- },
- "_requiredBy": [
- "/libnpx/yargs/os-locale/execa/get-stream"
- ],
- "_resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "_shasum": "2109adc7965887cfc05cbbd442cac8bfbb360863",
- "_shrinkwrap": null,
- "_spec": "object-assign@^4.0.1",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream",
- "bin": null,
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "> ES2015 [`Object.assign()`](http://www.2ality.com/2014/01/object-assign.html) [ponyfill](https://ponyfill.com)",
- "devDependencies": {
- "ava": "^0.16.0",
- "lodash": "^4.16.4",
- "matcha": "^0.7.0",
- "xo": "^0.16.0"
- },
- "engines": {
- "node": ">=0.10.0"
- },
- "name": "object-assign",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "4.1.1"
-}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/readme.md
deleted file mode 100644
index 1be09d35c776cc..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/object-assign/readme.md
+++ /dev/null
@@ -1,61 +0,0 @@
-# object-assign [![Build Status](https://travis-ci.org/sindresorhus/object-assign.svg?branch=master)](https://travis-ci.org/sindresorhus/object-assign)
-
-> ES2015 [`Object.assign()`](http://www.2ality.com/2014/01/object-assign.html) [ponyfill](https://ponyfill.com)
-
-
-## Use the built-in
-
-Node.js 4 and up, as well as every evergreen browser (Chrome, Edge, Firefox, Opera, Safari),
-support `Object.assign()` :tada:. If you target only those environments, then by all
-means, use `Object.assign()` instead of this package.
-
-
-## Install
-
-```
-$ npm install --save object-assign
-```
-
-
-## Usage
-
-```js
-const objectAssign = require('object-assign');
-
-objectAssign({foo: 0}, {bar: 1});
-//=> {foo: 0, bar: 1}
-
-// multiple sources
-objectAssign({foo: 0}, {bar: 1}, {baz: 2});
-//=> {foo: 0, bar: 1, baz: 2}
-
-// overwrites equal keys
-objectAssign({foo: 0}, {foo: 1}, {foo: 2});
-//=> {foo: 2}
-
-// ignores null and undefined sources
-objectAssign({foo: 0}, null, {bar: 1}, undefined);
-//=> {foo: 0, bar: 1}
-```
-
-
-## API
-
-### objectAssign(target, [source, ...])
-
-Assigns enumerable own properties of `source` objects to the `target` object and returns the `target` object. Additional `source` objects will overwrite previous ones.
-
-
-## Resources
-
-- [ES2015 spec - Object.assign](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-object.assign)
-
-
-## Related
-
-- [deep-assign](https://github.com/sindresorhus/deep-assign) - Recursive `Object.assign()`
-
-
-## License
-
-MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/index.js
deleted file mode 100644
index 777377a1f777b1..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/index.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict';
-
-module.exports = typeof Promise === 'function' ? Promise : require('pinkie');
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/license b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/license
deleted file mode 100644
index 1aeb74fd25e171..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/license
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Vsevolod Strukchinsky (github.com/floatdrop)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/index.js
deleted file mode 100644
index 14ce1bfe3d4918..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/index.js
+++ /dev/null
@@ -1,292 +0,0 @@
-'use strict';
-
-var PENDING = 'pending';
-var SETTLED = 'settled';
-var FULFILLED = 'fulfilled';
-var REJECTED = 'rejected';
-var NOOP = function () {};
-var isNode = typeof global !== 'undefined' && typeof global.process !== 'undefined' && typeof global.process.emit === 'function';
-
-var asyncSetTimer = typeof setImmediate === 'undefined' ? setTimeout : setImmediate;
-var asyncQueue = [];
-var asyncTimer;
-
-function asyncFlush() {
- // run promise callbacks
- for (var i = 0; i < asyncQueue.length; i++) {
- asyncQueue[i][0](asyncQueue[i][1]);
- }
-
- // reset async asyncQueue
- asyncQueue = [];
- asyncTimer = false;
-}
-
-function asyncCall(callback, arg) {
- asyncQueue.push([callback, arg]);
-
- if (!asyncTimer) {
- asyncTimer = true;
- asyncSetTimer(asyncFlush, 0);
- }
-}
-
-function invokeResolver(resolver, promise) {
- function resolvePromise(value) {
- resolve(promise, value);
- }
-
- function rejectPromise(reason) {
- reject(promise, reason);
- }
-
- try {
- resolver(resolvePromise, rejectPromise);
- } catch (e) {
- rejectPromise(e);
- }
-}
-
-function invokeCallback(subscriber) {
- var owner = subscriber.owner;
- var settled = owner._state;
- var value = owner._data;
- var callback = subscriber[settled];
- var promise = subscriber.then;
-
- if (typeof callback === 'function') {
- settled = FULFILLED;
- try {
- value = callback(value);
- } catch (e) {
- reject(promise, e);
- }
- }
-
- if (!handleThenable(promise, value)) {
- if (settled === FULFILLED) {
- resolve(promise, value);
- }
-
- if (settled === REJECTED) {
- reject(promise, value);
- }
- }
-}
-
-function handleThenable(promise, value) {
- var resolved;
-
- try {
- if (promise === value) {
- throw new TypeError('A promises callback cannot return that same promise.');
- }
-
- if (value && (typeof value === 'function' || typeof value === 'object')) {
- // then should be retrieved only once
- var then = value.then;
-
- if (typeof then === 'function') {
- then.call(value, function (val) {
- if (!resolved) {
- resolved = true;
-
- if (value === val) {
- fulfill(promise, val);
- } else {
- resolve(promise, val);
- }
- }
- }, function (reason) {
- if (!resolved) {
- resolved = true;
-
- reject(promise, reason);
- }
- });
-
- return true;
- }
- }
- } catch (e) {
- if (!resolved) {
- reject(promise, e);
- }
-
- return true;
- }
-
- return false;
-}
-
-function resolve(promise, value) {
- if (promise === value || !handleThenable(promise, value)) {
- fulfill(promise, value);
- }
-}
-
-function fulfill(promise, value) {
- if (promise._state === PENDING) {
- promise._state = SETTLED;
- promise._data = value;
-
- asyncCall(publishFulfillment, promise);
- }
-}
-
-function reject(promise, reason) {
- if (promise._state === PENDING) {
- promise._state = SETTLED;
- promise._data = reason;
-
- asyncCall(publishRejection, promise);
- }
-}
-
-function publish(promise) {
- promise._then = promise._then.forEach(invokeCallback);
-}
-
-function publishFulfillment(promise) {
- promise._state = FULFILLED;
- publish(promise);
-}
-
-function publishRejection(promise) {
- promise._state = REJECTED;
- publish(promise);
- if (!promise._handled && isNode) {
- global.process.emit('unhandledRejection', promise._data, promise);
- }
-}
-
-function notifyRejectionHandled(promise) {
- global.process.emit('rejectionHandled', promise);
-}
-
-/**
- * @class
- */
-function Promise(resolver) {
- if (typeof resolver !== 'function') {
- throw new TypeError('Promise resolver ' + resolver + ' is not a function');
- }
-
- if (this instanceof Promise === false) {
- throw new TypeError('Failed to construct \'Promise\': Please use the \'new\' operator, this object constructor cannot be called as a function.');
- }
-
- this._then = [];
-
- invokeResolver(resolver, this);
-}
-
-Promise.prototype = {
- constructor: Promise,
-
- _state: PENDING,
- _then: null,
- _data: undefined,
- _handled: false,
-
- then: function (onFulfillment, onRejection) {
- var subscriber = {
- owner: this,
- then: new this.constructor(NOOP),
- fulfilled: onFulfillment,
- rejected: onRejection
- };
-
- if ((onRejection || onFulfillment) && !this._handled) {
- this._handled = true;
- if (this._state === REJECTED && isNode) {
- asyncCall(notifyRejectionHandled, this);
- }
- }
-
- if (this._state === FULFILLED || this._state === REJECTED) {
- // already resolved, call callback async
- asyncCall(invokeCallback, subscriber);
- } else {
- // subscribe
- this._then.push(subscriber);
- }
-
- return subscriber.then;
- },
-
- catch: function (onRejection) {
- return this.then(null, onRejection);
- }
-};
-
-Promise.all = function (promises) {
- if (!Array.isArray(promises)) {
- throw new TypeError('You must pass an array to Promise.all().');
- }
-
- return new Promise(function (resolve, reject) {
- var results = [];
- var remaining = 0;
-
- function resolver(index) {
- remaining++;
- return function (value) {
- results[index] = value;
- if (!--remaining) {
- resolve(results);
- }
- };
- }
-
- for (var i = 0, promise; i < promises.length; i++) {
- promise = promises[i];
-
- if (promise && typeof promise.then === 'function') {
- promise.then(resolver(i), reject);
- } else {
- results[i] = promise;
- }
- }
-
- if (!remaining) {
- resolve(results);
- }
- });
-};
-
-Promise.race = function (promises) {
- if (!Array.isArray(promises)) {
- throw new TypeError('You must pass an array to Promise.race().');
- }
-
- return new Promise(function (resolve, reject) {
- for (var i = 0, promise; i < promises.length; i++) {
- promise = promises[i];
-
- if (promise && typeof promise.then === 'function') {
- promise.then(resolve, reject);
- } else {
- resolve(promise);
- }
- }
- });
-};
-
-Promise.resolve = function (value) {
- if (value && typeof value === 'object' && value.constructor === Promise) {
- return value;
- }
-
- return new Promise(function (resolve) {
- resolve(value);
- });
-};
-
-Promise.reject = function (reason) {
- return new Promise(function (resolve, reject) {
- reject(reason);
- });
-};
-
-module.exports = Promise;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/license b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/license
deleted file mode 100644
index 1aeb74fd25e171..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/license
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Vsevolod Strukchinsky (github.com/floatdrop)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/package.json
deleted file mode 100644
index d94124cdf51ab4..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/package.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- "_from": "pinkie@^2.0.0",
- "_id": "pinkie@2.0.4",
- "_inBundle": false,
- "_integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=",
- "_location": "/libnpx/yargs/os-locale/execa/get-stream/pinkie-promise/pinkie",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "pinkie@^2.0.0",
- "name": "pinkie",
- "escapedName": "pinkie",
- "rawSpec": "^2.0.0",
- "saveSpec": null,
- "fetchSpec": "^2.0.0"
- },
- "_requiredBy": [
- "/libnpx/yargs/os-locale/execa/get-stream/pinkie-promise"
- ],
- "_resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz",
- "_shasum": "72556b80cfa0d48a974e80e77248e80ed4f7f870",
- "_shrinkwrap": null,
- "_spec": "pinkie@^2.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise",
- "bin": null,
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": " \t
\t \t
\t
",
- "devDependencies": {
- "core-assert": "^0.1.1",
- "coveralls": "^2.11.4",
- "mocha": "*",
- "nyc": "^3.2.2",
- "promises-aplus-tests": "*",
- "xo": "^0.10.1"
- },
- "engines": {
- "node": ">=0.10.0"
- },
- "name": "pinkie",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "2.0.4"
-}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/readme.md
deleted file mode 100644
index 54747f60cc538b..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/node_modules/pinkie/readme.md
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
-
-
-
-
-
-> Itty bitty little widdle twinkie pinkie [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation
-
-[![Build Status](https://travis-ci.org/floatdrop/pinkie.svg?branch=master)](https://travis-ci.org/floatdrop/pinkie) [![Coverage Status](https://coveralls.io/repos/floatdrop/pinkie/badge.svg?branch=master&service=github)](https://coveralls.io/github/floatdrop/pinkie?branch=master)
-
-There are [tons of Promise implementations](https://github.com/promises-aplus/promises-spec/blob/master/implementations.md#standalone) out there, but all of them focus on browser compatibility and are often bloated with functionality.
-
-This module is an exact Promise specification polyfill (like [native-promise-only](https://github.com/getify/native-promise-only)), but in Node.js land (it should be browserify-able though).
-
-
-## Install
-
-```
-$ npm install --save pinkie
-```
-
-
-## Usage
-
-```js
-var fs = require('fs');
-var Promise = require('pinkie');
-
-new Promise(function (resolve, reject) {
- fs.readFile('foo.json', 'utf8', function (err, data) {
- if (err) {
- reject(err);
- return;
- }
-
- resolve(data);
- });
-});
-//=> Promise
-```
-
-
-### API
-
-`pinkie` exports bare [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation and polyfills [Node.js rejection events](https://nodejs.org/api/process.html#process_event_unhandledrejection). In case you forgot:
-
-#### new Promise(executor)
-
-Returns new instance of `Promise`.
-
-##### executor
-
-*Required*
-Type: `function`
-
-Function with two arguments `resolve` and `reject`. The first argument fulfills the promise, the second argument rejects it.
-
-#### pinkie.all(promises)
-
-Returns a promise that resolves when all of the promises in the `promises` Array argument have resolved.
-
-#### pinkie.race(promises)
-
-Returns a promise that resolves or rejects as soon as one of the promises in the `promises` Array resolves or rejects, with the value or reason from that promise.
-
-#### pinkie.reject(reason)
-
-Returns a Promise object that is rejected with the given `reason`.
-
-#### pinkie.resolve(value)
-
-Returns a Promise object that is resolved with the given `value`. If the `value` is a thenable (i.e. has a then method), the returned promise will "follow" that thenable, adopting its eventual state; otherwise the returned promise will be fulfilled with the `value`.
-
-
-## Related
-
-- [pinkie-promise](https://github.com/floatdrop/pinkie-promise) - Returns the native Promise or this module
-
-
-## License
-
-MIT © [Vsevolod Strukchinsky](http://github.com/floatdrop)
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/package.json
deleted file mode 100644
index 96227946d83286..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/package.json
+++ /dev/null
@@ -1,44 +0,0 @@
-{
- "_from": "pinkie-promise@^2.0.0",
- "_id": "pinkie-promise@2.0.1",
- "_inBundle": false,
- "_integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=",
- "_location": "/libnpx/yargs/os-locale/execa/get-stream/pinkie-promise",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "pinkie-promise@^2.0.0",
- "name": "pinkie-promise",
- "escapedName": "pinkie-promise",
- "rawSpec": "^2.0.0",
- "saveSpec": null,
- "fetchSpec": "^2.0.0"
- },
- "_requiredBy": [
- "/libnpx/yargs/os-locale/execa/get-stream"
- ],
- "_resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz",
- "_shasum": "2135d6dfa7a358c069ac9b178776288228450ffa",
- "_shrinkwrap": null,
- "_spec": "pinkie-promise@^2.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream",
- "bin": null,
- "bundleDependencies": false,
- "dependencies": {
- "pinkie": "^2.0.0"
- },
- "deprecated": false,
- "description": "> [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) ponyfill",
- "devDependencies": {
- "mocha": "*"
- },
- "engines": {
- "node": ">=0.10.0"
- },
- "name": "pinkie-promise",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "2.0.1"
-}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/readme.md
deleted file mode 100644
index 78477f4297d677..00000000000000
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/node_modules/pinkie-promise/readme.md
+++ /dev/null
@@ -1,28 +0,0 @@
-# pinkie-promise [![Build Status](https://travis-ci.org/floatdrop/pinkie-promise.svg?branch=master)](https://travis-ci.org/floatdrop/pinkie-promise)
-
-> [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) ponyfill
-
-Module exports global Promise object (if available) or [`pinkie`](http://github.com/floatdrop/pinkie) Promise polyfill.
-
-## Install
-
-```
-$ npm install --save pinkie-promise
-```
-
-## Usage
-
-```js
-var Promise = require('pinkie-promise');
-
-new Promise(function (resolve) { resolve('unicorns'); });
-//=> Promise { 'unicorns' }
-```
-
-## Related
-
-- [pify](https://github.com/sindresorhus/pify) - Promisify a callback-style function
-
-## License
-
-MIT © [Vsevolod Strukchinsky](http://github.com/floatdrop)
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/package.json
index 90ff4f20986e00..1b817c9ee66a9c 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/package.json
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/package.json
@@ -1,48 +1,80 @@
{
- "_from": "get-stream@^2.2.0",
- "_id": "get-stream@2.3.1",
+ "_from": "get-stream@^3.0.0",
+ "_id": "get-stream@3.0.0",
"_inBundle": false,
- "_integrity": "sha1-Xzj5PzRgCWZu4BUKBUFn+Rvdld4=",
+ "_integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=",
"_location": "/libnpx/yargs/os-locale/execa/get-stream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "get-stream@^2.2.0",
+ "raw": "get-stream@^3.0.0",
"name": "get-stream",
"escapedName": "get-stream",
- "rawSpec": "^2.2.0",
+ "rawSpec": "^3.0.0",
"saveSpec": null,
- "fetchSpec": "^2.2.0"
+ "fetchSpec": "^3.0.0"
},
"_requiredBy": [
"/libnpx/yargs/os-locale/execa"
],
- "_resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz",
- "_shasum": "5f38f93f346009666ee0150a054167f91bdd95de",
- "_shrinkwrap": null,
- "_spec": "get-stream@^2.2.0",
+ "_resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz",
+ "_shasum": "8e943d1358dc37555054ecbe2edb05aa174ede14",
+ "_spec": "get-stream@^3.0.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa",
- "bin": null,
- "bundleDependencies": false,
- "dependencies": {
- "object-assign": "^4.0.1",
- "pinkie-promise": "^2.0.0"
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/get-stream/issues"
},
+ "bundleDependencies": false,
"deprecated": false,
- "description": "> Get a stream as a string, buffer, or array",
+ "description": "Get a stream as a string, buffer, or array",
"devDependencies": {
"ava": "*",
- "buffer-equals": "^1.0.3",
- "into-stream": "^2.0.1",
+ "into-stream": "^3.0.0",
"xo": "*"
},
"engines": {
- "node": ">=0.10.0"
+ "node": ">=4"
},
+ "files": [
+ "index.js",
+ "buffer-stream.js"
+ ],
+ "homepage": "https://github.com/sindresorhus/get-stream#readme",
+ "keywords": [
+ "get",
+ "stream",
+ "promise",
+ "concat",
+ "string",
+ "str",
+ "text",
+ "buffer",
+ "read",
+ "data",
+ "consume",
+ "readable",
+ "readablestream",
+ "array",
+ "object",
+ "obj"
+ ],
+ "license": "MIT",
"name": "get-stream",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "2.3.1"
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/get-stream.git"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "version": "3.0.0",
+ "xo": {
+ "esnext": true
+ }
}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/readme.md
index a74866bb299646..73b188fb420f2a 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/readme.md
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/node_modules/get-stream/readme.md
@@ -46,7 +46,7 @@ getStream(stream).then(str => {
## API
-The methods returns a promise that is resolved when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode.
+The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode.
### getStream(stream, [options])
@@ -93,8 +93,10 @@ If the input stream emits an `error` event, the promise will be rejected with th
```js
getStream(streamThatErrorsAtTheEnd('unicorn'))
- .catch(err => console.log(err.bufferedData));
-// unicorn
+ .catch(err => {
+ console.log(err.bufferedData);
+ //=> 'unicorn'
+ });
```
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/package.json
index 1bceaf633b00de..e448f7b6ac37f0 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/package.json
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/package.json
@@ -1,8 +1,8 @@
{
- "_from": "execa@^0.5.0",
- "_id": "execa@0.5.1",
+ "_from": "execa@^0.7.0",
+ "_id": "execa@0.7.0",
"_inBundle": false,
- "_integrity": "sha1-3j+4XLjW6RyFvLzrFkWBeFy1ezY=",
+ "_integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=",
"_location": "/libnpx/yargs/os-locale/execa",
"_phantomChildren": {
"lru-cache": "4.1.1",
@@ -11,26 +11,32 @@
"_requested": {
"type": "range",
"registry": true,
- "raw": "execa@^0.5.0",
+ "raw": "execa@^0.7.0",
"name": "execa",
"escapedName": "execa",
- "rawSpec": "^0.5.0",
+ "rawSpec": "^0.7.0",
"saveSpec": null,
- "fetchSpec": "^0.5.0"
+ "fetchSpec": "^0.7.0"
},
"_requiredBy": [
"/libnpx/yargs/os-locale"
],
- "_resolved": "https://registry.npmjs.org/execa/-/execa-0.5.1.tgz",
- "_shasum": "de3fb85cb8d6e91c85bcbceb164581785cb57b36",
- "_shrinkwrap": null,
- "_spec": "execa@^0.5.0",
+ "_resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz",
+ "_shasum": "944becd34cc41ee32a63a9faf27ad5a65fc59777",
+ "_spec": "execa@^0.7.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale",
- "bin": null,
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/execa/issues"
+ },
"bundleDependencies": false,
"dependencies": {
- "cross-spawn": "^4.0.0",
- "get-stream": "^2.2.0",
+ "cross-spawn": "^5.0.1",
+ "get-stream": "^3.0.0",
"is-stream": "^1.1.0",
"npm-run-path": "^2.0.0",
"p-finally": "^1.0.0",
@@ -38,22 +44,68 @@
"strip-eof": "^1.0.0"
},
"deprecated": false,
- "description": "> A better [`child_process`](https://nodejs.org/api/child_process.html)",
+ "description": "A better `child_process`",
"devDependencies": {
"ava": "*",
"cat-names": "^1.0.2",
"coveralls": "^2.11.9",
- "delay": "^1.3.1",
+ "delay": "^2.0.0",
"is-running": "^2.0.0",
- "nyc": "^8.3.0",
+ "nyc": "^11.0.2",
+ "tempfile": "^2.0.0",
"xo": "*"
},
"engines": {
"node": ">=4"
},
+ "files": [
+ "index.js",
+ "lib"
+ ],
+ "homepage": "https://github.com/sindresorhus/execa#readme",
+ "keywords": [
+ "exec",
+ "child",
+ "process",
+ "execute",
+ "fork",
+ "execfile",
+ "spawn",
+ "file",
+ "shell",
+ "bin",
+ "binary",
+ "binaries",
+ "npm",
+ "path",
+ "local"
+ ],
+ "license": "MIT",
+ "maintainers": [
+ {
+ "name": "James Talmage",
+ "email": "james@talmage.io",
+ "url": "github.com/jamestalmage"
+ }
+ ],
"name": "execa",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "0.5.1"
+ "nyc": {
+ "reporter": [
+ "text",
+ "lcov"
+ ],
+ "exclude": [
+ "**/fixtures/**",
+ "**/test.js",
+ "**/test/**"
+ ]
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/execa.git"
+ },
+ "scripts": {
+ "test": "xo && nyc ava"
+ },
+ "version": "0.7.0"
}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/readme.md
index 16189acb27fb3e..18c808aa6902c0 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/readme.md
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/node_modules/execa/readme.md
@@ -64,8 +64,6 @@ execa.shell('exit 3').catch(error => {
Execute a file.
-Same options as [`child_process.execFile`](https://nodejs.org/api/child_process.html#child_process_child_process_execfile_file_args_options_callback).
-
Think of this as a mix of `child_process.execFile` and `child_process.spawn`.
Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties.
@@ -82,8 +80,6 @@ Same as `execa()`, but returns only `stderr`.
Execute a command through the system shell. Prefer `execa()` whenever possible, as it's both faster and safer.
-Same options as [`child_process.exec`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback).
-
Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess).
The `child_process` instance is enhanced to also be promise for a result object with `stdout` and `stderr` properties.
@@ -92,21 +88,78 @@ The `child_process` instance is enhanced to also be promise for a result object
Execute a file synchronously.
-Same options as [`child_process.execFileSync`](https://nodejs.org/api/child_process.html#child_process_child_process_execfilesync_file_args_options), except the default encoding is `utf8` instead of `buffer`.
-
Returns the same result object as [`child_process.spawnSync`](https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options).
+This method throws an `Error` if the command fails.
+
### execa.shellSync(file, [options])
Execute a command synchronously through the system shell.
-Same options as [`child_process.execSync`](https://nodejs.org/api/child_process.html#child_process_child_process_execsync_command_options), except the default encoding is `utf8` instead of `buffer`.
-
Returns the same result object as [`child_process.spawnSync`](https://nodejs.org/api/child_process.html#child_process_child_process_spawnsync_command_args_options).
### options
-Additional options:
+Type: `Object`
+
+#### cwd
+
+Type: `string`
+Default: `process.cwd()`
+
+Current working directory of the child process.
+
+#### env
+
+Type: `Object`
+Default: `process.env`
+
+Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this.
+
+#### extendEnv
+
+Type: `boolean`
+Default: `true`
+
+Set to `false` if you don't want to extend the environment variables when providing the `env` property.
+
+#### argv0
+
+Type: `string`
+
+Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified.
+
+#### stdio
+
+Type: `Array` `string`
+Default: `pipe`
+
+Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration.
+
+#### detached
+
+Type: `boolean`
+
+Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached).
+
+#### uid
+
+Type: `number`
+
+Sets the user identity of the process.
+
+#### gid
+
+Type: `number`
+
+Sets the group identity of the process.
+
+#### shell
+
+Type: `boolean` `string`
+Default: `false`
+
+If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows.
#### stripEof
@@ -123,9 +176,16 @@ Default: `true`
Prefer locally installed binaries when looking for a binary to execute.
If you `$ npm install foo`, you can then `execa('foo')`.
+#### localDir
+
+Type: `string`
+Default: `process.cwd()`
+
+Preferred path to find locally installed binaries in (use with `preferLocal`).
+
#### input
-Type: `string` `Buffer` `ReadableStream`
+Type: `string` `Buffer` `stream.Readable`
Write some input to the `stdin` of your binary.
Streams are not allowed when using the synchronous methods.
@@ -144,6 +204,75 @@ Default: `true`
Keep track of the spawned process and `kill` it when the parent process exits.
+#### encoding
+
+Type: `string`
+Default: `utf8`
+
+Specify the character encoding used to decode the `stdout` and `stderr` output.
+
+#### timeout
+
+Type: `number`
+Default: `0`
+
+If timeout is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than timeout milliseconds.
+
+#### maxBuffer
+
+Type: `number`
+Default: `10000000` (10MB)
+
+Largest amount of data in bytes allowed on `stdout` or `stderr`.
+
+#### killSignal
+
+Type: `string` `number`
+Default: `SIGTERM`
+
+Signal value to be used when the spawned process will be killed.
+
+#### stdin
+
+Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe`
+
+Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
+
+#### stdout
+
+Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe`
+
+Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
+
+#### stderr
+
+Type: `string` `number` `Stream` `undefined` `null`
+Default: `pipe`
+
+Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
+
+
+## Tips
+
+### Save and pipe output from a child process
+
+Let's say you want to show the output of a child process in real-time while also saving it to a variable.
+
+```js
+const execa = require('execa');
+const getStream = require('get-stream');
+
+const stream = execa('echo', ['foo']).stdout;
+
+stream.pipe(process.stdout);
+
+getStream(stream).then(value => {
+ console.log('child output:', value);
+});
+```
+
## License
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/package.json
index b52d010e41e6d5..bda5c72d19cc12 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/package.json
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/package.json
@@ -1,8 +1,8 @@
{
"_from": "os-locale@^2.0.0",
- "_id": "os-locale@2.0.0",
+ "_id": "os-locale@2.1.0",
"_inBundle": false,
- "_integrity": "sha1-FZGN7VEFIrge565aMJ1U9jn8OaQ=",
+ "_integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==",
"_location": "/libnpx/yargs/os-locale",
"_phantomChildren": {
"lru-cache": "4.1.1",
@@ -21,31 +21,60 @@
"_requiredBy": [
"/libnpx/yargs"
],
- "_resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.0.0.tgz",
- "_shasum": "15918ded510522b81ee7ae5a309d54f639fc39a4",
- "_shrinkwrap": null,
+ "_resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz",
+ "_shasum": "42bc2900a6b5b8bd17376c8e882b65afccf24bf2",
"_spec": "os-locale@^2.0.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs",
- "bin": null,
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/os-locale/issues"
+ },
"bundleDependencies": false,
"dependencies": {
- "execa": "^0.5.0",
+ "execa": "^0.7.0",
"lcid": "^1.0.0",
"mem": "^1.1.0"
},
"deprecated": false,
- "description": "> Get the system [locale](https://en.wikipedia.org/wiki/Locale_(computer_software))",
+ "description": "Get the system locale",
"devDependencies": {
"ava": "*",
- "require-uncached": "^1.0.2",
+ "import-fresh": "^2.0.0",
"xo": "*"
},
"engines": {
"node": ">=4"
},
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/sindresorhus/os-locale#readme",
+ "keywords": [
+ "locale",
+ "lang",
+ "language",
+ "system",
+ "os",
+ "string",
+ "str",
+ "user",
+ "country",
+ "id",
+ "identifier",
+ "region"
+ ],
+ "license": "MIT",
"name": "os-locale",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "2.0.0"
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/os-locale.git"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "version": "2.1.0"
}
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/readme.md b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/readme.md
index b867c55ee697f5..7c80d33589e4bb 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/readme.md
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/os-locale/readme.md
@@ -19,7 +19,7 @@ $ npm install --save os-locale
```js
const osLocale = require('os-locale');
-osLocale.then(locale => {
+osLocale().then(locale => {
console.log(locale);
//=> 'en_US'
});
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/index.js b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/index.js
index 1f8a1f113427b0..bbc49d29b156c3 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/index.js
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/index.js
@@ -7,10 +7,10 @@ module.exports = str => {
return 0;
}
- let width = 0;
-
str = stripAnsi(str);
+ let width = 0;
+
for (let i = 0; i < str.length; i++) {
const code = str.codePointAt(i);
@@ -19,16 +19,17 @@ module.exports = str => {
continue;
}
+ // Ignore combining characters
+ if (code >= 0x300 && code <= 0x36F) {
+ continue;
+ }
+
// Surrogates
- if (code >= 0x10000) {
+ if (code > 0xFFFF) {
i++;
}
- if (isFullwidthCodePoint(code)) {
- width += 2;
- } else {
- width++;
- }
+ width += isFullwidthCodePoint(code) ? 2 : 1;
}
return width;
diff --git a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/package.json b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/package.json
index c2f5b958421ab9..3bc2c04b625786 100644
--- a/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/package.json
+++ b/deps/npm/node_modules/libnpx/node_modules/yargs/node_modules/string-width/package.json
@@ -1,8 +1,8 @@
{
"_from": "string-width@^2.0.0",
- "_id": "string-width@2.1.0",
+ "_id": "string-width@2.1.1",
"_inBundle": false,
- "_integrity": "sha1-AwZkVh/BRslCPsfZeP4kV0N/5tA=",
+ "_integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
"_location": "/libnpx/yargs/string-width",
"_phantomChildren": {},
"_requested": {
@@ -18,19 +18,25 @@
"_requiredBy": [
"/libnpx/yargs"
],
- "_resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.0.tgz",
- "_shasum": "030664561fc146c9423ec7d978fe2457437fe6d0",
- "_shrinkwrap": null,
+ "_resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
+ "_shasum": "ab93f27a8dc13d28cac815c462143a6d9012ae9e",
"_spec": "string-width@^2.0.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/libnpx/node_modules/yargs",
- "bin": null,
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/string-width/issues"
+ },
"bundleDependencies": false,
"dependencies": {
"is-fullwidth-code-point": "^2.0.0",
"strip-ansi": "^4.0.0"
},
"deprecated": false,
- "description": "> Get the visual width of a string - the number of columns required to display it",
+ "description": "Get the visual width of a string - the number of columns required to display it",
"devDependencies": {
"ava": "*",
"xo": "*"
@@ -38,9 +44,44 @@
"engines": {
"node": ">=4"
},
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/sindresorhus/string-width#readme",
+ "keywords": [
+ "string",
+ "str",
+ "character",
+ "char",
+ "unicode",
+ "width",
+ "visual",
+ "column",
+ "columns",
+ "fullwidth",
+ "full-width",
+ "full",
+ "ansi",
+ "escape",
+ "codes",
+ "cli",
+ "command-line",
+ "terminal",
+ "console",
+ "cjk",
+ "chinese",
+ "japanese",
+ "korean",
+ "fixed-width"
+ ],
+ "license": "MIT",
"name": "string-width",
- "optionalDependencies": {},
- "peerDependencies": {},
- "scripts": {},
- "version": "2.1.0"
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/string-width.git"
+ },
+ "scripts": {
+ "test": "xo && ava"
+ },
+ "version": "2.1.1"
}
diff --git a/deps/npm/node_modules/libnpx/package.json b/deps/npm/node_modules/libnpx/package.json
index a7a6da98918bbd..57fdb89e237264 100644
--- a/deps/npm/node_modules/libnpx/package.json
+++ b/deps/npm/node_modules/libnpx/package.json
@@ -1,34 +1,34 @@
{
- "_from": "libnpx@9.2.1",
- "_id": "libnpx@9.2.1",
+ "_from": "libnpx@9.6.0",
+ "_id": "libnpx@9.6.0",
"_inBundle": false,
- "_integrity": "sha512-8xbZZ4+jn48kyyqgeIKKRxTie3wz/8HDP8eo7yA/bpPPmXtdSbudSc2BStSR6qCYt9Y5Jzf/h/gyxDsMmcCFGQ==",
+ "_integrity": "sha512-XyfUHtcxQPuQ/KEjdDzA8IHu/l26u5HuaZa41Y8zvk8IjT7ZkpGE2jFXQ5ozpOPFpUoip7lE9iJJzJpITS+zkQ==",
"_location": "/libnpx",
"_phantomChildren": {
"graceful-fs": "4.1.11",
"lru-cache": "4.1.1",
"normalize-package-data": "2.4.0",
"strip-ansi": "4.0.0",
- "which": "1.2.14"
+ "which": "1.3.0"
},
"_requested": {
"type": "version",
"registry": true,
- "raw": "libnpx@9.2.1",
+ "raw": "libnpx@9.6.0",
"name": "libnpx",
"escapedName": "libnpx",
- "rawSpec": "9.2.1",
+ "rawSpec": "9.6.0",
"saveSpec": null,
- "fetchSpec": "9.2.1"
+ "fetchSpec": "9.6.0"
},
"_requiredBy": [
"#USER",
"/"
],
- "_resolved": "https://registry.npmjs.org/libnpx/-/libnpx-9.2.1.tgz",
- "_shasum": "cef11bfa2e5ac68521a7c1b82f48ee8ba19884ae",
- "_spec": "libnpx@9.2.1",
- "_where": "/Users/zkat/Documents/code/release-checkouts/node/deps/npm",
+ "_resolved": "https://registry.npmjs.org/libnpx/-/libnpx-9.6.0.tgz",
+ "_shasum": "c441ddd698b043bd8e8dc78384fa8eb7d77991e5",
+ "_spec": "libnpx@9.6.0",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
@@ -62,7 +62,7 @@
"json": "^9.0.6",
"marked-man": "^0.2.1",
"mkdirp": "^0.5.1",
- "npm": "^5.2.0",
+ "npm": "^5.3.0",
"nyc": "^11.0.2",
"require-inject": "^1.4.0",
"standard": "^10.0.2",
@@ -109,5 +109,5 @@
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "9.2.1"
+ "version": "9.6.0"
}
diff --git a/deps/npm/node_modules/meant/.npmignore b/deps/npm/node_modules/meant/.npmignore
new file mode 100644
index 00000000000000..5148e527a7e286
--- /dev/null
+++ b/deps/npm/node_modules/meant/.npmignore
@@ -0,0 +1,37 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+
+# Runtime data
+pids
+*.pid
+*.seed
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (http://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules
+jspm_packages
+
+# Optional npm cache directory
+.npm
+
+# Optional REPL history
+.node_repl_history
diff --git a/deps/npm/node_modules/meant/.travis.yml b/deps/npm/node_modules/meant/.travis.yml
new file mode 100644
index 00000000000000..413d5de7f1a649
--- /dev/null
+++ b/deps/npm/node_modules/meant/.travis.yml
@@ -0,0 +1,16 @@
+script:
+ - "npm test"
+
+language: node_js
+
+node_js:
+ - "5"
+ - "4"
+ - iojs
+ - "0.12"
+
+sudo: false
+
+cache:
+ directories:
+ - node_modules
diff --git a/deps/npm/node_modules/meant/CHANGELOG.md b/deps/npm/node_modules/meant/CHANGELOG.md
new file mode 100644
index 00000000000000..0117b2e1053f36
--- /dev/null
+++ b/deps/npm/node_modules/meant/CHANGELOG.md
@@ -0,0 +1,19 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+
+# 1.0.0 (2016-09-08)
+
+
+### Bug Fixes
+
+* **deps:** install devDeps and update tests ([d766d6f](https://github.com/watilde/meant/commit/d766d6f))
+* **run-script:** add npm run release command ([9387904](https://github.com/watilde/meant/commit/9387904))
+* **test:** add test.js ([65b6e99](https://github.com/watilde/meant/commit/65b6e99))
+* **travis:** add .travis.yml ([24d918c](https://github.com/watilde/meant/commit/24d918c))
+
+
+### Features
+
+* **new-meant:** add index.js ([7289b99](https://github.com/watilde/meant/commit/7289b99))
diff --git a/deps/npm/node_modules/meant/LICENSE b/deps/npm/node_modules/meant/LICENSE
new file mode 100644
index 00000000000000..4205f889a4b31e
--- /dev/null
+++ b/deps/npm/node_modules/meant/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 Daijirō Wachi
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/meant/README.md b/deps/npm/node_modules/meant/README.md
new file mode 100644
index 00000000000000..6c541ce9cbf0cd
--- /dev/null
+++ b/deps/npm/node_modules/meant/README.md
@@ -0,0 +1,56 @@
+# meant [![Build Status](https://travis-ci.org/watilde/meant.png?branch=master)](https://travis-ci.org/watilde/meant)
+
+Like the `Did you mean?` in git for npm
+
+## Installation
+
+Download node at [nodejs.org](http://nodejs.org) and install it, if you haven't already.
+
+```sh
+npm install meant --save
+```
+
+
+## Tests
+
+```sh
+npm install
+npm test
+```
+```
+
+> meant@1.0.0 test /Users/watilde/Development/meant
+> standard && tap test.js
+TAP version 13
+# Subtest: test.js
+ # Subtest: test vs ['tast', 'tbst', 'tcst', 'foo']
+ ok 1 - list has tast
+ ok 2 - list has tbst
+ ok 3 - list has tcst
+ ok 4 - list doesn't have foo
+ 1..4
+ ok 1 - test vs ['tast', 'tbst', 'tcst', 'foo'] # time=11.816ms
+ 1..1
+ # time=44.006ms
+ok 1 - test.js # time=249.154ms
+1..1
+# time=267.371ms
+
+```
+
+## Dependencies
+
+None
+
+## Dev Dependencies
+
+- [standard](https://github.com/feross/standard): JavaScript Standard Style
+- [standard-version](https://github.com/conventional-changelog/standard-version): replacement for `npm version` with automatic CHANGELOG generation
+- [tap](https://github.com/tapjs/node-tap): A Test-Anything-Protocol library
+
+
+## License
+
+MIT
+
+_Generated by [package-json-to-readme](https://github.com/zeke/package-json-to-readme)_
diff --git a/deps/npm/node_modules/meant/index.js b/deps/npm/node_modules/meant/index.js
new file mode 100644
index 00000000000000..647ba912d42ec2
--- /dev/null
+++ b/deps/npm/node_modules/meant/index.js
@@ -0,0 +1,49 @@
+function levenshteinD (s1, s2) {
+ var d = []
+ var i = 0
+
+ for (i = 0; i <= s1.length; i++) d[i] = [i]
+ for (i = 0; i <= s2.length; i++) d[0][i] = i
+
+ s2.split('').forEach(function (c2, j) {
+ s1.split('').forEach(function (c1, i) {
+ if (c1 === c2) {
+ d[i + 1][j + 1] = d[i][j]
+ return
+ }
+ d[i + 1][j + 1] = Math.min(
+ d[i][j + 1] + 1,
+ d[i + 1][j] + 1,
+ d[i][j] + 1
+ )
+ })
+ })
+
+ return d[s1.length][s2.length]
+}
+
+function meant (scmd, commands) {
+ var d = []
+ var bestSimilarity = []
+
+ commands.forEach(function (cmd, i) {
+ var item = {}
+ item[levenshteinD(scmd, cmd)] = i
+ d.push(item)
+ })
+
+ d.sort(function (a, b) {
+ return Number(Object.keys(a)[0]) - Number(Object.keys(b)[0])
+ })
+
+ d.forEach(function (item) {
+ var key = Number(Object.keys(item)[0])
+ if (scmd.length / 2 >= key) {
+ bestSimilarity.push(commands[item[key]])
+ }
+ })
+
+ return bestSimilarity
+}
+
+module.exports = meant
diff --git a/deps/npm/node_modules/meant/package.json b/deps/npm/node_modules/meant/package.json
new file mode 100644
index 00000000000000..1111b1f89a7611
--- /dev/null
+++ b/deps/npm/node_modules/meant/package.json
@@ -0,0 +1,56 @@
+{
+ "_from": "meant@~1.0.0",
+ "_id": "meant@1.0.0",
+ "_inBundle": false,
+ "_integrity": "sha1-y2KG47evkxXxYRj9wiRybtOAdLs=",
+ "_location": "/meant",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "meant@~1.0.0",
+ "name": "meant",
+ "escapedName": "meant",
+ "rawSpec": "~1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "~1.0.0"
+ },
+ "_requiredBy": [
+ "#USER",
+ "/"
+ ],
+ "_resolved": "https://registry.npmjs.org/meant/-/meant-1.0.0.tgz",
+ "_shasum": "cb6286e3b7af9315f16118fdc224726ed38074bb",
+ "_spec": "meant@~1.0.0",
+ "_where": "/Users/rebecca/code/npm",
+ "author": {
+ "name": "Daijiro Wachi"
+ },
+ "bugs": {
+ "url": "https://github.com/watilde/meant/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Like the `Did you mean?` in git for npm",
+ "devDependencies": {
+ "standard": "^8.0.0",
+ "standard-version": "^2.4.0",
+ "tap": "^7.1.1"
+ },
+ "homepage": "https://github.com/watilde/meant#readme",
+ "keywords": [
+ "meant"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "meant",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/watilde/meant.git"
+ },
+ "scripts": {
+ "release": "standard-version",
+ "test": "standard && tap test.js"
+ },
+ "version": "1.0.0"
+}
diff --git a/deps/npm/node_modules/meant/test.js b/deps/npm/node_modules/meant/test.js
new file mode 100644
index 00000000000000..bc7ba564be9dd4
--- /dev/null
+++ b/deps/npm/node_modules/meant/test.js
@@ -0,0 +1,11 @@
+var test = require('tap').test
+var meant = require('./')
+
+test('test vs [\'tast\', \'tbst\', \'tcst\', \'foo\']', function (t) {
+ var list = meant('test', ['tast', 'tbst', 'tcst', 'foo'])
+ t.notEqual(list.indexOf('tast'), -1, 'list has tast')
+ t.notEqual(list.indexOf('tbst'), -1, 'list has tbst')
+ t.notEqual(list.indexOf('tcst'), -1, 'list has tcst')
+ t.equal(list.indexOf('foo'), -1, 'list doesn\'t have foo')
+ t.end()
+})
diff --git a/deps/npm/node_modules/fstream/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/fstream/.npmignore
similarity index 100%
rename from deps/npm/node_modules/fstream/.npmignore
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/.npmignore
diff --git a/deps/npm/node_modules/fstream/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/fstream/.travis.yml
similarity index 100%
rename from deps/npm/node_modules/fstream/.travis.yml
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/.travis.yml
diff --git a/deps/npm/node_modules/fstream-npm/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/fstream/LICENSE
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/LICENSE
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/LICENSE
diff --git a/deps/npm/node_modules/fstream/README.md b/deps/npm/node_modules/node-gyp/node_modules/fstream/README.md
similarity index 100%
rename from deps/npm/node_modules/fstream/README.md
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/README.md
diff --git a/deps/npm/node_modules/fstream/examples/filter-pipe.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js
similarity index 100%
rename from deps/npm/node_modules/fstream/examples/filter-pipe.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js
diff --git a/deps/npm/node_modules/fstream/examples/pipe.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/pipe.js
similarity index 100%
rename from deps/npm/node_modules/fstream/examples/pipe.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/examples/pipe.js
diff --git a/deps/npm/node_modules/fstream/examples/reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/reader.js
similarity index 100%
rename from deps/npm/node_modules/fstream/examples/reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/examples/reader.js
diff --git a/deps/npm/node_modules/fstream/examples/symlink-write.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js
similarity index 100%
rename from deps/npm/node_modules/fstream/examples/symlink-write.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js
diff --git a/deps/npm/node_modules/fstream/fstream.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/fstream.js
similarity index 100%
rename from deps/npm/node_modules/fstream/fstream.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/fstream.js
diff --git a/deps/npm/node_modules/fstream/lib/abstract.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/abstract.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/abstract.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/abstract.js
diff --git a/deps/npm/node_modules/fstream/lib/collect.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/collect.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/collect.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/collect.js
diff --git a/deps/npm/node_modules/fstream/lib/dir-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/dir-reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js
diff --git a/deps/npm/node_modules/fstream/lib/dir-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/dir-writer.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js
diff --git a/deps/npm/node_modules/fstream/lib/file-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/file-reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js
diff --git a/deps/npm/node_modules/fstream/lib/file-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/file-writer.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js
diff --git a/deps/npm/node_modules/fstream/lib/get-type.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/get-type.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/get-type.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/get-type.js
diff --git a/deps/npm/node_modules/fstream/lib/link-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/link-reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js
diff --git a/deps/npm/node_modules/fstream/lib/link-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/link-writer.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js
diff --git a/deps/npm/node_modules/fstream/lib/proxy-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/proxy-reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js
diff --git a/deps/npm/node_modules/fstream/lib/proxy-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/proxy-writer.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js
diff --git a/deps/npm/node_modules/fstream/lib/reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/reader.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/reader.js
diff --git a/deps/npm/node_modules/fstream/lib/socket-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/socket-reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js
diff --git a/deps/npm/node_modules/fstream/lib/writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/writer.js
similarity index 100%
rename from deps/npm/node_modules/fstream/lib/writer.js
rename to deps/npm/node_modules/node-gyp/node_modules/fstream/lib/writer.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/package.json b/deps/npm/node_modules/node-gyp/node_modules/fstream/package.json
new file mode 100644
index 00000000000000..5ac16f546b93f7
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/package.json
@@ -0,0 +1,62 @@
+{
+ "_from": "fstream@^1.0.0",
+ "_id": "fstream@1.0.11",
+ "_inBundle": false,
+ "_integrity": "sha1-XB+x8RdHcRTwYyoOtLcbPLD9MXE=",
+ "_location": "/node-gyp/fstream",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "fstream@^1.0.0",
+ "name": "fstream",
+ "escapedName": "fstream",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/node-gyp",
+ "/node-gyp/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.11.tgz",
+ "_shasum": "5c1fb1f117477114f0632a0eb4b71b3cb0fd3171",
+ "_spec": "fstream@^1.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/node-gyp",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/fstream/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "graceful-fs": "^4.1.2",
+ "inherits": "~2.0.0",
+ "mkdirp": ">=0.5 0",
+ "rimraf": "2"
+ },
+ "deprecated": false,
+ "description": "Advanced file system stream things",
+ "devDependencies": {
+ "standard": "^4.0.0",
+ "tap": "^1.2.0"
+ },
+ "engines": {
+ "node": ">=0.6"
+ },
+ "homepage": "https://github.com/npm/fstream#readme",
+ "license": "ISC",
+ "main": "fstream.js",
+ "name": "fstream",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/fstream.git"
+ },
+ "scripts": {
+ "test": "standard && tap examples/*.js"
+ },
+ "version": "1.0.11"
+}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/LICENSE
rename to deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/README.md b/deps/npm/node_modules/node-gyp/node_modules/semver/README.md
new file mode 100644
index 00000000000000..cbd956549dbb01
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/README.md
@@ -0,0 +1,350 @@
+semver(1) -- The semantic versioner for npm
+===========================================
+
+## Usage
+
+ $ npm install semver
+ $ node
+ var semver = require('semver')
+
+ semver.valid('1.2.3') // '1.2.3'
+ semver.valid('a.b.c') // null
+ semver.clean(' =v1.2.3 ') // '1.2.3'
+ semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
+ semver.gt('1.2.3', '9.8.7') // false
+ semver.lt('1.2.3', '9.8.7') // true
+
+As a command-line utility:
+
+ $ semver -h
+
+ SemVer 5.1.0
+
+ A JavaScript implementation of the http://semver.org/ specification
+ Copyright Isaac Z. Schlueter
+
+ Usage: semver [options] [ [...]]
+ Prints valid versions sorted by SemVer precedence
+
+ Options:
+ -r --range
+ Print versions that match the specified range.
+
+ -i --increment []
+ Increment a version by the specified level. Level can
+ be one of: major, minor, patch, premajor, preminor,
+ prepatch, or prerelease. Default level is 'patch'.
+ Only one version may be specified.
+
+ --preid
+ Identifier to be used to prefix premajor, preminor,
+ prepatch or prerelease version increments.
+
+ -l --loose
+ Interpret versions and ranges loosely
+
+ Program exits successfully if any valid version satisfies
+ all supplied ranges, and prints all satisfying versions.
+
+ If no satisfying versions are found, then exits failure.
+
+ Versions are printed in ascending order, so supplying
+ multiple versions to the utility will just sort them.
+
+## Versions
+
+A "version" is described by the `v2.0.0` specification found at
+ .
+
+A leading `"="` or `"v"` character is stripped off and ignored.
+
+## Ranges
+
+A `version range` is a set of `comparators` which specify versions
+that satisfy the range.
+
+A `comparator` is composed of an `operator` and a `version`. The set
+of primitive `operators` is:
+
+* `<` Less than
+* `<=` Less than or equal to
+* `>` Greater than
+* `>=` Greater than or equal to
+* `=` Equal. If no operator is specified, then equality is assumed,
+ so this operator is optional, but MAY be included.
+
+For example, the comparator `>=1.2.7` would match the versions
+`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
+or `1.1.0`.
+
+Comparators can be joined by whitespace to form a `comparator set`,
+which is satisfied by the **intersection** of all of the comparators
+it includes.
+
+A range is composed of one or more comparator sets, joined by `||`. A
+version matches a range if and only if every comparator in at least
+one of the `||`-separated comparator sets is satisfied by the version.
+
+For example, the range `>=1.2.7 <1.3.0` would match the versions
+`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
+or `1.1.0`.
+
+The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
+`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
+
+### Prerelease Tags
+
+If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
+it will only be allowed to satisfy comparator sets if at least one
+comparator with the same `[major, minor, patch]` tuple also has a
+prerelease tag.
+
+For example, the range `>1.2.3-alpha.3` would be allowed to match the
+version `1.2.3-alpha.7`, but it would *not* be satisfied by
+`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
+than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
+range only accepts prerelease tags on the `1.2.3` version. The
+version `3.4.5` *would* satisfy the range, because it does not have a
+prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
+
+The purpose for this behavior is twofold. First, prerelease versions
+frequently are updated very quickly, and contain many breaking changes
+that are (by the author's design) not yet fit for public consumption.
+Therefore, by default, they are excluded from range matching
+semantics.
+
+Second, a user who has opted into using a prerelease version has
+clearly indicated the intent to use *that specific* set of
+alpha/beta/rc versions. By including a prerelease tag in the range,
+the user is indicating that they are aware of the risk. However, it
+is still not appropriate to assume that they have opted into taking a
+similar risk on the *next* set of prerelease versions.
+
+#### Prerelease Identifiers
+
+The method `.inc` takes an additional `identifier` string argument that
+will append the value of the string as a prerelease identifier:
+
+```javascript
+> semver.inc('1.2.3', 'prerelease', 'beta')
+'1.2.4-beta.0'
+```
+
+command-line example:
+
+```shell
+$ semver 1.2.3 -i prerelease --preid beta
+1.2.4-beta.0
+```
+
+Which then can be used to increment further:
+
+```shell
+$ semver 1.2.4-beta.0 -i prerelease
+1.2.4-beta.1
+```
+
+### Advanced Range Syntax
+
+Advanced range syntax desugars to primitive comparators in
+deterministic ways.
+
+Advanced ranges may be combined in the same way as primitive
+comparators using white space or `||`.
+
+#### Hyphen Ranges `X.Y.Z - A.B.C`
+
+Specifies an inclusive set.
+
+* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
+
+If a partial version is provided as the first version in the inclusive
+range, then the missing pieces are replaced with zeroes.
+
+* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
+
+If a partial version is provided as the second version in the
+inclusive range, then all versions that start with the supplied parts
+of the tuple are accepted, but nothing that would be greater than the
+provided tuple parts.
+
+* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
+* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
+
+#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
+
+Any of `X`, `x`, or `*` may be used to "stand in" for one of the
+numeric values in the `[major, minor, patch]` tuple.
+
+* `*` := `>=0.0.0` (Any version satisfies)
+* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
+* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
+
+A partial version range is treated as an X-Range, so the special
+character is in fact optional.
+
+* `""` (empty string) := `*` := `>=0.0.0`
+* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
+* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
+
+#### Tilde Ranges `~1.2.3` `~1.2` `~1`
+
+Allows patch-level changes if a minor version is specified on the
+comparator. Allows minor-level changes if not.
+
+* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
+* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
+* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
+* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
+* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
+* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
+* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
+ the `1.2.3` version will be allowed, if they are greater than or
+ equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
+ `1.2.4-beta.2` would not, because it is a prerelease of a
+ different `[major, minor, patch]` tuple.
+
+#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
+
+Allows changes that do not modify the left-most non-zero digit in the
+`[major, minor, patch]` tuple. In other words, this allows patch and
+minor updates for versions `1.0.0` and above, patch updates for
+versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
+
+Many authors treat a `0.x` version as if the `x` were the major
+"breaking-change" indicator.
+
+Caret ranges are ideal when an author may make breaking changes
+between `0.2.4` and `0.3.0` releases, which is a common practice.
+However, it presumes that there will *not* be breaking changes between
+`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
+additive (but non-breaking), according to commonly observed practices.
+
+* `^1.2.3` := `>=1.2.3 <2.0.0`
+* `^0.2.3` := `>=0.2.3 <0.3.0`
+* `^0.0.3` := `>=0.0.3 <0.0.4`
+* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
+ the `1.2.3` version will be allowed, if they are greater than or
+ equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
+ `1.2.4-beta.2` would not, because it is a prerelease of a
+ different `[major, minor, patch]` tuple.
+* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
+ `0.0.3` version *only* will be allowed, if they are greater than or
+ equal to `beta`. So, `0.0.3-pr.2` would be allowed.
+
+When parsing caret ranges, a missing `patch` value desugars to the
+number `0`, but will allow flexibility within that value, even if the
+major and minor versions are both `0`.
+
+* `^1.2.x` := `>=1.2.0 <2.0.0`
+* `^0.0.x` := `>=0.0.0 <0.1.0`
+* `^0.0` := `>=0.0.0 <0.1.0`
+
+A missing `minor` and `patch` values will desugar to zero, but also
+allow flexibility within those values, even if the major version is
+zero.
+
+* `^1.x` := `>=1.0.0 <2.0.0`
+* `^0.x` := `>=0.0.0 <1.0.0`
+
+### Range Grammar
+
+Putting all this together, here is a Backus-Naur grammar for ranges,
+for the benefit of parser authors:
+
+```bnf
+range-set ::= range ( logical-or range ) *
+logical-or ::= ( ' ' ) * '||' ( ' ' ) *
+range ::= hyphen | simple ( ' ' simple ) * | ''
+hyphen ::= partial ' - ' partial
+simple ::= primitive | partial | tilde | caret
+primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial
+partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
+xr ::= 'x' | 'X' | '*' | nr
+nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
+tilde ::= '~' partial
+caret ::= '^' partial
+qualifier ::= ( '-' pre )? ( '+' build )?
+pre ::= parts
+build ::= parts
+parts ::= part ( '.' part ) *
+part ::= nr | [-0-9A-Za-z]+
+```
+
+## Functions
+
+All methods and classes take a final `loose` boolean argument that, if
+true, will be more forgiving about not-quite-valid semver strings.
+The resulting output will always be 100% strict, of course.
+
+Strict-mode Comparators and Ranges will be strict about the SemVer
+strings that they parse.
+
+* `valid(v)`: Return the parsed version, or null if it's not valid.
+* `inc(v, release)`: Return the version incremented by the release
+ type (`major`, `premajor`, `minor`, `preminor`, `patch`,
+ `prepatch`, or `prerelease`), or null if it's not valid
+ * `premajor` in one call will bump the version up to the next major
+ version and down to a prerelease of that major version.
+ `preminor`, and `prepatch` work the same way.
+ * If called from a non-prerelease version, the `prerelease` will work the
+ same as `prepatch`. It increments the patch version, then makes a
+ prerelease. If the input version is already a prerelease it simply
+ increments it.
+* `prerelease(v)`: Returns an array of prerelease components, or null
+ if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
+* `major(v)`: Return the major version number.
+* `minor(v)`: Return the minor version number.
+* `patch(v)`: Return the patch version number.
+
+### Comparison
+
+* `gt(v1, v2)`: `v1 > v2`
+* `gte(v1, v2)`: `v1 >= v2`
+* `lt(v1, v2)`: `v1 < v2`
+* `lte(v1, v2)`: `v1 <= v2`
+* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
+ even if they're not the exact same string. You already know how to
+ compare strings.
+* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
+* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
+ the corresponding function above. `"==="` and `"!=="` do simple
+ string comparison, but are included for completeness. Throws if an
+ invalid comparison string is provided.
+* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
+ `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
+* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
+ in descending order when passed to `Array.sort()`.
+* `diff(v1, v2)`: Returns difference between two versions by the release type
+ (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
+ or null if the versions are the same.
+
+
+### Ranges
+
+* `validRange(range)`: Return the valid range or null if it's not valid
+* `satisfies(version, range)`: Return true if the version satisfies the
+ range.
+* `maxSatisfying(versions, range)`: Return the highest version in the list
+ that satisfies the range, or `null` if none of them do.
+* `minSatisfying(versions, range)`: Return the lowest version in the list
+ that satisfies the range, or `null` if none of them do.
+* `gtr(version, range)`: Return `true` if version is greater than all the
+ versions possible in the range.
+* `ltr(version, range)`: Return `true` if version is less than all the
+ versions possible in the range.
+* `outside(version, range, hilo)`: Return true if the version is outside
+ the bounds of the range in either the high or low direction. The
+ `hilo` argument must be either the string `'>'` or `'<'`. (This is
+ the function called by `gtr` and `ltr`.)
+
+Note that, since ranges may be non-contiguous, a version might not be
+greater than a range, less than a range, *or* satisfy a range! For
+example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
+until `2.0.0`, so the version `1.2.10` would not be greater than the
+range (because `2.0.1` satisfies, which is higher), nor less than the
+range (since `1.2.8` satisfies, which is lower), and it also does not
+satisfy the range.
+
+If you want to know if a version satisfies or does not satisfy a
+range, use the `satisfies(version, range)` function.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver b/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver
new file mode 100755
index 00000000000000..c5f2e857e82790
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver
@@ -0,0 +1,133 @@
+#!/usr/bin/env node
+// Standalone semver comparison program.
+// Exits successfully and prints matching version(s) if
+// any supplied version is valid and passes all tests.
+
+var argv = process.argv.slice(2)
+ , versions = []
+ , range = []
+ , gt = []
+ , lt = []
+ , eq = []
+ , inc = null
+ , version = require("../package.json").version
+ , loose = false
+ , identifier = undefined
+ , semver = require("../semver")
+ , reverse = false
+
+main()
+
+function main () {
+ if (!argv.length) return help()
+ while (argv.length) {
+ var a = argv.shift()
+ var i = a.indexOf('=')
+ if (i !== -1) {
+ a = a.slice(0, i)
+ argv.unshift(a.slice(i + 1))
+ }
+ switch (a) {
+ case "-rv": case "-rev": case "--rev": case "--reverse":
+ reverse = true
+ break
+ case "-l": case "--loose":
+ loose = true
+ break
+ case "-v": case "--version":
+ versions.push(argv.shift())
+ break
+ case "-i": case "--inc": case "--increment":
+ switch (argv[0]) {
+ case "major": case "minor": case "patch": case "prerelease":
+ case "premajor": case "preminor": case "prepatch":
+ inc = argv.shift()
+ break
+ default:
+ inc = "patch"
+ break
+ }
+ break
+ case "--preid":
+ identifier = argv.shift()
+ break
+ case "-r": case "--range":
+ range.push(argv.shift())
+ break
+ case "-h": case "--help": case "-?":
+ return help()
+ default:
+ versions.push(a)
+ break
+ }
+ }
+
+ versions = versions.filter(function (v) {
+ return semver.valid(v, loose)
+ })
+ if (!versions.length) return fail()
+ if (inc && (versions.length !== 1 || range.length))
+ return failInc()
+
+ for (var i = 0, l = range.length; i < l ; i ++) {
+ versions = versions.filter(function (v) {
+ return semver.satisfies(v, range[i], loose)
+ })
+ if (!versions.length) return fail()
+ }
+ return success(versions)
+}
+
+function failInc () {
+ console.error("--inc can only be used on a single version with no range")
+ fail()
+}
+
+function fail () { process.exit(1) }
+
+function success () {
+ var compare = reverse ? "rcompare" : "compare"
+ versions.sort(function (a, b) {
+ return semver[compare](a, b, loose)
+ }).map(function (v) {
+ return semver.clean(v, loose)
+ }).map(function (v) {
+ return inc ? semver.inc(v, inc, loose, identifier) : v
+ }).forEach(function (v,i,_) { console.log(v) })
+}
+
+function help () {
+ console.log(["SemVer " + version
+ ,""
+ ,"A JavaScript implementation of the http://semver.org/ specification"
+ ,"Copyright Isaac Z. Schlueter"
+ ,""
+ ,"Usage: semver [options] [ [...]]"
+ ,"Prints valid versions sorted by SemVer precedence"
+ ,""
+ ,"Options:"
+ ,"-r --range "
+ ," Print versions that match the specified range."
+ ,""
+ ,"-i --increment []"
+ ," Increment a version by the specified level. Level can"
+ ," be one of: major, minor, patch, premajor, preminor,"
+ ," prepatch, or prerelease. Default level is 'patch'."
+ ," Only one version may be specified."
+ ,""
+ ,"--preid "
+ ," Identifier to be used to prefix premajor, preminor,"
+ ," prepatch or prerelease version increments."
+ ,""
+ ,"-l --loose"
+ ," Interpret versions and ranges loosely"
+ ,""
+ ,"Program exits successfully if any valid version satisfies"
+ ,"all supplied ranges, and prints all satisfying versions."
+ ,""
+ ,"If no satisfying versions are found, then exits failure."
+ ,""
+ ,"Versions are printed in ascending order, so supplying"
+ ,"multiple versions to the utility will just sort them."
+ ].join("\n"))
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/package.json b/deps/npm/node_modules/node-gyp/node_modules/semver/package.json
new file mode 100644
index 00000000000000..80f00fe5756d0a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/package.json
@@ -0,0 +1,54 @@
+{
+ "_from": "semver@~5.3.0",
+ "_id": "semver@5.3.0",
+ "_inBundle": false,
+ "_integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=",
+ "_location": "/node-gyp/semver",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "semver@~5.3.0",
+ "name": "semver",
+ "escapedName": "semver",
+ "rawSpec": "~5.3.0",
+ "saveSpec": null,
+ "fetchSpec": "~5.3.0"
+ },
+ "_requiredBy": [
+ "/node-gyp"
+ ],
+ "_resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz",
+ "_shasum": "9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f",
+ "_spec": "semver@~5.3.0",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/node-gyp",
+ "bin": {
+ "semver": "./bin/semver"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/node-semver/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "The semantic version parser used by npm.",
+ "devDependencies": {
+ "tap": "^2.0.0"
+ },
+ "files": [
+ "bin",
+ "range.bnf",
+ "semver.js"
+ ],
+ "homepage": "https://github.com/npm/node-semver#readme",
+ "license": "ISC",
+ "main": "semver.js",
+ "name": "semver",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/node-semver.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "5.3.0"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/range.bnf b/deps/npm/node_modules/node-gyp/node_modules/semver/range.bnf
new file mode 100644
index 00000000000000..25ebd5c8325488
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/range.bnf
@@ -0,0 +1,16 @@
+range-set ::= range ( logical-or range ) *
+logical-or ::= ( ' ' ) * '||' ( ' ' ) *
+range ::= hyphen | simple ( ' ' simple ) * | ''
+hyphen ::= partial ' - ' partial
+simple ::= primitive | partial | tilde | caret
+primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial
+partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
+xr ::= 'x' | 'X' | '*' | nr
+nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
+tilde ::= '~' partial
+caret ::= '^' partial
+qualifier ::= ( '-' pre )? ( '+' build )?
+pre ::= parts
+build ::= parts
+parts ::= part ( '.' part ) *
+part ::= nr | [-0-9A-Za-z]+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js
new file mode 100644
index 00000000000000..5f1a3c5c9e5dc9
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js
@@ -0,0 +1,1203 @@
+exports = module.exports = SemVer;
+
+// The debug function is excluded entirely from the minified version.
+/* nomin */ var debug;
+/* nomin */ if (typeof process === 'object' &&
+ /* nomin */ process.env &&
+ /* nomin */ process.env.NODE_DEBUG &&
+ /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG))
+ /* nomin */ debug = function() {
+ /* nomin */ var args = Array.prototype.slice.call(arguments, 0);
+ /* nomin */ args.unshift('SEMVER');
+ /* nomin */ console.log.apply(console, args);
+ /* nomin */ };
+/* nomin */ else
+ /* nomin */ debug = function() {};
+
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+exports.SEMVER_SPEC_VERSION = '2.0.0';
+
+var MAX_LENGTH = 256;
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
+
+// The actual regexps go on exports.re
+var re = exports.re = [];
+var src = exports.src = [];
+var R = 0;
+
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
+
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
+
+var NUMERICIDENTIFIER = R++;
+src[NUMERICIDENTIFIER] = '0|[1-9]\\d*';
+var NUMERICIDENTIFIERLOOSE = R++;
+src[NUMERICIDENTIFIERLOOSE] = '[0-9]+';
+
+
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
+
+var NONNUMERICIDENTIFIER = R++;
+src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*';
+
+
+// ## Main Version
+// Three dot-separated numeric identifiers.
+
+var MAINVERSION = R++;
+src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')';
+
+var MAINVERSIONLOOSE = R++;
+src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')';
+
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
+
+var PRERELEASEIDENTIFIER = R++;
+src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')';
+
+var PRERELEASEIDENTIFIERLOOSE = R++;
+src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')';
+
+
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
+
+var PRERELEASE = R++;
+src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))';
+
+var PRERELEASELOOSE = R++;
+src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))';
+
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
+
+var BUILDIDENTIFIER = R++;
+src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+';
+
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
+
+var BUILD = R++;
+src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
+ '(?:\\.' + src[BUILDIDENTIFIER] + ')*))';
+
+
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
+
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups. The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
+
+var FULL = R++;
+var FULLPLAIN = 'v?' + src[MAINVERSION] +
+ src[PRERELEASE] + '?' +
+ src[BUILD] + '?';
+
+src[FULL] = '^' + FULLPLAIN + '$';
+
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
+ src[PRERELEASELOOSE] + '?' +
+ src[BUILD] + '?';
+
+var LOOSE = R++;
+src[LOOSE] = '^' + LOOSEPLAIN + '$';
+
+var GTLT = R++;
+src[GTLT] = '((?:<|>)?=?)';
+
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+var XRANGEIDENTIFIERLOOSE = R++;
+src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*';
+var XRANGEIDENTIFIER = R++;
+src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*';
+
+var XRANGEPLAIN = R++;
+src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:' + src[PRERELEASE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?';
+
+var XRANGEPLAINLOOSE = R++;
+src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:' + src[PRERELEASELOOSE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?';
+
+var XRANGE = R++;
+src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$';
+var XRANGELOOSE = R++;
+src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$';
+
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+var LONETILDE = R++;
+src[LONETILDE] = '(?:~>?)';
+
+var TILDETRIM = R++;
+src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+';
+re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g');
+var tildeTrimReplace = '$1~';
+
+var TILDE = R++;
+src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$';
+var TILDELOOSE = R++;
+src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$';
+
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+var LONECARET = R++;
+src[LONECARET] = '(?:\\^)';
+
+var CARETTRIM = R++;
+src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+';
+re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g');
+var caretTrimReplace = '$1^';
+
+var CARET = R++;
+src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$';
+var CARETLOOSE = R++;
+src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$';
+
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+var COMPARATORLOOSE = R++;
+src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$';
+var COMPARATOR = R++;
+src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$';
+
+
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+var COMPARATORTRIM = R++;
+src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
+ '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')';
+
+// this one has to use the /g flag
+re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g');
+var comparatorTrimReplace = '$1$2$3';
+
+
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+var HYPHENRANGE = R++;
+src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAIN] + ')' +
+ '\\s*$';
+
+var HYPHENRANGELOOSE = R++;
+src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s*$';
+
+// Star ranges basically just allow anything at all.
+var STAR = R++;
+src[STAR] = '(<|>)?=?\\s*\\*';
+
+// Compile to actual regexp objects.
+// All are flag-free, unless they were created above with a flag.
+for (var i = 0; i < R; i++) {
+ debug(i, src[i]);
+ if (!re[i])
+ re[i] = new RegExp(src[i]);
+}
+
+exports.parse = parse;
+function parse(version, loose) {
+ if (version instanceof SemVer)
+ return version;
+
+ if (typeof version !== 'string')
+ return null;
+
+ if (version.length > MAX_LENGTH)
+ return null;
+
+ var r = loose ? re[LOOSE] : re[FULL];
+ if (!r.test(version))
+ return null;
+
+ try {
+ return new SemVer(version, loose);
+ } catch (er) {
+ return null;
+ }
+}
+
+exports.valid = valid;
+function valid(version, loose) {
+ var v = parse(version, loose);
+ return v ? v.version : null;
+}
+
+
+exports.clean = clean;
+function clean(version, loose) {
+ var s = parse(version.trim().replace(/^[=v]+/, ''), loose);
+ return s ? s.version : null;
+}
+
+exports.SemVer = SemVer;
+
+function SemVer(version, loose) {
+ if (version instanceof SemVer) {
+ if (version.loose === loose)
+ return version;
+ else
+ version = version.version;
+ } else if (typeof version !== 'string') {
+ throw new TypeError('Invalid Version: ' + version);
+ }
+
+ if (version.length > MAX_LENGTH)
+ throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
+
+ if (!(this instanceof SemVer))
+ return new SemVer(version, loose);
+
+ debug('SemVer', version, loose);
+ this.loose = loose;
+ var m = version.trim().match(loose ? re[LOOSE] : re[FULL]);
+
+ if (!m)
+ throw new TypeError('Invalid Version: ' + version);
+
+ this.raw = version;
+
+ // these are actually numbers
+ this.major = +m[1];
+ this.minor = +m[2];
+ this.patch = +m[3];
+
+ if (this.major > MAX_SAFE_INTEGER || this.major < 0)
+ throw new TypeError('Invalid major version')
+
+ if (this.minor > MAX_SAFE_INTEGER || this.minor < 0)
+ throw new TypeError('Invalid minor version')
+
+ if (this.patch > MAX_SAFE_INTEGER || this.patch < 0)
+ throw new TypeError('Invalid patch version')
+
+ // numberify any prerelease numeric ids
+ if (!m[4])
+ this.prerelease = [];
+ else
+ this.prerelease = m[4].split('.').map(function(id) {
+ if (/^[0-9]+$/.test(id)) {
+ var num = +id;
+ if (num >= 0 && num < MAX_SAFE_INTEGER)
+ return num;
+ }
+ return id;
+ });
+
+ this.build = m[5] ? m[5].split('.') : [];
+ this.format();
+}
+
+SemVer.prototype.format = function() {
+ this.version = this.major + '.' + this.minor + '.' + this.patch;
+ if (this.prerelease.length)
+ this.version += '-' + this.prerelease.join('.');
+ return this.version;
+};
+
+SemVer.prototype.toString = function() {
+ return this.version;
+};
+
+SemVer.prototype.compare = function(other) {
+ debug('SemVer.compare', this.version, this.loose, other);
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ return this.compareMain(other) || this.comparePre(other);
+};
+
+SemVer.prototype.compareMain = function(other) {
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ return compareIdentifiers(this.major, other.major) ||
+ compareIdentifiers(this.minor, other.minor) ||
+ compareIdentifiers(this.patch, other.patch);
+};
+
+SemVer.prototype.comparePre = function(other) {
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ // NOT having a prerelease is > having one
+ if (this.prerelease.length && !other.prerelease.length)
+ return -1;
+ else if (!this.prerelease.length && other.prerelease.length)
+ return 1;
+ else if (!this.prerelease.length && !other.prerelease.length)
+ return 0;
+
+ var i = 0;
+ do {
+ var a = this.prerelease[i];
+ var b = other.prerelease[i];
+ debug('prerelease compare', i, a, b);
+ if (a === undefined && b === undefined)
+ return 0;
+ else if (b === undefined)
+ return 1;
+ else if (a === undefined)
+ return -1;
+ else if (a === b)
+ continue;
+ else
+ return compareIdentifiers(a, b);
+ } while (++i);
+};
+
+// preminor will bump the version up to the next minor release, and immediately
+// down to pre-release. premajor and prepatch work the same way.
+SemVer.prototype.inc = function(release, identifier) {
+ switch (release) {
+ case 'premajor':
+ this.prerelease.length = 0;
+ this.patch = 0;
+ this.minor = 0;
+ this.major++;
+ this.inc('pre', identifier);
+ break;
+ case 'preminor':
+ this.prerelease.length = 0;
+ this.patch = 0;
+ this.minor++;
+ this.inc('pre', identifier);
+ break;
+ case 'prepatch':
+ // If this is already a prerelease, it will bump to the next version
+ // drop any prereleases that might already exist, since they are not
+ // relevant at this point.
+ this.prerelease.length = 0;
+ this.inc('patch', identifier);
+ this.inc('pre', identifier);
+ break;
+ // If the input is a non-prerelease version, this acts the same as
+ // prepatch.
+ case 'prerelease':
+ if (this.prerelease.length === 0)
+ this.inc('patch', identifier);
+ this.inc('pre', identifier);
+ break;
+
+ case 'major':
+ // If this is a pre-major version, bump up to the same major version.
+ // Otherwise increment major.
+ // 1.0.0-5 bumps to 1.0.0
+ // 1.1.0 bumps to 2.0.0
+ if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0)
+ this.major++;
+ this.minor = 0;
+ this.patch = 0;
+ this.prerelease = [];
+ break;
+ case 'minor':
+ // If this is a pre-minor version, bump up to the same minor version.
+ // Otherwise increment minor.
+ // 1.2.0-5 bumps to 1.2.0
+ // 1.2.1 bumps to 1.3.0
+ if (this.patch !== 0 || this.prerelease.length === 0)
+ this.minor++;
+ this.patch = 0;
+ this.prerelease = [];
+ break;
+ case 'patch':
+ // If this is not a pre-release version, it will increment the patch.
+ // If it is a pre-release it will bump up to the same patch version.
+ // 1.2.0-5 patches to 1.2.0
+ // 1.2.0 patches to 1.2.1
+ if (this.prerelease.length === 0)
+ this.patch++;
+ this.prerelease = [];
+ break;
+ // This probably shouldn't be used publicly.
+ // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
+ case 'pre':
+ if (this.prerelease.length === 0)
+ this.prerelease = [0];
+ else {
+ var i = this.prerelease.length;
+ while (--i >= 0) {
+ if (typeof this.prerelease[i] === 'number') {
+ this.prerelease[i]++;
+ i = -2;
+ }
+ }
+ if (i === -1) // didn't increment anything
+ this.prerelease.push(0);
+ }
+ if (identifier) {
+ // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+ // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+ if (this.prerelease[0] === identifier) {
+ if (isNaN(this.prerelease[1]))
+ this.prerelease = [identifier, 0];
+ } else
+ this.prerelease = [identifier, 0];
+ }
+ break;
+
+ default:
+ throw new Error('invalid increment argument: ' + release);
+ }
+ this.format();
+ this.raw = this.version;
+ return this;
+};
+
+exports.inc = inc;
+function inc(version, release, loose, identifier) {
+ if (typeof(loose) === 'string') {
+ identifier = loose;
+ loose = undefined;
+ }
+
+ try {
+ return new SemVer(version, loose).inc(release, identifier).version;
+ } catch (er) {
+ return null;
+ }
+}
+
+exports.diff = diff;
+function diff(version1, version2) {
+ if (eq(version1, version2)) {
+ return null;
+ } else {
+ var v1 = parse(version1);
+ var v2 = parse(version2);
+ if (v1.prerelease.length || v2.prerelease.length) {
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return 'pre'+key;
+ }
+ }
+ }
+ return 'prerelease';
+ }
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return key;
+ }
+ }
+ }
+ }
+}
+
+exports.compareIdentifiers = compareIdentifiers;
+
+var numeric = /^[0-9]+$/;
+function compareIdentifiers(a, b) {
+ var anum = numeric.test(a);
+ var bnum = numeric.test(b);
+
+ if (anum && bnum) {
+ a = +a;
+ b = +b;
+ }
+
+ return (anum && !bnum) ? -1 :
+ (bnum && !anum) ? 1 :
+ a < b ? -1 :
+ a > b ? 1 :
+ 0;
+}
+
+exports.rcompareIdentifiers = rcompareIdentifiers;
+function rcompareIdentifiers(a, b) {
+ return compareIdentifiers(b, a);
+}
+
+exports.major = major;
+function major(a, loose) {
+ return new SemVer(a, loose).major;
+}
+
+exports.minor = minor;
+function minor(a, loose) {
+ return new SemVer(a, loose).minor;
+}
+
+exports.patch = patch;
+function patch(a, loose) {
+ return new SemVer(a, loose).patch;
+}
+
+exports.compare = compare;
+function compare(a, b, loose) {
+ return new SemVer(a, loose).compare(b);
+}
+
+exports.compareLoose = compareLoose;
+function compareLoose(a, b) {
+ return compare(a, b, true);
+}
+
+exports.rcompare = rcompare;
+function rcompare(a, b, loose) {
+ return compare(b, a, loose);
+}
+
+exports.sort = sort;
+function sort(list, loose) {
+ return list.sort(function(a, b) {
+ return exports.compare(a, b, loose);
+ });
+}
+
+exports.rsort = rsort;
+function rsort(list, loose) {
+ return list.sort(function(a, b) {
+ return exports.rcompare(a, b, loose);
+ });
+}
+
+exports.gt = gt;
+function gt(a, b, loose) {
+ return compare(a, b, loose) > 0;
+}
+
+exports.lt = lt;
+function lt(a, b, loose) {
+ return compare(a, b, loose) < 0;
+}
+
+exports.eq = eq;
+function eq(a, b, loose) {
+ return compare(a, b, loose) === 0;
+}
+
+exports.neq = neq;
+function neq(a, b, loose) {
+ return compare(a, b, loose) !== 0;
+}
+
+exports.gte = gte;
+function gte(a, b, loose) {
+ return compare(a, b, loose) >= 0;
+}
+
+exports.lte = lte;
+function lte(a, b, loose) {
+ return compare(a, b, loose) <= 0;
+}
+
+exports.cmp = cmp;
+function cmp(a, op, b, loose) {
+ var ret;
+ switch (op) {
+ case '===':
+ if (typeof a === 'object') a = a.version;
+ if (typeof b === 'object') b = b.version;
+ ret = a === b;
+ break;
+ case '!==':
+ if (typeof a === 'object') a = a.version;
+ if (typeof b === 'object') b = b.version;
+ ret = a !== b;
+ break;
+ case '': case '=': case '==': ret = eq(a, b, loose); break;
+ case '!=': ret = neq(a, b, loose); break;
+ case '>': ret = gt(a, b, loose); break;
+ case '>=': ret = gte(a, b, loose); break;
+ case '<': ret = lt(a, b, loose); break;
+ case '<=': ret = lte(a, b, loose); break;
+ default: throw new TypeError('Invalid operator: ' + op);
+ }
+ return ret;
+}
+
+exports.Comparator = Comparator;
+function Comparator(comp, loose) {
+ if (comp instanceof Comparator) {
+ if (comp.loose === loose)
+ return comp;
+ else
+ comp = comp.value;
+ }
+
+ if (!(this instanceof Comparator))
+ return new Comparator(comp, loose);
+
+ debug('comparator', comp, loose);
+ this.loose = loose;
+ this.parse(comp);
+
+ if (this.semver === ANY)
+ this.value = '';
+ else
+ this.value = this.operator + this.semver.version;
+
+ debug('comp', this);
+}
+
+var ANY = {};
+Comparator.prototype.parse = function(comp) {
+ var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
+ var m = comp.match(r);
+
+ if (!m)
+ throw new TypeError('Invalid comparator: ' + comp);
+
+ this.operator = m[1];
+ if (this.operator === '=')
+ this.operator = '';
+
+ // if it literally is just '>' or '' then allow anything.
+ if (!m[2])
+ this.semver = ANY;
+ else
+ this.semver = new SemVer(m[2], this.loose);
+};
+
+Comparator.prototype.toString = function() {
+ return this.value;
+};
+
+Comparator.prototype.test = function(version) {
+ debug('Comparator.test', version, this.loose);
+
+ if (this.semver === ANY)
+ return true;
+
+ if (typeof version === 'string')
+ version = new SemVer(version, this.loose);
+
+ return cmp(version, this.operator, this.semver, this.loose);
+};
+
+
+exports.Range = Range;
+function Range(range, loose) {
+ if ((range instanceof Range) && range.loose === loose)
+ return range;
+
+ if (!(this instanceof Range))
+ return new Range(range, loose);
+
+ this.loose = loose;
+
+ // First, split based on boolean or ||
+ this.raw = range;
+ this.set = range.split(/\s*\|\|\s*/).map(function(range) {
+ return this.parseRange(range.trim());
+ }, this).filter(function(c) {
+ // throw out any that are not relevant for whatever reason
+ return c.length;
+ });
+
+ if (!this.set.length) {
+ throw new TypeError('Invalid SemVer Range: ' + range);
+ }
+
+ this.format();
+}
+
+Range.prototype.format = function() {
+ this.range = this.set.map(function(comps) {
+ return comps.join(' ').trim();
+ }).join('||').trim();
+ return this.range;
+};
+
+Range.prototype.toString = function() {
+ return this.range;
+};
+
+Range.prototype.parseRange = function(range) {
+ var loose = this.loose;
+ range = range.trim();
+ debug('range', range, loose);
+ // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+ var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE];
+ range = range.replace(hr, hyphenReplace);
+ debug('hyphen replace', range);
+ // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+ range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace);
+ debug('comparator trim', range, re[COMPARATORTRIM]);
+
+ // `~ 1.2.3` => `~1.2.3`
+ range = range.replace(re[TILDETRIM], tildeTrimReplace);
+
+ // `^ 1.2.3` => `^1.2.3`
+ range = range.replace(re[CARETTRIM], caretTrimReplace);
+
+ // normalize spaces
+ range = range.split(/\s+/).join(' ');
+
+ // At this point, the range is completely trimmed and
+ // ready to be split into comparators.
+
+ var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
+ var set = range.split(' ').map(function(comp) {
+ return parseComparator(comp, loose);
+ }).join(' ').split(/\s+/);
+ if (this.loose) {
+ // in loose mode, throw out any that are not valid comparators
+ set = set.filter(function(comp) {
+ return !!comp.match(compRe);
+ });
+ }
+ set = set.map(function(comp) {
+ return new Comparator(comp, loose);
+ });
+
+ return set;
+};
+
+// Mostly just for testing and legacy API reasons
+exports.toComparators = toComparators;
+function toComparators(range, loose) {
+ return new Range(range, loose).set.map(function(comp) {
+ return comp.map(function(c) {
+ return c.value;
+ }).join(' ').trim().split(' ');
+ });
+}
+
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+function parseComparator(comp, loose) {
+ debug('comp', comp);
+ comp = replaceCarets(comp, loose);
+ debug('caret', comp);
+ comp = replaceTildes(comp, loose);
+ debug('tildes', comp);
+ comp = replaceXRanges(comp, loose);
+ debug('xrange', comp);
+ comp = replaceStars(comp, loose);
+ debug('stars', comp);
+ return comp;
+}
+
+function isX(id) {
+ return !id || id.toLowerCase() === 'x' || id === '*';
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceTildes(comp, loose) {
+ return comp.trim().split(/\s+/).map(function(comp) {
+ return replaceTilde(comp, loose);
+ }).join(' ');
+}
+
+function replaceTilde(comp, loose) {
+ var r = loose ? re[TILDELOOSE] : re[TILDE];
+ return comp.replace(r, function(_, M, m, p, pr) {
+ debug('tilde', comp, _, M, m, p, pr);
+ var ret;
+
+ if (isX(M))
+ ret = '';
+ else if (isX(m))
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ else if (isX(p))
+ // ~1.2 == >=1.2.0 <1.3.0
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ else if (pr) {
+ debug('replaceTilde pr', pr);
+ if (pr.charAt(0) !== '-')
+ pr = '-' + pr;
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ // ~1.2.3 == >=1.2.3 <1.3.0
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0';
+
+ debug('tilde return', ret);
+ return ret;
+ });
+}
+
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2.0 --> >=1.2.0 <2.0.0
+function replaceCarets(comp, loose) {
+ return comp.trim().split(/\s+/).map(function(comp) {
+ return replaceCaret(comp, loose);
+ }).join(' ');
+}
+
+function replaceCaret(comp, loose) {
+ debug('caret', comp, loose);
+ var r = loose ? re[CARETLOOSE] : re[CARET];
+ return comp.replace(r, function(_, M, m, p, pr) {
+ debug('caret', comp, _, M, m, p, pr);
+ var ret;
+
+ if (isX(M))
+ ret = '';
+ else if (isX(m))
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ else if (isX(p)) {
+ if (M === '0')
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ else
+ ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0';
+ } else if (pr) {
+ debug('replaceCaret pr', pr);
+ if (pr.charAt(0) !== '-')
+ pr = '-' + pr;
+ if (M === '0') {
+ if (m === '0')
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + m + '.' + (+p + 1);
+ else
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + (+M + 1) + '.0.0';
+ } else {
+ debug('no pr');
+ if (M === '0') {
+ if (m === '0')
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + m + '.' + (+p + 1);
+ else
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + (+M + 1) + '.0.0';
+ }
+
+ debug('caret return', ret);
+ return ret;
+ });
+}
+
+function replaceXRanges(comp, loose) {
+ debug('replaceXRanges', comp, loose);
+ return comp.split(/\s+/).map(function(comp) {
+ return replaceXRange(comp, loose);
+ }).join(' ');
+}
+
+function replaceXRange(comp, loose) {
+ comp = comp.trim();
+ var r = loose ? re[XRANGELOOSE] : re[XRANGE];
+ return comp.replace(r, function(ret, gtlt, M, m, p, pr) {
+ debug('xRange', comp, ret, gtlt, M, m, p, pr);
+ var xM = isX(M);
+ var xm = xM || isX(m);
+ var xp = xm || isX(p);
+ var anyX = xp;
+
+ if (gtlt === '=' && anyX)
+ gtlt = '';
+
+ if (xM) {
+ if (gtlt === '>' || gtlt === '<') {
+ // nothing is allowed
+ ret = '<0.0.0';
+ } else {
+ // nothing is forbidden
+ ret = '*';
+ }
+ } else if (gtlt && anyX) {
+ // replace X with 0
+ if (xm)
+ m = 0;
+ if (xp)
+ p = 0;
+
+ if (gtlt === '>') {
+ // >1 => >=2.0.0
+ // >1.2 => >=1.3.0
+ // >1.2.3 => >= 1.2.4
+ gtlt = '>=';
+ if (xm) {
+ M = +M + 1;
+ m = 0;
+ p = 0;
+ } else if (xp) {
+ m = +m + 1;
+ p = 0;
+ }
+ } else if (gtlt === '<=') {
+ // <=0.7.x is actually <0.8.0, since any 0.7.x should
+ // pass. Similarly, <=7.x is actually <8.0.0, etc.
+ gtlt = '<';
+ if (xm)
+ M = +M + 1;
+ else
+ m = +m + 1;
+ }
+
+ ret = gtlt + M + '.' + m + '.' + p;
+ } else if (xm) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ } else if (xp) {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ }
+
+ debug('xRange return', ret);
+
+ return ret;
+ });
+}
+
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+function replaceStars(comp, loose) {
+ debug('replaceStars', comp, loose);
+ // Looseness is ignored here. star is always as loose as it gets!
+ return comp.trim().replace(re[STAR], '');
+}
+
+// This function is passed to string.replace(re[HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0
+function hyphenReplace($0,
+ from, fM, fm, fp, fpr, fb,
+ to, tM, tm, tp, tpr, tb) {
+
+ if (isX(fM))
+ from = '';
+ else if (isX(fm))
+ from = '>=' + fM + '.0.0';
+ else if (isX(fp))
+ from = '>=' + fM + '.' + fm + '.0';
+ else
+ from = '>=' + from;
+
+ if (isX(tM))
+ to = '';
+ else if (isX(tm))
+ to = '<' + (+tM + 1) + '.0.0';
+ else if (isX(tp))
+ to = '<' + tM + '.' + (+tm + 1) + '.0';
+ else if (tpr)
+ to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr;
+ else
+ to = '<=' + to;
+
+ return (from + ' ' + to).trim();
+}
+
+
+// if ANY of the sets match ALL of its comparators, then pass
+Range.prototype.test = function(version) {
+ if (!version)
+ return false;
+
+ if (typeof version === 'string')
+ version = new SemVer(version, this.loose);
+
+ for (var i = 0; i < this.set.length; i++) {
+ if (testSet(this.set[i], version))
+ return true;
+ }
+ return false;
+};
+
+function testSet(set, version) {
+ for (var i = 0; i < set.length; i++) {
+ if (!set[i].test(version))
+ return false;
+ }
+
+ if (version.prerelease.length) {
+ // Find the set of versions that are allowed to have prereleases
+ // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+ // That should allow `1.2.3-pr.2` to pass.
+ // However, `1.2.4-alpha.notready` should NOT be allowed,
+ // even though it's within the range set by the comparators.
+ for (var i = 0; i < set.length; i++) {
+ debug(set[i].semver);
+ if (set[i].semver === ANY)
+ continue;
+
+ if (set[i].semver.prerelease.length > 0) {
+ var allowed = set[i].semver;
+ if (allowed.major === version.major &&
+ allowed.minor === version.minor &&
+ allowed.patch === version.patch)
+ return true;
+ }
+ }
+
+ // Version has a -pre, but it's not one of the ones we like.
+ return false;
+ }
+
+ return true;
+}
+
+exports.satisfies = satisfies;
+function satisfies(version, range, loose) {
+ try {
+ range = new Range(range, loose);
+ } catch (er) {
+ return false;
+ }
+ return range.test(version);
+}
+
+exports.maxSatisfying = maxSatisfying;
+function maxSatisfying(versions, range, loose) {
+ return versions.filter(function(version) {
+ return satisfies(version, range, loose);
+ }).sort(function(a, b) {
+ return rcompare(a, b, loose);
+ })[0] || null;
+}
+
+exports.minSatisfying = minSatisfying;
+function minSatisfying(versions, range, loose) {
+ return versions.filter(function(version) {
+ return satisfies(version, range, loose);
+ }).sort(function(a, b) {
+ return compare(a, b, loose);
+ })[0] || null;
+}
+
+exports.validRange = validRange;
+function validRange(range, loose) {
+ try {
+ // Return '*' instead of '' so that truthiness works.
+ // This will throw if it's invalid anyway
+ return new Range(range, loose).range || '*';
+ } catch (er) {
+ return null;
+ }
+}
+
+// Determine if version is less than all the versions possible in the range
+exports.ltr = ltr;
+function ltr(version, range, loose) {
+ return outside(version, range, '<', loose);
+}
+
+// Determine if version is greater than all the versions possible in the range.
+exports.gtr = gtr;
+function gtr(version, range, loose) {
+ return outside(version, range, '>', loose);
+}
+
+exports.outside = outside;
+function outside(version, range, hilo, loose) {
+ version = new SemVer(version, loose);
+ range = new Range(range, loose);
+
+ var gtfn, ltefn, ltfn, comp, ecomp;
+ switch (hilo) {
+ case '>':
+ gtfn = gt;
+ ltefn = lte;
+ ltfn = lt;
+ comp = '>';
+ ecomp = '>=';
+ break;
+ case '<':
+ gtfn = lt;
+ ltefn = gte;
+ ltfn = gt;
+ comp = '<';
+ ecomp = '<=';
+ break;
+ default:
+ throw new TypeError('Must provide a hilo val of "<" or ">"');
+ }
+
+ // If it satisifes the range it is not outside
+ if (satisfies(version, range, loose)) {
+ return false;
+ }
+
+ // From now on, variable terms are as if we're in "gtr" mode.
+ // but note that everything is flipped for the "ltr" function.
+
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i];
+
+ var high = null;
+ var low = null;
+
+ comparators.forEach(function(comparator) {
+ if (comparator.semver === ANY) {
+ comparator = new Comparator('>=0.0.0')
+ }
+ high = high || comparator;
+ low = low || comparator;
+ if (gtfn(comparator.semver, high.semver, loose)) {
+ high = comparator;
+ } else if (ltfn(comparator.semver, low.semver, loose)) {
+ low = comparator;
+ }
+ });
+
+ // If the edge version comparator has a operator then our version
+ // isn't outside it
+ if (high.operator === comp || high.operator === ecomp) {
+ return false;
+ }
+
+ // If the lowest version comparator has an operator and our version
+ // is less than it then it isn't higher than the range
+ if ((!low.operator || low.operator === comp) &&
+ ltefn(version, low.semver)) {
+ return false;
+ } else if (low.operator === ecomp && ltfn(version, low.semver)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+exports.prerelease = prerelease;
+function prerelease(version, loose) {
+ var parsed = parse(version, loose);
+ return (parsed && parsed.prerelease.length) ? parsed.prerelease : null;
+}
diff --git a/deps/npm/node_modules/tar/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/tar/.npmignore
similarity index 100%
rename from deps/npm/node_modules/tar/.npmignore
rename to deps/npm/node_modules/node-gyp/node_modules/tar/.npmignore
diff --git a/deps/npm/node_modules/tar/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/tar/.travis.yml
similarity index 100%
rename from deps/npm/node_modules/tar/.travis.yml
rename to deps/npm/node_modules/node-gyp/node_modules/tar/.travis.yml
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE
new file mode 100644
index 00000000000000..019b7e40ea0568
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE
@@ -0,0 +1,12 @@
+The ISC License
+Copyright (c) Isaac Z. Schlueter and Contributors
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/README.md b/deps/npm/node_modules/node-gyp/node_modules/tar/README.md
new file mode 100644
index 00000000000000..cfda2ac180611c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/README.md
@@ -0,0 +1,50 @@
+# node-tar
+
+Tar for Node.js.
+
+[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/)
+
+## API
+
+See `examples/` for usage examples.
+
+### var tar = require('tar')
+
+Returns an object with `.Pack`, `.Extract` and `.Parse` methods.
+
+### tar.Pack([properties])
+
+Returns a through stream. Use
+[fstream](https://npmjs.org/package/fstream) to write files into the
+pack stream and you will receive tar archive data from the pack
+stream.
+
+This only works with directories, it does not work with individual files.
+
+The optional `properties` object are used to set properties in the tar
+'Global Extended Header'. If the `fromBase` property is set to true,
+the tar will contain files relative to the path passed, and not with
+the path included.
+
+### tar.Extract([options])
+
+Returns a through stream. Write tar data to the stream and the files
+in the tarball will be extracted onto the filesystem.
+
+`options` can be:
+
+```js
+{
+ path: '/path/to/extract/tar/into',
+ strip: 0, // how many path segments to strip from the root when extracting
+}
+```
+
+`options` also get passed to the `fstream.Writer` instance that `tar`
+uses internally.
+
+### tar.Parse()
+
+Returns a writable stream. Write tar data to it and it will emit
+`entry` events for each entry parsed from the tarball. This is used by
+`tar.Extract`.
diff --git a/deps/npm/node_modules/tar/examples/extracter.js b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/extracter.js
similarity index 100%
rename from deps/npm/node_modules/tar/examples/extracter.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/examples/extracter.js
diff --git a/deps/npm/node_modules/tar/examples/packer.js b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/packer.js
similarity index 100%
rename from deps/npm/node_modules/tar/examples/packer.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/examples/packer.js
diff --git a/deps/npm/node_modules/tar/examples/reader.js b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
similarity index 99%
rename from deps/npm/node_modules/tar/examples/reader.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
index 39f3f0888a2cfd..8d113ad30d05e9 100644
--- a/deps/npm/node_modules/tar/examples/reader.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
@@ -33,4 +33,3 @@ fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
console.error(" << 1) {
+ type = tar.types[obj.type]
+ if (!type) type = "0"
+ }
+ writeText(block, off, end, type)
+ break
+
+ case "path":
+ // uses the "prefix" field if > 100 bytes, but <= 255
+ var pathLen = Buffer.byteLength(obj.path)
+ , pathFSize = fieldSize[fields.path]
+ , prefFSize = fieldSize[fields.prefix]
+
+ // paths between 100 and 255 should use the prefix field.
+ // longer than 255
+ if (pathLen > pathFSize &&
+ pathLen <= pathFSize + prefFSize) {
+ // need to find a slash somewhere in the middle so that
+ // path and prefix both fit in their respective fields
+ var searchStart = pathLen - 1 - pathFSize
+ , searchEnd = prefFSize
+ , found = false
+ , pathBuf = new Buffer(obj.path)
+
+ for ( var s = searchStart
+ ; (s <= searchEnd)
+ ; s ++ ) {
+ if (pathBuf[s] === slash || pathBuf[s] === bslash) {
+ found = s
+ break
+ }
+ }
+
+ if (found !== false) {
+ prefix = pathBuf.slice(0, found).toString("utf8")
+ path = pathBuf.slice(found + 1).toString("utf8")
+
+ ret = writeText(block, off, end, path)
+ off = fieldOffs[fields.prefix]
+ end = fieldEnds[fields.prefix]
+ // console.error("%% header writing prefix", off, end, prefix)
+ ret = writeText(block, off, end, prefix) || ret
+ break
+ }
+ }
+
+ // paths less than 100 chars don't need a prefix
+ // and paths longer than 255 need an extended header and will fail
+ // on old implementations no matter what we do here.
+ // Null out the prefix, and fallthrough to default.
+ // console.error("%% header writing no prefix")
+ var poff = fieldOffs[fields.prefix]
+ , pend = fieldEnds[fields.prefix]
+ writeText(block, poff, pend, "")
+ // fallthrough
+
+ // all other fields are numeric or text
+ default:
+ ret = numeric[field]
+ ? writeNumeric(block, off, end, obj[field])
+ : writeText(block, off, end, obj[field] || "")
+ break
+ }
+ obj.needExtended = obj.needExtended || ret
+ }
+
+ var off = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ writeNumeric(block, off, end, calcSum.call(this, block))
+
+ return block
+}
+
+// if it's a negative number, or greater than will fit,
+// then use write256.
+var MAXNUM = { 12: 077777777777
+ , 11: 07777777777
+ , 8 : 07777777
+ , 7 : 0777777 }
+function writeNumeric (block, off, end, num) {
+ var writeLen = end - off
+ , maxNum = MAXNUM[writeLen] || 0
+
+ num = num || 0
+ // console.error(" numeric", num)
+
+ if (num instanceof Date ||
+ Object.prototype.toString.call(num) === "[object Date]") {
+ num = num.getTime() / 1000
+ }
+
+ if (num > maxNum || num < 0) {
+ write256(block, off, end, num)
+ // need an extended header if negative or too big.
+ return true
+ }
+
+ // god, tar is so annoying
+ // if the string is small enough, you should put a space
+ // between the octal string and the \0, but if it doesn't
+ // fit, then don't.
+ var numStr = Math.floor(num).toString(8)
+ if (num < MAXNUM[writeLen - 1]) numStr += " "
+
+ // pad with "0" chars
+ if (numStr.length < writeLen) {
+ numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
+ }
+
+ if (numStr.length !== writeLen - 1) {
+ throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
+ "expected: "+writeLen)
+ }
+ block.write(numStr, off, writeLen, "utf8")
+ block[end - 1] = 0
+}
+
+function write256 (block, off, end, num) {
+ var buf = block.slice(off, end)
+ var positive = num >= 0
+ buf[0] = positive ? 0x80 : 0xFF
+
+ // get the number as a base-256 tuple
+ if (!positive) num *= -1
+ var tuple = []
+ do {
+ var n = num % 256
+ tuple.push(n)
+ num = (num - n) / 256
+ } while (num)
+
+ var bytes = tuple.length
+
+ var fill = buf.length - bytes
+ for (var i = 1; i < fill; i ++) {
+ buf[i] = positive ? 0 : 0xFF
+ }
+
+ // tuple is a base256 number, with [0] as the *least* significant byte
+ // if it's negative, then we need to flip all the bits once we hit the
+ // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
+ // complement is (0xFF - n).
+ var zero = true
+ for (i = bytes; i > 0; i --) {
+ var byte = tuple[bytes - i]
+ if (positive) buf[fill + i] = byte
+ else if (zero && byte === 0) buf[fill + i] = 0
+ else if (zero) {
+ zero = false
+ buf[fill + i] = 0x100 - byte
+ } else buf[fill + i] = 0xFF - byte
+ }
+}
+
+function writeText (block, off, end, str) {
+ // strings are written as utf8, then padded with \0
+ var strLen = Buffer.byteLength(str)
+ , writeLen = Math.min(strLen, end - off)
+ // non-ascii fields need extended headers
+ // long fields get truncated
+ , needExtended = strLen !== str.length || strLen > writeLen
+
+ // write the string, and null-pad
+ if (writeLen > 0) block.write(str, off, writeLen, "utf8")
+ for (var i = off + writeLen; i < end; i ++) block[i] = 0
+
+ return needExtended
+}
+
+function calcSum (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ if (!block) throw new Error("Need block to checksum")
+
+ // now figure out what it would be if the cksum was " "
+ var sum = 0
+ , start = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
+ sum += block[i]
+ }
+
+ for (var i = start; i < end; i ++) {
+ sum += space
+ }
+
+ for (var i = end; i < 512; i ++) {
+ sum += block[i]
+ }
+
+ return sum
+}
+
+
+function checkSum (block) {
+ var sum = calcSum.call(this, block)
+ block = block || this.block
+
+ var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
+ cksum = parseNumeric(cksum)
+
+ return cksum === sum
+}
+
+function decode (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ this.block = block
+ this.cksumValid = this.checkSum()
+
+ var prefix = null
+
+ // slice off each field.
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = block.slice(fieldOffs[f], fieldEnds[f])
+
+ switch (field) {
+ case "ustar":
+ // if not ustar, then everything after that is just padding.
+ if (val.toString() !== "ustar\0") {
+ this.ustar = false
+ return
+ } else {
+ // console.error("ustar:", val, val.toString())
+ this.ustar = val.toString()
+ }
+ break
+
+ // prefix is special, since it might signal the xstar header
+ case "prefix":
+ var atime = parseNumeric(val.slice(131, 131 + 12))
+ , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
+ if ((val[130] === 0 || val[130] === space) &&
+ typeof atime === "number" &&
+ typeof ctime === "number" &&
+ val[131 + 12] === space &&
+ val[131 + 12 + 12] === space) {
+ this.atime = atime
+ this.ctime = ctime
+ val = val.slice(0, 130)
+ }
+ prefix = val.toString("utf8").replace(/\0+$/, "")
+ // console.error("%% header reading prefix", prefix)
+ break
+
+ // all other fields are null-padding text
+ // or a number.
+ default:
+ if (numeric[field]) {
+ this[field] = parseNumeric(val)
+ } else {
+ this[field] = val.toString("utf8").replace(/\0+$/, "")
+ }
+ break
+ }
+ }
+
+ // if we got a prefix, then prepend it to the path.
+ if (prefix) {
+ this.path = prefix + "/" + this.path
+ // console.error("%% header got a prefix", this.path)
+ }
+}
+
+function parse256 (buf) {
+ // first byte MUST be either 80 or FF
+ // 80 for positive, FF for 2's comp
+ var positive
+ if (buf[0] === 0x80) positive = true
+ else if (buf[0] === 0xFF) positive = false
+ else return null
+
+ // build up a base-256 tuple from the least sig to the highest
+ var zero = false
+ , tuple = []
+ for (var i = buf.length - 1; i > 0; i --) {
+ var byte = buf[i]
+ if (positive) tuple.push(byte)
+ else if (zero && byte === 0) tuple.push(0)
+ else if (zero) {
+ zero = false
+ tuple.push(0x100 - byte)
+ } else tuple.push(0xFF - byte)
+ }
+
+ for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
+ sum += tuple[i] * Math.pow(256, i)
+ }
+
+ return positive ? sum : -1 * sum
+}
+
+function parseNumeric (f) {
+ if (f[0] & 0x80) return parse256(f)
+
+ var str = f.toString("utf8").split("\0")[0].trim()
+ , res = parseInt(str, 8)
+
+ return isNaN(res) ? null : res
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/pack.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/pack.js
new file mode 100644
index 00000000000000..5a3bb95a121bdb
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/pack.js
@@ -0,0 +1,236 @@
+// pipe in an fstream, and it'll make a tarball.
+// key-value pair argument is global extended header props.
+
+module.exports = Pack
+
+var EntryWriter = require("./entry-writer.js")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , inherits = require("inherits")
+ , GlobalHeaderWriter = require("./global-header-writer.js")
+ , collect = require("fstream").collect
+ , eof = new Buffer(512)
+
+for (var i = 0; i < 512; i ++) eof[i] = 0
+
+inherits(Pack, Stream)
+
+function Pack (props) {
+ // console.error("-- p ctor")
+ var me = this
+ if (!(me instanceof Pack)) return new Pack(props)
+
+ if (props) me._noProprietary = props.noProprietary
+ else me._noProprietary = false
+
+ me._global = props
+
+ me.readable = true
+ me.writable = true
+ me._buffer = []
+ // console.error("-- -- set current to null in ctor")
+ me._currentEntry = null
+ me._processing = false
+
+ me._pipeRoot = null
+ me.on("pipe", function (src) {
+ if (src.root === me._pipeRoot) return
+ me._pipeRoot = src
+ src.on("end", function () {
+ me._pipeRoot = null
+ })
+ me.add(src)
+ })
+}
+
+Pack.prototype.addGlobal = function (props) {
+ // console.error("-- p addGlobal")
+ if (this._didGlobal) return
+ this._didGlobal = true
+
+ var me = this
+ GlobalHeaderWriter(props)
+ .on("data", function (c) {
+ me.emit("data", c)
+ })
+ .end()
+}
+
+Pack.prototype.add = function (stream) {
+ if (this._global && !this._didGlobal) this.addGlobal(this._global)
+
+ if (this._ended) return this.emit("error", new Error("add after end"))
+
+ collect(stream)
+ this._buffer.push(stream)
+ this._process()
+ this._needDrain = this._buffer.length > 0
+ return !this._needDrain
+}
+
+Pack.prototype.pause = function () {
+ this._paused = true
+ if (this._currentEntry) this._currentEntry.pause()
+ this.emit("pause")
+}
+
+Pack.prototype.resume = function () {
+ this._paused = false
+ if (this._currentEntry) this._currentEntry.resume()
+ this.emit("resume")
+ this._process()
+}
+
+Pack.prototype.end = function () {
+ this._ended = true
+ this._buffer.push(eof)
+ this._process()
+}
+
+Pack.prototype._process = function () {
+ var me = this
+ if (me._paused || me._processing) {
+ return
+ }
+
+ var entry = me._buffer.shift()
+
+ if (!entry) {
+ if (me._needDrain) {
+ me.emit("drain")
+ }
+ return
+ }
+
+ if (entry.ready === false) {
+ // console.error("-- entry is not ready", entry)
+ me._buffer.unshift(entry)
+ entry.on("ready", function () {
+ // console.error("-- -- ready!", entry)
+ me._process()
+ })
+ return
+ }
+
+ me._processing = true
+
+ if (entry === eof) {
+ // need 2 ending null blocks.
+ me.emit("data", eof)
+ me.emit("data", eof)
+ me.emit("end")
+ me.emit("close")
+ return
+ }
+
+ // Change the path to be relative to the root dir that was
+ // added to the tarball.
+ //
+ // XXX This should be more like how -C works, so you can
+ // explicitly set a root dir, and also explicitly set a pathname
+ // in the tarball to use. That way we can skip a lot of extra
+ // work when resolving symlinks for bundled dependencies in npm.
+
+ var root = path.dirname((entry.root || entry).path);
+ if (me._global && me._global.fromBase && entry.root && entry.root.path) {
+ // user set 'fromBase: true' indicating tar root should be directory itself
+ root = entry.root.path;
+ }
+
+ var wprops = {}
+
+ Object.keys(entry.props || {}).forEach(function (k) {
+ wprops[k] = entry.props[k]
+ })
+
+ if (me._noProprietary) wprops.noProprietary = true
+
+ wprops.path = path.relative(root, entry.path || '')
+
+ // actually not a matter of opinion or taste.
+ if (process.platform === "win32") {
+ wprops.path = wprops.path.replace(/\\/g, "/")
+ }
+
+ if (!wprops.type)
+ wprops.type = 'Directory'
+
+ switch (wprops.type) {
+ // sockets not supported
+ case "Socket":
+ return
+
+ case "Directory":
+ wprops.path += "/"
+ wprops.size = 0
+ break
+
+ case "Link":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(root, lp) || "."
+ wprops.size = 0
+ break
+
+ case "SymbolicLink":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
+ wprops.size = 0
+ break
+ }
+
+ // console.error("-- new writer", wprops)
+ // if (!wprops.type) {
+ // // console.error("-- no type?", entry.constructor.name, entry)
+ // }
+
+ // console.error("-- -- set current to new writer", wprops.path)
+ var writer = me._currentEntry = EntryWriter(wprops)
+
+ writer.parent = me
+
+ // writer.on("end", function () {
+ // // console.error("-- -- writer end", writer.path)
+ // })
+
+ writer.on("data", function (c) {
+ me.emit("data", c)
+ })
+
+ writer.on("header", function () {
+ Buffer.prototype.toJSON = function () {
+ return this.toString().split(/\0/).join(".")
+ }
+ // console.error("-- -- writer header %j", writer.props)
+ if (writer.props.size === 0) nextEntry()
+ })
+ writer.on("close", nextEntry)
+
+ var ended = false
+ function nextEntry () {
+ if (ended) return
+ ended = true
+
+ // console.error("-- -- writer close", writer.path)
+ // console.error("-- -- set current to null", wprops.path)
+ me._currentEntry = null
+ me._processing = false
+ me._process()
+ }
+
+ writer.on("error", function (er) {
+ // console.error("-- -- writer error", writer.path)
+ me.emit("error", er)
+ })
+
+ // if it's the root, then there's no need to add its entries,
+ // or data, since they'll be added directly.
+ if (entry === me._pipeRoot) {
+ // console.error("-- is the root, don't auto-add")
+ writer.add = null
+ }
+
+ entry.pipe(writer)
+}
+
+Pack.prototype.destroy = function () {}
+Pack.prototype.write = function () {}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/parse.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/parse.js
new file mode 100644
index 00000000000000..600ad782f0f61d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/parse.js
@@ -0,0 +1,275 @@
+
+// A writable stream.
+// It emits "entry" events, which provide a readable stream that has
+// header info attached.
+
+module.exports = Parse.create = Parse
+
+var stream = require("stream")
+ , Stream = stream.Stream
+ , BlockStream = require("block-stream")
+ , tar = require("../tar.js")
+ , TarHeader = require("./header.js")
+ , Entry = require("./entry.js")
+ , BufferEntry = require("./buffer-entry.js")
+ , ExtendedHeader = require("./extended-header.js")
+ , assert = require("assert").ok
+ , inherits = require("inherits")
+ , fstream = require("fstream")
+
+// reading a tar is a lot like reading a directory
+// However, we're actually not going to run the ctor,
+// since it does a stat and various other stuff.
+// This inheritance gives us the pause/resume/pipe
+// behavior that is desired.
+inherits(Parse, fstream.Reader)
+
+function Parse () {
+ var me = this
+ if (!(me instanceof Parse)) return new Parse()
+
+ // doesn't apply fstream.Reader ctor?
+ // no, becasue we don't want to stat/etc, we just
+ // want to get the entry/add logic from .pipe()
+ Stream.apply(me)
+
+ me.writable = true
+ me.readable = true
+ me._stream = new BlockStream(512)
+ me.position = 0
+ me._ended = false
+
+ me._stream.on("error", function (e) {
+ me.emit("error", e)
+ })
+
+ me._stream.on("data", function (c) {
+ me._process(c)
+ })
+
+ me._stream.on("end", function () {
+ me._streamEnd()
+ })
+
+ me._stream.on("drain", function () {
+ me.emit("drain")
+ })
+}
+
+// overridden in Extract class, since it needs to
+// wait for its DirWriter part to finish before
+// emitting "end"
+Parse.prototype._streamEnd = function () {
+ var me = this
+ if (!me._ended || me._entry) me.error("unexpected eof")
+ me.emit("end")
+}
+
+// a tar reader is actually a filter, not just a readable stream.
+// So, you should pipe a tarball stream into it, and it needs these
+// write/end methods to do that.
+Parse.prototype.write = function (c) {
+ if (this._ended) {
+ // gnutar puts a LOT of nulls at the end.
+ // you can keep writing these things forever.
+ // Just ignore them.
+ for (var i = 0, l = c.length; i > l; i ++) {
+ if (c[i] !== 0) return this.error("write() after end()")
+ }
+ return
+ }
+ return this._stream.write(c)
+}
+
+Parse.prototype.end = function (c) {
+ this._ended = true
+ return this._stream.end(c)
+}
+
+// don't need to do anything, since we're just
+// proxying the data up from the _stream.
+// Just need to override the parent's "Not Implemented"
+// error-thrower.
+Parse.prototype._read = function () {}
+
+Parse.prototype._process = function (c) {
+ assert(c && c.length === 512, "block size should be 512")
+
+ // one of three cases.
+ // 1. A new header
+ // 2. A part of a file/extended header
+ // 3. One of two or more EOF null blocks
+
+ if (this._entry) {
+ var entry = this._entry
+ if(!entry._abort) entry.write(c)
+ else {
+ entry._remaining -= c.length
+ if(entry._remaining < 0) entry._remaining = 0
+ }
+ if (entry._remaining === 0) {
+ entry.end()
+ this._entry = null
+ }
+ } else {
+ // either zeroes or a header
+ var zero = true
+ for (var i = 0; i < 512 && zero; i ++) {
+ zero = c[i] === 0
+ }
+
+ // eof is *at least* 2 blocks of nulls, and then the end of the
+ // file. you can put blocks of nulls between entries anywhere,
+ // so appending one tarball to another is technically valid.
+ // ending without the eof null blocks is not allowed, however.
+ if (zero) {
+ if (this._eofStarted)
+ this._ended = true
+ this._eofStarted = true
+ } else {
+ this._eofStarted = false
+ this._startEntry(c)
+ }
+ }
+
+ this.position += 512
+}
+
+// take a header chunk, start the right kind of entry.
+Parse.prototype._startEntry = function (c) {
+ var header = new TarHeader(c)
+ , self = this
+ , entry
+ , ev
+ , EntryType
+ , onend
+ , meta = false
+
+ if (null === header.size || !header.cksumValid) {
+ var e = new Error("invalid tar file")
+ e.header = header
+ e.tar_file_offset = this.position
+ e.tar_block = this.position / 512
+ return this.emit("error", e)
+ }
+
+ switch (tar.types[header.type]) {
+ case "File":
+ case "OldFile":
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ case "ContiguousFile":
+ case "GNUDumpDir":
+ // start a file.
+ // pass in any extended headers
+ // These ones consumers are typically most interested in.
+ EntryType = Entry
+ ev = "entry"
+ break
+
+ case "GlobalExtendedHeader":
+ // extended headers that apply to the rest of the tarball
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._global = self._global || {}
+ Object.keys(entry.fields).forEach(function (k) {
+ self._global[k] = entry.fields[k]
+ })
+ }
+ ev = "globalExtendedHeader"
+ meta = true
+ break
+
+ case "ExtendedHeader":
+ case "OldExtendedHeader":
+ // extended headers that apply to the next entry
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._extended = entry.fields
+ }
+ ev = "extendedHeader"
+ meta = true
+ break
+
+ case "NextFileHasLongLinkpath":
+ // set linkpath= in extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.linkpath = entry.body
+ }
+ ev = "longLinkpath"
+ meta = true
+ break
+
+ case "NextFileHasLongPath":
+ case "OldGnuLongPath":
+ // set path= in file-extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.path = entry.body
+ }
+ ev = "longPath"
+ meta = true
+ break
+
+ default:
+ // all the rest we skip, but still set the _entry
+ // member, so that we can skip over their data appropriately.
+ // emit an event to say that this is an ignored entry type?
+ EntryType = Entry
+ ev = "ignoredEntry"
+ break
+ }
+
+ var global, extended
+ if (meta) {
+ global = extended = null
+ } else {
+ var global = this._global
+ var extended = this._extended
+
+ // extendedHeader only applies to one entry, so once we start
+ // an entry, it's over.
+ this._extended = null
+ }
+ entry = new EntryType(header, extended, global)
+ entry.meta = meta
+
+ // only proxy data events of normal files.
+ if (!meta) {
+ entry.on("data", function (c) {
+ me.emit("data", c)
+ })
+ }
+
+ if (onend) entry.on("end", onend)
+
+ this._entry = entry
+ var me = this
+
+ entry.on("pause", function () {
+ me.pause()
+ })
+
+ entry.on("resume", function () {
+ me.resume()
+ })
+
+ if (this.listeners("*").length) {
+ this.emit("*", ev, entry)
+ }
+
+ this.emit(ev, entry)
+
+ // Zero-byte entry. End immediately.
+ if (entry.props.size === 0) {
+ entry.end()
+ this._entry = null
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/LICENCE b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE
similarity index 100%
rename from deps/npm/node_modules/tar/node_modules/block-stream/LICENCE
rename to deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/LICENSE
rename to deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/README.md b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md
similarity index 100%
rename from deps/npm/node_modules/tar/node_modules/block-stream/README.md
rename to deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/block-stream.js b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js
similarity index 100%
rename from deps/npm/node_modules/tar/node_modules/block-stream/block-stream.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json
new file mode 100644
index 00000000000000..bf449e9633b26f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json
@@ -0,0 +1,63 @@
+{
+ "_args": [
+ [
+ "block-stream@0.0.9",
+ "/Users/rebecca/code/npm"
+ ]
+ ],
+ "_from": "block-stream@0.0.9",
+ "_id": "block-stream@0.0.9",
+ "_inBundle": false,
+ "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=",
+ "_location": "/node-gyp/tar/block-stream",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "block-stream@0.0.9",
+ "name": "block-stream",
+ "escapedName": "block-stream",
+ "rawSpec": "0.0.9",
+ "saveSpec": null,
+ "fetchSpec": "0.0.9"
+ },
+ "_requiredBy": [
+ "/node-gyp/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz",
+ "_spec": "0.0.9",
+ "_where": "/Users/rebecca/code/npm",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/block-stream/issues"
+ },
+ "dependencies": {
+ "inherits": "~2.0.0"
+ },
+ "description": "a stream of blocks",
+ "devDependencies": {
+ "tap": "^5.7.1"
+ },
+ "engines": {
+ "node": "0.4 || >=0.5.8"
+ },
+ "files": [
+ "block-stream.js"
+ ],
+ "homepage": "https://github.com/isaacs/block-stream#readme",
+ "license": "ISC",
+ "main": "block-stream.js",
+ "name": "block-stream",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/block-stream.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js --cov"
+ },
+ "version": "0.0.9"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
new file mode 100644
index 00000000000000..0cb9624034ecf8
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
@@ -0,0 +1,64 @@
+{
+ "_args": [
+ [
+ "tar@2.2.1",
+ "/Users/rebecca/code/npm"
+ ]
+ ],
+ "_from": "tar@2.2.1",
+ "_id": "tar@2.2.1",
+ "_inBundle": false,
+ "_integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=",
+ "_location": "/node-gyp/tar",
+ "_phantomChildren": {
+ "inherits": "2.0.3"
+ },
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "tar@2.2.1",
+ "name": "tar",
+ "escapedName": "tar",
+ "rawSpec": "2.2.1",
+ "saveSpec": null,
+ "fetchSpec": "2.2.1"
+ },
+ "_requiredBy": [
+ "/node-gyp"
+ ],
+ "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz",
+ "_spec": "2.2.1",
+ "_where": "/Users/rebecca/code/npm",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-tar/issues"
+ },
+ "dependencies": {
+ "block-stream": "*",
+ "fstream": "^1.0.2",
+ "inherits": "2"
+ },
+ "description": "tar for node",
+ "devDependencies": {
+ "graceful-fs": "^4.1.2",
+ "mkdirp": "^0.5.0",
+ "rimraf": "1.x",
+ "tap": "0.x"
+ },
+ "homepage": "https://github.com/isaacs/node-tar#readme",
+ "license": "ISC",
+ "main": "tar.js",
+ "name": "tar",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-tar.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "2.2.1"
+}
diff --git a/deps/npm/node_modules/tar/tar.js b/deps/npm/node_modules/node-gyp/node_modules/tar/tar.js
similarity index 100%
rename from deps/npm/node_modules/tar/tar.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/tar.js
diff --git a/deps/npm/node_modules/tar/test/00-setup-fixtures.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/00-setup-fixtures.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js
diff --git a/deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz b/deps/npm/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz
similarity index 100%
rename from deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz
diff --git a/deps/npm/node_modules/tar/test/dir-normalization.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/dir-normalization.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js
diff --git a/deps/npm/node_modules/tar/test/dir-normalization.tar b/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar
similarity index 100%
rename from deps/npm/node_modules/tar/test/dir-normalization.tar
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar
diff --git a/deps/npm/node_modules/tar/test/error-on-broken.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/error-on-broken.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js
diff --git a/deps/npm/node_modules/tar/test/extract-move.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract-move.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/extract-move.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/extract-move.js
diff --git a/deps/npm/node_modules/tar/test/extract.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/extract.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/extract.js
diff --git a/deps/npm/node_modules/tar/test/fixtures.tgz b/deps/npm/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz
similarity index 100%
rename from deps/npm/node_modules/tar/test/fixtures.tgz
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz
diff --git a/deps/npm/node_modules/tar/test/header.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/header.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/header.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/header.js
diff --git a/deps/npm/node_modules/tar/test/pack-no-proprietary.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/pack-no-proprietary.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js
diff --git a/deps/npm/node_modules/tar/test/pack.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/pack.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/pack.js
diff --git a/deps/npm/node_modules/tar/test/parse-discard.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse-discard.js
similarity index 92%
rename from deps/npm/node_modules/tar/test/parse-discard.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/parse-discard.js
index da01a65ccc7d7d..79408c274b96e9 100644
--- a/deps/npm/node_modules/tar/test/parse-discard.js
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse-discard.js
@@ -20,10 +20,10 @@ tap.test("parser test", function (t) {
.pipe(parser)
.on('entry',function(entry){
if(entry.path === 'c.txt') entry.abort()
-
+
total += entry.size;
entry.on('data',function(data){
- dataTotal += data.length
+ dataTotal += data.length
})
})
})
diff --git a/deps/npm/node_modules/tar/test/parse.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/parse.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/parse.js
diff --git a/deps/npm/node_modules/tar/test/zz-cleanup.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js
similarity index 100%
rename from deps/npm/node_modules/tar/test/zz-cleanup.js
rename to deps/npm/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js
diff --git a/deps/npm/node_modules/npm-lifecycle/CHANGELOG.md b/deps/npm/node_modules/npm-lifecycle/CHANGELOG.md
new file mode 100644
index 00000000000000..c3cde3dbc8f758
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/CHANGELOG.md
@@ -0,0 +1,36 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+
+## [1.0.2](https://github.com/npm/lifecycle/compare/v1.0.1...v1.0.2) (2017-08-17)
+
+
+
+
+## [1.0.1](https://github.com/npm/lifecycle/compare/v1.0.0...v1.0.1) (2017-08-16)
+
+
+### Bug Fixes
+
+* **license:** fix up license documentation ([a784ca0](https://github.com/npm/lifecycle/commit/a784ca0))
+
+
+
+
+# 1.0.0 (2017-08-16)
+
+
+### Bug Fixes
+
+* **misc:** use strict to fix node[@4](https://github.com/4) ([#2](https://github.com/npm/lifecycle/issues/2)) ([961ceb9](https://github.com/npm/lifecycle/commit/961ceb9))
+
+
+### Features
+
+* **api:** Extract from npm proper ([#1](https://github.com/npm/lifecycle/issues/1)) ([27d9930](https://github.com/npm/lifecycle/commit/27d9930))
+
+
+### BREAKING CHANGES
+
+* **api:** this is the initial implementation
diff --git a/deps/npm/node_modules/npm-lifecycle/LICENSE b/deps/npm/node_modules/npm-lifecycle/LICENSE
new file mode 100644
index 00000000000000..0b6c2287459632
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/LICENSE
@@ -0,0 +1,235 @@
+The npm application
+Copyright (c) npm, Inc. and Contributors
+Licensed on the terms of The Artistic License 2.0
+
+Node package dependencies of the npm application
+Copyright (c) their respective copyright owners
+Licensed on their respective license terms
+
+The npm public registry at https://registry.npmjs.org
+and the npm website at https://www.npmjs.com
+Operated by npm, Inc.
+Use governed by terms published on https://www.npmjs.com
+
+"Node.js"
+Trademark Joyent, Inc., https://joyent.com
+Neither npm nor npm, Inc. are affiliated with Joyent, Inc.
+
+The Node.js application
+Project of Node Foundation, https://nodejs.org
+
+The npm Logo
+Copyright (c) Mathias Pettersson and Brian Hammond
+
+"Gubblebum Blocky" typeface
+Copyright (c) Tjarda Koster, https://jelloween.deviantart.com
+Used with permission
+
+
+--------
+
+
+The Artistic License 2.0
+
+Copyright (c) 2000-2006, The Perl Foundation.
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+Preamble
+
+This license establishes the terms under which a given free software
+Package may be copied, modified, distributed, and/or redistributed.
+The intent is that the Copyright Holder maintains some artistic
+control over the development of that Package while still keeping the
+Package available as open source and free software.
+
+You are always permitted to make arrangements wholly outside of this
+license directly with the Copyright Holder of a given Package. If the
+terms of this license do not permit the full use that you propose to
+make of the Package, you should contact the Copyright Holder and seek
+a different licensing arrangement.
+
+Definitions
+
+ "Copyright Holder" means the individual(s) or organization(s)
+ named in the copyright notice for the entire Package.
+
+ "Contributor" means any party that has contributed code or other
+ material to the Package, in accordance with the Copyright Holder's
+ procedures.
+
+ "You" and "your" means any person who would like to copy,
+ distribute, or modify the Package.
+
+ "Package" means the collection of files distributed by the
+ Copyright Holder, and derivatives of that collection and/or of
+ those files. A given Package may consist of either the Standard
+ Version, or a Modified Version.
+
+ "Distribute" means providing a copy of the Package or making it
+ accessible to anyone else, or in the case of a company or
+ organization, to others outside of your company or organization.
+
+ "Distributor Fee" means any fee that you charge for Distributing
+ this Package or providing support for this Package to another
+ party. It does not mean licensing fees.
+
+ "Standard Version" refers to the Package if it has not been
+ modified, or has been modified only in ways explicitly requested
+ by the Copyright Holder.
+
+ "Modified Version" means the Package, if it has been changed, and
+ such changes were not explicitly requested by the Copyright
+ Holder.
+
+ "Original License" means this Artistic License as Distributed with
+ the Standard Version of the Package, in its current version or as
+ it may be modified by The Perl Foundation in the future.
+
+ "Source" form means the source code, documentation source, and
+ configuration files for the Package.
+
+ "Compiled" form means the compiled bytecode, object code, binary,
+ or any other form resulting from mechanical transformation or
+ translation of the Source form.
+
+
+Permission for Use and Modification Without Distribution
+
+(1) You are permitted to use the Standard Version and create and use
+Modified Versions for any purpose without restriction, provided that
+you do not Distribute the Modified Version.
+
+
+Permissions for Redistribution of the Standard Version
+
+(2) You may Distribute verbatim copies of the Source form of the
+Standard Version of this Package in any medium without restriction,
+either gratis or for a Distributor Fee, provided that you duplicate
+all of the original copyright notices and associated disclaimers. At
+your discretion, such verbatim copies may or may not include a
+Compiled form of the Package.
+
+(3) You may apply any bug fixes, portability changes, and other
+modifications made available from the Copyright Holder. The resulting
+Package will still be considered the Standard Version, and as such
+will be subject to the Original License.
+
+
+Distribution of Modified Versions of the Package as Source
+
+(4) You may Distribute your Modified Version as Source (either gratis
+or for a Distributor Fee, and with or without a Compiled form of the
+Modified Version) provided that you clearly document how it differs
+from the Standard Version, including, but not limited to, documenting
+any non-standard features, executables, or modules, and provided that
+you do at least ONE of the following:
+
+ (a) make the Modified Version available to the Copyright Holder
+ of the Standard Version, under the Original License, so that the
+ Copyright Holder may include your modifications in the Standard
+ Version.
+
+ (b) ensure that installation of your Modified Version does not
+ prevent the user installing or running the Standard Version. In
+ addition, the Modified Version must bear a name that is different
+ from the name of the Standard Version.
+
+ (c) allow anyone who receives a copy of the Modified Version to
+ make the Source form of the Modified Version available to others
+ under
+
+ (i) the Original License or
+
+ (ii) a license that permits the licensee to freely copy,
+ modify and redistribute the Modified Version using the same
+ licensing terms that apply to the copy that the licensee
+ received, and requires that the Source form of the Modified
+ Version, and of any works derived from it, be made freely
+ available in that license fees are prohibited but Distributor
+ Fees are allowed.
+
+
+Distribution of Compiled Forms of the Standard Version
+or Modified Versions without the Source
+
+(5) You may Distribute Compiled forms of the Standard Version without
+the Source, provided that you include complete instructions on how to
+get the Source of the Standard Version. Such instructions must be
+valid at the time of your distribution. If these instructions, at any
+time while you are carrying out such distribution, become invalid, you
+must provide new instructions on demand or cease further distribution.
+If you provide valid instructions or cease distribution within thirty
+days after you become aware that the instructions are invalid, then
+you do not forfeit any of your rights under this license.
+
+(6) You may Distribute a Modified Version in Compiled form without
+the Source, provided that you comply with Section 4 with respect to
+the Source of the Modified Version.
+
+
+Aggregating or Linking the Package
+
+(7) You may aggregate the Package (either the Standard Version or
+Modified Version) with other packages and Distribute the resulting
+aggregation provided that you do not charge a licensing fee for the
+Package. Distributor Fees are permitted, and licensing fees for other
+components in the aggregation are permitted. The terms of this license
+apply to the use and Distribution of the Standard or Modified Versions
+as included in the aggregation.
+
+(8) You are permitted to link Modified and Standard Versions with
+other works, to embed the Package in a larger work of your own, or to
+build stand-alone binary or bytecode versions of applications that
+include the Package, and Distribute the result without restriction,
+provided the result does not expose a direct interface to the Package.
+
+
+Items That are Not Considered Part of a Modified Version
+
+(9) Works (including, but not limited to, modules and scripts) that
+merely extend or make use of the Package, do not, by themselves, cause
+the Package to be a Modified Version. In addition, such works are not
+considered parts of the Package itself, and are not subject to the
+terms of this license.
+
+
+General Provisions
+
+(10) Any use, modification, and distribution of the Standard or
+Modified Versions is governed by this Artistic License. By using,
+modifying or distributing the Package, you accept this license. Do not
+use, modify, or distribute the Package, if you do not accept this
+license.
+
+(11) If your Modified Version has been derived from a Modified
+Version made by someone other than you, you are nevertheless required
+to ensure that your Modified Version complies with the requirements of
+this license.
+
+(12) This license does not grant you the right to use any trademark,
+service mark, tradename, or logo of the Copyright Holder.
+
+(13) This license includes the non-exclusive, worldwide,
+free-of-charge patent license to make, have made, use, offer to sell,
+sell, import and otherwise transfer the Package with respect to any
+patent claims licensable by the Copyright Holder that are necessarily
+infringed by the Package. If you institute patent litigation
+(including a cross-claim or counterclaim) against any party alleging
+that the Package constitutes direct or contributory patent
+infringement, then this Artistic License to you shall terminate on the
+date that such litigation is filed.
+
+(14) Disclaimer of Warranty:
+THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
+IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
+NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
+LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
+ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+--------
diff --git a/deps/npm/node_modules/npm-lifecycle/README.md b/deps/npm/node_modules/npm-lifecycle/README.md
new file mode 100644
index 00000000000000..e864593bffed7d
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/README.md
@@ -0,0 +1,45 @@
+# npm-lifecycle [![npm version](https://img.shields.io/npm/v/npm-lifecycle.svg)](https://npm.im/npm-lifecycle) [![license](https://img.shields.io/npm/l/npm-lifecycle.svg)](https://npm.im/npm-lifecycle) [![Travis](https://img.shields.io/travis/npm/lifecycle.svg)](https://travis-ci.org/npm/lifecycle) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/lifecycle?svg=true)](https://ci.appveyor.com/project/npm/lifecycle) [![Coverage Status](https://coveralls.io/repos/github/npm/lifecycle/badge.svg?branch=latest)](https://coveralls.io/github/npm/lifecycle?branch=latest)
+
+[`npm-lifecycle`](https://github.com/npm/lifecycle) is a standalone library for
+executing packages' lifecycle scripts. It is extracted from npm itself and
+intended to be fully compatible with the way npm executes individual scripts.
+
+## Install
+
+`$ npm install npm-lifecycle`
+
+## Table of Contents
+
+* [Example](#example)
+* [Features](#features)
+* [Contributing](#contributing)
+* [API](#api)
+ * [`lifecycle`](#lifecycle)
+
+### Example
+
+```javascript
+// idk yet
+```
+
+### Features
+
+* something cool
+
+### Contributing
+
+The npm team enthusiastically welcomes contributions and project participation!
+There's a bunch of things you can do if you want to contribute! The [Contributor
+Guide](CONTRIBUTING.md) has all the information you need for everything from
+reporting bugs to contributing entire new features. Please don't hesitate to
+jump in if you'd like to, or even ask us questions if something isn't clear.
+
+### API
+
+#### `> lifecycle(name, pkg, wd, [opts]) -> Promise`
+
+##### Example
+
+```javascript
+lifecycle()
+```
diff --git a/deps/npm/node_modules/npm-lifecycle/index.js b/deps/npm/node_modules/npm-lifecycle/index.js
new file mode 100644
index 00000000000000..1ed5d3f642e54b
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/index.js
@@ -0,0 +1,435 @@
+'use strict'
+
+exports = module.exports = lifecycle
+exports.makeEnv = makeEnv
+exports._incorrectWorkingDirectory = _incorrectWorkingDirectory
+
+const spawn = require('./lib/spawn')
+const path = require('path')
+const Stream = require('stream').Stream
+const fs = require('graceful-fs')
+const chain = require('slide').chain
+const uidNumber = require('uid-number')
+const umask = require('umask')
+const which = require('which')
+
+let PATH = 'PATH'
+
+// windows calls it's path 'Path' usually, but this is not guaranteed.
+if (process.platform === 'win32') {
+ PATH = 'Path'
+ Object.keys(process.env).forEach(function (e) {
+ if (e.match(/^PATH$/i)) {
+ PATH = e
+ }
+ })
+}
+
+function logid (pkg, stage) {
+ return pkg._id + '~' + stage + ':'
+}
+
+function lifecycle (pkg, stage, wd, opts) {
+ return new Promise((resolve, reject) => {
+ while (pkg && pkg._data) pkg = pkg._data
+ if (!pkg) return reject(new Error('Invalid package data'))
+
+ opts.log.info('lifecycle', logid(pkg, stage), pkg._id)
+ if (!pkg.scripts) pkg.scripts = {}
+
+ if (stage === 'prepublish' && opts.ignorePrepublish) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-prepublish is set to true', pkg._id)
+ delete pkg.scripts.prepublish
+ }
+
+ if (!pkg.scripts[stage]) return resolve()
+
+ validWd(wd || path.resolve(opts.dir, pkg.name), function (er, wd) {
+ if (er) return reject(er)
+
+ if ((wd.indexOf(opts.dir) !== 0 || _incorrectWorkingDirectory(wd, pkg)) &&
+ !opts.unsafePerm && pkg.scripts[stage]) {
+ opts.log.warn('lifecycle', logid(pkg, stage), 'cannot run in wd',
+ '%s %s (wd=%s)', pkg._id, pkg.scripts[stage], wd
+ )
+ return resolve()
+ }
+
+ // set the env variables, then run scripts as a child process.
+ var env = makeEnv(pkg, opts)
+ env.npm_lifecycle_event = stage
+ env.npm_node_execpath = env.NODE = env.NODE || process.execPath
+ env.npm_execpath = require.main.filename
+ env.INIT_CWD = process.cwd()
+
+ // 'nobody' typically doesn't have permission to write to /tmp
+ // even if it's never used, sh freaks out.
+ if (!opts.unsafePerm) env.TMPDIR = wd
+
+ lifecycle_(pkg, stage, wd, opts, env, (er) => {
+ if (er) return reject(er)
+ return resolve()
+ })
+ })
+ })
+}
+
+function _incorrectWorkingDirectory (wd, pkg) {
+ return wd.lastIndexOf(pkg.name) !== wd.length - pkg.name.length
+}
+
+function lifecycle_ (pkg, stage, wd, opts, env, cb) {
+ var pathArr = []
+ var p = wd.split(/[\\/]node_modules[\\/]/)
+ var acc = path.resolve(p.shift())
+
+ p.forEach(function (pp) {
+ pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
+ acc = path.join(acc, 'node_modules', pp)
+ })
+ pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
+
+ // we also unshift the bundled node-gyp-bin folder so that
+ // the bundled one will be used for installing things.
+ pathArr.unshift(path.join(__dirname, '..', '..', 'bin', 'node-gyp-bin'))
+
+ if (shouldPrependCurrentNodeDirToPATH(opts)) {
+ // prefer current node interpreter in child scripts
+ pathArr.push(path.dirname(process.execPath))
+ }
+
+ if (env[PATH]) pathArr.push(env[PATH])
+ env[PATH] = pathArr.join(process.platform === 'win32' ? ';' : ':')
+
+ var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
+
+ if (opts.ignoreScripts) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-scripts is set to true', pkg._id)
+ packageLifecycle = false
+ } else if (packageLifecycle) {
+ // define this here so it's available to all scripts.
+ env.npm_lifecycle_script = pkg.scripts[stage]
+ } else {
+ opts.log.silly('lifecycle', logid(pkg, stage), 'no script for ' + stage + ', continuing')
+ }
+
+ function done (er) {
+ if (er) {
+ if (opts.force) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'forced, continuing', er)
+ er = null
+ } else if (opts.failOk) {
+ opts.log.warn('lifecycle', logid(pkg, stage), 'continuing anyway', er.message)
+ er = null
+ }
+ }
+ cb(er)
+ }
+
+ chain(
+ [
+ packageLifecycle && [runPackageLifecycle, pkg, env, wd, opts],
+ [runHookLifecycle, pkg, env, wd, opts]
+ ],
+ done
+ )
+}
+
+function shouldPrependCurrentNodeDirToPATH (opts) {
+ const cfgsetting = opts.scriptsPrependNodePath
+ if (cfgsetting === false) return false
+ if (cfgsetting === true) return true
+
+ var isDifferentNodeInPath
+
+ var isWindows = process.platform === 'win32'
+ var foundExecPath
+ try {
+ foundExecPath = which.sync(path.basename(process.execPath), {pathExt: isWindows ? ';' : ':'})
+ // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable.
+ isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !==
+ fs.realpathSync(foundExecPath).toUpperCase()
+ } catch (e) {
+ isDifferentNodeInPath = true
+ }
+
+ if (cfgsetting === 'warn-only') {
+ if (isDifferentNodeInPath && !shouldPrependCurrentNodeDirToPATH.hasWarned) {
+ if (foundExecPath) {
+ opts.log.warn('lifecycle', 'The node binary used for scripts is', foundExecPath, 'but npm is using', process.execPath, 'itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
+ } else {
+ opts.log.warn('lifecycle', 'npm is using', process.execPath, 'but there is no node binary in the current PATH. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
+ }
+ shouldPrependCurrentNodeDirToPATH.hasWarned = true
+ }
+
+ return false
+ }
+
+ return isDifferentNodeInPath
+}
+
+function validWd (d, cb) {
+ fs.stat(d, function (er, st) {
+ if (er || !st.isDirectory()) {
+ var p = path.dirname(d)
+ if (p === d) {
+ return cb(new Error('Could not find suitable wd'))
+ }
+ return validWd(p, cb)
+ }
+ return cb(null, d)
+ })
+}
+
+function runPackageLifecycle (pkg, env, wd, opts, cb) {
+ // run package lifecycle scripts in the package root, or the nearest parent.
+ var stage = env.npm_lifecycle_event
+ var cmd = env.npm_lifecycle_script
+
+ var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
+ '\n> ' + cmd + '\n'
+ runCmd(note, cmd, pkg, env, stage, wd, opts, cb)
+}
+
+var running = false
+var queue = []
+function dequeue () {
+ running = false
+ if (queue.length) {
+ var r = queue.shift()
+ runCmd.apply(null, r)
+ }
+}
+
+function runCmd (note, cmd, pkg, env, stage, wd, opts, cb) {
+ if (running) {
+ queue.push([note, cmd, pkg, env, stage, wd, cb])
+ return
+ }
+
+ running = true
+ opts.log.pause()
+ var unsafe = opts.unsafePerm
+ var user = unsafe ? null : opts.user
+ var group = unsafe ? null : opts.group
+
+ if (opts.log.level !== 'silent') {
+ opts.log.clearProgress()
+ console.log(note)
+ opts.log.showProgress()
+ }
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'unsafe-perm in lifecycle', unsafe)
+
+ if (process.platform === 'win32') {
+ unsafe = true
+ }
+
+ if (unsafe) {
+ runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, 0, 0, cb)
+ } else {
+ uidNumber(user, group, function (er, uid, gid) {
+ runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb)
+ })
+ }
+}
+
+function runCmd_ (cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb_) {
+ function cb (er) {
+ cb_.apply(null, arguments)
+ opts.log.resume()
+ process.nextTick(dequeue)
+ }
+
+ var conf = {
+ cwd: wd,
+ env: env,
+ stdio: [ 0, 1, 2 ]
+ }
+
+ if (!unsafe) {
+ conf.uid = uid ^ 0
+ conf.gid = gid ^ 0
+ }
+
+ var sh = 'sh'
+ var shFlag = '-c'
+
+ var customShell = opts.scriptShell
+
+ if (customShell) {
+ sh = customShell
+ } else if (process.platform === 'win32') {
+ sh = process.env.comspec || 'cmd'
+ shFlag = '/d /s /c'
+ conf.windowsVerbatimArguments = true
+ }
+
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'PATH:', env[PATH])
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'CWD:', wd)
+ opts.log.silly('lifecycle', logid(pkg, stage), 'Args:', [shFlag, cmd])
+
+ var proc = spawn(sh, [shFlag, cmd], conf, opts.log)
+
+ proc.on('error', procError)
+ proc.on('close', function (code, signal) {
+ opts.log.silly('lifecycle', logid(pkg, stage), 'Returned: code:', code, ' signal:', signal)
+ if (signal) {
+ process.kill(process.pid, signal)
+ } else if (code) {
+ var er = new Error('Exit status ' + code)
+ er.errno = code
+ }
+ procError(er)
+ })
+ process.once('SIGTERM', procKill)
+ process.once('SIGINT', procInterupt)
+
+ function procError (er) {
+ if (er) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'Failed to exec ' + stage + ' script')
+ er.message = pkg._id + ' ' + stage + ': `' + cmd + '`\n' +
+ er.message
+ if (er.code !== 'EPERM') {
+ er.code = 'ELIFECYCLE'
+ }
+ fs.stat(opts.dir, function (statError, d) {
+ if (statError && statError.code === 'ENOENT' && opts.dir.split(path.sep).slice(-1)[0] === 'node_modules') {
+ opts.log.warn('', 'Local package.json exists, but node_modules missing, did you mean to install?')
+ }
+ })
+ er.pkgid = pkg._id
+ er.stage = stage
+ er.script = cmd
+ er.pkgname = pkg.name
+ }
+ process.removeListener('SIGTERM', procKill)
+ process.removeListener('SIGTERM', procInterupt)
+ process.removeListener('SIGINT', procKill)
+ return cb(er)
+ }
+ function procKill () {
+ proc.kill()
+ }
+ function procInterupt () {
+ proc.kill('SIGINT')
+ proc.on('exit', function () {
+ process.exit()
+ })
+ process.once('SIGINT', procKill)
+ }
+}
+
+function runHookLifecycle (pkg, env, wd, opts, cb) {
+ // check for a hook script, run if present.
+ var stage = env.npm_lifecycle_event
+ var hook = path.join(opts.dir, '.hooks', stage)
+ var cmd = hook
+
+ fs.stat(hook, function (er) {
+ if (er) return cb()
+ var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
+ '\n> ' + cmd
+ runCmd(note, hook, pkg, env, stage, wd, opts, cb)
+ })
+}
+
+function makeEnv (data, opts, prefix, env) {
+ prefix = prefix || 'npm_package_'
+ if (!env) {
+ env = {}
+ for (var i in process.env) {
+ if (!i.match(/^npm_/)) {
+ env[i] = process.env[i]
+ }
+ }
+
+ // express and others respect the NODE_ENV value.
+ if (opts.production) env.NODE_ENV = 'production'
+ } else if (!data.hasOwnProperty('_lifecycleEnv')) {
+ Object.defineProperty(data, '_lifecycleEnv',
+ {
+ value: env,
+ enumerable: false
+ }
+ )
+ }
+
+ for (i in data) {
+ if (i.charAt(0) !== '_') {
+ var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
+ if (i === 'readme') {
+ continue
+ }
+ if (data[i] && typeof data[i] === 'object') {
+ try {
+ // quick and dirty detection for cyclical structures
+ JSON.stringify(data[i])
+ makeEnv(data[i], opts, envKey + '_', env)
+ } catch (ex) {
+ // usually these are package objects.
+ // just get the path and basic details.
+ var d = data[i]
+ makeEnv(
+ { name: d.name, version: d.version, path: d.path },
+ opts,
+ envKey + '_',
+ env
+ )
+ }
+ } else {
+ env[envKey] = String(data[i])
+ env[envKey] = env[envKey].indexOf('\n') !== -1
+ ? JSON.stringify(env[envKey])
+ : env[envKey]
+ }
+ }
+ }
+
+ if (prefix !== 'npm_package_') return env
+
+ prefix = 'npm_config_'
+ var pkgConfig = {}
+ var pkgVerConfig = {}
+ var namePref = data.name + ':'
+ var verPref = data.name + '@' + data.version + ':'
+
+ Object.keys(opts.config).forEach(function (i) {
+ // in some rare cases (e.g. working with nerf darts), there are segmented
+ // "private" (underscore-prefixed) config names -- don't export
+ if ((i.charAt(0) === '_' && i.indexOf('_' + namePref) !== 0) || i.match(/:_/)) {
+ return
+ }
+ var value = opts.config[i]
+ if (value instanceof Stream || Array.isArray(value)) return
+ if (i.match(/umask/)) value = umask.toString(value)
+ if (!value) value = ''
+ else if (typeof value === 'number') value = '' + value
+ else if (typeof value !== 'string') value = JSON.stringify(value)
+
+ value = value.indexOf('\n') !== -1
+ ? JSON.stringify(value)
+ : value
+ i = i.replace(/^_+/, '')
+ var k
+ if (i.indexOf(namePref) === 0) {
+ k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, '_')
+ pkgConfig[k] = value
+ } else if (i.indexOf(verPref) === 0) {
+ k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, '_')
+ pkgVerConfig[k] = value
+ }
+ var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
+ env[envKey] = value
+ })
+
+ prefix = 'npm_package_config_'
+ ;[pkgConfig, pkgVerConfig].forEach(function (conf) {
+ for (var i in conf) {
+ var envKey = (prefix + i)
+ env[envKey] = conf[i]
+ }
+ })
+
+ return env
+}
diff --git a/deps/npm/node_modules/npm-lifecycle/lib/spawn.js b/deps/npm/node_modules/npm-lifecycle/lib/spawn.js
new file mode 100644
index 00000000000000..30e5b81846977a
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/lib/spawn.js
@@ -0,0 +1,65 @@
+'use strict'
+
+module.exports = spawn
+
+const _spawn = require('child_process').spawn
+const EventEmitter = require('events').EventEmitter
+
+let progressEnabled
+let running = 0
+
+function startRunning (log) {
+ if (progressEnabled == null) progressEnabled = log.progressEnabled
+ if (progressEnabled) log.disableProgress()
+ ++running
+}
+
+function stopRunning (log) {
+ --running
+ if (progressEnabled && running === 0) log.enableProgress()
+}
+
+function willCmdOutput (stdio) {
+ if (stdio === 'inherit') return true
+ if (!Array.isArray(stdio)) return false
+ for (let fh = 1; fh <= 2; ++fh) {
+ if (stdio[fh] === 'inherit') return true
+ if (stdio[fh] === 1 || stdio[fh] === 2) return true
+ }
+ return false
+}
+
+function spawn (cmd, args, options, log) {
+ const cmdWillOutput = willCmdOutput(options && options.stdio)
+
+ if (cmdWillOutput) startRunning(log)
+ const raw = _spawn(cmd, args, options)
+ const cooked = new EventEmitter()
+
+ raw.on('error', function (er) {
+ if (cmdWillOutput) stopRunning(log)
+ er.file = cmd
+ cooked.emit('error', er)
+ }).on('close', function (code, signal) {
+ if (cmdWillOutput) stopRunning(log)
+ // Create ENOENT error because Node.js v8.0 will not emit
+ // an `error` event if the command could not be found.
+ if (code === 127) {
+ const er = new Error('spawn ENOENT')
+ er.code = 'ENOENT'
+ er.errno = 'ENOENT'
+ er.syscall = 'spawn'
+ er.file = cmd
+ cooked.emit('error', er)
+ } else {
+ cooked.emit('close', code, signal)
+ }
+ })
+
+ cooked.stdin = raw.stdin
+ cooked.stdout = raw.stdout
+ cooked.stderr = raw.stderr
+ cooked.kill = function (sig) { return raw.kill(sig) }
+
+ return cooked
+}
diff --git a/deps/npm/node_modules/npm-lifecycle/package.json b/deps/npm/node_modules/npm-lifecycle/package.json
new file mode 100644
index 00000000000000..5f766aee40aec6
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/package.json
@@ -0,0 +1,78 @@
+{
+ "_from": "npm-lifecycle@1.0.2",
+ "_id": "npm-lifecycle@1.0.2",
+ "_inBundle": false,
+ "_integrity": "sha512-tuSfwKmTiJMVeQdvAfYJ1I+sC6tTpN2W6UWuHPkCy4FtM29Oc+DsuMDdTSaJTmbTairCtx9gXPI8DqZL42GF5g==",
+ "_location": "/npm-lifecycle",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "npm-lifecycle@1.0.2",
+ "name": "npm-lifecycle",
+ "escapedName": "npm-lifecycle",
+ "rawSpec": "1.0.2",
+ "saveSpec": null,
+ "fetchSpec": "1.0.2"
+ },
+ "_requiredBy": [
+ "#USER",
+ "/"
+ ],
+ "_resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-1.0.2.tgz",
+ "_shasum": "1ff54561a7cec3813e89c22a362de4a1ed7e2ae5",
+ "_spec": "npm-lifecycle@1.0.2",
+ "_where": "/Users/rebecca/code/npm",
+ "author": {
+ "name": "Mike Sherov"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/lifecycle/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "graceful-fs": "^4.1.11",
+ "slide": "^1.1.6",
+ "uid-number": "0.0.6",
+ "umask": "^1.1.0",
+ "which": "^1.3.0"
+ },
+ "deprecated": false,
+ "description": "JavaScript package lifecycle hook runner",
+ "devDependencies": {
+ "nyc": "^11.1.0",
+ "standard": "^10.0.3",
+ "standard-version": "^4.2.0",
+ "tap": "^10.7.2",
+ "weallbehave": "^1.2.0",
+ "weallcontribute": "^1.0.8"
+ },
+ "files": [
+ "index.js",
+ "lib/spawn.js"
+ ],
+ "homepage": "https://github.com/npm/lifecycle#readme",
+ "keywords": [
+ "npm",
+ "lifecycle",
+ "hook",
+ "runner"
+ ],
+ "license": "Artistic-2.0",
+ "main": "index.js",
+ "name": "npm-lifecycle",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/npm/lifecycle.git"
+ },
+ "scripts": {
+ "postrelease": "npm publish && git push --follow-tags",
+ "prerelease": "npm t",
+ "pretest": "standard",
+ "release": "standard-version -s",
+ "test": "tap -J --coverage test/*.js",
+ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
+ "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
+ },
+ "version": "1.0.2"
+}
diff --git a/deps/npm/node_modules/fstream/LICENSE b/deps/npm/node_modules/npm-packlist/LICENSE
similarity index 100%
rename from deps/npm/node_modules/fstream/LICENSE
rename to deps/npm/node_modules/npm-packlist/LICENSE
diff --git a/deps/npm/node_modules/npm-packlist/README.md b/deps/npm/node_modules/npm-packlist/README.md
new file mode 100644
index 00000000000000..9efcc2c69cc376
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/README.md
@@ -0,0 +1,69 @@
+# npm-packlist
+
+[![Build Status](https://travis-ci.com/npm/npm-packlist.svg?token=hHeDp9pQmz9kvsgRNVHy&branch=master)](https://travis-ci.com/npm/npm-packlist)
+
+Get a list of the files to add from a folder into an npm package
+
+These can be handed to [tar](http://npm.im/tar) like so to make an npm
+package tarball:
+
+```js
+const packlist = require('npm-packlist')
+const tar = require('tar')
+const packageDir = '/path/to/package'
+const packageTarball = '/path/to/package.tgz'
+
+packlist({ path: packageDir })
+ .then(files => tar.create({
+ prefix: 'package/',
+ cwd: packageDir,
+ file: packageTarball,
+ gzip: true
+ }, files))
+ .then(_ => {
+ // tarball has been created, continue with your day
+ })
+```
+
+This uses the following rules:
+
+1. If a `package.json` file is found, and it has a `files` list,
+ then ignore everything that isn't in `files`. Always include the
+ readme, license, notice, changes, changelog, and history files, if
+ they exist, and the package.json file itself.
+2. If there's no `package.json` file (or it has no `files` list), and
+ there is a `.npmignore` file, then ignore all the files in the
+ `.npmignore` file.
+3. If there's no `package.json` with a `files` list, and there's no
+ `.npmignore` file, but there is a `.gitignore` file, then ignore
+ all the files in the `.gitignore` file.
+4. Everything in the root `node_modules` is ignored, unless it's a
+ bundled dependency. If it IS a bundled dependency, and it's a
+ symbolic link, then the target of the link is included, not the
+ symlink itself.
+4. Unless they're explicitly included (by being in a `files` list, or
+ a `!negated` rule in a relevant `.npmignore` or `.gitignore`),
+ always ignore certain common cruft files:
+
+ 1. .npmignore and .gitignore files (their effect is in the package
+ already, there's no need to include them in the package)
+ 2. editor junk like `.*.swp`, `._*` and `.*.orig` files
+ 3. A `/test/` or `/tests/` folder at the root
+ 4. `.npmrc` files (these may contain private configs)
+ 5. The `node_modules/.bin` folder
+ 6. Waf and gyp cruft like `/build/config.gypi` and `.lock-wscript`
+ 7. Darwin's `.DS_Store` files because wtf are those even
+ 8. `npm-debug.log` files at the root of a project
+
+ You can explicitly re-include any of these with a `files` list in
+ `package.json` or a negated ignore file rule.
+
+## API
+
+Same API as [ignore-walk](http://npm.im/ignore-walk), just hard-coded
+file list and rule sets.
+
+The `Walker` and `WalkerSync` classes take a `bundled` argument, which
+is a list of package names to include from node_modules. When calling
+the top-level `packlist()` and `packlist.sync()` functions, this
+module calls into `npm-bundled` directly.
diff --git a/deps/npm/node_modules/npm-packlist/index.js b/deps/npm/node_modules/npm-packlist/index.js
new file mode 100644
index 00000000000000..b53391d8a1f333
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/index.js
@@ -0,0 +1,214 @@
+'use strict'
+
+// Do a two-pass walk, first to get the list of packages that need to be
+// bundled, then again to get the actual files and folders.
+// Keep a cache of node_modules content and package.json data, so that the
+// second walk doesn't have to re-do all the same work.
+
+const bundleWalk = require('npm-bundled')
+const BundleWalker = bundleWalk.BundleWalker
+const BundleWalkerSync = bundleWalk.BundleWalkerSync
+
+const ignoreWalk = require('ignore-walk')
+const IgnoreWalker = ignoreWalk.Walker
+const IgnoreWalkerSync = ignoreWalk.WalkerSync
+
+const rootBuiltinRules = Symbol('root-builtin-rules')
+const packageNecessaryRules = Symbol('package-necessary-rules')
+const path = require('path')
+
+const defaultRules = [
+ '.npmignore',
+ '.gitignore',
+ '**/.git/',
+ '**/.svn/',
+ '**/.hg/',
+ '**/CVS/',
+ '/.lock-wscript',
+ '/.wafpickle-*',
+ '/build/config.gypi',
+ 'npm-debug.log',
+ '**/.npmrc',
+ '.*.swp',
+ '.DS_Store',
+ '._*',
+ '*.orig',
+ '/test{,s}/',
+ 'package-lock.json'
+]
+
+// a decorator that applies our custom rules to an ignore walker
+const npmWalker = Class => class Walker extends Class {
+ constructor (opt) {
+ opt = opt || {}
+
+ // the order in which rules are applied.
+ opt.ignoreFiles = [
+ rootBuiltinRules,
+ 'package.json',
+ '.npmignore',
+ '.gitignore',
+ packageNecessaryRules
+ ]
+
+ opt.includeEmpty = false
+ opt.path = opt.path || process.cwd()
+ opt.follow = path.basename(opt.path) === 'node_modules'
+ super(opt)
+
+ // ignore a bunch of things by default at the root level.
+ // also ignore anything in node_modules, except bundled dependencies
+ if (!this.parent) {
+ this.bundled = opt.bundled || []
+ this.bundledScopes = Array.from(new Set(
+ this.bundled.filter(f => /^@/.test(f))
+ .map(f => f.split('/')[0])))
+ const rules = defaultRules.join('\n') + '\n'
+ this.packageJsonCache = opt.packageJsonCache || new Map()
+ super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_)
+ } else {
+ this.bundled = []
+ this.bundledScopes = []
+ this.packageJsonCache = this.parent.packageJsonCache
+ }
+ }
+
+ filterEntry (entry, partial) {
+ // get the partial path from the root of the walk
+ const p = this.path.substr(this.root.length + 1)
+ const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/
+ const pkg = pkgre.test(entry) ? entry.replace(pkgre, '$1') : null
+
+ return (
+ // if we're in a bundled package, check with the parent.
+ /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry(
+ this.basename + '/' + entry, partial)
+
+ // if package is bundled, all files included
+ // also include @scope dirs for bundled scoped deps
+ // they'll be ignored if no files end up in them.
+ : pkg ? -1 !== this.bundled.indexOf(pkg) ||
+ -1 !== this.bundledScopes.indexOf(pkg)
+
+ // only walk top node_modules if we want to bundle something
+ : entry === 'node_modules' && !this.parent ? !!this.bundled.length
+
+ // always include package.json at the root.
+ : entry === 'package.json' && !this.parent ? true
+
+ // otherwise, follow ignore-walk's logic
+ : super.filterEntry(entry, partial)
+ )
+ }
+
+ filterEntries () {
+ if (this.ignoreRules['package.json'])
+ this.ignoreRules['.gitignore'] = this.ignoreRules['.npmignore'] = null
+ else if (this.ignoreRules['.npmignore'])
+ this.ignoreRules['.gitignore'] = null
+ this.filterEntries = super.filterEntries
+ super.filterEntries()
+ }
+
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ if (this.packageJsonCache.has(ig))
+ this.onPackageJson(ig, this.packageJsonCache.get(ig), then)
+ else
+ super.addIgnoreFile(file, then)
+ }
+
+ onPackageJson (ig, pkg, then) {
+ this.packageJsonCache.set(ig, pkg)
+
+ // if there's a browser or main, make sure we don't ignore it
+ const rules = [
+ pkg.browser ? '!' + pkg.browser : '',
+ pkg.main ? '!' + pkg.main : '',
+ '!@(readme|license|licence|notice|changes|changelog|history){,.*}'
+ ].filter(f => f).join('\n') + '\n'
+ super.onReadIgnoreFile(packageNecessaryRules, rules, _=>_)
+
+ if (Array.isArray(pkg.files))
+ super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map(
+ f => '!' + f + '\n!' + f.replace(/\/+$/, '') + '/**'
+ ).join('\n') + '\n', then)
+ else
+ then()
+ }
+
+ // override parent onstat function to nix all symlinks
+ onstat (st, entry, file, dir, then) {
+ if (st.isSymbolicLink())
+ then()
+ else
+ super.onstat(st, entry, file, dir, then)
+ }
+
+ onReadIgnoreFile (file, data, then) {
+ if (file === 'package.json')
+ try {
+ this.onPackageJson(file, JSON.parse(data), then)
+ } catch (er) {
+ // ignore package.json files that are not json
+ then()
+ }
+ else
+ super.onReadIgnoreFile(file, data, then)
+ }
+
+ sort (a, b) {
+ return sort(a, b)
+ }
+}
+
+class Walker extends npmWalker(IgnoreWalker) {
+ walker (entry, then) {
+ new Walker(this.walkerOpt(entry)).on('done', then).start()
+ }
+}
+
+class WalkerSync extends npmWalker(IgnoreWalkerSync) {
+ walker (entry, then) {
+ new WalkerSync(this.walkerOpt(entry)).start()
+ then()
+ }
+}
+
+const walk = (options, callback) => {
+ options = options || {}
+ const p = new Promise((resolve, reject) => {
+ const bw = new BundleWalker(options).start()
+ bw.on('done', bundled => {
+ options.bundled = bundled
+ options.packageJsonCache = bw.packageJsonCache
+ new Walker(options).on('done', resolve).on('error', reject).start()
+ })
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+ options = options || {}
+ const bw = new BundleWalkerSync(options).start()
+ options.bundled = bw.result
+ options.packageJsonCache = bw.packageJsonCache
+ const walker = new WalkerSync(options)
+ walker.start()
+ return walker.result
+}
+
+// package.json first, node_modules last, files before folders, alphasort
+const sort = (a, b) =>
+ a === 'package.json' ? -1
+ : b === 'package.json' ? 1
+ : /^node_modules/.test(a) && !/^node_modules/.test(b) ? 1
+ : /^node_modules/.test(b) && !/^node_modules/.test(a) ? -1
+ : path.dirname(a) === '.' && path.dirname(b) !== '.' ? -1
+ : path.dirname(b) === '.' && path.dirname(a) !== '.' ? 1
+ : a.localeCompare(b)
+
+module.exports = walk
+walk.sync = walkSync
+walk.Walker = Walker
+walk.WalkerSync = WalkerSync
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/LICENSE b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE
similarity index 100%
rename from deps/npm/node_modules/tar/node_modules/block-stream/LICENSE
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/README.md b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/README.md
new file mode 100644
index 00000000000000..66b69e894b2dd1
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/README.md
@@ -0,0 +1,60 @@
+# ignore-walk
+
+[![Build
+Status](https://travis-ci.org/isaacs/ignore-walk.svg?branch=master)](https://travis-ci.org/isaacs/ignore-walk)
+
+Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.
+
+Walk a directory creating a list of entries, parsing any `.ignore`
+files met along the way to exclude files.
+
+## USAGE
+
+```javascript
+const walk = require('ignore-walk')
+
+// All options are optional, defaults provided.
+
+// this function returns a promise, but you can also pass a cb
+// if you like that approach better.
+walk({
+ path: '...', // root dir to start in. defaults to process.cwd()
+ ignoreFiles: [ '.gitignore' ], // list of filenames. defaults to ['.ignore']
+ includeEmpty: true|false, // true to include empty dirs, default false
+ follow: true|false // true to follow symlink dirs, default false
+}, callback)
+
+// to walk synchronously, do it this way:
+const result = walk.sync({ path: '/wow/such/filepath' })
+```
+
+If you want to get at the underlying classes, they're at `walk.Walker`
+and `walk.WalkerSync`.
+
+## OPTIONS
+
+* `path` The path to start in. Defaults to `process.cwd()`
+
+* `ignoreFiles` Filenames to treat as ignore files. The default is
+ `['.ignore']`. (This is where you'd put `.gitignore` or
+ `.npmignore` or whatever.) If multiple ignore files are in a
+ directory, then rules from each are applied in the order that the
+ files are listed.
+
+* `includeEmpty` Set to `true` to include empty directories, assuming
+ they are not excluded by any of the ignore rules. If not set, then
+ this follows the standard `git` behavior of not including
+ directories that are empty.
+
+ Note: this will cause an empty directory to be included if it
+ would contain an included entry, even if it would have otherwise
+ been excluded itself.
+
+ For example, given the rules `*` (ignore everything) and `!/a/b/c`
+ (re-include the entry at `/a/b/c`), the directory `/a/b` will be
+ included if it is empty.
+
+* `follow` Set to `true` to treat symbolically linked directories as
+ directories, recursing into them. There is no handling for nested
+ symlinks, so `ELOOP` errors can occur in some cases when using this
+ option. Defaults to `false`.
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/index.js b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/index.js
new file mode 100644
index 00000000000000..22517fb0e68ebe
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/index.js
@@ -0,0 +1,266 @@
+'use strict'
+
+const fs = require('fs')
+const path = require('path')
+const EE = require('events').EventEmitter
+const Minimatch = require('minimatch').Minimatch
+
+class Walker extends EE {
+ constructor (opts) {
+ opts = opts || {}
+ super(opts)
+ this.path = opts.path || process.cwd()
+ this.basename = path.basename(this.path)
+ this.ignoreFiles = opts.ignoreFiles || [ '.ignore' ]
+ this.ignoreRules = {}
+ this.parent = opts.parent || null
+ this.includeEmpty = !!opts.includeEmpty
+ this.root = this.parent ? this.parent.root : this.path
+ this.follow = !!opts.follow
+ this.result = this.parent ? this.parent.result : []
+ this.entries = null
+ this.sawError = false
+ }
+
+ sort (a, b) {
+ return a.localeCompare(b)
+ }
+
+ emit (ev, data) {
+ let ret = false
+ if (!(this.sawError && ev === 'error')) {
+ if (ev === 'error')
+ this.sawError = true
+ else if (ev === 'done' && !this.parent)
+ data = data.sort(this.sort)
+ if (ev === 'error' && this.parent)
+ ret = this.parent.emit('error', data)
+ else
+ ret = super.emit(ev, data)
+ }
+ return ret
+ }
+
+ start () {
+ fs.readdir(this.path, (er, entries) =>
+ er ? this.emit('error', er) : this.onReaddir(entries))
+ return this
+ }
+
+ isIgnoreFile (e) {
+ return e !== "." &&
+ e !== ".." &&
+ -1 !== this.ignoreFiles.indexOf(e)
+ }
+
+ onReaddir (entries) {
+ this.entries = entries
+ if (entries.length === 0) {
+ if (this.includeEmpty)
+ this.result.push(this.path.substr(this.root.length + 1))
+ this.emit('done', this.result)
+ } else {
+ const hasIg = this.entries.some(e =>
+ this.isIgnoreFile(e))
+
+ if (hasIg)
+ this.addIgnoreFiles()
+ else
+ this.filterEntries()
+ }
+ }
+
+ addIgnoreFiles () {
+ const newIg = this.entries
+ .filter(e => this.isIgnoreFile(e))
+
+ let igCount = newIg.length
+ const then = _ => {
+ if (--igCount === 0)
+ this.filterEntries()
+ }
+
+ newIg.forEach(e => this.addIgnoreFile(e, then))
+ }
+
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ fs.readFile(ig, 'utf8', (er, data) =>
+ er ? this.emit('error', er) : this.onReadIgnoreFile(file, data, then))
+ }
+
+ onReadIgnoreFile (file, data, then) {
+ const mmopt = {
+ matchBase: true,
+ dot: true,
+ flipNegate: true,
+ nocase: true
+ }
+ const rules = data.split(/\r?\n/)
+ .filter(line => !/^#|^$/.test(line.trim()))
+ .map(r => new Minimatch(r, mmopt))
+
+ if (rules.length)
+ this.ignoreRules[file] = rules
+
+ then()
+ }
+
+ filterEntries () {
+ // at this point we either have ignore rules, or just inheriting
+ // this exclusion is at the point where we know the list of
+ // entries in the dir, but don't know what they are. since
+ // some of them *might* be directories, we have to run the
+ // match in dir-mode as well, so that we'll pick up partials
+ // of files that will be included later. Anything included
+ // at this point will be checked again later once we know
+ // what it is.
+ const filtered = this.entries.map(entry => {
+ // at this point, we don't know if it's a dir or not.
+ const passFile = this.filterEntry(entry)
+ const passDir = this.filterEntry(entry, true)
+ return (passFile || passDir) ? [entry, passFile, passDir] : false
+ }).filter(e => e)
+
+ // now we stat them all
+ // if it's a dir, and passes as a dir, then recurse
+ // if it's not a dir, but passes as a file, add to set
+ let entryCount = filtered.length
+ if (entryCount === 0) {
+ this.emit('done', this.result)
+ } else {
+ const then = _ => {
+ if (-- entryCount === 0)
+ this.emit('done', this.result)
+ }
+ filtered.forEach(filt => {
+ const entry = filt[0]
+ const file = filt[1]
+ const dir = filt[2]
+ this.stat(entry, file, dir, then)
+ })
+ }
+ }
+
+ onstat (st, entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ if (!st.isDirectory()) {
+ if (file)
+ this.result.push(abs.substr(this.root.length + 1))
+ then()
+ } else {
+ // is a directory
+ if (dir)
+ this.walker(entry, then)
+ else
+ then()
+ }
+ }
+
+ stat (entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ fs[this.follow ? 'stat' : 'lstat'](abs, (er, st) => {
+ if (er)
+ this.emit('error', er)
+ else
+ this.onstat(st, entry, file, dir, then)
+ })
+ }
+
+ walkerOpt (entry) {
+ return {
+ path: this.path + '/' + entry,
+ parent: this,
+ ignoreFiles: this.ignoreFiles,
+ follow: this.follow,
+ includeEmpty: this.includeEmpty
+ }
+ }
+
+ walker (entry, then) {
+ new Walker(this.walkerOpt(entry)).on('done', then).start()
+ }
+
+ filterEntry (entry, partial) {
+ let included = true
+
+ // this = /a/b/c
+ // entry = d
+ // parent /a/b sees c/d
+ if (this.parent && this.parent.filterEntry) {
+ var pt = this.basename + "/" + entry
+ included = this.parent.filterEntry(pt, partial)
+ }
+
+ this.ignoreFiles.forEach(f => {
+ if (this.ignoreRules[f]) {
+ this.ignoreRules[f].forEach(rule => {
+ // negation means inclusion
+ // so if it's negated, and already included, no need to check
+ // likewise if it's neither negated nor included
+ if (rule.negate !== included) {
+ // first, match against /foo/bar
+ // then, against foo/bar
+ // then, in the case of partials, match with a /
+ const match = rule.match('/' + entry) ||
+ rule.match(entry) ||
+ (!!partial && (
+ rule.match('/' + entry + '/') ||
+ rule.match(entry + '/'))) ||
+ (!!partial && rule.negate && (
+ rule.match('/' + entry, true) ||
+ rule.match(entry, true)))
+
+ if (match)
+ included = rule.negate
+ }
+ })
+ }
+ })
+
+ return included
+ }
+}
+
+class WalkerSync extends Walker {
+ constructor (opt) {
+ super(opt)
+ }
+
+ start () {
+ this.onReaddir(fs.readdirSync(this.path))
+ return this
+ }
+
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ this.onReadIgnoreFile(file, fs.readFileSync(ig, 'utf8'), then)
+ }
+
+ stat (entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ const st = fs[this.follow ? 'statSync' : 'lstatSync'](abs)
+ this.onstat(st, entry, file, dir, then)
+ }
+
+ walker (entry, then) {
+ new WalkerSync(this.walkerOpt(entry)).start()
+ then()
+ }
+}
+
+const walk = (options, callback) => {
+ const p = new Promise((resolve, reject) => {
+ new Walker(options).on('done', resolve).on('error', reject).start()
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+ return new WalkerSync(options).start().result
+}
+
+module.exports = walk
+walk.sync = walkSync
+walk.Walker = Walker
+walk.WalkerSync = WalkerSync
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/LICENSE b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/LICENSE
new file mode 100644
index 00000000000000..19129e315fe593
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/README.md b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/README.md
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/README.md
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/README.md
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/minimatch.js
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/minimatch.js
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/minimatch.js
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/README.md b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/README.md
new file mode 100644
index 00000000000000..778a1c3c1dc0d3
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/README.md
@@ -0,0 +1,123 @@
+# brace-expansion
+
+[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
+as known from sh/bash, in JavaScript.
+
+[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion)
+[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion)
+[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/)
+
+[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion)
+
+## Example
+
+```js
+var expand = require('brace-expansion');
+
+expand('file-{a,b,c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('-v{,,}')
+// => ['-v', '-v', '-v']
+
+expand('file{0..2}.jpg')
+// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
+
+expand('file-{a..c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('file{2..0}.jpg')
+// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
+
+expand('file{0..4..2}.jpg')
+// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
+
+expand('file-{a..e..2}.jpg')
+// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
+
+expand('file{00..10..5}.jpg')
+// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
+
+expand('{{A..C},{a..c}}')
+// => ['A', 'B', 'C', 'a', 'b', 'c']
+
+expand('ppp{,config,oe{,conf}}')
+// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
+```
+
+## API
+
+```js
+var expand = require('brace-expansion');
+```
+
+### var expanded = expand(str)
+
+Return an array of all possible and valid expansions of `str`. If none are
+found, `[str]` is returned.
+
+Valid expansions are:
+
+```js
+/^(.*,)+(.+)?$/
+// {a,b,...}
+```
+
+A comma seperated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+A numeric sequence from `x` to `y` inclusive, with optional increment.
+If `x` or `y` start with a leading `0`, all the numbers will be padded
+to have equal length. Negative numbers and backwards iteration work too.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+An alphabetic sequence from `x` to `y` inclusive, with optional increment.
+`x` and `y` must be exactly one character, and if given, `incr` must be a
+number.
+
+For compatibility reasons, the string `${` is not eligible for brace expansion.
+
+## Installation
+
+With [npm](https://npmjs.org) do:
+
+```bash
+npm install brace-expansion
+```
+
+## Contributors
+
+- [Julian Gruber](https://github.com/juliangruber)
+- [Isaac Z. Schlueter](https://github.com/isaacs)
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/index.js b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/index.js
new file mode 100644
index 00000000000000..2b6f4f85c951fc
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/index.js
@@ -0,0 +1,200 @@
+var concatMap = require('concat-map');
+var balanced = require('balanced-match');
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
new file mode 100644
index 00000000000000..e20302146cfb1f
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
@@ -0,0 +1,77 @@
+{
+ "_from": "balanced-match@^1.0.0",
+ "_id": "balanced-match@1.0.0",
+ "_inBundle": false,
+ "_integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
+ "_location": "/npm-packlist/ignore-walk/minimatch/brace-expansion/balanced-match",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "balanced-match@^1.0.0",
+ "name": "balanced-match",
+ "escapedName": "balanced-match",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/npm-packlist/ignore-walk/minimatch/brace-expansion"
+ ],
+ "_resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
+ "_shasum": "89b4d199ab2bee49de164ea02b89ce462d71b767",
+ "_spec": "balanced-match@^1.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion",
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "bugs": {
+ "url": "https://github.com/juliangruber/balanced-match/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "Match balanced character pairs, like \"{\" and \"}\"",
+ "devDependencies": {
+ "matcha": "^0.7.0",
+ "tape": "^4.6.0"
+ },
+ "homepage": "https://github.com/juliangruber/balanced-match",
+ "keywords": [
+ "match",
+ "regexp",
+ "test",
+ "balanced",
+ "parse"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "balanced-match",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/balanced-match.git"
+ },
+ "scripts": {
+ "bench": "make bench",
+ "test": "make test"
+ },
+ "testling": {
+ "files": "test/*.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/20..latest",
+ "firefox/nightly",
+ "chrome/25..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
+ },
+ "version": "1.0.0"
+}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
new file mode 100644
index 00000000000000..b164c408c090ba
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
@@ -0,0 +1,88 @@
+{
+ "_from": "concat-map@0.0.1",
+ "_id": "concat-map@0.0.1",
+ "_inBundle": false,
+ "_integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
+ "_location": "/npm-packlist/ignore-walk/minimatch/brace-expansion/concat-map",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "concat-map@0.0.1",
+ "name": "concat-map",
+ "escapedName": "concat-map",
+ "rawSpec": "0.0.1",
+ "saveSpec": null,
+ "fetchSpec": "0.0.1"
+ },
+ "_requiredBy": [
+ "/npm-packlist/ignore-walk/minimatch/brace-expansion"
+ ],
+ "_resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "_shasum": "d8a96bd77fd68df7793a73036a3ba0d5405d477b",
+ "_spec": "concat-map@0.0.1",
+ "_where": "/Users/rebecca/code/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion",
+ "author": {
+ "name": "James Halliday",
+ "email": "mail@substack.net",
+ "url": "http://substack.net"
+ },
+ "bugs": {
+ "url": "https://github.com/substack/node-concat-map/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "concatenative mapdashery",
+ "devDependencies": {
+ "tape": "~2.4.0"
+ },
+ "directories": {
+ "example": "example",
+ "test": "test"
+ },
+ "homepage": "https://github.com/substack/node-concat-map#readme",
+ "keywords": [
+ "concat",
+ "concatMap",
+ "map",
+ "functional",
+ "higher-order"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "concat-map",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/substack/node-concat-map.git"
+ },
+ "scripts": {
+ "test": "tape test/*.js"
+ },
+ "testling": {
+ "files": "test/*.js",
+ "browsers": {
+ "ie": [
+ 6,
+ 7,
+ 8,
+ 9
+ ],
+ "ff": [
+ 3.5,
+ 10,
+ 15
+ ],
+ "chrome": [
+ 10,
+ 22
+ ],
+ "safari": [
+ 5.1
+ ],
+ "opera": [
+ 12
+ ]
+ }
+ },
+ "version": "0.0.1"
+}
diff --git a/deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js
similarity index 100%
rename from deps/npm/node_modules/fstream-npm/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js
rename to deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/package.json b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/package.json
new file mode 100644
index 00000000000000..7720642bd6ba41
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/node_modules/brace-expansion/package.json
@@ -0,0 +1,75 @@
+{
+ "_from": "brace-expansion@^1.1.7",
+ "_id": "brace-expansion@1.1.8",
+ "_inBundle": false,
+ "_integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=",
+ "_location": "/npm-packlist/ignore-walk/minimatch/brace-expansion",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "brace-expansion@^1.1.7",
+ "name": "brace-expansion",
+ "escapedName": "brace-expansion",
+ "rawSpec": "^1.1.7",
+ "saveSpec": null,
+ "fetchSpec": "^1.1.7"
+ },
+ "_requiredBy": [
+ "/npm-packlist/ignore-walk/minimatch"
+ ],
+ "_resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz",
+ "_shasum": "c07b211c7c952ec1f8efd51a77ef0d1d3990a292",
+ "_spec": "brace-expansion@^1.1.7",
+ "_where": "/Users/rebecca/code/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch",
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "bugs": {
+ "url": "https://github.com/juliangruber/brace-expansion/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ },
+ "deprecated": false,
+ "description": "Brace expansion as known from sh/bash",
+ "devDependencies": {
+ "matcha": "^0.7.0",
+ "tape": "^4.6.0"
+ },
+ "homepage": "https://github.com/juliangruber/brace-expansion",
+ "keywords": [],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "brace-expansion",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/brace-expansion.git"
+ },
+ "scripts": {
+ "bench": "matcha test/perf/bench.js",
+ "gentest": "bash test/generate.sh",
+ "test": "tape test/*.js"
+ },
+ "testling": {
+ "files": "test/*.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/20..latest",
+ "firefox/nightly",
+ "chrome/25..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
+ },
+ "version": "1.1.8"
+}
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/package.json b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/package.json
new file mode 100644
index 00000000000000..ca45830ea0bf9b
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/node_modules/minimatch/package.json
@@ -0,0 +1,63 @@
+{
+ "_from": "minimatch@^3.0.4",
+ "_id": "minimatch@3.0.4",
+ "_inBundle": false,
+ "_integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "_location": "/npm-packlist/ignore-walk/minimatch",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minimatch@^3.0.4",
+ "name": "minimatch",
+ "escapedName": "minimatch",
+ "rawSpec": "^3.0.4",
+ "saveSpec": null,
+ "fetchSpec": "^3.0.4"
+ },
+ "_requiredBy": [
+ "/npm-packlist/ignore-walk"
+ ],
+ "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "_shasum": "5166e286457f03306064be5497e8dbb0c3d32083",
+ "_spec": "minimatch@^3.0.4",
+ "_where": "/Users/rebecca/code/npm/node_modules/npm-packlist/node_modules/ignore-walk",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minimatch/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "deprecated": false,
+ "description": "a glob matcher in javascript",
+ "devDependencies": {
+ "tap": "^10.3.2"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "files": [
+ "minimatch.js"
+ ],
+ "homepage": "https://github.com/isaacs/minimatch#readme",
+ "license": "ISC",
+ "main": "minimatch.js",
+ "name": "minimatch",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/minimatch.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --cov"
+ },
+ "version": "3.0.4"
+}
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/package.json b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/package.json
new file mode 100644
index 00000000000000..2411f58f59f265
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/ignore-walk/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "ignore-walk@^3.0.0",
+ "_id": "ignore-walk@3.0.0",
+ "_inBundle": false,
+ "_integrity": "sha512-tKHrQ70YReq6IFyAs/XAQy91mgLVpLExNh3HrjExr6vqg8FLq/vd27D4eAN0K2PodhLjiQu5Xc2Q+AkW/T7hKQ==",
+ "_location": "/npm-packlist/ignore-walk",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "ignore-walk@^3.0.0",
+ "name": "ignore-walk",
+ "escapedName": "ignore-walk",
+ "rawSpec": "^3.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^3.0.0"
+ },
+ "_requiredBy": [
+ "/npm-packlist"
+ ],
+ "_resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.0.tgz",
+ "_shasum": "e407919edee5c47c63473b319bfe3ea4a771a57e",
+ "_spec": "ignore-walk@^3.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/npm-packlist",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/ignore-walk/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "minimatch": "^3.0.4"
+ },
+ "deprecated": false,
+ "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.",
+ "devDependencies": {
+ "mkdirp": "^0.5.1",
+ "mutate-fs": "^1.1.0",
+ "rimraf": "^2.6.1",
+ "tap": "^10.3.2"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/isaacs/ignore-walk#readme",
+ "keywords": [
+ "ignorefile",
+ "ignore",
+ "file",
+ ".gitignore",
+ ".npmignore",
+ "glob"
+ ],
+ "license": "ISC",
+ "main": "index.js",
+ "name": "ignore-walk",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/ignore-walk.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "3.0.0"
+}
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/README.md b/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/README.md
new file mode 100644
index 00000000000000..2974afa0e0db88
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/README.md
@@ -0,0 +1,46 @@
+# npm-bundled
+
+Run this in a node package, and it'll tell you which things in
+node_modules are bundledDependencies, or transitive dependencies of
+bundled dependencies.
+
+## USAGE
+
+To get the list of deps at the top level that are bundled (or
+transitive deps of a bundled dep) run this:
+
+```js
+const bundled = require('npm-bundled')
+
+// async version
+bundled({ path: '/path/to/pkg/defaults/to/cwd'}, (er, list) => {
+ // er means it had an error, which is _hella_ weird
+ // list is a list of package names, like `fooblz` or `@corp/blerg`
+ // the might not all be deps of the top level, because transitives
+})
+
+// async promise version
+bundled({ path: '/path/to/pkg/defaults/to/cwd'}).then(list => {
+ // so promisey!
+ // actually the callback version returns a promise, too, it just
+ // attaches the supplied callback to the promise
+})
+
+// sync version, throws if there's an error
+const list = bundled({ path: '/path/to/pkg/defaults/to/cwd'})
+```
+
+That's basically all you need to know. If you care to dig into it,
+you can also use the `bundled.Walker` and `bundled.WalkerSync`
+classes to get fancy.
+
+This library does not write anything to the filesystem, but it _may_
+have undefined behavior if the structure of `node_modules` changes
+while it's reading deps.
+
+All symlinks are followed. This means that it can lead to surprising
+results if a symlinked bundled dependency has a missing dependency
+that is satisfied at the top level. Since package creation resolves
+symlinks as well, this is an edge case where package creation and
+development environment are not going to be aligned, and is best
+avoided.
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/index.js b/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/index.js
new file mode 100644
index 00000000000000..dadd8473498b01
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/index.js
@@ -0,0 +1,227 @@
+'use strict'
+
+// walk the tree of deps starting from the top level list of bundled deps
+// Any deps at the top level that are depended on by a bundled dep that
+// does not have that dep in its own node_modules folder are considered
+// bundled deps as well. This list of names can be passed to npm-packlist
+// as the "bundled" argument. Additionally, packageJsonCache is shared so
+// packlist doesn't have to re-read files already consumed in this pass
+
+const fs = require('fs')
+const path = require('path')
+const EE = require('events').EventEmitter
+
+class BundleWalker extends EE {
+ constructor (opt) {
+ opt = opt || {}
+ super(opt)
+ this.path = path.resolve(opt.path || process.cwd())
+
+ this.parent = opt.parent || null
+ if (this.parent) {
+ this.result = this.parent.result
+ // only collect results in node_modules folders at the top level
+ // since the node_modules in a bundled dep is included always
+ if (!this.parent.parent) {
+ const base = path.basename(this.path)
+ const scope = path.basename(path.dirname(this.path))
+ this.result.add(/^@/.test(scope) ? scope + '/' + base : base)
+ }
+ this.root = this.parent.root
+ this.packageJsonCache = this.parent.packageJsonCache
+ } else {
+ this.result = new Set()
+ this.root = this.path
+ this.packageJsonCache = opt.packageJsonCache || new Map()
+ }
+
+ this.didDone = false
+ this.children = 0
+ this.node_modules = []
+ this.package = null
+ this.bundle = null
+ }
+
+ done () {
+ if (!this.didDone) {
+ this.didDone = true
+ if (!this.parent) {
+ const res = Array.from(this.result)
+ this.result = res
+ this.emit('done', res)
+ } else {
+ this.emit('done')
+ }
+ }
+ }
+
+ start () {
+ const pj = this.path + '/package.json'
+ if (this.packageJsonCache.has(pj))
+ this.onPackage(this.packageJsonCache.get(pj))
+ else
+ this.readPackageJson(pj)
+ return this
+ }
+
+ readPackageJson (pj) {
+ fs.readFile(pj, (er, data) =>
+ er ? this.done() : this.onPackageJson(pj, data))
+ }
+
+ onPackageJson (pj, data) {
+ try {
+ this.package = JSON.parse(data + '')
+ } catch (er) {
+ return this.done()
+ }
+ this.packageJsonCache.set(pj, this.package)
+ this.onPackage(this.package)
+ }
+
+ onPackage (pkg) {
+ // all deps are bundled if we got here as a child.
+ // otherwise, only bundle bundledDeps
+ // Get a unique-ified array with a short-lived Set
+ const bdRaw = this.parent
+ ? Object.keys(pkg.dependencies || {}).concat(
+ Object.keys(pkg.optionalDependencies || {}))
+ : pkg.bundleDependencies || pkg.bundledDependencies || []
+
+ const bd = Array.from(new Set(
+ Array.isArray(bdRaw) ? bdRaw : Object.keys(bdRaw)))
+
+ if (!bd.length)
+ return this.done()
+
+ this.bundle = bd
+ const nm = this.path + '/node_modules'
+ this.readModules()
+ }
+
+ readModules () {
+ readdirNodeModules(this.path + '/node_modules', (er, nm) =>
+ er ? this.onReaddir([]) : this.onReaddir(nm))
+ }
+
+ onReaddir (nm) {
+ // keep track of what we have, in case children need it
+ this.node_modules = nm
+
+ this.bundle.forEach(dep => this.childDep(dep))
+ if (this.children === 0)
+ this.done()
+ }
+
+ childDep (dep) {
+ if (this.node_modules.indexOf(dep) !== -1) {
+ this.child(dep)
+ } else if (this.parent) {
+ this.parent.childDep(dep)
+ }
+ }
+
+ child (dep) {
+ const p = this.path + '/node_modules/' + dep
+ this.children += 1
+ const child = new BundleWalker({
+ path: p,
+ parent: this
+ })
+ child.on('done', _ => {
+ if (--this.children === 0)
+ this.done()
+ })
+ child.start()
+ }
+}
+
+class BundleWalkerSync extends BundleWalker {
+ constructor (opt) {
+ super(opt)
+ }
+
+ start () {
+ super.start()
+ this.done()
+ return this
+ }
+
+ readPackageJson (pj) {
+ try {
+ this.onPackageJson(pj, fs.readFileSync(pj))
+ } catch (er) {}
+ return this
+ }
+
+ readModules () {
+ try {
+ this.onReaddir(readdirNodeModulesSync(this.path + '/node_modules'))
+ } catch (er) {
+ this.onReaddir([])
+ }
+ }
+
+ child (dep) {
+ new BundleWalkerSync({
+ path: this.path + '/node_modules/' + dep,
+ parent: this
+ }).start()
+ }
+}
+
+const readdirNodeModules = (nm, cb) => {
+ fs.readdir(nm, (er, set) => {
+ if (er)
+ cb(er)
+ else {
+ const scopes = set.filter(f => /^@/.test(f))
+ if (!scopes.length)
+ cb(null, set)
+ else {
+ const unscoped = set.filter(f => !/^@/.test(f))
+ let count = scopes.length
+ scopes.forEach(scope => {
+ fs.readdir(nm + '/' + scope, (er, pkgs) => {
+ if (er || !pkgs.length)
+ unscoped.push(scope)
+ else
+ unscoped.push.apply(unscoped, pkgs.map(p => scope + '/' + p))
+ if (--count === 0)
+ cb(null, unscoped)
+ })
+ })
+ }
+ }
+ })
+}
+
+const readdirNodeModulesSync = nm => {
+ const set = fs.readdirSync(nm)
+ const unscoped = set.filter(f => !/^@/.test(f))
+ const scopes = set.filter(f => /^@/.test(f)).map(scope => {
+ try {
+ const pkgs = fs.readdirSync(nm + '/' + scope)
+ return pkgs.length ? pkgs.map(p => scope + '/' + p) : [scope]
+ } catch (er) {
+ return [scope]
+ }
+ }).reduce((a, b) => a.concat(b), [])
+ return unscoped.concat(scopes)
+}
+
+const walk = (options, callback) => {
+ const p = new Promise((resolve, reject) => {
+ new BundleWalker(options).on('done', resolve).on('error', reject).start()
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+ return new BundleWalkerSync(options).start().result
+}
+
+module.exports = walk
+walk.sync = walkSync
+walk.BundleWalker = BundleWalker
+walk.BundleWalkerSync = BundleWalkerSync
diff --git a/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/package.json b/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/package.json
new file mode 100644
index 00000000000000..36a9adf793ec28
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/node_modules/npm-bundled/package.json
@@ -0,0 +1,60 @@
+{
+ "_from": "npm-bundled@^1.0.1",
+ "_id": "npm-bundled@1.0.3",
+ "_inBundle": false,
+ "_integrity": "sha512-ByQ3oJ/5ETLyglU2+8dBObvhfWXX8dtPZDMePCahptliFX2iIuhyEszyFk401PZUNQH20vvdW5MLjJxkwU80Ow==",
+ "_location": "/npm-packlist/npm-bundled",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "npm-bundled@^1.0.1",
+ "name": "npm-bundled",
+ "escapedName": "npm-bundled",
+ "rawSpec": "^1.0.1",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.1"
+ },
+ "_requiredBy": [
+ "/npm-packlist"
+ ],
+ "_resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.3.tgz",
+ "_shasum": "7e71703d973af3370a9591bafe3a63aca0be2308",
+ "_spec": "npm-bundled@^1.0.1",
+ "_where": "/Users/rebecca/code/npm/node_modules/npm-packlist",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/npm-bundled/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof",
+ "devDependencies": {
+ "mkdirp": "^0.5.1",
+ "mutate-fs": "^1.1.0",
+ "rimraf": "^2.6.1",
+ "tap": "^10.3.2"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/npm/npm-bundled#readme",
+ "license": "ISC",
+ "main": "index.js",
+ "name": "npm-bundled",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/npm-bundled.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js -J --100"
+ },
+ "version": "1.0.3"
+}
diff --git a/deps/npm/node_modules/npm-packlist/package.json b/deps/npm/node_modules/npm-packlist/package.json
new file mode 100644
index 00000000000000..dcb979191cbfd8
--- /dev/null
+++ b/deps/npm/node_modules/npm-packlist/package.json
@@ -0,0 +1,67 @@
+{
+ "_from": "npm-packlist@latest",
+ "_id": "npm-packlist@1.1.8",
+ "_inBundle": false,
+ "_integrity": "sha512-ZF/1c80o8g+rbJoe898m3p/gpquP/UK92vuTIw4wIVmoBZhRPGCPu8p+DJFV5MOa+HUT7CKVp+g9Hz+ayGW/+A==",
+ "_location": "/npm-packlist",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "tag",
+ "registry": true,
+ "raw": "npm-packlist@latest",
+ "name": "npm-packlist",
+ "escapedName": "npm-packlist",
+ "rawSpec": "latest",
+ "saveSpec": null,
+ "fetchSpec": "latest"
+ },
+ "_requiredBy": [
+ "#USER",
+ "/"
+ ],
+ "_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.1.8.tgz",
+ "_shasum": "aa7e56734ed038aba50724d79d0bc9c2acad372a",
+ "_spec": "npm-packlist@latest",
+ "_where": "/Users/rebecca/code/npm",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/npm-packlist/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "ignore-walk": "^3.0.0",
+ "npm-bundled": "^1.0.1"
+ },
+ "deprecated": false,
+ "description": "Get a list of the files to add from a folder into an npm package",
+ "devDependencies": {
+ "mkdirp": "^0.5.1",
+ "rimraf": "^2.6.1",
+ "tap": "^10.3.2"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://www.npmjs.com/package/npm-packlist",
+ "license": "ISC",
+ "main": "index.js",
+ "name": "npm-packlist",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/npm-packlist.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J"
+ },
+ "version": "1.1.8"
+}
diff --git a/deps/npm/node_modules/pacote/CHANGELOG.md b/deps/npm/node_modules/pacote/CHANGELOG.md
index f16a2ddac17187..3d9ac9dc213e4e 100644
--- a/deps/npm/node_modules/pacote/CHANGELOG.md
+++ b/deps/npm/node_modules/pacote/CHANGELOG.md
@@ -2,6 +2,113 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+## [6.0.2](https://github.com/zkat/pacote/compare/v6.0.1...v6.0.2) (2017-09-06)
+
+
+### Bug Fixes
+
+* **extract:** preserve executable perms on extracted files ([19b3dfd](https://github.com/zkat/pacote/commit/19b3dfd))
+
+
+### Performance Improvements
+
+* replace some calls to .match() with .starts/endsWith() (#115) ([192a02f](https://github.com/zkat/pacote/commit/192a02f))
+
+
+
+
+## [6.0.1](https://github.com/zkat/pacote/compare/v6.0.0...v6.0.1) (2017-08-22)
+
+
+### Bug Fixes
+
+* **finalize:** insist on getting a package.json ([f72ee91](https://github.com/zkat/pacote/commit/f72ee91))
+
+
+
+
+# [6.0.0](https://github.com/zkat/pacote/compare/v5.0.1...v6.0.0) (2017-08-19)
+
+
+### Bug Fixes
+
+* **tar:** bring back the .gitignore -> .npmignore logic (#113) ([0dd518e](https://github.com/zkat/pacote/commit/0dd518e))
+
+
+### BREAKING CHANGES
+
+* **tar:** this reverts a previous change to disable this feature.
+
+
+
+
+## [5.0.1](https://github.com/zkat/pacote/compare/v5.0.0...v5.0.1) (2017-08-17)
+
+
+### Bug Fixes
+
+* **tar:** chown directories on extract as well ([2fa4598](https://github.com/zkat/pacote/commit/2fa4598))
+
+
+
+
+# [5.0.0](https://github.com/zkat/pacote/compare/v4.0.0...v5.0.0) (2017-08-16)
+
+
+### Bug Fixes
+
+* **registry:** Pass maxSockets options down (#110) ([3f05b79](https://github.com/zkat/pacote/commit/3f05b79))
+
+
+### Features
+
+* **deps:** replace tar-fs/tar-stream with tar[@3](https://github.com/3) ([28c80a9](https://github.com/zkat/pacote/commit/28c80a9))
+* **tar:** switch to tarv3 ([53899c7](https://github.com/zkat/pacote/commit/53899c7))
+
+
+### BREAKING CHANGES
+
+* **tar:** this changes the underlying tar library, and thus may introduce some subtle low-level incompatibility. Also:
+
+* The tarball packer built into pacote works much closer to how the one npm injects does.
+* Special characters on Windows will now be escaped the way tar(1) usually does: by replacing them with the `0xf000` masked character on the way out.
+* Directories won't be chowned.
+
+
+
+
+# [4.0.0](https://github.com/zkat/pacote/compare/v3.0.0...v4.0.0) (2017-06-29)
+
+
+### Bug Fixes
+
+* **extract:** revert uid/gid change ([41852e0](https://github.com/zkat/pacote/commit/41852e0))
+
+
+### BREAKING CHANGES
+
+* **extract:** behavior for setting uid/gid on extracted contents was restored to what it was in pacote@2
+
+
+
+
+# [3.0.0](https://github.com/zkat/pacote/compare/v2.7.38...v3.0.0) (2017-06-29)
+
+
+### Bug Fixes
+
+* **extract:** always extract as current user gid/uid ([6fc01a5](https://github.com/zkat/pacote/commit/6fc01a5))
+
+
+### BREAKING CHANGES
+
+* **extract:** pacote will no longer set ownership of extracted
+contents -- uid/gid will *only* be used for the cache and other internal
+details.
+
+
+
## [2.7.38](https://github.com/zkat/pacote/compare/v2.7.37...v2.7.38) (2017-06-29)
diff --git a/deps/npm/node_modules/pacote/README.md b/deps/npm/node_modules/pacote/README.md
index e931dd8099e5be..339777624927f7 100644
--- a/deps/npm/node_modules/pacote/README.md
+++ b/deps/npm/node_modules/pacote/README.md
@@ -22,7 +22,7 @@ needed to reduce excess operations, using [`cacache`](https://npm.im/cacache).
* [`extract`](#extract)
* [`prefetch`](#prefetch)
* [`options`](#options)
- * [`clearMemoized`](#clear-memoized)
+ * [`clearMemoized`](#clearMemoized)
### Example
@@ -151,6 +151,7 @@ directly for matching contents before performing any other operations.
##### `opts.@somescope:registry`
##### `opts.auth`
##### `opts.log`
+##### `opts.maxSockets`
Default: `silentNpmLog`
diff --git a/deps/npm/node_modules/pacote/extract.js b/deps/npm/node_modules/pacote/extract.js
index 01036519a978ce..4312f1a9aa28e5 100644
--- a/deps/npm/node_modules/pacote/extract.js
+++ b/deps/npm/node_modules/pacote/extract.js
@@ -4,8 +4,8 @@ const BB = require('bluebird')
const cacache = require('cacache')
const extractStream = require('./lib/extract-stream')
+const mkdirp = BB.promisify(require('mkdirp'))
const npa = require('npm-package-arg')
-const pipe = BB.promisify(require('mississippi').pipe)
const optCheck = require('./lib/util/opt-check')
const retry = require('promise-retry')
const rimraf = BB.promisify(require('rimraf'))
@@ -59,21 +59,34 @@ function extract (spec, dest, opts) {
}
function extractByDigest (start, spec, dest, opts) {
- const xtractor = extractStream(dest, opts)
- const cached = cacache.get.stream.byDigest(opts.cache, opts.integrity, opts)
- return pipe(cached, xtractor).then(() => {
+ return mkdirp(dest).then(() => {
+ const xtractor = extractStream(dest, opts)
+ const cached = cacache.get.stream.byDigest(opts.cache, opts.integrity, opts)
+ cached.pipe(xtractor)
+ return new BB((resolve, reject) => {
+ cached.on('error', reject)
+ xtractor.on('error', reject)
+ xtractor.on('close', resolve)
+ })
+ }).then(() => {
opts.log.silly('pacote', `${spec} extracted to ${dest} by content address ${Date.now() - start}ms`)
})
}
let fetch
function extractByManifest (start, spec, dest, opts) {
- const xtractor = extractStream(dest, opts)
- return BB.resolve(null).then(() => {
+ return mkdirp(dest).then(() => {
+ const xtractor = extractStream(dest, opts)
if (!fetch) {
fetch = require('./lib/fetch')
}
- return pipe(fetch.tarball(spec, opts), xtractor)
+ const tardata = fetch.tarball(spec, opts)
+ tardata.pipe(xtractor)
+ return new BB((resolve, reject) => {
+ tardata.on('error', reject)
+ xtractor.on('error', reject)
+ xtractor.on('close', resolve)
+ })
}).then(() => {
opts.log.silly('pacote', `${spec} extracted in ${Date.now() - start}ms`)
})
diff --git a/deps/npm/node_modules/pacote/lib/extract-stream.js b/deps/npm/node_modules/pacote/lib/extract-stream.js
index f6f68bc1ef5524..b3c720b07f39e7 100644
--- a/deps/npm/node_modules/pacote/lib/extract-stream.js
+++ b/deps/npm/node_modules/pacote/lib/extract-stream.js
@@ -1,61 +1,50 @@
'use strict'
-const gunzip = require('./util/gunzip-maybe')
const path = require('path')
-const pipeline = require('mississippi').pipeline
-const tar = require('tar-fs')
+const tar = require('tar')
module.exports = extractStream
+module.exports._computeMode = computeMode
+
+function computeMode (fileMode, optMode, umask) {
+ return (fileMode | optMode) & ~(umask || 0)
+}
+
function extractStream (dest, opts) {
opts = opts || {}
- const sawIgnores = {}
- return pipeline(gunzip(), tar.extract(dest, {
- map: (header) => {
- if (process.platform !== 'win32') {
- header.uid = opts.uid == null ? header.uid : opts.uid
- header.gid = opts.gid == null ? header.gid : opts.gid
+ const sawIgnores = new Set()
+ return tar.x({
+ cwd: dest,
+ filter: (name, entry) => !entry.header.type.match(/^.*link$/i),
+ strip: 1,
+ onwarn: msg => opts.log && opts.log.warn('tar', msg),
+ uid: opts.uid,
+ gid: opts.gid,
+ onentry (entry) {
+ if (entry.type.toLowerCase() === 'file') {
+ entry.mode = computeMode(entry.mode, opts.fmode, opts.umask)
+ } else if (entry.type.toLowerCase() === 'directory') {
+ entry.mode = computeMode(entry.mode, opts.dmode, opts.umask)
+ } else {
+ entry.mode = computeMode(entry.mode, 0, opts.umask)
}
+
// Note: This mirrors logic in the fs read operations that are
// employed during tarball creation, in the fstream-npm module.
// It is duplicated here to handle tarballs that are created
// using other means, such as system tar or git archive.
- if (header.type === 'file') {
- const base = path.basename(header.name)
+ if (entry.type.toLowerCase() === 'file') {
+ const base = path.basename(entry.path)
if (base === '.npmignore') {
- sawIgnores[header.name] = true
+ sawIgnores.add(entry.path)
} else if (base === '.gitignore') {
- const npmignore = header.name.replace(/\.gitignore$/, '.npmignore')
- if (!sawIgnores[npmignore]) {
+ const npmignore = entry.path.replace(/\.gitignore$/, '.npmignore')
+ if (!sawIgnores.has(npmignore)) {
// Rename, may be clobbered later.
- header.name = npmignore
+ entry.path = npmignore
}
}
}
- return header
- },
- ignore: makeIgnore(opts.log),
- dmode: opts.dmode,
- fmode: opts.fmode,
- umask: opts.umask,
- strip: 1
- }))
-}
-
-function makeIgnore (log) {
- const sawIgnores = {}
- return (name, header) => _ignore(name, header, sawIgnores, log)
-}
-
-function _ignore (name, header, sawIgnores, logger) {
- if (header.type.match(/^.*link$/)) {
- if (logger) {
- logger.warn(
- 'extract-stream',
- 'excluding symbolic link',
- header.name, '->', header.linkname)
}
- return true
- }
-
- return false
+ })
}
diff --git a/deps/npm/node_modules/pacote/lib/fetchers/registry/fetch.js b/deps/npm/node_modules/pacote/lib/fetchers/registry/fetch.js
index 1c6c8e8d6e2ca9..a947ccea55a43c 100644
--- a/deps/npm/node_modules/pacote/lib/fetchers/registry/fetch.js
+++ b/deps/npm/node_modules/pacote/lib/fetchers/registry/fetch.js
@@ -22,6 +22,7 @@ function regFetch (uri, registry, opts) {
integrity: opts.integrity,
key: opts.key,
localAddress: opts.localAddress,
+ maxSockets: opts.maxSockets,
memoize: opts.memoize,
noProxy: opts.noProxy,
Promise: BB,
diff --git a/deps/npm/node_modules/pacote/lib/finalize-manifest.js b/deps/npm/node_modules/pacote/lib/finalize-manifest.js
index cd303a9fa3ba44..321b37cdcaeb1d 100644
--- a/deps/npm/node_modules/pacote/lib/finalize-manifest.js
+++ b/deps/npm/node_modules/pacote/lib/finalize-manifest.js
@@ -6,18 +6,17 @@ const cacache = require('cacache')
const cacheKey = require('./util/cache-key')
const fetchFromManifest = require('./fetch').fromManifest
const finished = BB.promisify(require('mississippi').finished)
-const gunzip = require('./util/gunzip-maybe')
const minimatch = require('minimatch')
const normalize = require('normalize-package-data')
const optCheck = require('./util/opt-check')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const ssri = require('ssri')
-const tar = require('tar-stream')
+const tar = require('tar')
// `finalizeManifest` takes as input the various kinds of manifests that
-// manifest handlers ('lib/handlers/*/manifest.js') return, and makes sure they
-// are:
+// manifest handlers ('lib/fetchers/*.js#manifest()') return, and makes sure
+// they are:
//
// * filled out with any required data that the handler couldn't fill in
// * formatted consistently
@@ -149,23 +148,23 @@ function tarballedProps (pkg, spec, opts) {
} else {
opts = optCheck(opts)
const tarStream = fetchFromManifest(pkg, spec, opts)
- const extracted = needsExtract && tar.extract()
- extracted && extracted.on('entry', (h, str, next) => {
- // Drain it
- str.on('data', () => {}).on('end', next).on('error', next)
- })
+ const extracted = needsExtract && new tar.Parse()
return BB.join(
needsShrinkwrap && jsonFromStream('npm-shrinkwrap.json', extracted),
needsManifest && jsonFromStream('package.json', extracted),
needsBin && getPaths(extracted),
needsHash && ssri.fromStream(tarStream, { algorithms: ['sha1'] }),
- needsExtract && pipe(tarStream, gunzip(), extracted),
+ needsExtract && pipe(tarStream, extracted),
(sr, mani, paths, hash) => {
+ if (needsManifest && !mani) {
+ const err = new Error(`Non-registry package missing package.json: ${spec}.`)
+ err.code = 'ENOPACKAGEJSON'
+ throw err
+ }
const extraProps = mani || {}
delete extraProps._resolved
// drain out the rest of the tarball
- tarStream.unpipe()
- tarStream.on('data', () => {})
+ tarStream.resume()
// if we have directories.bin, we need to collect any matching files
// to add to bin
if (paths && paths.length) {
@@ -199,25 +198,22 @@ function tarballedProps (pkg, spec, opts) {
function jsonFromStream (filename, dataStream) {
return BB.fromNode(cb => {
dataStream.on('error', cb)
- dataStream.on('finish', cb)
- dataStream.on('entry', function handler (header, stream, next) {
- const filePath = header.name.replace(/[^/]+\//, '')
+ dataStream.on('close', cb)
+ dataStream.on('entry', entry => {
+ const filePath = entry.header.path.replace(/[^/]+\//, '')
if (filePath !== filename) {
- next()
+ entry.resume()
} else {
let data = ''
- stream.on('data', d => { data += d })
- stream.on('error', cb)
- finished(stream).then(() => {
- dataStream.removeListener('entry', handler)
+ entry.on('data', d => { data += d })
+ entry.on('error', cb)
+ finished(entry).then(() => {
try {
cb(null, JSON.parse(data))
- next()
} catch (err) {
cb(err)
}
}, err => {
- dataStream.removeListener('entry', handler)
cb(err)
})
}
@@ -229,12 +225,11 @@ function getPaths (dataStream) {
return BB.fromNode(cb => {
let paths = []
dataStream.on('error', cb)
- dataStream.on('finish', () => cb(null, paths))
- dataStream.on('entry', function handler (header, stream, next) {
- const filePath = header.name.replace(/[^/]+\//, '')
- stream.on('data', () => {})
+ dataStream.on('close', () => cb(null, paths))
+ dataStream.on('entry', function handler (entry) {
+ const filePath = entry.header.path.replace(/[^/]+\//, '')
+ entry.resume()
paths.push(filePath)
- next()
})
})
}
diff --git a/deps/npm/node_modules/pacote/lib/util/git.js b/deps/npm/node_modules/pacote/lib/util/git.js
index ed1b49d56ac4ec..a24edccdd05dea 100644
--- a/deps/npm/node_modules/pacote/lib/util/git.js
+++ b/deps/npm/node_modules/pacote/lib/util/git.js
@@ -25,6 +25,7 @@ const GOOD_ENV_VARS = new Set([
'GIT_SSL_NO_VERIFY'
])
+const GIT_ = 'GIT_'
let GITENV
function gitEnv () {
if (GITENV) { return GITENV }
@@ -35,7 +36,7 @@ function gitEnv () {
GIT_TEMPLATE_DIR: tmpName
}
Object.keys(process.env).forEach(k => {
- if (GOOD_ENV_VARS.has(k) || !k.match(/^GIT_/)) {
+ if (GOOD_ENV_VARS.has(k) || !k.startsWith(GIT_)) {
GITENV[k] = process.env[k]
}
})
@@ -93,6 +94,7 @@ function headSha (repo, opts) {
})
}
+const CARET_BRACES = '^{}'
const REVS = new LRU({
max: 100,
maxAge: 5 * 60 * 1000
@@ -122,7 +124,7 @@ function revs (repo, opts) {
const sha = split[0].trim()
const ref = split[1].trim().match(/(?:refs\/[^/]+\/)?(.*)/)[1]
if (!ref) { return revs } // ???
- if (ref.match(/\^\{\}$/)) { return revs } // refs/tags/x^{} crap
+ if (ref.endsWith(CARET_BRACES)) { return revs } // refs/tags/x^{} crap
const type = refType(line)
const doc = {sha, ref, type}
@@ -202,12 +204,15 @@ function checkGit () {
}
}
+const REFS_TAGS = '/refs/tags/'
+const REFS_HEADS = '/refs/heads/'
+const HEAD = 'HEAD'
function refType (ref) {
- return ref.match(/refs\/tags\/.*$/)
+ return ref.indexOf(REFS_TAGS) !== -1
? 'tag'
- : ref.match(/refs\/heads\/.*$/)
+ : ref.indexOf(REFS_HEADS) !== -1
? 'branch'
- : ref.match(/HEAD$/)
+ : ref.endsWith(HEAD)
? 'head'
: 'other'
}
diff --git a/deps/npm/node_modules/pacote/lib/util/pack-dir.js b/deps/npm/node_modules/pacote/lib/util/pack-dir.js
index 54a94e5862c60a..7625f4faf8e4a9 100644
--- a/deps/npm/node_modules/pacote/lib/util/pack-dir.js
+++ b/deps/npm/node_modules/pacote/lib/util/pack-dir.js
@@ -5,39 +5,40 @@ const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('./cache-key')
const optCheck = require('./opt-check')
+const packlist = require('npm-packlist')
const pipe = BB.promisify(require('mississippi').pipe)
-const tar = require('tar-fs')
+const tar = require('tar')
module.exports = packDir
function packDir (manifest, label, dir, target, opts) {
opts = optCheck(opts)
const packer = opts.dirPacker
- ? opts.dirPacker(manifest, dir)
- : tar.pack(dir, {
- map: header => {
- header.name = 'package/' + header.name
- header.mtime = 0 // make tarballs idempotent
- return header
- },
- ignore: (name) => {
- return name.match(/\.git/)
- }
- })
+ ? BB.resolve(opts.dirPacker(manifest, dir))
+ : mkPacker(dir)
if (!opts.cache) {
- return pipe(packer, target).catch(err => {
- throw err
- })
+ return packer.then(packer => pipe(packer, target))
} else {
const cacher = cacache.put.stream(
opts.cache, cacheKey('packed-dir', label), opts
).on('integrity', i => {
target.emit('integrity', i)
})
- return BB.all([
+ return packer.then(packer => BB.all([
pipe(packer, cacher),
pipe(packer, target)
- ])
+ ]))
}
}
+
+function mkPacker (dir) {
+ return packlist({path: dir}).then(files => {
+ return tar.c({
+ cwd: dir,
+ gzip: true,
+ portable: true,
+ prefix: 'package/'
+ }, files)
+ })
+}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/CHANGELOG.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/CHANGELOG.md
index a8ed4ca56f98bc..59698d1c275748 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/CHANGELOG.md
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/CHANGELOG.md
@@ -2,6 +2,21 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+# [2.5.0](https://github.com/zkat/make-fetch-happen/compare/v2.4.13...v2.5.0) (2017-08-24)
+
+
+### Bug Fixes
+
+* **agent:** support timeout durations greater than 30 seconds ([04875ae](https://github.com/zkat/make-fetch-happen/commit/04875ae)), closes [#35](https://github.com/zkat/make-fetch-happen/issues/35)
+
+
+### Features
+
+* **cache:** export cache deletion functionality (#40) ([3da4250](https://github.com/zkat/make-fetch-happen/commit/3da4250))
+
+
+
## [2.4.13](https://github.com/zkat/make-fetch-happen/compare/v2.4.12...v2.4.13) (2017-06-29)
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/agent.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/agent.js
index a3c910eb9340cc..69bfab66476bbf 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/agent.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/agent.js
@@ -44,16 +44,26 @@ function getAgent (uri, opts) {
} else if (!isHttps && !HttpAgent) {
HttpAgent = require('agentkeepalive')
}
+
+ // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
+ // of zero disables the timeout behavior (OS limits still apply). Else, if
+ // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
+ // the node-fetch-npm timeout will always fire first, giving us more
+ // consistent errors.
+ const agentTimeout = opts.timeout === 0 ? 0 : opts.timeout + 1
+
const agent = isHttps ? new HttpsAgent({
maxSockets: opts.maxSockets || 15,
ca: opts.ca,
cert: opts.cert,
key: opts.key,
localAddress: opts.localAddress,
- rejectUnauthorized: opts.strictSSL
+ rejectUnauthorized: opts.strictSSL,
+ timeout: agentTimeout
}) : new HttpAgent({
maxSockets: opts.maxSockets || 15,
- localAddress: opts.localAddress
+ localAddress: opts.localAddress,
+ timeout: agentTimeout
})
AGENT_CACHE.set(key, agent)
return agent
@@ -130,6 +140,7 @@ function getProxy (proxyUrl, opts, isHttps) {
ca: opts.ca,
cert: opts.cert,
key: opts.key,
+ timeout: opts.timeout === 0 ? 0 : opts.timeout + 1,
localAddress: opts.localAddress,
maxSockets: opts.maxSockets || 15,
rejectUnauthorized: opts.strictSSL
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/index.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/index.js
index 18a2893538ad73..d82811b63db01b 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/index.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/index.js
@@ -42,9 +42,22 @@ cachingFetch.defaults = function (_uri, _opts) {
}
defaultedFetch.defaults = fetch.defaults
+ defaultedFetch.delete = fetch.delete
return defaultedFetch
}
+cachingFetch.delete = cacheDelete
+function cacheDelete (uri, opts) {
+ opts = configureOptions(opts)
+ if (opts.cacheManager) {
+ const req = new fetch.Request(uri, {
+ method: opts.method,
+ headers: opts.headers
+ })
+ return opts.cacheManager.delete(req, opts)
+ }
+}
+
function initializeCache (opts) {
if (typeof opts.cacheManager === 'string') {
if (!Cache) {
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/node_modules/humanize-ms/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/node_modules/humanize-ms/package.json
index 14ea394c4b2d93..b00c740c85a444 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/node_modules/humanize-ms/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/node_modules/humanize-ms/package.json
@@ -1,5 +1,5 @@
{
- "_from": "humanize-ms@^1.2.0",
+ "_from": "humanize-ms@^1.2.1",
"_id": "humanize-ms@1.2.1",
"_inBundle": false,
"_integrity": "sha1-xG4xWaKT9riW2ikxbYtv6Lt5u+0=",
@@ -8,19 +8,19 @@
"_requested": {
"type": "range",
"registry": true,
- "raw": "humanize-ms@^1.2.0",
+ "raw": "humanize-ms@^1.2.1",
"name": "humanize-ms",
"escapedName": "humanize-ms",
- "rawSpec": "^1.2.0",
+ "rawSpec": "^1.2.1",
"saveSpec": null,
- "fetchSpec": "^1.2.0"
+ "fetchSpec": "^1.2.1"
},
"_requiredBy": [
"/pacote/make-fetch-happen/agentkeepalive"
],
"_resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz",
"_shasum": "c46e3159a293f6b896da29316d8b6fe8bb79bbed",
- "_spec": "humanize-ms@^1.2.0",
+ "_spec": "humanize-ms@^1.2.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive",
"author": {
"name": "dead-horse",
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/package.json
index 2468ecf355df77..a9f0b55de031aa 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/agentkeepalive/package.json
@@ -1,5 +1,5 @@
{
- "_from": "agentkeepalive@^3.1.0",
+ "_from": "agentkeepalive@^3.3.0",
"_id": "agentkeepalive@3.3.0",
"_inBundle": false,
"_integrity": "sha512-9yhcpXti2ZQE7bxuCsjjWNIZoQOd9sZ1ZBovHG0YeCRohFv73SLvcm73PC9T3olM4GyozaQb+4MGdQpcD8m7NQ==",
@@ -8,20 +8,20 @@
"_requested": {
"type": "range",
"registry": true,
- "raw": "agentkeepalive@^3.1.0",
+ "raw": "agentkeepalive@^3.3.0",
"name": "agentkeepalive",
"escapedName": "agentkeepalive",
- "rawSpec": "^3.1.0",
+ "rawSpec": "^3.3.0",
"saveSpec": null,
- "fetchSpec": "^3.1.0"
+ "fetchSpec": "^3.3.0"
},
"_requiredBy": [
"/pacote/make-fetch-happen"
],
"_resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.3.0.tgz",
"_shasum": "6d5de5829afd3be2712201a39275fd11c651857c",
- "_spec": "agentkeepalive@^3.1.0",
- "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/make-fetch-happen",
+ "_spec": "agentkeepalive@^3.3.0",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen",
"author": {
"name": "fengmk2",
"email": "fengmk2@gmail.com",
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json
index 8da8d6d7508ae7..fa8235f1ed9dfe 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json
@@ -1,6 +1,7 @@
{
"_from": "http-cache-semantics@^3.7.3",
"_id": "http-cache-semantics@3.7.3",
+ "_inBundle": false,
"_integrity": "sha1-LzXFMuzSnx5UE7mvgztySjxvf3I=",
"_location": "/pacote/make-fetch-happen/http-cache-semantics",
"_phantomChildren": {},
@@ -19,7 +20,6 @@
],
"_resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.7.3.tgz",
"_shasum": "2f35c532ecd29f1e5413b9af833b724a3c6f7f72",
- "_shrinkwrap": null,
"_spec": "http-cache-semantics@^3.7.3",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen",
"author": {
@@ -27,12 +27,10 @@
"email": "kornel@geekhood.net",
"url": "https://kornel.ski/"
},
- "bin": null,
"bugs": {
"url": "https://github.com/pornel/http-cache-semantics/issues"
},
"bundleDependencies": false,
- "dependencies": {},
"deprecated": false,
"description": "Parses Cache-Control and other headers. Helps building correct HTTP caches and proxies",
"devDependencies": {
@@ -49,8 +47,6 @@
"license": "BSD-2-Clause",
"main": "node4/index.js",
"name": "http-cache-semantics",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git+https://github.com/pornel/http-cache-semantics.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/History.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/History.md
index b79a539629609a..a81fb17a97367e 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/History.md
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/History.md
@@ -1,4 +1,9 @@
+4.1.1 / 2017-07-20
+==================
+
+ * Correct `https.request()` with a String (#9)
+
4.1.0 / 2017-06-26
==================
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/package.json
index b123eaf34e9d26..4c28ce76e26a42 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/package.json
@@ -1,8 +1,8 @@
{
"_from": "agent-base@4",
- "_id": "agent-base@4.1.0",
+ "_id": "agent-base@4.1.1",
"_inBundle": false,
- "_integrity": "sha1-IOF0Ac1Js8B2v1akvGxbQ2/6jVU=",
+ "_integrity": "sha512-yWGUUmCZD/33IRjG2It94PzixT8lX+47Uq8fjmd0cgQWITCMrJuXFaVIMnGDmDnZGGKAGdwTx8UGeU8lMR2urA==",
"_location": "/pacote/make-fetch-happen/http-proxy-agent/agent-base",
"_phantomChildren": {},
"_requested": {
@@ -18,8 +18,8 @@
"_requiredBy": [
"/pacote/make-fetch-happen/http-proxy-agent"
],
- "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.1.0.tgz",
- "_shasum": "20e17401cd49b3c076bf56a4bc6c5b436ffa8d55",
+ "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.1.1.tgz",
+ "_shasum": "92d8a4fc2524a3b09b3666a33b6c97960f23d6a4",
"_spec": "agent-base@4",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent",
"author": {
@@ -61,5 +61,5 @@
"scripts": {
"test": "mocha --reporter spec"
},
- "version": "4.1.0"
+ "version": "4.1.1"
}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/patch-core.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/patch-core.js
index 05cbaa1e70a56d..a3f7bc6160c156 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/patch-core.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/patch-core.js
@@ -11,8 +11,8 @@ const https = require('https');
*/
https.request = (function(request) {
return function(_options, cb) {
- let options
- if (typeof options === 'string') {
+ let options;
+ if (typeof _options === 'string') {
options = url.parse(_options);
} else {
options = Object.assign({}, _options);
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/test/test.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/test/test.js
index 43217d4273a73d..23814e2c326962 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/test/test.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/http-proxy-agent/node_modules/agent-base/test/test.js
@@ -443,6 +443,17 @@ describe('"https" module', function() {
done();
});
+ it('should work with a String URL', function(done) {
+ var endpoint = 'https://127.0.0.1:' + port;
+ var req = https.get(endpoint);
+
+ // it's gonna error out since `rejectUnauthorized` is not being passed in
+ req.on('error', function(err) {
+ assert.equal(err.code, 'DEPTH_ZERO_SELF_SIGNED_CERT');
+ done();
+ });
+ });
+
it('should work for basic HTTPS requests', function(done) {
var called = false;
var agent = new Agent(function(req, opts, fn) {
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/History.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/History.md
index 8f96c3949078a1..c0b5cef88b11c5 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/History.md
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/History.md
@@ -1,4 +1,12 @@
+2.1.0 / 2017-08-08
+==================
+
+ * only include the port number in the Host header when non-default port (#22)
+ * set ALPN to "http 1.1" by default when using tlsproxy (#25)
+ * only set `ALPNProtocols` when the property does not already exist
+ * support SNI (#14)
+
2.0.0 / 2017-06-26
==================
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/index.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/index.js
index 33207c1454c26e..699857804e0c2e 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/index.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/index.js
@@ -1,4 +1,3 @@
-
/**
* Module dependencies.
*/
@@ -23,10 +22,13 @@ module.exports = HttpsProxyAgent;
* @api public
*/
-function HttpsProxyAgent (opts) {
+function HttpsProxyAgent(opts) {
if (!(this instanceof HttpsProxyAgent)) return new HttpsProxyAgent(opts);
if ('string' == typeof opts) opts = url.parse(opts);
- if (!opts) throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');
+ if (!opts)
+ throw new Error(
+ 'an HTTP(S) proxy server `host` and `port` must be specified!'
+ );
debug('creating new HttpsProxyAgent instance: %o', opts);
Agent.call(this, opts);
@@ -39,6 +41,12 @@ function HttpsProxyAgent (opts) {
proxy.host = proxy.hostname || proxy.host;
proxy.port = +proxy.port || (this.secureProxy ? 443 : 80);
+ // ALPN is supported by Node.js >= v5.
+ // attempt to negotiate http/1.1 for proxy servers that support http/2
+ if (this.secureProxy && !('ALPNProtocols' in proxy)) {
+ proxy.ALPNProtocols = ['http 1.1']
+ }
+
if (proxy.host && proxy.path) {
// if both a `host` and `path` are specified then it's most likely the
// result of a `url.parse()` call... we need to remove the `path` portion so
@@ -57,7 +65,7 @@ inherits(HttpsProxyAgent, Agent);
* @api public
*/
-HttpsProxyAgent.prototype.callback = function connect (req, opts, fn) {
+HttpsProxyAgent.prototype.callback = function connect(req, opts, fn) {
var proxy = this.proxy;
// create a socket connection to the proxy server
@@ -75,13 +83,13 @@ HttpsProxyAgent.prototype.callback = function connect (req, opts, fn) {
var buffers = [];
var buffersLength = 0;
- function read () {
+ function read() {
var b = socket.read();
if (b) ondata(b);
else socket.once('readable', read);
}
- function cleanup () {
+ function cleanup() {
socket.removeListener('data', ondata);
socket.removeListener('end', onend);
socket.removeListener('error', onerror);
@@ -89,20 +97,20 @@ HttpsProxyAgent.prototype.callback = function connect (req, opts, fn) {
socket.removeListener('readable', read);
}
- function onclose (err) {
+ function onclose(err) {
debug('onclose had error %o', err);
}
- function onend () {
+ function onend() {
debug('onend');
}
- function onerror (err) {
+ function onerror(err) {
cleanup();
fn(err);
}
- function ondata (b) {
+ function ondata(b) {
buffers.push(b);
buffersLength += b.length;
var buffered = Buffer.concat(buffers, buffersLength);
@@ -133,9 +141,12 @@ HttpsProxyAgent.prototype.callback = function connect (req, opts, fn) {
if (opts.secureEndpoint) {
// since the proxy is connecting to an SSL server, we have
// to upgrade this socket connection to an SSL connection
- debug('upgrading proxy-connected socket to TLS connection: %o', opts.host);
+ debug(
+ 'upgrading proxy-connected socket to TLS connection: %o',
+ opts.host
+ );
opts.socket = socket;
- opts.servername = opts.host;
+ opts.servername = opts.servername || opts.host;
opts.host = null;
opts.hostname = null;
opts.port = null;
@@ -159,7 +170,7 @@ HttpsProxyAgent.prototype.callback = function connect (req, opts, fn) {
}
}
- function onsocket (socket) {
+ function onsocket(socket) {
// replay the "buffers" Buffer onto the `socket`, since at this point
// the HTTP module machinery has been hooked up for the user
if ('function' == typeof socket.ondata) {
@@ -192,13 +203,26 @@ HttpsProxyAgent.prototype.callback = function connect (req, opts, fn) {
var headers = Object.assign({}, proxy.headers);
if (proxy.auth) {
- headers['Proxy-Authorization'] = 'Basic ' + new Buffer(proxy.auth).toString('base64');
+ headers['Proxy-Authorization'] =
+ 'Basic ' + new Buffer(proxy.auth).toString('base64');
+ }
+
+ // the Host header should only include the port
+ // number when it is a non-standard port
+ var host = opts.host;
+ if (!isDefaultPort(opts.port, opts.secureEndpoint)) {
+ host += ':' + opts.port;
}
- headers['Host'] = hostname;
+ headers['Host'] = host;
+
headers['Connection'] = 'close';
- Object.keys(headers).forEach(function (name) {
+ Object.keys(headers).forEach(function(name) {
msg += name + ': ' + headers[name] + '\r\n';
});
socket.write(msg + '\r\n');
};
+
+function isDefaultPort(port, secure) {
+ return Boolean((!secure && port === 80) || (secure && port === 443));
+}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/History.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/History.md
index b79a539629609a..a81fb17a97367e 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/History.md
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/History.md
@@ -1,4 +1,9 @@
+4.1.1 / 2017-07-20
+==================
+
+ * Correct `https.request()` with a String (#9)
+
4.1.0 / 2017-06-26
==================
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/package.json
index 4b0d6c182cdd93..2de34cbacff785 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/package.json
@@ -1,8 +1,8 @@
{
"_from": "agent-base@^4.1.0",
- "_id": "agent-base@4.1.0",
+ "_id": "agent-base@4.1.1",
"_inBundle": false,
- "_integrity": "sha1-IOF0Ac1Js8B2v1akvGxbQ2/6jVU=",
+ "_integrity": "sha512-yWGUUmCZD/33IRjG2It94PzixT8lX+47Uq8fjmd0cgQWITCMrJuXFaVIMnGDmDnZGGKAGdwTx8UGeU8lMR2urA==",
"_location": "/pacote/make-fetch-happen/https-proxy-agent/agent-base",
"_phantomChildren": {},
"_requested": {
@@ -18,8 +18,8 @@
"_requiredBy": [
"/pacote/make-fetch-happen/https-proxy-agent"
],
- "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.1.0.tgz",
- "_shasum": "20e17401cd49b3c076bf56a4bc6c5b436ffa8d55",
+ "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.1.1.tgz",
+ "_shasum": "92d8a4fc2524a3b09b3666a33b6c97960f23d6a4",
"_spec": "agent-base@^4.1.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent",
"author": {
@@ -61,5 +61,5 @@
"scripts": {
"test": "mocha --reporter spec"
},
- "version": "4.1.0"
+ "version": "4.1.1"
}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/patch-core.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/patch-core.js
index 05cbaa1e70a56d..a3f7bc6160c156 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/patch-core.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/patch-core.js
@@ -11,8 +11,8 @@ const https = require('https');
*/
https.request = (function(request) {
return function(_options, cb) {
- let options
- if (typeof options === 'string') {
+ let options;
+ if (typeof _options === 'string') {
options = url.parse(_options);
} else {
options = Object.assign({}, _options);
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/test/test.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/test/test.js
index 43217d4273a73d..23814e2c326962 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/test/test.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/agent-base/test/test.js
@@ -443,6 +443,17 @@ describe('"https" module', function() {
done();
});
+ it('should work with a String URL', function(done) {
+ var endpoint = 'https://127.0.0.1:' + port;
+ var req = https.get(endpoint);
+
+ // it's gonna error out since `rejectUnauthorized` is not being passed in
+ req.on('error', function(err) {
+ assert.equal(err.code, 'DEPTH_ZERO_SELF_SIGNED_CERT');
+ done();
+ });
+ });
+
it('should work for basic HTTPS requests', function(done) {
var called = false;
var agent = new Agent(function(req, opts, fn) {
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/debug/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/debug/package.json
index ac0f1e8a9bd224..ade1aa0a01e2b6 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/debug/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/node_modules/debug/package.json
@@ -1,5 +1,5 @@
{
- "_from": "debug@2",
+ "_from": "debug@^2.4.1",
"_id": "debug@2.6.8",
"_inBundle": false,
"_integrity": "sha1-5zFTHKLt4n0YgiJCfaF4IdaP9Pw=",
@@ -8,19 +8,19 @@
"_requested": {
"type": "range",
"registry": true,
- "raw": "debug@2",
+ "raw": "debug@^2.4.1",
"name": "debug",
"escapedName": "debug",
- "rawSpec": "2",
+ "rawSpec": "^2.4.1",
"saveSpec": null,
- "fetchSpec": "2"
+ "fetchSpec": "^2.4.1"
},
"_requiredBy": [
"/pacote/make-fetch-happen/https-proxy-agent"
],
"_resolved": "https://registry.npmjs.org/debug/-/debug-2.6.8.tgz",
"_shasum": "e731531ca2ede27d188222427da17821d68ff4fc",
- "_spec": "debug@2",
+ "_spec": "debug@^2.4.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent",
"author": {
"name": "TJ Holowaychuk",
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/package.json
index 1e84640b0a0280..85a8f5b0e58d8c 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/https-proxy-agent/package.json
@@ -1,8 +1,8 @@
{
"_from": "https-proxy-agent@^2.0.0",
- "_id": "https-proxy-agent@2.0.0",
+ "_id": "https-proxy-agent@2.1.0",
"_inBundle": false,
- "_integrity": "sha1-/6pLb69YasNAwYoUBDHna31/KUQ=",
+ "_integrity": "sha512-/DTVSUCbRc6AiyOV4DBRvPDpKKCJh4qQJNaCgypX0T41quD9hp/PB5iUyx/60XobuMPQa9ce1jNV9UOUq6PnTg==",
"_location": "/pacote/make-fetch-happen/https-proxy-agent",
"_phantomChildren": {},
"_requested": {
@@ -18,8 +18,8 @@
"_requiredBy": [
"/pacote/make-fetch-happen"
],
- "_resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.0.0.tgz",
- "_shasum": "ffaa4b6faf586ac340c18a140431e76b7d7f2944",
+ "_resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.1.0.tgz",
+ "_shasum": "1391bee7fd66aeabc0df2a1fa90f58954f43e443",
"_spec": "https-proxy-agent@^2.0.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen",
"author": {
@@ -58,5 +58,5 @@
"scripts": {
"test": "mocha --reporter spec"
},
- "version": "2.0.0"
+ "version": "2.1.0"
}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/CHANGELOG.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/CHANGELOG.md
index e007b99929229b..a0cfe7f73e495c 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/CHANGELOG.md
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/CHANGELOG.md
@@ -2,6 +2,16 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+## [2.0.2](https://github.com/npm/node-fetch-npm/compare/v2.0.1...v2.0.2) (2017-08-15)
+
+
+### Bug Fixes
+
+* **json:** replace jju with a custom syntax error reporter ([#6](https://github.com/npm/node-fetch-npm/issues/6)) ([84d169c](https://github.com/npm/node-fetch-npm/commit/84d169c))
+
+
+
## [2.0.1](https://github.com/npm/node-fetch-npm/compare/v2.0.0...v2.0.1) (2017-05-24)
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/node_modules/iconv-lite/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/node_modules/iconv-lite/package.json
index 3ec29a66e9929e..75a538098c44e2 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/node_modules/iconv-lite/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/node_modules/iconv-lite/package.json
@@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.18.tgz",
"_shasum": "23d8656b16aae6742ac29732ea8f0336a4789cf2",
"_spec": "iconv-lite@~0.4.13",
- "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding",
"author": {
"name": "Alexander Shtuchkin",
"email": "ashtuchkin@gmail.com"
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/package.json
index a3a41f628e036c..eff80f339706bf 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/encoding/package.json
@@ -1,6 +1,7 @@
{
"_from": "encoding@^0.1.11",
"_id": "encoding@0.1.12",
+ "_inBundle": false,
"_integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=",
"_location": "/pacote/make-fetch-happen/node-fetch-npm/encoding",
"_phantomChildren": {},
@@ -19,13 +20,11 @@
],
"_resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz",
"_shasum": "538b66f3ee62cd1ab51ec323829d1f9480c74beb",
- "_shrinkwrap": null,
"_spec": "encoding@^0.1.11",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm",
"author": {
"name": "Andris Reinman"
},
- "bin": null,
"bugs": {
"url": "https://github.com/andris9/encoding/issues"
},
@@ -43,8 +42,6 @@
"license": "MIT",
"main": "lib/encoding.js",
"name": "encoding",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git+https://github.com/andris9/encoding.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/CHANGELOG.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/CHANGELOG.md
new file mode 100644
index 00000000000000..843a0bcb941887
--- /dev/null
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/CHANGELOG.md
@@ -0,0 +1,36 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+
+## [1.0.1](https://github.com/zkat/json-parse-better-errors/compare/v1.0.0...v1.0.1) (2017-08-16)
+
+
+### Bug Fixes
+
+* **license:** oops. Forgot to update license.md ([efe2958](https://github.com/zkat/json-parse-better-errors/commit/efe2958))
+
+
+
+
+# 1.0.0 (2017-08-15)
+
+
+### Features
+
+* **init:** Initial Commit ([562c977](https://github.com/zkat/json-parse-better-errors/commit/562c977))
+
+
+### BREAKING CHANGES
+
+* **init:** This is the first commit!
+
+
+
+
+# 0.1.0 (2017-08-15)
+
+
+### Features
+
+* **init:** Initial Commit ([9dd1a19](https://github.com/zkat/json-parse-better-errors/commit/9dd1a19))
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/LICENSE.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/LICENSE.md
new file mode 100644
index 00000000000000..c51842cc4ab3c2
--- /dev/null
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/LICENSE.md
@@ -0,0 +1,7 @@
+Copyright 2017 Kat Marchán
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/README.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/README.md
new file mode 100644
index 00000000000000..667323c775a99e
--- /dev/null
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/README.md
@@ -0,0 +1,53 @@
+# json-parse-better-errors [![npm version](https://img.shields.io/npm/v/json-parse-better-errors.svg)](https://npm.im/json-parse-better-errors) [![license](https://img.shields.io/npm/l/json-parse-better-errors.svg)](https://npm.im/json-parse-better-errors) [![Travis](https://img.shields.io/travis/zkat/json-parse-better-errors.svg)](https://travis-ci.org/zkat/json-parse-better-errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/json-parse-better-errors?svg=true)](https://ci.appveyor.com/project/zkat/json-parse-better-errors) [![Coverage Status](https://coveralls.io/repos/github/zkat/json-parse-better-errors/badge.svg?branch=latest)](https://coveralls.io/github/zkat/json-parse-better-errors?branch=latest)
+
+[`json-parse-better-errors`](https://github.com/zkat/json-parse-better-errors) is a Node.js library for managing
+local key and content address caches. It's really fast, really good at
+concurrency, and it will never give you corrupted data, even if cache files
+get corrupted or manipulated.
+
+It was originally written to be used as [npm](https://npm.im)'s local cache, but
+can just as easily be used on its own
+
+_Translations: [español](README.es.md)_
+
+## Install
+
+`$ npm install --save json-parse-better-errors`
+
+## Table of Contents
+
+* [Example](#example)
+* [Features](#features)
+* [Contributing](#contributing)
+* [API](#api)
+ * [`parse`](#parse)
+
+### Example
+
+```javascript
+const parseJson = require('json-parse-better-errors')
+
+parseJson('"foo"')
+parseJson('garbage') // more useful error message
+```
+
+### Features
+
+* Like JSON.parse, but the errors are better.
+
+### Contributing
+
+The json-parse-better-errors team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
+
+All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
+
+Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
+
+Happy hacking!
+
+### API
+
+#### `> parse(txt, ?reviver, ?context=20)`
+
+Works just like `JSON.parse`, but will include a bit more information when an
+error happens.
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/index.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/index.js
new file mode 100644
index 00000000000000..32c36358661a29
--- /dev/null
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/index.js
@@ -0,0 +1,32 @@
+'use strict'
+
+module.exports = parseJson
+function parseJson (txt, reviver, context) {
+ context = context || 20
+ try {
+ return JSON.parse(txt, reviver)
+ } catch (e) {
+ const syntaxErr = e.message.match(/^Unexpected token.*position\s+(\d+)/i)
+ const errIdx = syntaxErr
+ ? +syntaxErr[1]
+ : e.message.match(/^Unexpected end of JSON.*/i)
+ ? txt.length - 1
+ : null
+ if (errIdx != null) {
+ const start = errIdx <= context
+ ? 0
+ : errIdx - context
+ const end = errIdx + context >= txt.length
+ ? txt.length
+ : errIdx + context
+ e.message += ` while parsing near '${
+ start === 0 ? '' : '...'
+ }${txt.slice(start, end)}${
+ end === txt.length ? '' : '...'
+ }'`
+ } else {
+ e.message += ` while parsing '${txt.slice(0, context * 2)}'`
+ }
+ throw e
+ }
+}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/package.json
new file mode 100644
index 00000000000000..c088bb6a98184d
--- /dev/null
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-better-errors/package.json
@@ -0,0 +1,76 @@
+{
+ "_from": "json-parse-better-errors@^1.0.0",
+ "_id": "json-parse-better-errors@1.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-xyQpxeWWMKyJps9CuGJYeng6ssI5bpqS9ltQpdVQ90t4ql6NdnxFKh95JcRt2cun/DjMVNrdjniLPuMA69xmCw==",
+ "_location": "/pacote/make-fetch-happen/node-fetch-npm/json-parse-better-errors",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "json-parse-better-errors@^1.0.0",
+ "name": "json-parse-better-errors",
+ "escapedName": "json-parse-better-errors",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/pacote/make-fetch-happen/node-fetch-npm"
+ ],
+ "_resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.1.tgz",
+ "_shasum": "50183cd1b2d25275de069e9e71b467ac9eab973a",
+ "_spec": "json-parse-better-errors@^1.0.0",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm",
+ "author": {
+ "name": "Kat Marchán",
+ "email": "kzm@sykosomatic.org"
+ },
+ "bugs": {
+ "url": "https://github.com/zkat/json-parse-better-errors/issues"
+ },
+ "bundleDependencies": false,
+ "config": {
+ "nyc": {
+ "exclude": [
+ "node_modules/**",
+ "test/**"
+ ]
+ }
+ },
+ "deprecated": false,
+ "description": "JSON.parse with context information on error",
+ "devDependencies": {
+ "nyc": "^10.3.2",
+ "standard": "^9.0.2",
+ "standard-version": "^4.1.0",
+ "tap": "^10.3.3",
+ "weallbehave": "^1.2.0",
+ "weallcontribute": "^1.0.8"
+ },
+ "files": [
+ "*.js"
+ ],
+ "homepage": "https://github.com/zkat/json-parse-better-errors#readme",
+ "keywords": [
+ "JSON",
+ "parser"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "json-parse-better-errors",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/zkat/json-parse-better-errors.git"
+ },
+ "scripts": {
+ "postrelease": "npm publish && git push --follow-tags",
+ "prerelease": "npm t",
+ "pretest": "standard",
+ "release": "standard-version -s",
+ "test": "tap -J --coverage test/*.js",
+ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
+ "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
+ },
+ "version": "1.0.1"
+}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/.editorconfig b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/.editorconfig
deleted file mode 100644
index fb7f73a832a4af..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/.editorconfig
+++ /dev/null
@@ -1,14 +0,0 @@
-root = true
-
-[*]
-end_of_line = lf
-insert_final_newline = true
-trim_trailing_whitespace = true
-
-[*.js, **/*.js]
-indent_size = 4
-indent_style = space
-
-[{package.json,.travis.yml}]
-indent_size = 2
-indent_style = space
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/.npmignore b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/.npmignore
deleted file mode 100644
index 59d842baa84c8b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/.npmignore
+++ /dev/null
@@ -1,28 +0,0 @@
-# Logs
-logs
-*.log
-
-# Runtime data
-pids
-*.pid
-*.seed
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-lib-cov
-
-# Coverage directory used by tools like istanbul
-coverage
-
-# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
-.grunt
-
-# Compiled binary addons (http://nodejs.org/api/addons.html)
-build/Release
-
-# Dependency directory
-# Commenting this out is preferred by some people, see
-# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git-
-node_modules
-
-# Users Environment Variables
-.lock-wscript
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/LICENSE b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/LICENSE
deleted file mode 100644
index c3d2eb3550079b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 Sam Mikes
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/README.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/README.md
deleted file mode 100644
index ffad93584b19d4..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-# json-parse-helpfulerror
-
-A drop-in replacement for `JSON.parse` that uses
- to provide more useful error messages in the
-event of a parse error.
-
-# Example
-
-## Installation
-
-```
-npm i -S json-parse-helpfulerror
-```
-
-## Use
-
-```js
-var jph = require('json-parse-helpfulerror');
-
-var notJSON = "{'foo': 3}"; // keys must be double-quoted in JSON
-
-JSON.parse(notJSON); // throws unhelpful error
-
-jph.parse("{'foo': 3}") // throws more helpful error: "Unexpected token '\''..."
-```
-
-# License
-
-MIT
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/index.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/index.js
deleted file mode 100644
index 15648b017b3db5..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/index.js
+++ /dev/null
@@ -1,21 +0,0 @@
-'use strict';
-
-var jju = require('jju');
-
-function parse(text, reviver) {
- try {
- return JSON.parse(text, reviver);
- } catch (err) {
- // we expect this to throw with a more informative message
- jju.parse(text, {
- mode: 'json',
- reviver: reviver
- });
-
- // backup if jju is not as strict as JSON.parse; re-throw error
- // data-dependent code path, I do not know how to cover it
- throw err;
- }
-}
-
-exports.parse = parse;
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/.npmignore b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/.npmignore
deleted file mode 100644
index 5ae40150eea106..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/.npmignore
+++ /dev/null
@@ -1,9 +0,0 @@
-package.json
-node_modules
-test
-benchmark
-docs
-examples
-/.editorconfig
-/.eslint*
-/.travis.yml
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE
deleted file mode 100644
index 5c93f456546877..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
- DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
- Version 2, December 2004
-
- Copyright (C) 2004 Sam Hocevar
-
- Everyone is permitted to copy and distribute verbatim or modified
- copies of this license document, and changing it is allowed as long
- as the name is changed.
-
- DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. You just DO WHAT THE FUCK YOU WANT TO.
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/README.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/README.md
deleted file mode 100644
index 3d61083fb04dd0..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/README.md
+++ /dev/null
@@ -1,242 +0,0 @@
-`jju` - a set of utilities to work with JSON / JSON5 documents
-
-[![npm version badge](https://img.shields.io/npm/v/jju.svg)](https://www.npmjs.org/package/jju)
-[![travis badge](http://img.shields.io/travis/rlidwka/jju.svg)](https://travis-ci.org/rlidwka/jju)
-[![downloads badge](http://img.shields.io/npm/dm/jju.svg)](https://www.npmjs.org/package/jju)
-
-## Installation
-
-```
-npm install jju
-```
-
-## Usage
-
-This module provides following functions:
-
-1. [jju.parse()](#jjuparse-function) parses json/json5 text and returns a javascript value it corresponds to
-2. [jju.stringify()](#jjustringify-function) converts javascript value to an appropriate json/json5 text
-3. [jju.tokenize()](#jjutokenize-function) parses json/json5 text and returns an array of tokens it consists of ([see demo](http://rlidwka.github.io/jju/tokenizer.html))
-4. [jju.analyze()](#jjuanalyze-function) parses json/json5 text and tries to guess indentation, quoting style, etc.
-5. [jju.update()](#jjuupdate-function) changes json/json5 text, preserving original formatting as much as possible ([see demo](http://rlidwka.github.io/jju/editor.html))
-
-All functions are able to work with a standard JSON documents. `jju.parse()` and `jju.stringify()` are better in some cases, but slower than native `JSON.parse()` and `JSON.stringify()` versions. Detailed description see below.
-
-### jju.parse() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - text to parse, type: String
- * `options` - parser options, type: Object
- */
-jju.parse(text[, options])
-
-// compatibility syntax
-jju.parse(text[, reviver])
-```
-
-Options:
-
- - reserved\_keys - what to do with reserved keys (String, default="ignore")
- - "ignore" - ignore reserved keys
- - "throw" - throw SyntaxError in case of reserved keys
- - "replace" - replace reserved keys, this is the default JSON.parse behaviour, unsafe
-
- Reserved keys are keys that exist in an empty object (`hasOwnProperty`, `__proto__`, etc.).
-
-```javascript
-// 'ignore' will cause reserved keys to be ignored:
-parse('{hasOwnProperty: 1}', {reserved_keys: 'ignore'}) == {}
-parse('{hasOwnProperty: 1, x: 2}', {reserved_keys: 'ignore'}).hasOwnProperty('x') == true
-
-// 'throw' will cause SyntaxError in these cases:
-parse('{hasOwnProperty: 1}', {reserved_keys: 'throw'}) == SyntaxError
-
-// 'replace' will replace reserved keys with new ones:
-parse('{hasOwnProperty: 1}', {reserved_keys: 'throw'}) == {hasOwnProperty: 1}
-parse('{hasOwnProperty: 1, x: 2}', {reserved_keys: 'ignore'}).hasOwnProperty('x') == TypeError
-```
-
-
- - null\_prototype - create object as Object.create(null) instead of '{}' (Boolean)
-
- if `reserved_keys != 'replace'`, default is **false**
-
- if `reserved_keys == 'replace'`, default is **true**
-
- It is usually unsafe and not recommended to change this option to false in the last case.
-
- - reviver - reviver function - Function
-
- This function should follow JSON specification
-
- - mode - operation mode, set it to 'json' if you want to throw on non-strict json files (String)
-
-### jju.stringify() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `value` - value to serialize, type: *
- * `options` - serializer options, type: Object
- */
-jju.stringify(value[, options])
-
-// compatibility syntax
-jju.stringify(value[, replacer [, indent])
-```
-
-Options:
-
- - ascii - output ascii only (Boolean, default=false)
- If this option is enabled, output will not have any characters except of 0x20-0x7f.
-
- - indent - indentation (String, Number or Boolean, default='\t')
- This option follows JSON specification.
-
- - quote - enquoting char (String, "'" or '"', default="'")
- - quote\_keys - whether keys quoting in objects is required or not (String, default=false)
- If you want `{"q": 1}` instead of `{q: 1}`, set it to true.
-
- - sort\_keys - sort all keys while stringifying (Boolean or Function, default=false)
- By default sort order will depend on implementation, with v8 it's insertion order. If set to `true`, all keys (but not arrays) will be sorted alphabetically. You can provide your own sorting function as well.
-
- - replacer - replacer function or array (Function or Array)
- This option follows JSON specification.
-
- - no\_trailing\_comma = don't output trailing comma (Boolean, default=false)
- If this option is set, arrays like this `[1,2,3,]` will never be generated. Otherwise they may be generated for pretty printing.
-
- - mode - operation mode, set it to 'json' if you want correct json in the output (String)
-
- Currently it's either 'json' or something else. If it is 'json', following options are implied:
-
- - options.quote = '"'
- - options.no\_trailing\_comma = true
- - options.quote\_keys = true
- - '\x' literals are not used
-
-### jju.tokenize() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - text to tokenize, type: String
- * `options` - parser options, type: Object
- */
-jju.tokenize(text[, options])
-```
-
-Options are the same as for the `jju.parse` function.
-
-Return value is an array of tokens, where each token is an object:
-
- - raw (String) - raw text of this token, if you join all raw's, you will get the original document
- - type (String) - type of the token, can be `whitespace`, `comment`, `key`, `literal`, `separator` or `newline`
- - stack (Array) - path to the current token in the syntax tree
- - value - value of the token if token is a `key` or `literal`
-
-You can check tokenizer for yourself using [this demo](http://rlidwka.github.io/jju/tokenizer.html).
-
-### jju.analyze() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - text to analyze, type: String
- * `options` - parser options, type: Object
- */
-jju.analyze(text[, options])
-```
-
-Options are the same as for the `jju.parse` function.
-
-Return value is an object defining a programming style in which the document was written.
-
- - indent (String) - preferred indentation
- - newline (String) - preferred newline
- - quote (String) - `"` or `'` depending on which quote is preferred
- - quote\_keys (Boolean) - `true` if unquoted keys were used at least once
- - has\_whitespace (Boolean) - `true` if input has a whitespace token
- - has\_comments (Boolean) - `true` if input has a comment token
- - has\_newlines (Boolean) - `true` if input has a newline token
- - has\_trailing\_comma (Boolean) - `true` if input has at least one trailing comma
-
-### jju.update() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - original text, type: String
- * `new_value` - new value you want to set
- * `options` - parser or stringifier options, type: Object
- */
-jju.update(text, new_value[, options])
-```
-
-If you want to update a JSON document, here is the general approach:
-
-```javascript
-// here is your original JSON document:
-var input = '{"foo": "bar", "baz": 123}'
-
-// you need to parse it first:
-var json = jju.parse(input, {mode: 'json'})
-// json is { foo: 'bar', baz: 123 }
-
-// then you can change it as you like:
-json.foo = 'quux'
-json.hello = 'world'
-
-// then you run an update function to change the original json:
-var output = jju.update(input, json, {mode: 'json'})
-// output is '{"foo": "quux", "baz": 123, "hello": "world"}'
-```
-
-Look at [this demo](http://rlidwka.github.io/jju/editor.html) to test various types of json.
-
-## Advantages over existing JSON libraries
-
-In a few cases it makes sense to use this module instead of built-in JSON methods.
-
-Parser:
- - better error reporting with source code and line numbers
-
-In case of syntax error, JSON.parse does not return any good information to the user. This module does:
-
-```
-$ node -e 'require("jju").parse("[1,1,1,1,invalid]")'
-
-SyntaxError: Unexpected token 'i' at 0:9
-[1,1,1,1,invalid]
- ^
-```
-
-This module is about 5 times slower, so if user experience matters to you more than performance, use this module. If you're working with a lot of machine-generated data, use JSON.parse instead.
-
-Stringifier:
- - util.inspect-like pretty printing
-
-This module behaves more smart when dealing with object and arrays, and does not always print newlines in them:
-
-```
-$ node -e 'console.log(require("./").stringify([[,,,],,,[,,,,]], {mode:"json"}))'
-[
- [null, null, null],
- null,
- null,
- [null, null, null, null]
-]
-```
-
-JSON.stringify will split this into 15 lines, and it's hard to read.
-
-Yet again, this feature comes with a performance hit, so if user experience matters to you more than performance, use this module. If your JSON will be consumed by machines, use JSON.stringify instead.
-
-As a rule of thumb, if you use "space" argument to indent your JSON, you'd better use this module instead.
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/index.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/index.js
deleted file mode 100644
index 50f16249634fb6..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/index.js
+++ /dev/null
@@ -1,32 +0,0 @@
-
-module.exports.__defineGetter__('parse', function() {
- return require('./lib/parse').parse
-})
-
-module.exports.__defineGetter__('stringify', function() {
- return require('./lib/stringify').stringify
-})
-
-module.exports.__defineGetter__('tokenize', function() {
- return require('./lib/parse').tokenize
-})
-
-module.exports.__defineGetter__('update', function() {
- return require('./lib/document').update
-})
-
-module.exports.__defineGetter__('analyze', function() {
- return require('./lib/analyze').analyze
-})
-
-module.exports.__defineGetter__('utils', function() {
- return require('./lib/utils')
-})
-
-/**package
-{ "name": "jju",
- "version": "0.0.0",
- "dependencies": {"js-yaml": "*"},
- "scripts": {"postinstall": "js-yaml package.yaml > package.json ; npm install"}
-}
-**/
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js
deleted file mode 100644
index 39303b0969081c..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-var tokenize = require('./parse').tokenize
-
-module.exports.analyze = function analyzeJSON(input, options) {
- if (options == null) options = {}
-
- if (!Array.isArray(input)) {
- input = tokenize(input, options)
- }
-
- var result = {
- has_whitespace: false,
- has_comments: false,
- has_newlines: false,
- has_trailing_comma: false,
- indent: '',
- newline: '\n',
- quote: '"',
- quote_keys: true,
- }
-
- var stats = {
- indent: {},
- newline: {},
- quote: {},
- }
-
- for (var i=0; i stats[k][b] ? a : b
- })
- }
- }
-
- return result
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js
deleted file mode 100644
index af1a01a03d062b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js
+++ /dev/null
@@ -1,484 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-var assert = require('assert')
-var tokenize = require('./parse').tokenize
-var stringify = require('./stringify').stringify
-var analyze = require('./analyze').analyze
-
-function isObject(x) {
- return typeof(x) === 'object' && x !== null
-}
-
-function value_to_tokenlist(value, stack, options, is_key, indent) {
- options = Object.create(options)
- options._stringify_key = !!is_key
-
- if (indent) {
- options._prefix = indent.prefix.map(function(x) {
- return x.raw
- }).join('')
- }
-
- if (options._splitMin == null) options._splitMin = 0
- if (options._splitMax == null) options._splitMax = 0
-
- var stringified = stringify(value, options)
-
- if (is_key) {
- return [ { raw: stringified, type: 'key', stack: stack, value: value } ]
- }
-
- options._addstack = stack
- var result = tokenize(stringified, {
- _addstack: stack,
- })
- result.data = null
- return result
-}
-
-// '1.2.3' -> ['1','2','3']
-function arg_to_path(path) {
- // array indexes
- if (typeof(path) === 'number') path = String(path)
-
- if (path === '') path = []
- if (typeof(path) === 'string') path = path.split('.')
-
- if (!Array.isArray(path)) throw Error('Invalid path type, string or array expected')
- return path
-}
-
-// returns new [begin, end] or false if not found
-//
-// {x:3, xxx: 111, y: [111, {q: 1, e: 2} ,333] }
-// f('y',0) returns this B^^^^^^^^^^^^^^^^^^^^^^^^E
-// then f('1',1) would reduce it to B^^^^^^^^^^E
-function find_element_in_tokenlist(element, lvl, tokens, begin, end) {
- while(tokens[begin].stack[lvl] != element) {
- if (begin++ >= end) return false
- }
- while(tokens[end].stack[lvl] != element) {
- if (end-- < begin) return false
- }
- return [begin, end]
-}
-
-function is_whitespace(token_type) {
- return token_type === 'whitespace'
- || token_type === 'newline'
- || token_type === 'comment'
-}
-
-function find_first_non_ws_token(tokens, begin, end) {
- while(is_whitespace(tokens[begin].type)) {
- if (begin++ >= end) return false
- }
- return begin
-}
-
-function find_last_non_ws_token(tokens, begin, end) {
- while(is_whitespace(tokens[end].type)) {
- if (end-- < begin) return false
- }
- return end
-}
-
-/*
- * when appending a new element of an object/array, we are trying to
- * figure out the style used on the previous element
- *
- * return {prefix, sep1, sep2, suffix}
- *
- * ' "key" : "element" \r\n'
- * prefix^^^^ sep1^ ^^sep2 ^^^^^^^^suffix
- *
- * begin - the beginning of the object/array
- * end - last token of the last element (value or comma usually)
- */
-function detect_indent_style(tokens, is_array, begin, end, level) {
- var result = {
- sep1: [],
- sep2: [],
- suffix: [],
- prefix: [],
- newline: [],
- }
-
- if (tokens[end].type === 'separator' && tokens[end].stack.length !== level+1 && tokens[end].raw !== ',') {
- // either a beginning of the array (no last element) or other weird situation
- //
- // just return defaults
- return result
- }
-
- // ' "key" : "value" ,'
- // skipping last separator, we're now here ^^
- if (tokens[end].type === 'separator')
- end = find_last_non_ws_token(tokens, begin, end - 1)
- if (end === false) return result
-
- // ' "key" : "value" ,'
- // skipping value ^^^^^^^
- while(tokens[end].stack.length > level) end--
-
- if (!is_array) {
- while(is_whitespace(tokens[end].type)) {
- if (end < begin) return result
- if (tokens[end].type === 'whitespace') {
- result.sep2.unshift(tokens[end])
- } else {
- // newline, comment or other unrecognized codestyle
- return result
- }
- end--
- }
-
- // ' "key" : "value" ,'
- // skipping separator ^
- assert.equal(tokens[end].type, 'separator')
- assert.equal(tokens[end].raw, ':')
- while(is_whitespace(tokens[--end].type)) {
- if (end < begin) return result
- if (tokens[end].type === 'whitespace') {
- result.sep1.unshift(tokens[end])
- } else {
- // newline, comment or other unrecognized codestyle
- return result
- }
- }
-
- assert.equal(tokens[end].type, 'key')
- end--
- }
-
- // ' "key" : "value" ,'
- // skipping key ^^^^^
- while(is_whitespace(tokens[end].type)) {
- if (end < begin) return result
- if (tokens[end].type === 'whitespace') {
- result.prefix.unshift(tokens[end])
- } else if (tokens[end].type === 'newline') {
- result.newline.unshift(tokens[end])
- return result
- } else {
- // comment or other unrecognized codestyle
- return result
- }
- end--
- }
-
- return result
-}
-
-function Document(text, options) {
- var self = Object.create(Document.prototype)
-
- if (options == null) options = {}
- //options._structure = true
- var tokens = self._tokens = tokenize(text, options)
- self._data = tokens.data
- tokens.data = null
- self._options = options
-
- var stats = analyze(text, options)
- if (options.indent == null) {
- options.indent = stats.indent
- }
- if (options.quote == null) {
- options.quote = stats.quote
- }
- if (options.quote_keys == null) {
- options.quote_keys = stats.quote_keys
- }
- if (options.no_trailing_comma == null) {
- options.no_trailing_comma = !stats.has_trailing_comma
- }
- return self
-}
-
-// return true if it's a proper object
-// throw otherwise
-function check_if_can_be_placed(key, object, is_unset) {
- //if (object == null) return false
- function error(add) {
- return Error("You can't " + (is_unset ? 'unset' : 'set') + " key '" + key + "'" + add)
- }
-
- if (!isObject(object)) {
- throw error(' of an non-object')
- }
- if (Array.isArray(object)) {
- // array, check boundary
- if (String(key).match(/^\d+$/)) {
- key = Number(String(key))
- if (object.length < key || (is_unset && object.length === key)) {
- throw error(', out of bounds')
- } else if (is_unset && object.length !== key+1) {
- throw error(' in the middle of an array')
- } else {
- return true
- }
- } else {
- throw error(' of an array')
- }
- } else {
- // object
- return true
- }
-}
-
-// usage: document.set('path.to.something', 'value')
-// or: document.set(['path','to','something'], 'value')
-Document.prototype.set = function(path, value) {
- path = arg_to_path(path)
-
- // updating this._data and check for errors
- if (path.length === 0) {
- if (value === undefined) throw Error("can't remove root document")
- this._data = value
- var new_key = false
-
- } else {
- var data = this._data
-
- for (var i=0; i {x:1}`
- // removing sep, literal and optional sep
- // ':'
- var pos2 = find_last_non_ws_token(this._tokens, pos_old[0], position[0] - 1)
- assert.equal(this._tokens[pos2].type, 'separator')
- assert.equal(this._tokens[pos2].raw, ':')
- position[0] = pos2
-
- // key
- var pos2 = find_last_non_ws_token(this._tokens, pos_old[0], position[0] - 1)
- assert.equal(this._tokens[pos2].type, 'key')
- assert.equal(this._tokens[pos2].value, path[path.length-1])
- position[0] = pos2
- }
-
- // removing comma in arrays and objects
- var pos2 = find_last_non_ws_token(this._tokens, pos_old[0], position[0] - 1)
- assert.equal(this._tokens[pos2].type, 'separator')
- if (this._tokens[pos2].raw === ',') {
- position[0] = pos2
- } else {
- // beginning of the array/object, so we should remove trailing comma instead
- pos2 = find_first_non_ws_token(this._tokens, position[1] + 1, pos_old[1])
- assert.equal(this._tokens[pos2].type, 'separator')
- if (this._tokens[pos2].raw === ',') {
- position[1] = pos2
- }
- }
-
- } else {
- var indent = pos2 !== false
- ? detect_indent_style(this._tokens, Array.isArray(data), pos_old[0], position[1] - 1, i)
- : {}
- var newtokens = value_to_tokenlist(value, path, this._options, false, indent)
- }
-
- } else {
- // insert new key, that's tricky
- var path_1 = path.slice(0, i)
-
- // find a last separator after which we're inserting it
- var pos2 = find_last_non_ws_token(this._tokens, position[0] + 1, position[1] - 1)
- assert(pos2 !== false)
-
- var indent = pos2 !== false
- ? detect_indent_style(this._tokens, Array.isArray(data), position[0] + 1, pos2, i)
- : {}
-
- var newtokens = value_to_tokenlist(value, path, this._options, false, indent)
-
- // adding leading whitespaces according to detected codestyle
- var prefix = []
- if (indent.newline && indent.newline.length)
- prefix = prefix.concat(indent.newline)
- if (indent.prefix && indent.prefix.length)
- prefix = prefix.concat(indent.prefix)
-
- // adding '"key":' (as in "key":"value") to object values
- if (!Array.isArray(data)) {
- prefix = prefix.concat(value_to_tokenlist(path[path.length-1], path_1, this._options, true))
- if (indent.sep1 && indent.sep1.length)
- prefix = prefix.concat(indent.sep1)
- prefix.push({raw: ':', type: 'separator', stack: path_1})
- if (indent.sep2 && indent.sep2.length)
- prefix = prefix.concat(indent.sep2)
- }
-
- newtokens.unshift.apply(newtokens, prefix)
-
- // check if prev token is a separator AND they're at the same level
- if (this._tokens[pos2].type === 'separator' && this._tokens[pos2].stack.length === path.length-1) {
- // previous token is either , or [ or {
- if (this._tokens[pos2].raw === ',') {
- // restore ending comma
- newtokens.push({raw: ',', type: 'separator', stack: path_1})
- }
- } else {
- // previous token isn't a separator, so need to insert one
- newtokens.unshift({raw: ',', type: 'separator', stack: path_1})
- }
-
- if (indent.suffix && indent.suffix.length)
- newtokens.push.apply(newtokens, indent.suffix)
-
- assert.equal(this._tokens[position[1]].type, 'separator')
- position[0] = pos2+1
- position[1] = pos2
- }
-
- newtokens.unshift(position[1] - position[0] + 1)
- newtokens.unshift(position[0])
- this._tokens.splice.apply(this._tokens, newtokens)
-
- return this
-}
-
-// convenience method
-Document.prototype.unset = function(path) {
- return this.set(path, undefined)
-}
-
-Document.prototype.get = function(path) {
- path = arg_to_path(path)
-
- var data = this._data
- for (var i=0; i old_data.length) {
- // adding new elements, so going forward
- for (var i=0; i=0; i--) {
- path.push(String(i))
- change(path, old_data[i], new_data[i])
- path.pop()
- }
- }
-
- } else {
- // both values are objects here
- for (var i in new_data) {
- path.push(String(i))
- change(path, old_data[i], new_data[i])
- path.pop()
- }
-
- for (var i in old_data) {
- if (i in new_data) continue
- path.push(String(i))
- change(path, old_data[i], new_data[i])
- path.pop()
- }
- }
- }
-}
-
-Document.prototype.toString = function() {
- return this._tokens.map(function(x) {
- return x.raw
- }).join('')
-}
-
-module.exports.Document = Document
-
-module.exports.update = function updateJSON(source, new_value, options) {
- return Document(source, options).update(new_value).toString()
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
deleted file mode 100644
index 025007f63b7060..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
+++ /dev/null
@@ -1,764 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-// RTFM: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-262.pdf
-
-var Uni = require('./unicode')
-
-function isHexDigit(x) {
- return (x >= '0' && x <= '9')
- || (x >= 'A' && x <= 'F')
- || (x >= 'a' && x <= 'f')
-}
-
-function isOctDigit(x) {
- return x >= '0' && x <= '7'
-}
-
-function isDecDigit(x) {
- return x >= '0' && x <= '9'
-}
-
-var unescapeMap = {
- '\'': '\'',
- '"' : '"',
- '\\': '\\',
- 'b' : '\b',
- 'f' : '\f',
- 'n' : '\n',
- 'r' : '\r',
- 't' : '\t',
- 'v' : '\v',
- '/' : '/',
-}
-
-function formatError(input, msg, position, lineno, column, json5) {
- var result = msg + ' at ' + (lineno + 1) + ':' + (column + 1)
- , tmppos = position - column - 1
- , srcline = ''
- , underline = ''
-
- var isLineTerminator = json5 ? Uni.isLineTerminator : Uni.isLineTerminatorJSON
-
- // output no more than 70 characters before the wrong ones
- if (tmppos < position - 70) {
- tmppos = position - 70
- }
-
- while (1) {
- var chr = input[++tmppos]
-
- if (isLineTerminator(chr) || tmppos === input.length) {
- if (position >= tmppos) {
- // ending line error, so show it after the last char
- underline += '^'
- }
- break
- }
- srcline += chr
-
- if (position === tmppos) {
- underline += '^'
- } else if (position > tmppos) {
- underline += input[tmppos] === '\t' ? '\t' : ' '
- }
-
- // output no more than 78 characters on the string
- if (srcline.length > 78) break
- }
-
- return result + '\n' + srcline + '\n' + underline
-}
-
-function parse(input, options) {
- // parse as a standard JSON mode
- var json5 = false;
- var cjson = false;
-
- if (options.legacy || options.mode === 'json') {
- // use json
- } else if (options.mode === 'cjson') {
- cjson = true;
- } else if (options.mode === 'json5') {
- json5 = true;
- } else {
- // use it by default
- json5 = true;
- }
-
- var isLineTerminator = json5 ? Uni.isLineTerminator : Uni.isLineTerminatorJSON
- var isWhiteSpace = json5 ? Uni.isWhiteSpace : Uni.isWhiteSpaceJSON
-
- var length = input.length
- , lineno = 0
- , linestart = 0
- , position = 0
- , stack = []
-
- var tokenStart = function() {}
- var tokenEnd = function(v) {return v}
-
- /* tokenize({
- raw: '...',
- type: 'whitespace'|'comment'|'key'|'literal'|'separator'|'newline',
- value: 'number'|'string'|'whatever',
- path: [...],
- })
- */
- if (options._tokenize) {
- ;(function() {
- var start = null
- tokenStart = function() {
- if (start !== null) throw Error('internal error, token overlap')
- start = position
- }
-
- tokenEnd = function(v, type) {
- if (start != position) {
- var hash = {
- raw: input.substr(start, position-start),
- type: type,
- stack: stack.slice(0),
- }
- if (v !== undefined) hash.value = v
- options._tokenize.call(null, hash)
- }
- start = null
- return v
- }
- })()
- }
-
- function fail(msg) {
- var column = position - linestart
-
- if (!msg) {
- if (position < length) {
- var token = '\'' +
- JSON
- .stringify(input[position])
- .replace(/^"|"$/g, '')
- .replace(/'/g, "\\'")
- .replace(/\\"/g, '"')
- + '\''
-
- if (!msg) msg = 'Unexpected token ' + token
- } else {
- if (!msg) msg = 'Unexpected end of input'
- }
- }
-
- var error = SyntaxError(formatError(input, msg, position, lineno, column, json5))
- error.row = lineno + 1
- error.column = column + 1
- throw error
- }
-
- function newline(chr) {
- // account for
- if (chr === '\r' && input[position] === '\n') position++
- linestart = position
- lineno++
- }
-
- function parseGeneric() {
- var result
-
- while (position < length) {
- tokenStart()
- var chr = input[position++]
-
- if (chr === '"' || (chr === '\'' && json5)) {
- return tokenEnd(parseString(chr), 'literal')
-
- } else if (chr === '{') {
- tokenEnd(undefined, 'separator')
- return parseObject()
-
- } else if (chr === '[') {
- tokenEnd(undefined, 'separator')
- return parseArray()
-
- } else if (chr === '-'
- || chr === '.'
- || isDecDigit(chr)
- // + number Infinity NaN
- || (json5 && (chr === '+' || chr === 'I' || chr === 'N'))
- ) {
- return tokenEnd(parseNumber(), 'literal')
-
- } else if (chr === 'n') {
- parseKeyword('null')
- return tokenEnd(null, 'literal')
-
- } else if (chr === 't') {
- parseKeyword('true')
- return tokenEnd(true, 'literal')
-
- } else if (chr === 'f') {
- parseKeyword('false')
- return tokenEnd(false, 'literal')
-
- } else {
- position--
- return tokenEnd(undefined)
- }
- }
- }
-
- function parseKey() {
- var result
-
- while (position < length) {
- tokenStart()
- var chr = input[position++]
-
- if (chr === '"' || (chr === '\'' && json5)) {
- return tokenEnd(parseString(chr), 'key')
-
- } else if (chr === '{') {
- tokenEnd(undefined, 'separator')
- return parseObject()
-
- } else if (chr === '[') {
- tokenEnd(undefined, 'separator')
- return parseArray()
-
- } else if (chr === '.'
- || isDecDigit(chr)
- ) {
- return tokenEnd(parseNumber(true), 'key')
-
- } else if (json5
- && Uni.isIdentifierStart(chr) || (chr === '\\' && input[position] === 'u')) {
- // unicode char or a unicode sequence
- var rollback = position - 1
- var result = parseIdentifier()
-
- if (result === undefined) {
- position = rollback
- return tokenEnd(undefined)
- } else {
- return tokenEnd(result, 'key')
- }
-
- } else {
- position--
- return tokenEnd(undefined)
- }
- }
- }
-
- function skipWhiteSpace() {
- tokenStart()
- while (position < length) {
- var chr = input[position++]
-
- if (isLineTerminator(chr)) {
- position--
- tokenEnd(undefined, 'whitespace')
- tokenStart()
- position++
- newline(chr)
- tokenEnd(undefined, 'newline')
- tokenStart()
-
- } else if (isWhiteSpace(chr)) {
- // nothing
-
- } else if (chr === '/'
- && (json5 || cjson)
- && (input[position] === '/' || input[position] === '*')
- ) {
- position--
- tokenEnd(undefined, 'whitespace')
- tokenStart()
- position++
- skipComment(input[position++] === '*')
- tokenEnd(undefined, 'comment')
- tokenStart()
-
- } else {
- position--
- break
- }
- }
- return tokenEnd(undefined, 'whitespace')
- }
-
- function skipComment(multi) {
- while (position < length) {
- var chr = input[position++]
-
- if (isLineTerminator(chr)) {
- // LineTerminator is an end of singleline comment
- if (!multi) {
- // let parent function deal with newline
- position--
- return
- }
-
- newline(chr)
-
- } else if (chr === '*' && multi) {
- // end of multiline comment
- if (input[position] === '/') {
- position++
- return
- }
-
- } else {
- // nothing
- }
- }
-
- if (multi) {
- fail('Unclosed multiline comment')
- }
- }
-
- function parseKeyword(keyword) {
- // keyword[0] is not checked because it should've checked earlier
- var _pos = position
- var len = keyword.length
- for (var i=1; i= length || keyword[i] != input[position]) {
- position = _pos-1
- fail()
- }
- position++
- }
- }
-
- function parseObject() {
- var result = options.null_prototype ? Object.create(null) : {}
- , empty_object = {}
- , is_non_empty = false
-
- while (position < length) {
- skipWhiteSpace()
- var item1 = parseKey()
- skipWhiteSpace()
- tokenStart()
- var chr = input[position++]
- tokenEnd(undefined, 'separator')
-
- if (chr === '}' && item1 === undefined) {
- if (!json5 && is_non_empty) {
- position--
- fail('Trailing comma in object')
- }
- return result
-
- } else if (chr === ':' && item1 !== undefined) {
- skipWhiteSpace()
- stack.push(item1)
- var item2 = parseGeneric()
- stack.pop()
-
- if (item2 === undefined) fail('No value found for key ' + item1)
- if (typeof(item1) !== 'string') {
- if (!json5 || typeof(item1) !== 'number') {
- fail('Wrong key type: ' + item1)
- }
- }
-
- if ((item1 in empty_object || empty_object[item1] != null) && options.reserved_keys !== 'replace') {
- if (options.reserved_keys === 'throw') {
- fail('Reserved key: ' + item1)
- } else {
- // silently ignore it
- }
- } else {
- if (typeof(options.reviver) === 'function') {
- item2 = options.reviver.call(null, item1, item2)
- }
-
- if (item2 !== undefined) {
- is_non_empty = true
- Object.defineProperty(result, item1, {
- value: item2,
- enumerable: true,
- configurable: true,
- writable: true,
- })
- }
- }
-
- skipWhiteSpace()
-
- tokenStart()
- var chr = input[position++]
- tokenEnd(undefined, 'separator')
-
- if (chr === ',') {
- continue
-
- } else if (chr === '}') {
- return result
-
- } else {
- fail()
- }
-
- } else {
- position--
- fail()
- }
- }
-
- fail()
- }
-
- function parseArray() {
- var result = []
-
- while (position < length) {
- skipWhiteSpace()
- stack.push(result.length)
- var item = parseGeneric()
- stack.pop()
- skipWhiteSpace()
- tokenStart()
- var chr = input[position++]
- tokenEnd(undefined, 'separator')
-
- if (item !== undefined) {
- if (typeof(options.reviver) === 'function') {
- item = options.reviver.call(null, String(result.length), item)
- }
- if (item === undefined) {
- result.length++
- item = true // hack for check below, not included into result
- } else {
- result.push(item)
- }
- }
-
- if (chr === ',') {
- if (item === undefined) {
- fail('Elisions are not supported')
- }
-
- } else if (chr === ']') {
- if (!json5 && item === undefined && result.length) {
- position--
- fail('Trailing comma in array')
- }
- return result
-
- } else {
- position--
- fail()
- }
- }
- }
-
- function parseNumber() {
- // rewind because we don't know first char
- position--
-
- var start = position
- , chr = input[position++]
- , t
-
- var to_num = function(is_octal) {
- var str = input.substr(start, position - start)
-
- if (is_octal) {
- var result = parseInt(str.replace(/^0o?/, ''), 8)
- } else {
- var result = Number(str)
- }
-
- if (Number.isNaN(result)) {
- position--
- fail('Bad numeric literal - "' + input.substr(start, position - start + 1) + '"')
- } else if (!json5 && !str.match(/^-?(0|[1-9][0-9]*)(\.[0-9]+)?(e[+-]?[0-9]+)?$/i)) {
- // additional restrictions imposed by json
- position--
- fail('Non-json numeric literal - "' + input.substr(start, position - start + 1) + '"')
- } else {
- return result
- }
- }
-
- // ex: -5982475.249875e+29384
- // ^ skipping this
- if (chr === '-' || (chr === '+' && json5)) chr = input[position++]
-
- if (chr === 'N' && json5) {
- parseKeyword('NaN')
- return NaN
- }
-
- if (chr === 'I' && json5) {
- parseKeyword('Infinity')
-
- // returning +inf or -inf
- return to_num()
- }
-
- if (chr >= '1' && chr <= '9') {
- // ex: -5982475.249875e+29384
- // ^^^ skipping these
- while (position < length && isDecDigit(input[position])) position++
- chr = input[position++]
- }
-
- // special case for leading zero: 0.123456
- if (chr === '0') {
- chr = input[position++]
-
- // new syntax, "0o777" old syntax, "0777"
- var is_octal = chr === 'o' || chr === 'O' || isOctDigit(chr)
- var is_hex = chr === 'x' || chr === 'X'
-
- if (json5 && (is_octal || is_hex)) {
- while (position < length
- && (is_hex ? isHexDigit : isOctDigit)( input[position] )
- ) position++
-
- var sign = 1
- if (input[start] === '-') {
- sign = -1
- start++
- } else if (input[start] === '+') {
- start++
- }
-
- return sign * to_num(is_octal)
- }
- }
-
- if (chr === '.') {
- // ex: -5982475.249875e+29384
- // ^^^ skipping these
- while (position < length && isDecDigit(input[position])) position++
- chr = input[position++]
- }
-
- if (chr === 'e' || chr === 'E') {
- chr = input[position++]
- if (chr === '-' || chr === '+') position++
- // ex: -5982475.249875e+29384
- // ^^^ skipping these
- while (position < length && isDecDigit(input[position])) position++
- chr = input[position++]
- }
-
- // we have char in the buffer, so count for it
- position--
- return to_num()
- }
-
- function parseIdentifier() {
- // rewind because we don't know first char
- position--
-
- var result = ''
-
- while (position < length) {
- var chr = input[position++]
-
- if (chr === '\\'
- && input[position] === 'u'
- && isHexDigit(input[position+1])
- && isHexDigit(input[position+2])
- && isHexDigit(input[position+3])
- && isHexDigit(input[position+4])
- ) {
- // UnicodeEscapeSequence
- chr = String.fromCharCode(parseInt(input.substr(position+1, 4), 16))
- position += 5
- }
-
- if (result.length) {
- // identifier started
- if (Uni.isIdentifierPart(chr)) {
- result += chr
- } else {
- position--
- return result
- }
-
- } else {
- if (Uni.isIdentifierStart(chr)) {
- result += chr
- } else {
- return undefined
- }
- }
- }
-
- fail()
- }
-
- function parseString(endChar) {
- // 7.8.4 of ES262 spec
- var result = ''
-
- while (position < length) {
- var chr = input[position++]
-
- if (chr === endChar) {
- return result
-
- } else if (chr === '\\') {
- if (position >= length) fail()
- chr = input[position++]
-
- if (unescapeMap[chr] && (json5 || (chr != 'v' && chr != "'"))) {
- result += unescapeMap[chr]
-
- } else if (json5 && isLineTerminator(chr)) {
- // line continuation
- newline(chr)
-
- } else if (chr === 'u' || (chr === 'x' && json5)) {
- // unicode/character escape sequence
- var off = chr === 'u' ? 4 : 2
-
- // validation for \uXXXX
- for (var i=0; i= length) fail()
- if (!isHexDigit(input[position])) fail('Bad escape sequence')
- position++
- }
-
- result += String.fromCharCode(parseInt(input.substr(position-off, off), 16))
- } else if (json5 && isOctDigit(chr)) {
- if (chr < '4' && isOctDigit(input[position]) && isOctDigit(input[position+1])) {
- // three-digit octal
- var digits = 3
- } else if (isOctDigit(input[position])) {
- // two-digit octal
- var digits = 2
- } else {
- var digits = 1
- }
- position += digits - 1
- result += String.fromCharCode(parseInt(input.substr(position-digits, digits), 8))
- /*if (!isOctDigit(input[position])) {
- // \0 is allowed still
- result += '\0'
- } else {
- fail('Octal literals are not supported')
- }*/
-
- } else if (json5) {
- // \X -> x
- result += chr
-
- } else {
- position--
- fail()
- }
-
- } else if (isLineTerminator(chr)) {
- fail()
-
- } else {
- if (!json5 && chr.charCodeAt(0) < 32) {
- position--
- fail('Unexpected control character')
- }
-
- // SourceCharacter but not one of " or \ or LineTerminator
- result += chr
- }
- }
-
- fail()
- }
-
- skipWhiteSpace()
- var return_value = parseGeneric()
- if (return_value !== undefined || position < length) {
- skipWhiteSpace()
-
- if (position >= length) {
- if (typeof(options.reviver) === 'function') {
- return_value = options.reviver.call(null, '', return_value)
- }
- return return_value
- } else {
- fail()
- }
-
- } else {
- if (position) {
- fail('No data, only a whitespace')
- } else {
- fail('No data, empty input')
- }
- }
-}
-
-/*
- * parse(text, options)
- * or
- * parse(text, reviver)
- *
- * where:
- * text - string
- * options - object
- * reviver - function
- */
-module.exports.parse = function parseJSON(input, options) {
- // support legacy functions
- if (typeof(options) === 'function') {
- options = {
- reviver: options
- }
- }
-
- if (input === undefined) {
- // parse(stringify(x)) should be equal x
- // with JSON functions it is not 'cause of undefined
- // so we're fixing it
- return undefined
- }
-
- // JSON.parse compat
- if (typeof(input) !== 'string') input = String(input)
- if (options == null) options = {}
- if (options.reserved_keys == null) options.reserved_keys = 'ignore'
-
- if (options.reserved_keys === 'throw' || options.reserved_keys === 'ignore') {
- if (options.null_prototype == null) {
- options.null_prototype = true
- }
- }
-
- try {
- return parse(input, options)
- } catch(err) {
- // jju is a recursive parser, so JSON.parse("{{{{{{{") could blow up the stack
- //
- // this catch is used to skip all those internal calls
- if (err instanceof SyntaxError && err.row != null && err.column != null) {
- var old_err = err
- err = SyntaxError(old_err.message)
- err.column = old_err.column
- err.row = old_err.row
- }
- throw err
- }
-}
-
-module.exports.tokenize = function tokenizeJSON(input, options) {
- if (options == null) options = {}
-
- options._tokenize = function(smth) {
- if (options._addstack) smth.stack.unshift.apply(smth.stack, options._addstack)
- tokens.push(smth)
- }
-
- var tokens = []
- tokens.data = module.exports.parse(input, options)
- return tokens
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
deleted file mode 100644
index e76af2efe8e06a..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
+++ /dev/null
@@ -1,382 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-var Uni = require('./unicode')
-
-// Fix Function#name on browsers that do not support it (IE)
-// http://stackoverflow.com/questions/6903762/function-name-not-supported-in-ie
-if (!(function f(){}).name) {
- Object.defineProperty((function(){}).constructor.prototype, 'name', {
- get: function() {
- var name = this.toString().match(/^\s*function\s*(\S*)\s*\(/)[1]
- // For better performance only parse once, and then cache the
- // result through a new accessor for repeated access.
- Object.defineProperty(this, 'name', { value: name })
- return name
- }
- })
-}
-
-var special_chars = {
- 0: '\\0', // this is not an octal literal
- 8: '\\b',
- 9: '\\t',
- 10: '\\n',
- 11: '\\v',
- 12: '\\f',
- 13: '\\r',
- 92: '\\\\',
-}
-
-// for oddballs
-var hasOwnProperty = Object.prototype.hasOwnProperty
-
-// some people escape those, so I'd copy this to be safe
-var escapable = /[\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/
-
-function _stringify(object, options, recursiveLvl, currentKey) {
- var json5 = (options.mode === 'json5' || !options.mode)
- /*
- * Opinionated decision warning:
- *
- * Objects are serialized in the following form:
- * { type: 'Class', data: DATA }
- *
- * Class is supposed to be a function, and new Class(DATA) is
- * supposed to be equivalent to the original value
- */
- /*function custom_type() {
- return stringify({
- type: object.constructor.name,
- data: object.toString()
- })
- }*/
-
- // if add, it's an internal indentation, so we add 1 level and a eol
- // if !add, it's an ending indentation, so we just indent
- function indent(str, add) {
- var prefix = options._prefix ? options._prefix : ''
- if (!options.indent) return prefix + str
- var result = ''
- var count = recursiveLvl + (add || 0)
- for (var i=0; i 0) {
- if (!Uni.isIdentifierPart(key[i]))
- return _stringify_str(key)
-
- } else {
- if (!Uni.isIdentifierStart(key[i]))
- return _stringify_str(key)
- }
-
- var chr = key.charCodeAt(i)
-
- if (options.ascii) {
- if (chr < 0x80) {
- result += key[i]
-
- } else {
- result += '\\u' + ('0000' + chr.toString(16)).slice(-4)
- }
-
- } else {
- if (escapable.exec(key[i])) {
- result += '\\u' + ('0000' + chr.toString(16)).slice(-4)
-
- } else {
- result += key[i]
- }
- }
- }
-
- return result
- }
-
- function _stringify_str(key) {
- var quote = options.quote
- var quoteChr = quote.charCodeAt(0)
-
- var result = ''
- for (var i=0; i= 8 && chr <= 13 && (json5 || chr !== 11)) {
- result += special_chars[chr]
- } else if (json5) {
- result += '\\x0' + chr.toString(16)
- } else {
- result += '\\u000' + chr.toString(16)
- }
-
- } else if (chr < 0x20) {
- if (json5) {
- result += '\\x' + chr.toString(16)
- } else {
- result += '\\u00' + chr.toString(16)
- }
-
- } else if (chr >= 0x20 && chr < 0x80) {
- // ascii range
- if (chr === 47 && i && key[i-1] === '<') {
- // escaping slashes in
- result += '\\' + key[i]
-
- } else if (chr === 92) {
- result += '\\\\'
-
- } else if (chr === quoteChr) {
- result += '\\' + quote
-
- } else {
- result += key[i]
- }
-
- } else if (options.ascii || Uni.isLineTerminator(key[i]) || escapable.exec(key[i])) {
- if (chr < 0x100) {
- if (json5) {
- result += '\\x' + chr.toString(16)
- } else {
- result += '\\u00' + chr.toString(16)
- }
-
- } else if (chr < 0x1000) {
- result += '\\u0' + chr.toString(16)
-
- } else if (chr < 0x10000) {
- result += '\\u' + chr.toString(16)
-
- } else {
- throw Error('weird codepoint')
- }
- } else {
- result += key[i]
- }
- }
- return quote + result + quote
- }
-
- function _stringify_object() {
- if (object === null) return 'null'
- var result = []
- , len = 0
- , braces
-
- if (Array.isArray(object)) {
- braces = '[]'
- for (var i=0; i options._splitMax - recursiveLvl * options.indent.length || len > options._splitMin) ) {
- // remove trailing comma in multiline if asked to
- if (options.no_trailing_comma && result.length) {
- result[result.length-1] = result[result.length-1].substring(0, result[result.length-1].length-1)
- }
-
- var innerStuff = result.map(function(x) {return indent(x, 1)}).join('')
- return braces[0]
- + (options.indent ? '\n' : '')
- + innerStuff
- + indent(braces[1])
- } else {
- // always remove trailing comma in one-lined arrays
- if (result.length) {
- result[result.length-1] = result[result.length-1].substring(0, result[result.length-1].length-1)
- }
-
- var innerStuff = result.join(options.indent ? ' ' : '')
- return braces[0]
- + innerStuff
- + braces[1]
- }
- }
-
- function _stringify_nonobject(object) {
- if (typeof(options.replacer) === 'function') {
- object = options.replacer.call(null, currentKey, object)
- }
-
- switch(typeof(object)) {
- case 'string':
- return _stringify_str(object)
-
- case 'number':
- if (object === 0 && 1/object < 0) {
- // Opinionated decision warning:
- //
- // I want cross-platform negative zero in all js engines
- // I know they're equal, but why lose that tiny bit of
- // information needlessly?
- return '-0'
- }
- if (!json5 && !Number.isFinite(object)) {
- // json don't support infinity (= sucks)
- return 'null'
- }
- return object.toString()
-
- case 'boolean':
- return object.toString()
-
- case 'undefined':
- return undefined
-
- case 'function':
-// return custom_type()
-
- default:
- // fallback for something weird
- return JSON.stringify(object)
- }
- }
-
- if (options._stringify_key) {
- return _stringify_key(object)
- }
-
- if (typeof(object) === 'object') {
- if (object === null) return 'null'
-
- var str
- if (typeof(str = object.toJSON5) === 'function' && options.mode !== 'json') {
- object = str.call(object, currentKey)
-
- } else if (typeof(str = object.toJSON) === 'function') {
- object = str.call(object, currentKey)
- }
-
- if (object === null) return 'null'
- if (typeof(object) !== 'object') return _stringify_nonobject(object)
-
- if (object.constructor === Number || object.constructor === Boolean || object.constructor === String) {
- object = object.valueOf()
- return _stringify_nonobject(object)
-
- } else if (object.constructor === Date) {
- // only until we can't do better
- return _stringify_nonobject(object.toISOString())
-
- } else {
- if (typeof(options.replacer) === 'function') {
- object = options.replacer.call(null, currentKey, object)
- if (typeof(object) !== 'object') return _stringify_nonobject(object)
- }
-
- return _stringify_object(object)
- }
- } else {
- return _stringify_nonobject(object)
- }
-}
-
-/*
- * stringify(value, options)
- * or
- * stringify(value, replacer, space)
- *
- * where:
- * value - anything
- * options - object
- * replacer - function or array
- * space - boolean or number or string
- */
-module.exports.stringify = function stringifyJSON(object, options, _space) {
- // support legacy syntax
- if (typeof(options) === 'function' || Array.isArray(options)) {
- options = {
- replacer: options
- }
- } else if (typeof(options) === 'object' && options !== null) {
- // nothing to do
- } else {
- options = {}
- }
- if (_space != null) options.indent = _space
-
- if (options.indent == null) options.indent = '\t'
- if (options.quote == null) options.quote = "'"
- if (options.ascii == null) options.ascii = false
- if (options.mode == null) options.mode = 'json5'
-
- if (options.mode === 'json' || options.mode === 'cjson') {
- // json only supports double quotes (= sucks)
- options.quote = '"'
-
- // json don't support trailing commas (= sucks)
- options.no_trailing_comma = true
-
- // json don't support unquoted property names (= sucks)
- options.quote_keys = true
- }
-
- // why would anyone use such objects?
- if (typeof(options.indent) === 'object') {
- if (options.indent.constructor === Number
- || options.indent.constructor === Boolean
- || options.indent.constructor === String)
- options.indent = options.indent.valueOf()
- }
-
- // gap is capped at 10 characters
- if (typeof(options.indent) === 'number') {
- if (options.indent >= 0) {
- options.indent = Array(Math.min(~~options.indent, 10) + 1).join(' ')
- } else {
- options.indent = false
- }
- } else if (typeof(options.indent) === 'string') {
- options.indent = options.indent.substr(0, 10)
- }
-
- if (options._splitMin == null) options._splitMin = 50
- if (options._splitMax == null) options._splitMax = 70
-
- return _stringify(object, options, 0, '')
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/unicode.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/unicode.js
deleted file mode 100644
index 1a29143c2d6b1c..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/unicode.js
+++ /dev/null
@@ -1,71 +0,0 @@
-
-// This is autogenerated with esprima tools, see:
-// https://github.com/ariya/esprima/blob/master/esprima.js
-//
-// PS: oh God, I hate Unicode
-
-// ECMAScript 5.1/Unicode v6.3.0 NonAsciiIdentifierStart:
-
-var Uni = module.exports
-
-module.exports.isWhiteSpace = function isWhiteSpace(x) {
- // section 7.2, table 2
- return x === '\u0020'
- || x === '\u00A0'
- || x === '\uFEFF' // <-- this is not a Unicode WS, only a JS one
- || (x >= '\u0009' && x <= '\u000D') // 9 A B C D
-
- // + whitespace characters from unicode, category Zs
- || x === '\u1680'
- || x === '\u180E'
- || (x >= '\u2000' && x <= '\u200A') // 0 1 2 3 4 5 6 7 8 9 A
- || x === '\u2028'
- || x === '\u2029'
- || x === '\u202F'
- || x === '\u205F'
- || x === '\u3000'
-}
-
-module.exports.isWhiteSpaceJSON = function isWhiteSpaceJSON(x) {
- return x === '\u0020'
- || x === '\u0009'
- || x === '\u000A'
- || x === '\u000D'
-}
-
-module.exports.isLineTerminator = function isLineTerminator(x) {
- // ok, here is the part when JSON is wrong
- // section 7.3, table 3
- return x === '\u000A'
- || x === '\u000D'
- || x === '\u2028'
- || x === '\u2029'
-}
-
-module.exports.isLineTerminatorJSON = function isLineTerminatorJSON(x) {
- return x === '\u000A'
- || x === '\u000D'
-}
-
-module.exports.isIdentifierStart = function isIdentifierStart(x) {
- return x === '$'
- || x === '_'
- || (x >= 'A' && x <= 'Z')
- || (x >= 'a' && x <= 'z')
- || (x >= '\u0080' && Uni.NonAsciiIdentifierStart.test(x))
-}
-
-module.exports.isIdentifierPart = function isIdentifierPart(x) {
- return x === '$'
- || x === '_'
- || (x >= 'A' && x <= 'Z')
- || (x >= 'a' && x <= 'z')
- || (x >= '0' && x <= '9') // <-- addition to Start
- || (x >= '\u0080' && Uni.NonAsciiIdentifierPart.test(x))
-}
-
-module.exports.NonAsciiIdentifierStart = /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F0\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]/
-
-// ECMAScript 5.1/Unicode v6.3.0 NonAsciiIdentifierPart:
-
-module.exports.NonAsciiIdentifierPart = /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0\u08A2-\u08AC\u08E4-\u08FE\u0900-\u0963\u0966-\u096F\u0971-\u0977\u0979-\u097F\u0981-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C01-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58\u0C59\u0C60-\u0C63\u0C66-\u0C6F\u0C82\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D02\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D60-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F0\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191C\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19D9\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1D00-\u1DE6\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u200C\u200D\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u2E2F\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099\u309A\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66F\uA674-\uA67D\uA67F-\uA697\uA69F-\uA6F1\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA827\uA840-\uA873\uA880-\uA8C4\uA8D0-\uA8D9\uA8E0-\uA8F7\uA8FB\uA900-\uA92D\uA930-\uA953\uA960-\uA97C\uA980-\uA9C0\uA9CF-\uA9D9\uAA00-\uAA36\uAA40-\uAA4D\uAA50-\uAA59\uAA60-\uAA76\uAA7A\uAA7B\uAA80-\uAAC2\uAADB-\uAADD\uAAE0-\uAAEF\uAAF2-\uAAF6\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uABC0-\uABEA\uABEC\uABED\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE00-\uFE0F\uFE20-\uFE26\uFE33\uFE34\uFE4D-\uFE4F\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]/
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js
deleted file mode 100644
index dd4752c73a4078..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js
+++ /dev/null
@@ -1,45 +0,0 @@
-var FS = require('fs')
-var jju = require('../')
-
-// this function registers json5 extension, so you
-// can do `require("./config.json5")` kind of thing
-module.exports.register = function() {
- var r = require, e = 'extensions'
- r[e]['.json5'] = function(m, f) {
- /*eslint no-sync:0*/
- m.exports = jju.parse(FS.readFileSync(f, 'utf8'))
- }
-}
-
-// this function monkey-patches JSON.parse, so it
-// will return an exact position of error in case
-// of parse failure
-module.exports.patch_JSON_parse = function() {
- var _parse = JSON.parse
- JSON.parse = function(text, rev) {
- try {
- return _parse(text, rev)
- } catch(err) {
- // this call should always throw
- require('jju').parse(text, {
- mode: 'json',
- legacy: true,
- reviver: rev,
- reserved_keys: 'replace',
- null_prototype: false,
- })
-
- // if it didn't throw, but original parser did,
- // this is an error in this library and should be reported
- throw err
- }
- }
-}
-
-// this function is an express/connect middleware
-// that accepts uploads in application/json5 format
-module.exports.middleware = function() {
- return function(req, res, next) {
- throw Error('this function is removed, use express-json5 instead')
- }
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/package.json
deleted file mode 100644
index 8b01adc877b3fb..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "_from": "jju@^1.1.0",
- "_id": "jju@1.3.0",
- "_inBundle": false,
- "_integrity": "sha1-2t2e8BkkvHKLA/L3l5vb1i96Kqo=",
- "_location": "/pacote/make-fetch-happen/node-fetch-npm/json-parse-helpfulerror/jju",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "jju@^1.1.0",
- "name": "jju",
- "escapedName": "jju",
- "rawSpec": "^1.1.0",
- "saveSpec": null,
- "fetchSpec": "^1.1.0"
- },
- "_requiredBy": [
- "/pacote/make-fetch-happen/node-fetch-npm/json-parse-helpfulerror"
- ],
- "_resolved": "https://registry.npmjs.org/jju/-/jju-1.3.0.tgz",
- "_shasum": "dadd9ef01924bc728b03f2f7979bdbd62f7a2aaa",
- "_spec": "jju@^1.1.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror",
- "author": {
- "name": "Alex Kocharin",
- "email": "alex@kocharin.ru"
- },
- "bugs": {
- "url": "https://github.com/rlidwka/jju/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "a set of utilities to work with JSON / JSON5 documents",
- "devDependencies": {
- "eslint": "~0.4.2",
- "js-yaml": ">=3.1.0",
- "mocha": ">=1.21.0"
- },
- "homepage": "http://rlidwka.github.io/jju/",
- "keywords": [
- "json",
- "json5",
- "parser",
- "serializer",
- "data"
- ],
- "license": {
- "type": "WTFPL",
- "url": "http://www.wtfpl.net/txt/copying/"
- },
- "name": "jju",
- "publishConfig": {
- "registry": "https://registry.npmjs.org/"
- },
- "repository": {
- "type": "git",
- "url": "git://github.com/rlidwka/jju.git"
- },
- "scripts": {
- "lint": "eslint -c ./.eslint.yaml ./lib",
- "test": "mocha test/*.js"
- },
- "version": "1.3.0"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
deleted file mode 100644
index 828163ddc4524c..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
+++ /dev/null
@@ -1,45 +0,0 @@
-# use "yapm install ." if you're installing this from git repository
-
-# "jju" stands for "json/json5 utils"
-name: jju
-
-version: 1.3.0
-description: a set of utilities to work with JSON / JSON5 documents
-
-author:
- name: Alex Kocharin
- email: alex@kocharin.ru
-
-repository:
- type: git
- url: git://github.com/rlidwka/jju
-
-bugs:
- url: https://github.com/rlidwka/jju/issues
-
-homepage: http://rlidwka.github.io/jju/
-
-devDependencies:
- mocha: '>=1.21.0'
- js-yaml: '>=3.1.0'
-
- # linting tools
- eslint: '~0.4.2'
-
-scripts:
- test: 'mocha test/*.js'
- lint: 'eslint -c ./.eslint.yaml ./lib'
-
-keywords:
- - json
- - json5
- - parser
- - serializer
- - data
-
-publishConfig:
- registry: https://registry.npmjs.org/
-
-license:
- type: WTFPL
- url: http://www.wtfpl.net/txt/copying/
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/package.json
deleted file mode 100644
index 6c723ae8efa256..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/package.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
- "_from": "json-parse-helpfulerror@^1.0.3",
- "_id": "json-parse-helpfulerror@1.0.3",
- "_inBundle": false,
- "_integrity": "sha1-E/FM4C7tTpgSl7ZOueO5MuLdE9w=",
- "_location": "/pacote/make-fetch-happen/node-fetch-npm/json-parse-helpfulerror",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "json-parse-helpfulerror@^1.0.3",
- "name": "json-parse-helpfulerror",
- "escapedName": "json-parse-helpfulerror",
- "rawSpec": "^1.0.3",
- "saveSpec": null,
- "fetchSpec": "^1.0.3"
- },
- "_requiredBy": [
- "/pacote/make-fetch-happen/node-fetch-npm"
- ],
- "_resolved": "https://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz",
- "_shasum": "13f14ce02eed4e981297b64eb9e3b932e2dd13dc",
- "_spec": "json-parse-helpfulerror@^1.0.3",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm",
- "author": {
- "name": "Sam Mikes",
- "email": "smikes@cubane.com"
- },
- "bugs": {
- "url": "https://github.com/smikes/json-parse-helpfulerror/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "jju": "^1.1.0"
- },
- "deprecated": false,
- "description": "A drop-in replacement for JSON.parse that uses `jju` to give helpful errors",
- "devDependencies": {
- "code": "^1.2.1",
- "jslint": "^0.7.1",
- "lab": "^5.1.1"
- },
- "homepage": "https://github.com/smikes/json-parse-helpfulerror",
- "keywords": [
- "json",
- "parse",
- "line",
- "doublequote",
- "error"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "json-parse-helpfulerror",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/smikes/json-parse-helpfulerror.git"
- },
- "scripts": {
- "lint": "jslint --edition=latest --terse *.js",
- "test": "lab -c"
- },
- "version": "1.0.3"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/test/test.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/test/test.js
deleted file mode 100644
index fca458ac080f60..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/node_modules/json-parse-helpfulerror/test/test.js
+++ /dev/null
@@ -1,32 +0,0 @@
-var Code = require('code'),
- Lab = require('lab'),
- lab = Lab.script(),
- jph = require('..'); // 'json-parse-helpfulerror'
-
-exports.lab = lab;
-
-lab.test('can parse', function (done) {
- var o = jph.parse('{"foo": "bar"}');
-
- Code.expect(o.foo).to.equal('bar');
- done();
-});
-
-lab.test('helpful error for bad JSON', function (done) {
-
- var bad = "{'foo': 'bar'}";
-
- Code.expect(function () { JSON.parse(bad) }).to.throw();
-
- Code.expect(function () { jph.parse(bad) }).to.throw(SyntaxError, "Unexpected token '\\'' at 1:2\n" + bad + '\n ^');
-
- done();
-});
-
-lab.test('fails if reviver throws', function (done) {
- function badReviver() { throw new ReferenceError('silly'); }
-
- Code.expect(function () { jph.parse('3', badReviver) }).to.throw(ReferenceError, 'silly');
-
- done();
-});
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/package.json
index b98cbe1997616a..fb0b79a0d42888 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/package.json
@@ -1,26 +1,26 @@
{
- "_from": "node-fetch-npm@^2.0.0",
- "_id": "node-fetch-npm@2.0.1",
+ "_from": "node-fetch-npm@^2.0.1",
+ "_id": "node-fetch-npm@2.0.2",
"_inBundle": false,
- "_integrity": "sha512-W3onhopST5tqpX0/MGSL47pDQLLKobNR83AvkiOWQKaw54h+uYUfzeLAxCiyhWlUOiuI+GIb4O9ojLaAFlhCCA==",
+ "_integrity": "sha512-nJIxm1QmAj4v3nfCvEeCrYSoVwXyxLnaPBK5W1W5DGEJwjlKuC2VEUycGw5oxk+4zZahRrB84PUJJgEmhFTDFw==",
"_location": "/pacote/make-fetch-happen/node-fetch-npm",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "node-fetch-npm@^2.0.0",
+ "raw": "node-fetch-npm@^2.0.1",
"name": "node-fetch-npm",
"escapedName": "node-fetch-npm",
- "rawSpec": "^2.0.0",
+ "rawSpec": "^2.0.1",
"saveSpec": null,
- "fetchSpec": "^2.0.0"
+ "fetchSpec": "^2.0.1"
},
"_requiredBy": [
"/pacote/make-fetch-happen"
],
- "_resolved": "https://registry.npmjs.org/node-fetch-npm/-/node-fetch-npm-2.0.1.tgz",
- "_shasum": "4dd3355ce526c01bc5ab29ccdf48352dc8a79465",
- "_spec": "node-fetch-npm@^2.0.0",
+ "_resolved": "https://registry.npmjs.org/node-fetch-npm/-/node-fetch-npm-2.0.2.tgz",
+ "_shasum": "7258c9046182dca345b4208eda918daf33697ff7",
+ "_spec": "node-fetch-npm@^2.0.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen",
"author": {
"name": "David Frank"
@@ -41,8 +41,8 @@
],
"dependencies": {
"encoding": "^0.1.11",
- "json-parse-helpfulerror": "^1.0.3",
- "safe-buffer": "^5.0.1"
+ "json-parse-better-errors": "^1.0.0",
+ "safe-buffer": "^5.1.1"
},
"deprecated": false,
"description": "An npm cli-oriented fork of the excellent node-fetch",
@@ -50,21 +50,21 @@
"chai": "^3.5.0",
"chai-as-promised": "^6.0.0",
"chai-iterator": "^1.1.1",
- "chai-string": "^1.3.0",
+ "chai-string": "^1.4.0",
"codecov": "^1.0.1",
"cross-env": "^3.1.4",
- "form-data": ">=1.0.0",
+ "form-data": "^2.2.0",
"is-builtin-module": "^1.0.0",
- "mocha": "^3.1.2",
+ "mocha": "^3.5.0",
"nyc": "^10.3.2",
"parted": "^0.1.1",
- "promise": "^7.1.1",
+ "promise": "^7.3.1",
"resumer": "0.0.0",
- "standard": "^10.0.2",
- "standard-version": "^4.0.0",
- "weallbehave": "^1.0.3",
+ "standard": "^10.0.3",
+ "standard-version": "^4.2.0",
+ "weallbehave": "^1.2.0",
"weallcontribute": "^1.0.8",
- "whatwg-url": "^4.0.0"
+ "whatwg-url": "^4.8.0"
},
"engines": {
"node": ">=4"
@@ -96,5 +96,5 @@
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "2.0.1"
+ "version": "2.0.2"
}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/src/body.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/src/body.js
index 2b009b7cfc0293..9e7481857aea24 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/src/body.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/node-fetch-npm/src/body.js
@@ -11,7 +11,7 @@ const Buffer = require('safe-buffer').Buffer
const Blob = require('./blob.js')
const BUFFER = Blob.BUFFER
const convert = require('encoding').convert
-const parseJson = require('json-parse-helpfulerror').parse
+const parseJson = require('json-parse-better-errors')
const FetchError = require('./fetch-error.js')
const Stream = require('stream')
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/History.md b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/History.md
index b79a539629609a..a81fb17a97367e 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/History.md
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/History.md
@@ -1,4 +1,9 @@
+4.1.1 / 2017-07-20
+==================
+
+ * Correct `https.request()` with a String (#9)
+
4.1.0 / 2017-06-26
==================
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/package.json
index c0b4c066a2e166..561a1a84e01705 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/package.json
@@ -1,8 +1,8 @@
{
"_from": "agent-base@^4.0.1",
- "_id": "agent-base@4.1.0",
+ "_id": "agent-base@4.1.1",
"_inBundle": false,
- "_integrity": "sha1-IOF0Ac1Js8B2v1akvGxbQ2/6jVU=",
+ "_integrity": "sha512-yWGUUmCZD/33IRjG2It94PzixT8lX+47Uq8fjmd0cgQWITCMrJuXFaVIMnGDmDnZGGKAGdwTx8UGeU8lMR2urA==",
"_location": "/pacote/make-fetch-happen/socks-proxy-agent/agent-base",
"_phantomChildren": {},
"_requested": {
@@ -18,8 +18,8 @@
"_requiredBy": [
"/pacote/make-fetch-happen/socks-proxy-agent"
],
- "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.1.0.tgz",
- "_shasum": "20e17401cd49b3c076bf56a4bc6c5b436ffa8d55",
+ "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.1.1.tgz",
+ "_shasum": "92d8a4fc2524a3b09b3666a33b6c97960f23d6a4",
"_spec": "agent-base@^4.0.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent",
"author": {
@@ -61,5 +61,5 @@
"scripts": {
"test": "mocha --reporter spec"
},
- "version": "4.1.0"
+ "version": "4.1.1"
}
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js
index 05cbaa1e70a56d..a3f7bc6160c156 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js
@@ -11,8 +11,8 @@ const https = require('https');
*/
https.request = (function(request) {
return function(_options, cb) {
- let options
- if (typeof options === 'string') {
+ let options;
+ if (typeof _options === 'string') {
options = url.parse(_options);
} else {
options = Object.assign({}, _options);
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js
index 43217d4273a73d..23814e2c326962 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js
@@ -443,6 +443,17 @@ describe('"https" module', function() {
done();
});
+ it('should work with a String URL', function(done) {
+ var endpoint = 'https://127.0.0.1:' + port;
+ var req = https.get(endpoint);
+
+ // it's gonna error out since `rejectUnauthorized` is not being passed in
+ req.on('error', function(err) {
+ assert.equal(err.code, 'DEPTH_ZERO_SELF_SIGNED_CERT');
+ done();
+ });
+ });
+
it('should work for basic HTTPS requests', function(done) {
var called = false;
var agent = new Agent(function(req, opts, fn) {
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/ip/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/ip/package.json
index 851da5fadc1f45..4ea7f796283592 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/ip/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/ip/package.json
@@ -1,6 +1,7 @@
{
"_from": "ip@^1.1.4",
"_id": "ip@1.1.5",
+ "_inBundle": false,
"_integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=",
"_location": "/pacote/make-fetch-happen/socks-proxy-agent/socks/ip",
"_phantomChildren": {},
@@ -19,19 +20,16 @@
],
"_resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz",
"_shasum": "bdded70114290828c0a039e72ef25f5aaec4354a",
- "_shrinkwrap": null,
"_spec": "ip@^1.1.4",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks",
"author": {
"name": "Fedor Indutny",
"email": "fedor@indutny.com"
},
- "bin": null,
"bugs": {
"url": "https://github.com/indutny/node-ip/issues"
},
"bundleDependencies": false,
- "dependencies": {},
"deprecated": false,
"description": "[![](https://badge.fury.io/js/ip.svg)](https://www.npmjs.com/package/ip)",
"devDependencies": {
@@ -43,8 +41,6 @@
"license": "MIT",
"main": "lib/ip",
"name": "ip",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/indutny/node-ip.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/smart-buffer/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/smart-buffer/package.json
index dd6cec46984ace..82fc8ae532de4c 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/smart-buffer/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/node_modules/smart-buffer/package.json
@@ -1,6 +1,7 @@
{
"_from": "smart-buffer@^1.0.13",
"_id": "smart-buffer@1.1.15",
+ "_inBundle": false,
"_integrity": "sha1-fxFLW2X6s+KjWqd1uxLw0cZJvxY=",
"_location": "/pacote/make-fetch-happen/socks-proxy-agent/socks/smart-buffer",
"_phantomChildren": {},
@@ -19,13 +20,11 @@
],
"_resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.1.15.tgz",
"_shasum": "7f114b5b65fab3e2a35aa775bb12f0d1c649bf16",
- "_shrinkwrap": null,
"_spec": "smart-buffer@^1.0.13",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks",
"author": {
"name": "Josh Glazebrook"
},
- "bin": null,
"bugs": {
"url": "https://github.com/JoshGlazebrook/smart-buffer/issues"
},
@@ -57,8 +56,6 @@
"license": "MIT",
"main": "lib/smart-buffer.js",
"name": "smart-buffer",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git+https://github.com/JoshGlazebrook/smart-buffer.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/package.json
index a4ccc5633377cc..8d639ef6b4d94b 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent/node_modules/socks/package.json
@@ -1,31 +1,30 @@
{
- "_from": "socks@~1.1.5",
+ "_from": "socks@^1.1.10",
"_id": "socks@1.1.10",
+ "_inBundle": false,
"_integrity": "sha1-W4t/x8jzQcU+0FbpKbe/Tei6e1o=",
"_location": "/pacote/make-fetch-happen/socks-proxy-agent/socks",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "socks@~1.1.5",
+ "raw": "socks@^1.1.10",
"name": "socks",
"escapedName": "socks",
- "rawSpec": "~1.1.5",
+ "rawSpec": "^1.1.10",
"saveSpec": null,
- "fetchSpec": "~1.1.5"
+ "fetchSpec": "^1.1.10"
},
"_requiredBy": [
"/pacote/make-fetch-happen/socks-proxy-agent"
],
"_resolved": "https://registry.npmjs.org/socks/-/socks-1.1.10.tgz",
"_shasum": "5b8b7fc7c8f341c53ed056e929b7bf4de8ba7b5a",
- "_shrinkwrap": null,
- "_spec": "socks@~1.1.5",
+ "_spec": "socks@^1.1.10",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/make-fetch-happen/node_modules/socks-proxy-agent",
"author": {
"name": "Josh Glazebrook"
},
- "bin": null,
"bugs": {
"url": "https://github.com/JoshGlazebrook/socks/issues"
},
@@ -41,7 +40,6 @@
},
"deprecated": false,
"description": "A SOCKS proxy client supporting SOCKS 4, 4a, and 5. (also supports BIND/Associate)",
- "devDependencies": {},
"engines": {
"node": ">= 0.10.0",
"npm": ">= 1.3.5"
@@ -62,8 +60,6 @@
"license": "MIT",
"main": "index.js",
"name": "socks",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git+https://github.com/JoshGlazebrook/socks.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/package.json
index fe7a3f374305bf..f900c0d5218053 100644
--- a/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/make-fetch-happen/package.json
@@ -1,8 +1,8 @@
{
"_from": "make-fetch-happen@^2.4.13",
- "_id": "make-fetch-happen@2.4.13",
+ "_id": "make-fetch-happen@2.5.0",
"_inBundle": false,
- "_integrity": "sha512-73CsTlMRSLdGr7VvOE8iYl/ejOSIxyfRYg7jZhepGGEqIlgdq6FLe2DEAI5bo813Jdg5fS/Ku62SRQ/UpT6NJA==",
+ "_integrity": "sha512-JPD5R43T02wIkcxjcmZuR7D06nB20fMR8aC9VEyYsSBXvJa5hOR/QhCxKY+5SXhy3uU5OUY/r+A6r+fJ2mFndA==",
"_location": "/pacote/make-fetch-happen",
"_phantomChildren": {
"safe-buffer": "5.1.1"
@@ -20,10 +20,10 @@
"_requiredBy": [
"/pacote"
],
- "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-2.4.13.tgz",
- "_shasum": "3139ba2f4230a8384e7ba394534816c872ecbf4b",
+ "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-2.5.0.tgz",
+ "_shasum": "08c22d499f4f30111addba79fe87c98cf01b6bc8",
"_spec": "make-fetch-happen@^2.4.13",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
@@ -50,9 +50,10 @@
"devDependencies": {
"bluebird": "^3.5.0",
"mkdirp": "^0.5.1",
- "nock": "^9.0.6",
+ "nock": "^9.0.14",
"npmlog": "^4.1.2",
"nyc": "^11.0.3",
+ "require-inject": "^1.4.2",
"rimraf": "^2.5.4",
"safe-buffer": "^5.1.1",
"standard": "^10.0.1",
@@ -88,9 +89,9 @@
"prerelease": "npm t",
"pretest": "standard lib test *.js",
"release": "standard-version -s",
- "test": "nyc --all -- tap -J test/*.js",
+ "test": "nyc --all -- tap --timeout=35 -J test/*.js",
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "2.4.13"
+ "version": "2.5.0"
}
diff --git a/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json b/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
index afa90581dbdeb0..b350ab14e8d6d6 100644
--- a/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
@@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"_shasum": "89b4d199ab2bee49de164ea02b89ce462d71b767",
"_spec": "balanced-match@^1.0.0",
- "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion",
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
diff --git a/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json b/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
index cea52d0f6b91de..8730fadd8374b2 100644
--- a/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
@@ -1,6 +1,7 @@
{
"_from": "concat-map@0.0.1",
"_id": "concat-map@0.0.1",
+ "_inBundle": false,
"_integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"_location": "/pacote/minimatch/brace-expansion/concat-map",
"_phantomChildren": {},
@@ -19,7 +20,6 @@
],
"_resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"_shasum": "d8a96bd77fd68df7793a73036a3ba0d5405d477b",
- "_shrinkwrap": null,
"_spec": "concat-map@0.0.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion",
"author": {
@@ -27,12 +27,10 @@
"email": "mail@substack.net",
"url": "http://substack.net"
},
- "bin": null,
"bugs": {
"url": "https://github.com/substack/node-concat-map/issues"
},
"bundleDependencies": false,
- "dependencies": {},
"deprecated": false,
"description": "concatenative mapdashery",
"devDependencies": {
@@ -53,8 +51,6 @@
"license": "MIT",
"main": "index.js",
"name": "concat-map",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/substack/node-concat-map.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/package.json b/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/package.json
index c245d54c932f61..0ba0e63cb84aea 100644
--- a/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/minimatch/node_modules/brace-expansion/package.json
@@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz",
"_shasum": "c07b211c7c952ec1f8efd51a77ef0d1d3990a292",
"_spec": "brace-expansion@^1.1.7",
- "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/minimatch",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/minimatch",
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
diff --git a/deps/npm/node_modules/pacote/node_modules/minimatch/package.json b/deps/npm/node_modules/pacote/node_modules/minimatch/package.json
index aec0ec6236c559..b3694531957c01 100644
--- a/deps/npm/node_modules/pacote/node_modules/minimatch/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/minimatch/package.json
@@ -1,33 +1,32 @@
{
- "_from": "minimatch@^3.0.3",
+ "_from": "minimatch@^3.0.4",
"_id": "minimatch@3.0.4",
+ "_inBundle": false,
"_integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"_location": "/pacote/minimatch",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "minimatch@^3.0.3",
+ "raw": "minimatch@^3.0.4",
"name": "minimatch",
"escapedName": "minimatch",
- "rawSpec": "^3.0.3",
+ "rawSpec": "^3.0.4",
"saveSpec": null,
- "fetchSpec": "^3.0.3"
+ "fetchSpec": "^3.0.4"
},
"_requiredBy": [
"/pacote"
],
"_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"_shasum": "5166e286457f03306064be5497e8dbb0c3d32083",
- "_shrinkwrap": null,
- "_spec": "minimatch@^3.0.3",
+ "_spec": "minimatch@^3.0.4",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me"
},
- "bin": null,
"bugs": {
"url": "https://github.com/isaacs/minimatch/issues"
},
@@ -50,8 +49,6 @@
"license": "ISC",
"main": "minimatch.js",
"name": "minimatch",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/isaacs/minimatch.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/promise-retry/node_modules/err-code/package.json b/deps/npm/node_modules/pacote/node_modules/promise-retry/node_modules/err-code/package.json
index 8d2cd5b86eaa54..559e5bfb66ba90 100644
--- a/deps/npm/node_modules/pacote/node_modules/promise-retry/node_modules/err-code/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/promise-retry/node_modules/err-code/package.json
@@ -1,6 +1,7 @@
{
"_from": "err-code@^1.0.0",
"_id": "err-code@1.1.2",
+ "_inBundle": false,
"_integrity": "sha1-BuARbTAo9q70gGhJ6w6mp0iuaWA=",
"_location": "/pacote/promise-retry/err-code",
"_phantomChildren": {},
@@ -19,7 +20,6 @@
],
"_resolved": "https://registry.npmjs.org/err-code/-/err-code-1.1.2.tgz",
"_shasum": "06e0116d3028f6aef4806849eb0ea6a748ae6960",
- "_shrinkwrap": null,
"_spec": "err-code@^1.0.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/promise-retry",
"author": {
@@ -27,12 +27,10 @@
"email": "hello@indigounited.com",
"url": "http://indigounited.com"
},
- "bin": null,
"bugs": {
"url": "https://github.com/IndigoUnited/js-err-code/issues/"
},
"bundleDependencies": false,
- "dependencies": {},
"deprecated": false,
"description": "Create an error with a code",
"devDependencies": {
@@ -53,8 +51,6 @@
"license": "MIT",
"main": "index.js",
"name": "err-code",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/IndigoUnited/js-err-code.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/promise-retry/package.json b/deps/npm/node_modules/pacote/node_modules/promise-retry/package.json
index 6e32d3a89b6f29..a7efb9ffb7df8f 100644
--- a/deps/npm/node_modules/pacote/node_modules/promise-retry/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/promise-retry/package.json
@@ -1,6 +1,7 @@
{
"_from": "promise-retry@^1.1.1",
"_id": "promise-retry@1.1.1",
+ "_inBundle": false,
"_integrity": "sha1-ZznpaOMFHaIM5kl/srUPaRHfPW0=",
"_location": "/pacote/promise-retry",
"_phantomChildren": {},
@@ -20,7 +21,6 @@
],
"_resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-1.1.1.tgz",
"_shasum": "6739e968e3051da20ce6497fb2b50f6911df3d6d",
- "_shrinkwrap": null,
"_spec": "promise-retry@^1.1.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
"author": {
@@ -28,7 +28,6 @@
"email": "hello@indigounited.com",
"url": "http://indigounited.com"
},
- "bin": null,
"bugs": {
"url": "https://github.com/IndigoUnited/node-promise-retry/issues/"
},
@@ -58,8 +57,6 @@
"license": "MIT",
"main": "index.js",
"name": "promise-retry",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/IndigoUnited/node-promise-retry.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/protoduck/node_modules/genfun/package.json b/deps/npm/node_modules/pacote/node_modules/protoduck/node_modules/genfun/package.json
index 78327af43ac783..b1b7bc8f69e605 100644
--- a/deps/npm/node_modules/pacote/node_modules/protoduck/node_modules/genfun/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/protoduck/node_modules/genfun/package.json
@@ -1,6 +1,7 @@
{
"_from": "genfun@^4.0.1",
"_id": "genfun@4.0.1",
+ "_inBundle": false,
"_integrity": "sha1-7RAEHy5KfxsKOEZtF6XD4n3x38E=",
"_location": "/pacote/protoduck/genfun",
"_phantomChildren": {},
@@ -19,14 +20,12 @@
],
"_resolved": "https://registry.npmjs.org/genfun/-/genfun-4.0.1.tgz",
"_shasum": "ed10041f2e4a7f1b0a38466d17a5c3e27df1dfc1",
- "_shrinkwrap": null,
"_spec": "genfun@^4.0.1",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/protoduck",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
},
- "bin": null,
"bugs": {
"url": "https://github.com/zkat/genfun/issues"
},
@@ -63,8 +62,6 @@
"license": "CC0-1.0",
"main": "lib/genfun.js",
"name": "genfun",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/zkat/genfun.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/protoduck/package.json b/deps/npm/node_modules/pacote/node_modules/protoduck/package.json
index 9b6a04a69968d7..b436c22e87fa3c 100644
--- a/deps/npm/node_modules/pacote/node_modules/protoduck/package.json
+++ b/deps/npm/node_modules/pacote/node_modules/protoduck/package.json
@@ -1,6 +1,7 @@
{
"_from": "protoduck@^4.0.0",
"_id": "protoduck@4.0.0",
+ "_inBundle": false,
"_integrity": "sha1-/kh02MeRM2bP2erRJFOiLNNlf44=",
"_location": "/pacote/protoduck",
"_phantomChildren": {},
@@ -19,14 +20,12 @@
],
"_resolved": "https://registry.npmjs.org/protoduck/-/protoduck-4.0.0.tgz",
"_shasum": "fe4874d8c7913366cfd9ead12453a22cd3657f8e",
- "_shrinkwrap": null,
"_spec": "protoduck@^4.0.0",
"_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
},
- "bin": null,
"bugs": {
"url": "https://github.com/zkat/protoduck/issues"
},
@@ -72,8 +71,6 @@
"license": "CC0-1.0",
"main": "index.js",
"name": "protoduck",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git+https://github.com/zkat/protoduck.git"
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/.npmignore b/deps/npm/node_modules/pacote/node_modules/tar-fs/.npmignore
deleted file mode 100644
index 118a1375d0e473..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/.npmignore
+++ /dev/null
@@ -1,2 +0,0 @@
-node_modules
-test/fixtures/copy
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/.travis.yml b/deps/npm/node_modules/pacote/node_modules/tar-fs/.travis.yml
deleted file mode 100644
index 6e5919de39a312..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/.travis.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-language: node_js
-node_js:
- - "0.10"
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/LICENSE b/deps/npm/node_modules/pacote/node_modules/tar-fs/LICENSE
deleted file mode 100644
index 757562ec59276b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/README.md b/deps/npm/node_modules/pacote/node_modules/tar-fs/README.md
deleted file mode 100644
index 6cc3b077a51432..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/README.md
+++ /dev/null
@@ -1,143 +0,0 @@
-# tar-fs
-
-filesystem bindings for [tar-stream](https://github.com/mafintosh/tar-stream).
-
-```
-npm install tar-fs
-```
-
-[![build status](https://secure.travis-ci.org/mafintosh/tar-fs.png)](http://travis-ci.org/mafintosh/tar-fs)
-
-## Usage
-
-tar-fs allows you to pack directories into tarballs and extract tarballs into directories.
-
-It doesn't gunzip for you, so if you want to extract a `.tar.gz` with this you'll need to use something like [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in addition to this.
-
-``` js
-var tar = require('tar-fs')
-var fs = require('fs')
-
-// packing a directory
-tar.pack('./my-directory').pipe(fs.createWriteStream('my-tarball.tar'))
-
-// extracting a directory
-fs.createReadStream('my-other-tarball.tar').pipe(tar.extract('./my-other-directory'))
-```
-
-To ignore various files when packing or extracting add a ignore function to the options. `ignore`
-is also an alias for `filter`. Additionally you get `header` if you use ignore while extracting.
-That way you could also filter by metadata.
-
-``` js
-var pack = tar.pack('./my-directory', {
- ignore: function(name) {
- return path.extname(name) === '.bin' // ignore .bin files when packing
- }
-})
-
-var extract = tar.extract('./my-other-directory', {
- ignore: function(name) {
- return path.extname(name) === '.bin' // ignore .bin files inside the tarball when extracing
- }
-})
-
-var extractFilesDirs = tar.extract('./my-other-other-directory', {
- ignore: function(_, header) {
- // pass files & directories, ignore e.g. symlinks
- return header.type !== 'file' && header.type !== 'directory'
- }
-})
-```
-
-You can also specify which entries to pack using the `entries` option
-
-```js
-var pack = tar.pack('./my-directory', {
- entries: ['file1', 'subdir/file2'] // only the specific entries will be packed
-})
-```
-
-If you want to modify the headers when packing/extracting add a map function to the options
-
-``` js
-var pack = tar.pack('./my-directory', {
- map: function(header) {
- header.name = 'prefixed/'+header.name
- return header
- }
-})
-
-var extract = tar.extract('./my-directory', {
- map: function(header) {
- header.name = 'another-prefix/'+header.name
- return header
- }
-})
-```
-
-Similarly you can use `mapStream` incase you wanna modify the input/output file streams
-
-``` js
-var pack = tar.pack('./my-directory', {
- mapStream: function(fileStream, header) {
- if (path.extname(header.name) === '.js') {
- return fileStream.pipe(someTransform)
- }
- return fileStream;
- }
-})
-
-var extract = tar.extract('./my-directory', {
- mapStream: function(fileStream, header) {
- if (path.extname(header.name) === '.js') {
- return fileStream.pipe(someTransform)
- }
- return fileStream;
- }
-})
-```
-
-Set `options.fmode` and `options.dmode` to ensure that files/directories extracted have the corresponding modes
-
-``` js
-var extract = tar.extract('./my-directory', {
- dmode: parseInt(555, 8), // all dirs should be readable
- fmode: parseInt(444, 8) // all files should be readable
-})
-```
-
-It can be useful to use `dmode` and `fmode` if you are packing/unpacking tarballs between *nix/windows to ensure that all files/directories unpacked are readable.
-
-Alternatively you can set `options.readable` and/or `options.writable` to set the dmode and fmode to readable/writable.
-
-``` js
-var extract = tar.extract('./my-directory', {
- readable: true, // all dirs and files should be readable
- writable: true, // all dirs and files should be writable
-})
-```
-
-Set `options.strict` to `false` if you want to ignore errors due to unsupported entry types (like device files)
-
-To dereference symlinks (pack the contents of the symlink instead of the link itself) set `options.dereference` to `true`.
-
-## Copy a directory
-
-Copying a directory with permissions and mtime intact is as simple as
-
-``` js
-tar.pack('source-directory').pipe(tar.extract('dest-directory'))
-```
-
-## Performance
-
-Packing and extracting a 6.1 GB with 2496 directories and 2398 files yields the following results on my Macbook Air.
-[See the benchmark here](https://gist.github.com/mafintosh/8102201)
-
-* tar-fs: 34.261 ms
-* [node-tar](https://github.com/isaacs/node-tar): 366.123 ms (or 10x slower)
-
-## License
-
-MIT
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/index.js b/deps/npm/node_modules/pacote/node_modules/tar-fs/index.js
deleted file mode 100644
index 4b345b7d02fff8..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/index.js
+++ /dev/null
@@ -1,323 +0,0 @@
-var chownr = require('chownr')
-var tar = require('tar-stream')
-var pump = require('pump')
-var mkdirp = require('mkdirp')
-var fs = require('fs')
-var path = require('path')
-var os = require('os')
-
-var win32 = os.platform() === 'win32'
-
-var noop = function () {}
-
-var echo = function (name) {
- return name
-}
-
-var normalize = !win32 ? echo : function (name) {
- return name.replace(/\\/g, '/').replace(/[:?<>|]/g, '_')
-}
-
-var statAll = function (fs, stat, cwd, ignore, entries, sort) {
- var queue = entries || ['.']
-
- return function loop (callback) {
- if (!queue.length) return callback()
- var next = queue.shift()
- var nextAbs = path.join(cwd, next)
-
- stat(nextAbs, function (err, stat) {
- if (err) return callback(err)
-
- if (!stat.isDirectory()) return callback(null, next, stat)
-
- fs.readdir(nextAbs, function (err, files) {
- if (err) return callback(err)
-
- if (sort) files.sort()
- for (var i = 0; i < files.length; i++) {
- if (!ignore(path.join(cwd, next, files[i]))) queue.push(path.join(next, files[i]))
- }
-
- callback(null, next, stat)
- })
- })
- }
-}
-
-var strip = function (map, level) {
- return function (header) {
- header.name = header.name.split('/').slice(level).join('/')
-
- var linkname = header.linkname
- if (linkname && (header.type === 'link' || path.isAbsolute(linkname))) {
- header.linkname = linkname.split('/').slice(level).join('/')
- }
-
- return map(header)
- }
-}
-
-exports.pack = function (cwd, opts) {
- if (!cwd) cwd = '.'
- if (!opts) opts = {}
-
- var xfs = opts.fs || fs
- var ignore = opts.ignore || opts.filter || noop
- var map = opts.map || noop
- var mapStream = opts.mapStream || echo
- var statNext = statAll(xfs, opts.dereference ? xfs.stat : xfs.lstat, cwd, ignore, opts.entries, opts.sort)
- var strict = opts.strict !== false
- var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
- var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
- var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
- var pack = opts.pack || tar.pack()
-
- if (opts.strip) map = strip(map, opts.strip)
-
- if (opts.readable) {
- dmode |= parseInt(555, 8)
- fmode |= parseInt(444, 8)
- }
- if (opts.writable) {
- dmode |= parseInt(333, 8)
- fmode |= parseInt(222, 8)
- }
-
- var onsymlink = function (filename, header) {
- xfs.readlink(path.join(cwd, filename), function (err, linkname) {
- if (err) return pack.destroy(err)
- header.linkname = normalize(linkname)
- pack.entry(header, onnextentry)
- })
- }
-
- var onstat = function (err, filename, stat) {
- if (err) return pack.destroy(err)
- if (!filename) return pack.finalize()
-
- if (stat.isSocket()) return onnextentry() // tar does not support sockets...
-
- var header = {
- name: normalize(filename),
- mode: (stat.mode | (stat.isDirectory() ? dmode : fmode)) & umask,
- mtime: stat.mtime,
- size: stat.size,
- type: 'file',
- uid: stat.uid,
- gid: stat.gid
- }
-
- if (stat.isDirectory()) {
- header.size = 0
- header.type = 'directory'
- header = map(header) || header
- return pack.entry(header, onnextentry)
- }
-
- if (stat.isSymbolicLink()) {
- header.size = 0
- header.type = 'symlink'
- header = map(header) || header
- return onsymlink(filename, header)
- }
-
- // TODO: add fifo etc...
-
- header = map(header) || header
-
- if (!stat.isFile()) {
- if (strict) return pack.destroy(new Error('unsupported type for ' + filename))
- return onnextentry()
- }
-
- var entry = pack.entry(header, onnextentry)
- if (!entry) return
-
- var rs = mapStream(xfs.createReadStream(path.join(cwd, filename)), header)
-
- rs.on('error', function (err) { // always forward errors on destroy
- entry.destroy(err)
- })
-
- pump(rs, entry)
- }
-
- var onnextentry = function (err) {
- if (err) return pack.destroy(err)
- statNext(onstat)
- }
-
- onnextentry()
-
- return pack
-}
-
-var head = function (list) {
- return list.length ? list[list.length - 1] : null
-}
-
-var processGetuid = function () {
- return process.getuid ? process.getuid() : -1
-}
-
-var processUmask = function () {
- return process.umask ? process.umask() : 0
-}
-
-exports.extract = function (cwd, opts) {
- if (!cwd) cwd = '.'
- if (!opts) opts = {}
-
- var xfs = opts.fs || fs
- var ignore = opts.ignore || opts.filter || noop
- var map = opts.map || noop
- var mapStream = opts.mapStream || echo
- var own = opts.chown !== false && !win32 && processGetuid() === 0
- var extract = opts.extract || tar.extract()
- var stack = []
- var now = new Date()
- var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
- var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
- var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
- var strict = opts.strict !== false
-
- if (opts.strip) map = strip(map, opts.strip)
-
- if (opts.readable) {
- dmode |= parseInt(555, 8)
- fmode |= parseInt(444, 8)
- }
- if (opts.writable) {
- dmode |= parseInt(333, 8)
- fmode |= parseInt(222, 8)
- }
-
- var utimesParent = function (name, cb) { // we just set the mtime on the parent dir again everytime we write an entry
- var top
- while ((top = head(stack)) && name.slice(0, top[0].length) !== top[0]) stack.pop()
- if (!top) return cb()
- xfs.utimes(top[0], now, top[1], cb)
- }
-
- var utimes = function (name, header, cb) {
- if (opts.utimes === false) return cb()
-
- if (header.type === 'directory') return xfs.utimes(name, now, header.mtime, cb)
- if (header.type === 'symlink') return utimesParent(name, cb) // TODO: how to set mtime on link?
-
- xfs.utimes(name, now, header.mtime, function (err) {
- if (err) return cb(err)
- utimesParent(name, cb)
- })
- }
-
- var chperm = function (name, header, cb) {
- var link = header.type === 'symlink'
- var chmod = link ? xfs.lchmod : xfs.chmod
- var chown = link ? xfs.lchown : xfs.chown
-
- if (!chmod) return cb()
-
- var mode = (header.mode | (header.type === 'directory' ? dmode : fmode)) & umask
- chmod(name, mode, function (err) {
- if (err) return cb(err)
- if (!own) return cb()
- if (!chown) return cb()
- chown(name, header.uid, header.gid, cb)
- })
- }
-
- extract.on('entry', function (header, stream, next) {
- header = map(header) || header
- header.name = normalize(header.name)
- var name = path.join(cwd, path.join('/', header.name))
-
- if (ignore(name, header)) {
- stream.resume()
- return next()
- }
-
- var stat = function (err) {
- if (err) return next(err)
- utimes(name, header, function (err) {
- if (err) return next(err)
- if (win32) return next()
- chperm(name, header, next)
- })
- }
-
- var onsymlink = function () {
- if (win32) return next() // skip symlinks on win for now before it can be tested
- xfs.unlink(name, function () {
- xfs.symlink(header.linkname, name, stat)
- })
- }
-
- var onlink = function () {
- if (win32) return next() // skip links on win for now before it can be tested
- xfs.unlink(name, function () {
- var srcpath = path.resolve(cwd, header.linkname)
-
- xfs.link(srcpath, name, function (err) {
- if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) {
- stream = xfs.createReadStream(srcpath)
- return onfile()
- }
-
- stat(err)
- })
- })
- }
-
- var onfile = function () {
- var ws = xfs.createWriteStream(name)
- var rs = mapStream(stream, header)
-
- ws.on('error', function (err) { // always forward errors on destroy
- rs.destroy(err)
- })
-
- pump(rs, ws, function (err) {
- if (err) return next(err)
- ws.on('close', stat)
- })
- }
-
- if (header.type === 'directory') {
- stack.push([name, header.mtime])
- return mkdirfix(name, {
- fs: xfs, own: own, uid: header.uid, gid: header.gid
- }, stat)
- }
-
- mkdirfix(path.dirname(name), {
- fs: xfs, own: own, uid: header.uid, gid: header.gid
- }, function (err) {
- if (err) return next(err)
-
- switch (header.type) {
- case 'file': return onfile()
- case 'link': return onlink()
- case 'symlink': return onsymlink()
- }
-
- if (strict) return next(new Error('unsupported type for ' + name + ' (' + header.type + ')'))
-
- stream.resume()
- next()
- })
- })
-
- return extract
-}
-
-function mkdirfix (name, opts, cb) {
- mkdirp(name, {fs: opts.xfs}, function (err, made) {
- if (!err && made && opts.own) {
- chownr(made, opts.uid, opts.gid, cb)
- } else {
- cb(err)
- }
- })
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.npmignore b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.npmignore
deleted file mode 100644
index 3c3629e647f5dd..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.travis.yml b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.travis.yml
deleted file mode 100644
index 17f94330e70bc8..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.travis.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-language: node_js
-node_js:
- - "0.10"
-
-script: "npm test"
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/LICENSE b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/LICENSE
deleted file mode 100644
index 757562ec59276b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/README.md b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/README.md
deleted file mode 100644
index 5029b27d6817e5..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/README.md
+++ /dev/null
@@ -1,56 +0,0 @@
-# pump
-
-pump is a small node module that pipes streams together and destroys all of them if one of them closes.
-
-```
-npm install pump
-```
-
-[![build status](http://img.shields.io/travis/mafintosh/pump.svg?style=flat)](http://travis-ci.org/mafintosh/pump)
-
-## What problem does it solve?
-
-When using standard `source.pipe(dest)` source will _not_ be destroyed if dest emits close or an error.
-You are also not able to provide a callback to tell when then pipe has finished.
-
-pump does these two things for you
-
-## Usage
-
-Simply pass the streams you want to pipe together to pump and add an optional callback
-
-``` js
-var pump = require('pump')
-var fs = require('fs')
-
-var source = fs.createReadStream('/dev/random')
-var dest = fs.createWriteStream('/dev/null')
-
-pump(source, dest, function(err) {
- console.log('pipe finished', err)
-})
-
-setTimeout(function() {
- dest.destroy() // when dest is closed pump will destroy source
-}, 1000)
-```
-
-You can use pump to pipe more than two streams together as well
-
-``` js
-var transform = someTransformStream()
-
-pump(source, transform, anotherTransform, dest, function(err) {
- console.log('pipe finished', err)
-})
-```
-
-If `source`, `transform`, `anotherTransform` or `dest` closes all of them will be destroyed.
-
-## License
-
-MIT
-
-## Related
-
-`pump` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/index.js b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/index.js
deleted file mode 100644
index 060ce5f4fd3662..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/index.js
+++ /dev/null
@@ -1,80 +0,0 @@
-var once = require('once')
-var eos = require('end-of-stream')
-var fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes
-
-var noop = function () {}
-
-var isFn = function (fn) {
- return typeof fn === 'function'
-}
-
-var isFS = function (stream) {
- if (!fs) return false // browser
- return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
-}
-
-var isRequest = function (stream) {
- return stream.setHeader && isFn(stream.abort)
-}
-
-var destroyer = function (stream, reading, writing, callback) {
- callback = once(callback)
-
- var closed = false
- stream.on('close', function () {
- closed = true
- })
-
- eos(stream, {readable: reading, writable: writing}, function (err) {
- if (err) return callback(err)
- closed = true
- callback()
- })
-
- var destroyed = false
- return function (err) {
- if (closed) return
- if (destroyed) return
- destroyed = true
-
- if (isFS(stream)) return stream.close() // use close for fs streams to avoid fd leaks
- if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
-
- if (isFn(stream.destroy)) return stream.destroy()
-
- callback(err || new Error('stream was destroyed'))
- }
-}
-
-var call = function (fn) {
- fn()
-}
-
-var pipe = function (from, to) {
- return from.pipe(to)
-}
-
-var pump = function () {
- var streams = Array.prototype.slice.call(arguments)
- var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
-
- if (Array.isArray(streams[0])) streams = streams[0]
- if (streams.length < 2) throw new Error('pump requires two streams per minimum')
-
- var error
- var destroys = streams.map(function (stream, i) {
- var reading = i < streams.length - 1
- var writing = i > 0
- return destroyer(stream, reading, writing, function (err) {
- if (!error) error = err
- if (err) destroys.forEach(call)
- if (reading) return
- destroys.forEach(call)
- callback(error)
- })
- })
-
- return streams.reduce(pipe)
-}
-
-module.exports = pump
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/LICENSE b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/LICENSE
deleted file mode 100644
index 757562ec59276b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/README.md b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/README.md
deleted file mode 100644
index f2560c939d960e..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# end-of-stream
-
-A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
-
- npm install end-of-stream
-
-## Usage
-
-Simply pass a stream and a callback to the `eos`.
-Both legacy streams, streams2 and stream3 are supported.
-
-``` js
-var eos = require('end-of-stream');
-
-eos(readableStream, function(err) {
- // this will be set to the stream instance
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended', this === readableStream);
-});
-
-eos(writableStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished', this === writableStream);
-});
-
-eos(duplexStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended and finished', this === duplexStream);
-});
-
-eos(duplexStream, {readable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished but might still be readable');
-});
-
-eos(duplexStream, {writable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended but might still be writable');
-});
-
-eos(readableStream, {error:false}, function(err) {
- // do not treat emit('error', err) as a end-of-stream
-});
-```
-
-## License
-
-MIT
-
-## Related
-
-`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/index.js b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/index.js
deleted file mode 100644
index b3a90686346cfb..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-var once = require('once');
-
-var noop = function() {};
-
-var isRequest = function(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
-};
-
-var isChildProcess = function(stream) {
- return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
-};
-
-var eos = function(stream, opts, callback) {
- if (typeof opts === 'function') return eos(stream, null, opts);
- if (!opts) opts = {};
-
- callback = once(callback || noop);
-
- var ws = stream._writableState;
- var rs = stream._readableState;
- var readable = opts.readable || (opts.readable !== false && stream.readable);
- var writable = opts.writable || (opts.writable !== false && stream.writable);
-
- var onlegacyfinish = function() {
- if (!stream.writable) onfinish();
- };
-
- var onfinish = function() {
- writable = false;
- if (!readable) callback.call(stream);
- };
-
- var onend = function() {
- readable = false;
- if (!writable) callback.call(stream);
- };
-
- var onexit = function(exitCode) {
- callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
- };
-
- var onclose = function() {
- if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close'));
- if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close'));
- };
-
- var onrequest = function() {
- stream.req.on('finish', onfinish);
- };
-
- if (isRequest(stream)) {
- stream.on('complete', onfinish);
- stream.on('abort', onclose);
- if (stream.req) onrequest();
- else stream.on('request', onrequest);
- } else if (writable && !ws) { // legacy streams
- stream.on('end', onlegacyfinish);
- stream.on('close', onlegacyfinish);
- }
-
- if (isChildProcess(stream)) stream.on('exit', onexit);
-
- stream.on('end', onend);
- stream.on('finish', onfinish);
- if (opts.error !== false) stream.on('error', callback);
- stream.on('close', onclose);
-
- return function() {
- stream.removeListener('complete', onfinish);
- stream.removeListener('abort', onclose);
- stream.removeListener('request', onrequest);
- if (stream.req) stream.req.removeListener('finish', onfinish);
- stream.removeListener('end', onlegacyfinish);
- stream.removeListener('close', onlegacyfinish);
- stream.removeListener('finish', onfinish);
- stream.removeListener('exit', onexit);
- stream.removeListener('end', onend);
- stream.removeListener('error', callback);
- stream.removeListener('close', onclose);
- };
-};
-
-module.exports = eos;
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/package.json b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/package.json
deleted file mode 100644
index 28eeea06e29e07..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/package.json
+++ /dev/null
@@ -1,66 +0,0 @@
-{
- "_from": "end-of-stream@^1.1.0",
- "_id": "end-of-stream@1.4.0",
- "_integrity": "sha1-epDYM+/abPpurA9JSduw+tOmMgY=",
- "_location": "/pacote/tar-fs/pump/end-of-stream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "end-of-stream@^1.1.0",
- "name": "end-of-stream",
- "escapedName": "end-of-stream",
- "rawSpec": "^1.1.0",
- "saveSpec": null,
- "fetchSpec": "^1.1.0"
- },
- "_requiredBy": [
- "/pacote/tar-fs/pump"
- ],
- "_resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.0.tgz",
- "_shasum": "7a90d833efda6cfa6eac0f4949dbb0fad3a63206",
- "_shrinkwrap": null,
- "_spec": "end-of-stream@^1.1.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump",
- "author": {
- "name": "Mathias Buus",
- "email": "mathiasbuus@gmail.com"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/mafintosh/end-of-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "once": "^1.4.0"
- },
- "deprecated": false,
- "description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
- "devDependencies": {},
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/mafintosh/end-of-stream",
- "keywords": [
- "stream",
- "streams",
- "callback",
- "finish",
- "close",
- "end",
- "wait"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "end-of-stream",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/mafintosh/end-of-stream.git"
- },
- "scripts": {
- "test": "node test.js"
- },
- "version": "1.4.0"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/package.json b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/package.json
deleted file mode 100644
index 2af973326b767f..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "_from": "pump@^1.0.0",
- "_id": "pump@1.0.2",
- "_integrity": "sha1-Oz7mUS+U8OV1U4wXmV+fFpkKXVE=",
- "_location": "/pacote/tar-fs/pump",
- "_phantomChildren": {
- "once": "1.4.0"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "pump@^1.0.0",
- "name": "pump",
- "escapedName": "pump",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-fs"
- ],
- "_resolved": "https://registry.npmjs.org/pump/-/pump-1.0.2.tgz",
- "_shasum": "3b3ee6512f94f0e575538c17995f9f16990a5d51",
- "_shrinkwrap": null,
- "_spec": "pump@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-fs",
- "author": {
- "name": "Mathias Buus Madsen",
- "email": "mathiasbuus@gmail.com"
- },
- "bin": null,
- "browser": {
- "fs": false
- },
- "bugs": {
- "url": "https://github.com/mafintosh/pump/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "end-of-stream": "^1.1.0",
- "once": "^1.3.1"
- },
- "deprecated": false,
- "description": "pipe streams together and close all of them if one of them closes",
- "devDependencies": {},
- "homepage": "https://github.com/mafintosh/pump#readme",
- "keywords": [
- "streams",
- "pipe",
- "destroy",
- "callback"
- ],
- "license": "MIT",
- "name": "pump",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/mafintosh/pump.git"
- },
- "scripts": {
- "test": "node test.js"
- },
- "version": "1.0.2"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test-browser.js b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test-browser.js
deleted file mode 100644
index 80e852c7dcb9d8..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test-browser.js
+++ /dev/null
@@ -1,58 +0,0 @@
-var stream = require('stream')
-var pump = require('./index')
-
-var rs = new stream.Readable()
-var ws = new stream.Writable()
-
-rs._read = function (size) {
- this.push(Buffer(size).fill('abc'))
-}
-
-ws._write = function (chunk, encoding, cb) {
- setTimeout(function () {
- cb()
- }, 100)
-}
-
-var toHex = function () {
- var reverse = new (require('stream').Transform)()
-
- reverse._transform = function (chunk, enc, callback) {
- reverse.push(chunk.toString('hex'))
- callback()
- }
-
- return reverse
-}
-
-var wsClosed = false
-var rsClosed = false
-var callbackCalled = false
-
-var check = function () {
- if (wsClosed && rsClosed && callbackCalled) console.log('done')
-}
-
-ws.on('finish', function () {
- wsClosed = true
- check()
-})
-
-rs.on('end', function () {
- rsClosed = true
- check()
-})
-
-pump(rs, toHex(), toHex(), toHex(), ws, function () {
- callbackCalled = true
- check()
-})
-
-setTimeout(function () {
- rs.push(null)
- rs.emit('close')
-}, 1000)
-
-setTimeout(function () {
- if (!check()) throw new Error('timeout')
-}, 5000)
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test.js b/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test.js
deleted file mode 100644
index 64e772ca5bc69c..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test.js
+++ /dev/null
@@ -1,46 +0,0 @@
-var pump = require('./index')
-
-var rs = require('fs').createReadStream('/dev/random')
-var ws = require('fs').createWriteStream('/dev/null')
-
-var toHex = function () {
- var reverse = new (require('stream').Transform)()
-
- reverse._transform = function (chunk, enc, callback) {
- reverse.push(chunk.toString('hex'))
- callback()
- }
-
- return reverse
-}
-
-var wsClosed = false
-var rsClosed = false
-var callbackCalled = false
-
-var check = function () {
- if (wsClosed && rsClosed && callbackCalled) process.exit(0)
-}
-
-ws.on('close', function () {
- wsClosed = true
- check()
-})
-
-rs.on('close', function () {
- rsClosed = true
- check()
-})
-
-pump(rs, toHex(), toHex(), toHex(), ws, function () {
- callbackCalled = true
- check()
-})
-
-setTimeout(function () {
- rs.destroy()
-}, 1000)
-
-setTimeout(function () {
- throw new Error('timeout')
-}, 5000)
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/package.json b/deps/npm/node_modules/pacote/node_modules/tar-fs/package.json
deleted file mode 100644
index 2832072b548330..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/package.json
+++ /dev/null
@@ -1,70 +0,0 @@
-{
- "_from": "tar-fs@^1.15.1",
- "_id": "tar-fs@1.15.3",
- "_inBundle": false,
- "_integrity": "sha1-7M+TXpQUk9gVECjmNuUc5MPKfyA=",
- "_location": "/pacote/tar-fs",
- "_phantomChildren": {
- "once": "1.4.0"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "tar-fs@^1.15.1",
- "name": "tar-fs",
- "escapedName": "tar-fs",
- "rawSpec": "^1.15.1",
- "saveSpec": null,
- "fetchSpec": "^1.15.1"
- },
- "_requiredBy": [
- "/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-1.15.3.tgz",
- "_shasum": "eccf935e941493d8151028e636e51ce4c3ca7f20",
- "_spec": "tar-fs@^1.15.1",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
- "author": {
- "name": "Mathias Buus"
- },
- "bugs": {
- "url": "https://github.com/mafintosh/tar-fs/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "chownr": "^1.0.1",
- "mkdirp": "^0.5.1",
- "pump": "^1.0.0",
- "tar-stream": "^1.1.2"
- },
- "deprecated": false,
- "description": "filesystem bindings for tar-stream",
- "devDependencies": {
- "rimraf": "^2.2.8",
- "standard": "^4.5.4",
- "tape": "^3.0.0"
- },
- "directories": {
- "test": "test"
- },
- "homepage": "https://github.com/mafintosh/tar-fs",
- "keywords": [
- "tar",
- "fs",
- "file",
- "tarball",
- "directory",
- "stream"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "tar-fs",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/mafintosh/tar-fs.git"
- },
- "scripts": {
- "test": "standard && tape test/index.js"
- },
- "version": "1.15.3"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/a/hello.txt b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/a/hello.txt
deleted file mode 100644
index 3b18e512dba79e..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/a/hello.txt
+++ /dev/null
@@ -1 +0,0 @@
-hello world
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/b/a/test.txt b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/b/a/test.txt
deleted file mode 100644
index 9daeafb9864cf4..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/b/a/test.txt
+++ /dev/null
@@ -1 +0,0 @@
-test
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/c/.npmignore b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/c/.npmignore
deleted file mode 100644
index 2b2328d77c795e..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/c/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-link
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file2 b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file2
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-dir/file5 b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-dir/file5
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file3 b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file3
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file4 b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file4
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/directory/.ignore b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/directory/.ignore
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/file b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/file
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/index.js b/deps/npm/node_modules/pacote/node_modules/tar-fs/test/index.js
deleted file mode 100644
index 01ca87f2a8ef5a..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-fs/test/index.js
+++ /dev/null
@@ -1,227 +0,0 @@
-var test = require('tape')
-var rimraf = require('rimraf')
-var tar = require('../index')
-var path = require('path')
-var fs = require('fs')
-var os = require('os')
-
-var win32 = os.platform() === 'win32'
-
-var mtime = function (st) {
- return Math.floor(st.mtime.getTime() / 1000)
-}
-
-test('copy a -> copy/a', function (t) {
- t.plan(5)
-
- var a = path.join(__dirname, 'fixtures', 'a')
- var b = path.join(__dirname, 'fixtures', 'copy', 'a')
-
- rimraf.sync(b)
- tar.pack(a)
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b)
- t.same(files.length, 1)
- t.same(files[0], 'hello.txt')
- var fileB = path.join(b, files[0])
- var fileA = path.join(a, files[0])
- t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8'))
- t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode)
- t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA)))
- })
-})
-
-test('copy b -> copy/b', function (t) {
- t.plan(8)
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'b')
-
- rimraf.sync(b)
- tar.pack(a)
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b)
- t.same(files.length, 1)
- t.same(files[0], 'a')
- var dirB = path.join(b, files[0])
- var dirA = path.join(a, files[0])
- t.same(fs.statSync(dirB).mode, fs.statSync(dirA).mode)
- t.same(mtime(fs.statSync(dirB)), mtime(fs.statSync(dirA)))
- t.ok(fs.statSync(dirB).isDirectory())
- var fileB = path.join(dirB, 'test.txt')
- var fileA = path.join(dirA, 'test.txt')
- t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8'))
- t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode)
- t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA)))
- })
-})
-
-test('symlink', function (t) {
- if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow
- t.plan(1)
- t.ok(true)
- return
- }
-
- t.plan(5)
-
- var a = path.join(__dirname, 'fixtures', 'c')
-
- rimraf.sync(path.join(a, 'link'))
- fs.symlinkSync('.gitignore', path.join(a, 'link'))
-
- var b = path.join(__dirname, 'fixtures', 'copy', 'c')
-
- rimraf.sync(b)
- tar.pack(a)
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 2)
- t.same(files[0], '.gitignore')
- t.same(files[1], 'link')
-
- var linkA = path.join(a, 'link')
- var linkB = path.join(b, 'link')
-
- t.same(mtime(fs.lstatSync(linkB)), mtime(fs.lstatSync(linkA)))
- t.same(fs.readlinkSync(linkB), fs.readlinkSync(linkA))
- })
-})
-
-test('follow symlinks', function (t) {
- if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow
- t.plan(1)
- t.ok(true)
- return
- }
-
- t.plan(5)
-
- var a = path.join(__dirname, 'fixtures', 'c')
-
- rimraf.sync(path.join(a, 'link'))
- fs.symlinkSync('.gitignore', path.join(a, 'link'))
-
- var b = path.join(__dirname, 'fixtures', 'copy', 'c-dereference')
-
- rimraf.sync(b)
- tar.pack(a, {dereference: true})
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 2)
- t.same(files[0], '.gitignore')
- t.same(files[1], 'link')
-
- var file1 = path.join(b, '.gitignore')
- var file2 = path.join(b, 'link')
-
- t.same(mtime(fs.lstatSync(file1)), mtime(fs.lstatSync(file2)))
- t.same(fs.readFileSync(file1), fs.readFileSync(file2))
- })
-})
-
-test('strip', function (t) {
- t.plan(2)
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip')
-
- rimraf.sync(b)
-
- tar.pack(a)
- .pipe(tar.extract(b, {strip: 1}))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 1)
- t.same(files[0], 'test.txt')
- })
-})
-
-test('strip + map', function (t) {
- t.plan(2)
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip')
-
- rimraf.sync(b)
-
- var uppercase = function (header) {
- header.name = header.name.toUpperCase()
- return header
- }
-
- tar.pack(a)
- .pipe(tar.extract(b, {strip: 1, map: uppercase}))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 1)
- t.same(files[0], 'TEST.TXT')
- })
-})
-
-test('map + dir + permissions', function (t) {
- t.plan(win32 ? 1 : 2) // skip chmod test, it's not working like unix
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'a-perms')
-
- rimraf.sync(b)
-
- var aWithMode = function (header) {
- if (header.name === 'a') {
- header.mode = parseInt(700, 8)
- }
- return header
- }
-
- tar.pack(a)
- .pipe(tar.extract(b, {map: aWithMode}))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- var stat = fs.statSync(path.join(b, 'a'))
- t.same(files.length, 1)
- if (!win32) {
- t.same(stat.mode & parseInt(777, 8), parseInt(700, 8))
- }
- })
-})
-
-test('specific entries', function (t) {
- t.plan(6)
-
- var a = path.join(__dirname, 'fixtures', 'd')
- var b = path.join(__dirname, 'fixtures', 'copy', 'd-entries')
-
- var entries = [ 'file1', 'sub-files/file3', 'sub-dir' ]
-
- rimraf.sync(b)
- tar.pack(a, {entries: entries})
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b)
- t.same(files.length, 3)
- t.notSame(files.indexOf('file1'), -1)
- t.notSame(files.indexOf('sub-files'), -1)
- t.notSame(files.indexOf('sub-dir'), -1)
- var subFiles = fs.readdirSync(path.join(b, 'sub-files'))
- t.same(subFiles, ['file3'])
- var subDir = fs.readdirSync(path.join(b, 'sub-dir'))
- t.same(subDir, ['file5'])
- })
-})
-
-test('check type while mapping header on packing', function (t) {
- t.plan(3)
-
- var e = path.join(__dirname, 'fixtures', 'e')
-
- var checkHeaderType = function (header) {
- if (header.name.indexOf('.') === -1) t.same(header.type, header.name)
- }
-
- tar.pack(e, { map: checkHeaderType })
-})
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/LICENSE b/deps/npm/node_modules/pacote/node_modules/tar-stream/LICENSE
deleted file mode 100644
index 757562ec59276b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/README.md b/deps/npm/node_modules/pacote/node_modules/tar-stream/README.md
deleted file mode 100644
index 96abbca1b841e2..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/README.md
+++ /dev/null
@@ -1,168 +0,0 @@
-# tar-stream
-
-tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
-
-Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this.
-
-```
-npm install tar-stream
-```
-
-[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream)
-[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://opensource.org/licenses/MIT)
-
-## Usage
-
-tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both.
-
-
-It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc)
-
-## Related
-
-If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module.
-
-## Packing
-
-To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries.
-
-``` js
-var tar = require('tar-stream')
-var pack = tar.pack() // pack is a streams2 stream
-
-// add a file called my-test.txt with the content "Hello World!"
-pack.entry({ name: 'my-test.txt' }, 'Hello World!')
-
-// add a file called my-stream-test.txt from a stream
-var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
- // the stream was added
- // no more entries
- pack.finalize()
-})
-
-entry.write('hello')
-entry.write(' ')
-entry.write('world')
-entry.end()
-
-// pipe the pack stream somewhere
-pack.pipe(process.stdout)
-```
-
-## Extracting
-
-To extract a stream use `tar.extract()` and listen for `extract.on('entry', (header, stream, next) )`
-
-``` js
-var extract = tar.extract()
-
-extract.on('entry', function(header, stream, next) {
- // header is the tar header
- // stream is the content body (might be an empty stream)
- // call next when you are done with this entry
-
- stream.on('end', function() {
- next() // ready for next entry
- })
-
- stream.resume() // just auto drain the stream
-})
-
-extract.on('finish', function() {
- // all entries read
-})
-
-pack.pipe(extract)
-```
-
-The tar archive is streamed sequentially, meaning you **must** drain each entry's stream as you get them or else the main extract stream will receive backpressure and stop reading.
-
-## Headers
-
-The header object using in `entry` should contain the following properties.
-Most of these values can be found by stat'ing a file.
-
-``` js
-{
- name: 'path/to/this/entry.txt',
- size: 1314, // entry size. defaults to 0
- mode: 0644, // entry mode. defaults to to 0755 for dirs and 0644 otherwise
- mtime: new Date(), // last modified date for entry. defaults to now.
- type: 'file', // type of entry. defaults to file. can be:
- // file | link | symlink | directory | block-device
- // character-device | fifo | contiguous-file
- linkname: 'path', // linked file name
- uid: 0, // uid of entry owner. defaults to 0
- gid: 0, // gid of entry owner. defaults to 0
- uname: 'maf', // uname of entry owner. defaults to null
- gname: 'staff', // gname of entry owner. defaults to null
- devmajor: 0, // device major version. defaults to 0
- devminor: 0 // device minor version. defaults to 0
-}
-```
-
-## Modifying existing tarballs
-
-Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball.
-
-``` js
-var extract = tar.extract()
-var pack = tar.pack()
-var path = require('path')
-
-extract.on('entry', function(header, stream, callback) {
- // let's prefix all names with 'tmp'
- header.name = path.join('tmp', header.name)
- // write the new entry to the pack stream
- stream.pipe(pack.entry(header, callback))
-})
-
-extract.on('finish', function() {
- // all entries done - lets finalize it
- pack.finalize()
-})
-
-// pipe the old tarball to the extractor
-oldTarballStream.pipe(extract)
-
-// pipe the new tarball the another stream
-pack.pipe(newTarballStream)
-```
-
-## Saving tarball to fs
-
-
-``` js
-var fs = require('fs')
-var tar = require('tar-stream')
-
-var pack = tar.pack() // pack is a streams2 stream
-var path = 'YourTarBall.tar'
-var yourTarball = fs.createWriteStream(path)
-
-// add a file called YourFile.txt with the content "Hello World!"
-pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) {
- if (err) throw err
- pack.finalize()
-})
-
-// pipe the pack stream to your file
-pack.pipe(yourTarball)
-
-yourTarball.on('close', function () {
- console.log(path + ' has been written')
- fs.stat(path, function(err, stats) {
- if (err) throw err
- console.log(stats)
- console.log('Got file info successfully!')
- })
-})
-```
-
-## Performance
-
-[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance)
-
-# License
-
-MIT
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/extract.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/extract.js
deleted file mode 100644
index 8be2a472c65ad5..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/extract.js
+++ /dev/null
@@ -1,246 +0,0 @@
-var util = require('util')
-var bl = require('bl')
-var xtend = require('xtend')
-var headers = require('./headers')
-
-var Writable = require('readable-stream').Writable
-var PassThrough = require('readable-stream').PassThrough
-
-var noop = function () {}
-
-var overflow = function (size) {
- size &= 511
- return size && 512 - size
-}
-
-var emptyStream = function (self, offset) {
- var s = new Source(self, offset)
- s.end()
- return s
-}
-
-var mixinPax = function (header, pax) {
- if (pax.path) header.name = pax.path
- if (pax.linkpath) header.linkname = pax.linkpath
- header.pax = pax
- return header
-}
-
-var Source = function (self, offset) {
- this._parent = self
- this.offset = offset
- PassThrough.call(this)
-}
-
-util.inherits(Source, PassThrough)
-
-Source.prototype.destroy = function (err) {
- this._parent.destroy(err)
-}
-
-var Extract = function (opts) {
- if (!(this instanceof Extract)) return new Extract(opts)
- Writable.call(this, opts)
-
- this._offset = 0
- this._buffer = bl()
- this._missing = 0
- this._onparse = noop
- this._header = null
- this._stream = null
- this._overflow = null
- this._cb = null
- this._locked = false
- this._destroyed = false
- this._pax = null
- this._paxGlobal = null
- this._gnuLongPath = null
- this._gnuLongLinkPath = null
-
- var self = this
- var b = self._buffer
-
- var oncontinue = function () {
- self._continue()
- }
-
- var onunlock = function (err) {
- self._locked = false
- if (err) return self.destroy(err)
- if (!self._stream) oncontinue()
- }
-
- var onstreamend = function () {
- self._stream = null
- var drain = overflow(self._header.size)
- if (drain) self._parse(drain, ondrain)
- else self._parse(512, onheader)
- if (!self._locked) oncontinue()
- }
-
- var ondrain = function () {
- self._buffer.consume(overflow(self._header.size))
- self._parse(512, onheader)
- oncontinue()
- }
-
- var onpaxglobalheader = function () {
- var size = self._header.size
- self._paxGlobal = headers.decodePax(b.slice(0, size))
- b.consume(size)
- onstreamend()
- }
-
- var onpaxheader = function () {
- var size = self._header.size
- self._pax = headers.decodePax(b.slice(0, size))
- if (self._paxGlobal) self._pax = xtend(self._paxGlobal, self._pax)
- b.consume(size)
- onstreamend()
- }
-
- var ongnulongpath = function () {
- var size = self._header.size
- this._gnuLongPath = headers.decodeLongPath(b.slice(0, size))
- b.consume(size)
- onstreamend()
- }
-
- var ongnulonglinkpath = function () {
- var size = self._header.size
- this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size))
- b.consume(size)
- onstreamend()
- }
-
- var onheader = function () {
- var offset = self._offset
- var header
- try {
- header = self._header = headers.decode(b.slice(0, 512))
- } catch (err) {
- self.emit('error', err)
- }
- b.consume(512)
-
- if (!header) {
- self._parse(512, onheader)
- oncontinue()
- return
- }
- if (header.type === 'gnu-long-path') {
- self._parse(header.size, ongnulongpath)
- oncontinue()
- return
- }
- if (header.type === 'gnu-long-link-path') {
- self._parse(header.size, ongnulonglinkpath)
- oncontinue()
- return
- }
- if (header.type === 'pax-global-header') {
- self._parse(header.size, onpaxglobalheader)
- oncontinue()
- return
- }
- if (header.type === 'pax-header') {
- self._parse(header.size, onpaxheader)
- oncontinue()
- return
- }
-
- if (self._gnuLongPath) {
- header.name = self._gnuLongPath
- self._gnuLongPath = null
- }
-
- if (self._gnuLongLinkPath) {
- header.linkname = self._gnuLongLinkPath
- self._gnuLongLinkPath = null
- }
-
- if (self._pax) {
- self._header = header = mixinPax(header, self._pax)
- self._pax = null
- }
-
- self._locked = true
-
- if (!header.size || header.type === 'directory') {
- self._parse(512, onheader)
- self.emit('entry', header, emptyStream(self, offset), onunlock)
- return
- }
-
- self._stream = new Source(self, offset)
-
- self.emit('entry', header, self._stream, onunlock)
- self._parse(header.size, onstreamend)
- oncontinue()
- }
-
- this._parse(512, onheader)
-}
-
-util.inherits(Extract, Writable)
-
-Extract.prototype.destroy = function (err) {
- if (this._destroyed) return
- this._destroyed = true
-
- if (err) this.emit('error', err)
- this.emit('close')
- if (this._stream) this._stream.emit('close')
-}
-
-Extract.prototype._parse = function (size, onparse) {
- if (this._destroyed) return
- this._offset += size
- this._missing = size
- this._onparse = onparse
-}
-
-Extract.prototype._continue = function () {
- if (this._destroyed) return
- var cb = this._cb
- this._cb = noop
- if (this._overflow) this._write(this._overflow, undefined, cb)
- else cb()
-}
-
-Extract.prototype._write = function (data, enc, cb) {
- if (this._destroyed) return
-
- var s = this._stream
- var b = this._buffer
- var missing = this._missing
-
- // we do not reach end-of-chunk now. just forward it
-
- if (data.length < missing) {
- this._missing -= data.length
- this._overflow = null
- if (s) return s.write(data, cb)
- b.append(data)
- return cb()
- }
-
- // end-of-chunk. the parser should call cb.
-
- this._cb = cb
- this._missing = 0
-
- var overflow = null
- if (data.length > missing) {
- overflow = data.slice(missing)
- data = data.slice(0, missing)
- }
-
- if (s) s.end(data)
- else b.append(data)
-
- this._overflow = overflow
- this._onparse()
-}
-
-module.exports = Extract
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/headers.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/headers.js
deleted file mode 100644
index 8aab8b56180b4e..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/headers.js
+++ /dev/null
@@ -1,286 +0,0 @@
-var ZEROS = '0000000000000000000'
-var SEVENS = '7777777777777777777'
-var ZERO_OFFSET = '0'.charCodeAt(0)
-var USTAR = 'ustar\x0000'
-var MASK = parseInt('7777', 8)
-
-var clamp = function (index, len, defaultValue) {
- if (typeof index !== 'number') return defaultValue
- index = ~~index // Coerce to integer.
- if (index >= len) return len
- if (index >= 0) return index
- index += len
- if (index >= 0) return index
- return 0
-}
-
-var toType = function (flag) {
- switch (flag) {
- case 0:
- return 'file'
- case 1:
- return 'link'
- case 2:
- return 'symlink'
- case 3:
- return 'character-device'
- case 4:
- return 'block-device'
- case 5:
- return 'directory'
- case 6:
- return 'fifo'
- case 7:
- return 'contiguous-file'
- case 72:
- return 'pax-header'
- case 55:
- return 'pax-global-header'
- case 27:
- return 'gnu-long-link-path'
- case 28:
- case 30:
- return 'gnu-long-path'
- }
-
- return null
-}
-
-var toTypeflag = function (flag) {
- switch (flag) {
- case 'file':
- return 0
- case 'link':
- return 1
- case 'symlink':
- return 2
- case 'character-device':
- return 3
- case 'block-device':
- return 4
- case 'directory':
- return 5
- case 'fifo':
- return 6
- case 'contiguous-file':
- return 7
- case 'pax-header':
- return 72
- }
-
- return 0
-}
-
-var alloc = function (size) {
- var buf = new Buffer(size)
- buf.fill(0)
- return buf
-}
-
-var indexOf = function (block, num, offset, end) {
- for (; offset < end; offset++) {
- if (block[offset] === num) return offset
- }
- return end
-}
-
-var cksum = function (block) {
- var sum = 8 * 32
- for (var i = 0; i < 148; i++) sum += block[i]
- for (var j = 156; j < 512; j++) sum += block[j]
- return sum
-}
-
-var encodeOct = function (val, n) {
- val = val.toString(8)
- if (val.length > n) return SEVENS.slice(0, n) + ' '
- else return ZEROS.slice(0, n - val.length) + val + ' '
-}
-
-/* Copied from the node-tar repo and modified to meet
- * tar-stream coding standard.
- *
- * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
- */
-function parse256 (buf) {
- // first byte MUST be either 80 or FF
- // 80 for positive, FF for 2's comp
- var positive
- if (buf[0] === 0x80) positive = true
- else if (buf[0] === 0xFF) positive = false
- else return null
-
- // build up a base-256 tuple from the least sig to the highest
- var zero = false
- var tuple = []
- for (var i = buf.length - 1; i > 0; i--) {
- var byte = buf[i]
- if (positive) tuple.push(byte)
- else if (zero && byte === 0) tuple.push(0)
- else if (zero) {
- zero = false
- tuple.push(0x100 - byte)
- } else tuple.push(0xFF - byte)
- }
-
- var sum = 0
- var l = tuple.length
- for (i = 0; i < l; i++) {
- sum += tuple[i] * Math.pow(256, i)
- }
-
- return positive ? sum : -1 * sum
-}
-
-var decodeOct = function (val, offset, length) {
- val = val.slice(offset, offset + length)
- offset = 0
-
- // If prefixed with 0x80 then parse as a base-256 integer
- if (val[offset] & 0x80) {
- return parse256(val)
- } else {
- // Older versions of tar can prefix with spaces
- while (offset < val.length && val[offset] === 32) offset++
- var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
- while (offset < end && val[offset] === 0) offset++
- if (end === offset) return 0
- return parseInt(val.slice(offset, end).toString(), 8)
- }
-}
-
-var decodeStr = function (val, offset, length) {
- return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString()
-}
-
-var addLength = function (str) {
- var len = Buffer.byteLength(str)
- var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
- if (len + digits > Math.pow(10, digits)) digits++
-
- return (len + digits) + str
-}
-
-exports.decodeLongPath = function (buf) {
- return decodeStr(buf, 0, buf.length)
-}
-
-exports.encodePax = function (opts) { // TODO: encode more stuff in pax
- var result = ''
- if (opts.name) result += addLength(' path=' + opts.name + '\n')
- if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
- var pax = opts.pax
- if (pax) {
- for (var key in pax) {
- result += addLength(' ' + key + '=' + pax[key] + '\n')
- }
- }
- return new Buffer(result)
-}
-
-exports.decodePax = function (buf) {
- var result = {}
-
- while (buf.length) {
- var i = 0
- while (i < buf.length && buf[i] !== 32) i++
- var len = parseInt(buf.slice(0, i).toString(), 10)
- if (!len) return result
-
- var b = buf.slice(i + 1, len - 1).toString()
- var keyIndex = b.indexOf('=')
- if (keyIndex === -1) return result
- result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
-
- buf = buf.slice(len)
- }
-
- return result
-}
-
-exports.encode = function (opts) {
- var buf = alloc(512)
- var name = opts.name
- var prefix = ''
-
- if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
- if (Buffer.byteLength(name) !== name.length) return null // utf-8
-
- while (Buffer.byteLength(name) > 100) {
- var i = name.indexOf('/')
- if (i === -1) return null
- prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
- name = name.slice(i + 1)
- }
-
- if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
- if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
-
- buf.write(name)
- buf.write(encodeOct(opts.mode & MASK, 6), 100)
- buf.write(encodeOct(opts.uid, 6), 108)
- buf.write(encodeOct(opts.gid, 6), 116)
- buf.write(encodeOct(opts.size, 11), 124)
- buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
-
- buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
-
- if (opts.linkname) buf.write(opts.linkname, 157)
-
- buf.write(USTAR, 257)
- if (opts.uname) buf.write(opts.uname, 265)
- if (opts.gname) buf.write(opts.gname, 297)
- buf.write(encodeOct(opts.devmajor || 0, 6), 329)
- buf.write(encodeOct(opts.devminor || 0, 6), 337)
-
- if (prefix) buf.write(prefix, 345)
-
- buf.write(encodeOct(cksum(buf), 6), 148)
-
- return buf
-}
-
-exports.decode = function (buf) {
- var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
-
- var name = decodeStr(buf, 0, 100)
- var mode = decodeOct(buf, 100, 8)
- var uid = decodeOct(buf, 108, 8)
- var gid = decodeOct(buf, 116, 8)
- var size = decodeOct(buf, 124, 12)
- var mtime = decodeOct(buf, 136, 12)
- var type = toType(typeflag)
- var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100)
- var uname = decodeStr(buf, 265, 32)
- var gname = decodeStr(buf, 297, 32)
- var devmajor = decodeOct(buf, 329, 8)
- var devminor = decodeOct(buf, 337, 8)
-
- if (buf[345]) name = decodeStr(buf, 345, 155) + '/' + name
-
- // to support old tar versions that use trailing / to indicate dirs
- if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
-
- var c = cksum(buf)
-
- // checksum is still initial value if header was null.
- if (c === 8 * 32) return null
-
- // valid checksum
- if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
-
- return {
- name: name,
- mode: mode,
- uid: uid,
- gid: gid,
- size: size,
- mtime: new Date(1000 * mtime),
- type: type,
- linkname: linkname,
- uname: uname,
- gname: gname,
- devmajor: devmajor,
- devminor: devminor
- }
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/index.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/index.js
deleted file mode 100644
index 6481704827ea8f..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/index.js
+++ /dev/null
@@ -1,2 +0,0 @@
-exports.extract = require('./extract')
-exports.pack = require('./pack')
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.npmignore b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.npmignore
deleted file mode 100644
index 40b878db5b1c97..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.travis.yml b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.travis.yml
deleted file mode 100644
index 8c6fc4810390d3..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.travis.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-sudo: false
-language: node_js
-node_js:
- - '0.10'
- - '0.12'
- - '4'
- - '6'
- - '7'
-branches:
- only:
- - master
-notifications:
- email:
- - rod@vagg.org
- - matteo.collina@gmail.com
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/LICENSE.md b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/LICENSE.md
deleted file mode 100644
index ff35a347282dd8..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/LICENSE.md
+++ /dev/null
@@ -1,13 +0,0 @@
-The MIT License (MIT)
-=====================
-
-Copyright (c) 2013-2016 bl contributors
-----------------------------------
-
-*bl contributors listed at *
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/README.md b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/README.md
deleted file mode 100644
index da0c18338e7a30..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/README.md
+++ /dev/null
@@ -1,208 +0,0 @@
-# bl *(BufferList)*
-
-[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl)
-
-**A Node.js Buffer list collector, reader and streamer thingy.**
-
-[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/)
-[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/)
-
-**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!
-
-The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.
-
-```js
-const BufferList = require('bl')
-
-var bl = new BufferList()
-bl.append(new Buffer('abcd'))
-bl.append(new Buffer('efg'))
-bl.append('hi') // bl will also accept & convert Strings
-bl.append(new Buffer('j'))
-bl.append(new Buffer([ 0x3, 0x4 ]))
-
-console.log(bl.length) // 12
-
-console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'
-console.log(bl.slice(3, 10).toString('ascii')) // 'defghij'
-console.log(bl.slice(3, 6).toString('ascii')) // 'def'
-console.log(bl.slice(3, 8).toString('ascii')) // 'defgh'
-console.log(bl.slice(5, 10).toString('ascii')) // 'fghij'
-
-// or just use toString!
-console.log(bl.toString()) // 'abcdefghij\u0003\u0004'
-console.log(bl.toString('ascii', 3, 8)) // 'defgh'
-console.log(bl.toString('ascii', 5, 10)) // 'fghij'
-
-// other standard Buffer readables
-console.log(bl.readUInt16BE(10)) // 0x0304
-console.log(bl.readUInt16LE(10)) // 0x0403
-```
-
-Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:
-
-```js
-const bl = require('bl')
- , fs = require('fs')
-
-fs.createReadStream('README.md')
- .pipe(bl(function (err, data) { // note 'new' isn't strictly required
- // `data` is a complete Buffer object containing the full data
- console.log(data.toString())
- }))
-```
-
-Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.
-
-Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):
-```js
-const hyperquest = require('hyperquest')
- , bl = require('bl')
- , url = 'https://raw.github.com/rvagg/bl/master/README.md'
-
-hyperquest(url).pipe(bl(function (err, data) {
- console.log(data.toString())
-}))
-```
-
-Or, use it as a readable stream to recompose a list of Buffers to an output source:
-
-```js
-const BufferList = require('bl')
- , fs = require('fs')
-
-var bl = new BufferList()
-bl.append(new Buffer('abcd'))
-bl.append(new Buffer('efg'))
-bl.append(new Buffer('hi'))
-bl.append(new Buffer('j'))
-
-bl.pipe(fs.createWriteStream('gibberish.txt'))
-```
-
-## API
-
- * new BufferList([ callback ])
- * bl.length
- * bl.append(buffer)
- * bl.get(index)
- * bl.slice([ start[, end ] ])
- * bl.shallowSlice([ start[, end ] ])
- * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
- * bl.duplicate()
- * bl.consume(bytes)
- * bl.toString([encoding, [ start, [ end ]]])
- * bl.readDoubleBE()
, bl.readDoubleLE()
, bl.readFloatBE()
, bl.readFloatLE()
, bl.readInt32BE()
, bl.readInt32LE()
, bl.readUInt32BE()
, bl.readUInt32LE()
, bl.readInt16BE()
, bl.readInt16LE()
, bl.readUInt16BE()
, bl.readUInt16LE()
, bl.readInt8()
, bl.readUInt8()
- * Streams
-
---------------------------------------------------------
-
-### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
-The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
-
-Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
-
-`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
-
-```js
-var bl = require('bl')
-var myinstance = bl()
-
-// equivilant to:
-
-var BufferList = require('bl')
-var myinstance = new BufferList()
-```
-
---------------------------------------------------------
-
-### bl.length
-Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.
-
---------------------------------------------------------
-
-### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
-`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
-
---------------------------------------------------------
-
-### bl.get(index)
-`get()` will return the byte at the specified index.
-
---------------------------------------------------------
-
-### bl.slice([ start, [ end ] ])
-`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
-
-If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.
-
---------------------------------------------------------
-
-### bl.shallowSlice([ start, [ end ] ])
-`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
-
-No copies will be performed. All buffers in the result share memory with the original list.
-
---------------------------------------------------------
-
-### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
-`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.
-
---------------------------------------------------------
-
-### bl.duplicate()
-`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:
-
-```js
-var bl = new BufferList()
-
-bl.append('hello')
-bl.append(' world')
-bl.append('\n')
-
-bl.duplicate().pipe(process.stdout, { end: false })
-
-console.log(bl.toString())
-```
-
---------------------------------------------------------
-
-### bl.consume(bytes)
-`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data.
-
---------------------------------------------------------
-
-### bl.toString([encoding, [ start, [ end ]]])
-`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.
-
---------------------------------------------------------
-
-### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
-
-All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.
-
-See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
documentation for how these work.
-
---------------------------------------------------------
-
-### Streams
-**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance.
-
---------------------------------------------------------
-
-## Contributors
-
-**bl** is brought to you by the following hackers:
-
- * [Rod Vagg](https://github.com/rvagg)
- * [Matteo Collina](https://github.com/mcollina)
- * [Jarett Cruger](https://github.com/jcrugzz)
-
-=======
-
-
-## License & copyright
-
-Copyright (c) 2013-2016 bl contributors (listed above).
-
-bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/bl.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/bl.js
deleted file mode 100644
index 98983316c48c41..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/bl.js
+++ /dev/null
@@ -1,280 +0,0 @@
-var DuplexStream = require('readable-stream/duplex')
- , util = require('util')
-
-
-function BufferList (callback) {
- if (!(this instanceof BufferList))
- return new BufferList(callback)
-
- this._bufs = []
- this.length = 0
-
- if (typeof callback == 'function') {
- this._callback = callback
-
- var piper = function piper (err) {
- if (this._callback) {
- this._callback(err)
- this._callback = null
- }
- }.bind(this)
-
- this.on('pipe', function onPipe (src) {
- src.on('error', piper)
- })
- this.on('unpipe', function onUnpipe (src) {
- src.removeListener('error', piper)
- })
- } else {
- this.append(callback)
- }
-
- DuplexStream.call(this)
-}
-
-
-util.inherits(BufferList, DuplexStream)
-
-
-BufferList.prototype._offset = function _offset (offset) {
- var tot = 0, i = 0, _t
- if (offset === 0) return [ 0, 0 ]
- for (; i < this._bufs.length; i++) {
- _t = tot + this._bufs[i].length
- if (offset < _t || i == this._bufs.length - 1)
- return [ i, offset - tot ]
- tot = _t
- }
-}
-
-
-BufferList.prototype.append = function append (buf) {
- var i = 0
-
- if (Buffer.isBuffer(buf)) {
- this._appendBuffer(buf);
- } else if (Array.isArray(buf)) {
- for (; i < buf.length; i++)
- this.append(buf[i])
- } else if (buf instanceof BufferList) {
- // unwrap argument into individual BufferLists
- for (; i < buf._bufs.length; i++)
- this.append(buf._bufs[i])
- } else if (buf != null) {
- // coerce number arguments to strings, since Buffer(number) does
- // uninitialized memory allocation
- if (typeof buf == 'number')
- buf = buf.toString()
-
- this._appendBuffer(new Buffer(buf));
- }
-
- return this
-}
-
-
-BufferList.prototype._appendBuffer = function appendBuffer (buf) {
- this._bufs.push(buf)
- this.length += buf.length
-}
-
-
-BufferList.prototype._write = function _write (buf, encoding, callback) {
- this._appendBuffer(buf)
-
- if (typeof callback == 'function')
- callback()
-}
-
-
-BufferList.prototype._read = function _read (size) {
- if (!this.length)
- return this.push(null)
-
- size = Math.min(size, this.length)
- this.push(this.slice(0, size))
- this.consume(size)
-}
-
-
-BufferList.prototype.end = function end (chunk) {
- DuplexStream.prototype.end.call(this, chunk)
-
- if (this._callback) {
- this._callback(null, this.slice())
- this._callback = null
- }
-}
-
-
-BufferList.prototype.get = function get (index) {
- return this.slice(index, index + 1)[0]
-}
-
-
-BufferList.prototype.slice = function slice (start, end) {
- if (typeof start == 'number' && start < 0)
- start += this.length
- if (typeof end == 'number' && end < 0)
- end += this.length
- return this.copy(null, 0, start, end)
-}
-
-
-BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
- if (typeof srcStart != 'number' || srcStart < 0)
- srcStart = 0
- if (typeof srcEnd != 'number' || srcEnd > this.length)
- srcEnd = this.length
- if (srcStart >= this.length)
- return dst || new Buffer(0)
- if (srcEnd <= 0)
- return dst || new Buffer(0)
-
- var copy = !!dst
- , off = this._offset(srcStart)
- , len = srcEnd - srcStart
- , bytes = len
- , bufoff = (copy && dstStart) || 0
- , start = off[1]
- , l
- , i
-
- // copy/slice everything
- if (srcStart === 0 && srcEnd == this.length) {
- if (!copy) { // slice, but full concat if multiple buffers
- return this._bufs.length === 1
- ? this._bufs[0]
- : Buffer.concat(this._bufs, this.length)
- }
-
- // copy, need to copy individual buffers
- for (i = 0; i < this._bufs.length; i++) {
- this._bufs[i].copy(dst, bufoff)
- bufoff += this._bufs[i].length
- }
-
- return dst
- }
-
- // easy, cheap case where it's a subset of one of the buffers
- if (bytes <= this._bufs[off[0]].length - start) {
- return copy
- ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
- : this._bufs[off[0]].slice(start, start + bytes)
- }
-
- if (!copy) // a slice, we need something to copy in to
- dst = new Buffer(len)
-
- for (i = off[0]; i < this._bufs.length; i++) {
- l = this._bufs[i].length - start
-
- if (bytes > l) {
- this._bufs[i].copy(dst, bufoff, start)
- } else {
- this._bufs[i].copy(dst, bufoff, start, start + bytes)
- break
- }
-
- bufoff += l
- bytes -= l
-
- if (start)
- start = 0
- }
-
- return dst
-}
-
-BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
- start = start || 0
- end = end || this.length
-
- if (start < 0)
- start += this.length
- if (end < 0)
- end += this.length
-
- var startOffset = this._offset(start)
- , endOffset = this._offset(end)
- , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
-
- if (endOffset[1] == 0)
- buffers.pop()
- else
- buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
-
- if (startOffset[1] != 0)
- buffers[0] = buffers[0].slice(startOffset[1])
-
- return new BufferList(buffers)
-}
-
-BufferList.prototype.toString = function toString (encoding, start, end) {
- return this.slice(start, end).toString(encoding)
-}
-
-BufferList.prototype.consume = function consume (bytes) {
- while (this._bufs.length) {
- if (bytes >= this._bufs[0].length) {
- bytes -= this._bufs[0].length
- this.length -= this._bufs[0].length
- this._bufs.shift()
- } else {
- this._bufs[0] = this._bufs[0].slice(bytes)
- this.length -= bytes
- break
- }
- }
- return this
-}
-
-
-BufferList.prototype.duplicate = function duplicate () {
- var i = 0
- , copy = new BufferList()
-
- for (; i < this._bufs.length; i++)
- copy.append(this._bufs[i])
-
- return copy
-}
-
-
-BufferList.prototype.destroy = function destroy () {
- this._bufs.length = 0
- this.length = 0
- this.push(null)
-}
-
-
-;(function () {
- var methods = {
- 'readDoubleBE' : 8
- , 'readDoubleLE' : 8
- , 'readFloatBE' : 4
- , 'readFloatLE' : 4
- , 'readInt32BE' : 4
- , 'readInt32LE' : 4
- , 'readUInt32BE' : 4
- , 'readUInt32LE' : 4
- , 'readInt16BE' : 2
- , 'readInt16LE' : 2
- , 'readUInt16BE' : 2
- , 'readUInt16LE' : 2
- , 'readInt8' : 1
- , 'readUInt8' : 1
- }
-
- for (var m in methods) {
- (function (m) {
- BufferList.prototype[m] = function (offset) {
- return this.slice(offset, offset + methods[m])[m](0)
- }
- }(m))
- }
-}())
-
-
-module.exports = BufferList
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/package.json b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/package.json
deleted file mode 100644
index 89a0dbcd04c86a..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "_from": "bl@^1.0.0",
- "_id": "bl@1.2.1",
- "_integrity": "sha1-ysMo977kVzDUBLaSID/LWQ4XLV4=",
- "_location": "/pacote/tar-stream/bl",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "bl@^1.0.0",
- "name": "bl",
- "escapedName": "bl",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-stream"
- ],
- "_resolved": "https://registry.npmjs.org/bl/-/bl-1.2.1.tgz",
- "_shasum": "cac328f7bee45730d404b692203fcb590e172d5e",
- "_shrinkwrap": null,
- "_spec": "bl@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-stream",
- "authors": [
- "Rod Vagg (https://github.com/rvagg)",
- "Matteo Collina (https://github.com/mcollina)",
- "Jarett Cruger (https://github.com/jcrugzz)"
- ],
- "bin": null,
- "bugs": {
- "url": "https://github.com/rvagg/bl/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "readable-stream": "^2.0.5"
- },
- "deprecated": false,
- "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
- "devDependencies": {
- "faucet": "0.0.1",
- "hash_file": "~0.1.1",
- "tape": "~4.6.0"
- },
- "homepage": "https://github.com/rvagg/bl",
- "keywords": [
- "buffer",
- "buffers",
- "stream",
- "awesomesauce"
- ],
- "license": "MIT",
- "main": "bl.js",
- "name": "bl",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git+https://github.com/rvagg/bl.git"
- },
- "scripts": {
- "test": "node test/test.js | faucet"
- },
- "version": "1.2.1"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/test/test.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/test/test.js
deleted file mode 100644
index 396974ec162d07..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/bl/test/test.js
+++ /dev/null
@@ -1,701 +0,0 @@
-var tape = require('tape')
- , crypto = require('crypto')
- , fs = require('fs')
- , hash = require('hash_file')
- , BufferList = require('../')
-
- , encodings =
- ('hex utf8 utf-8 ascii binary base64'
- + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ')
-
-tape('single bytes from single buffer', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abcd'))
-
- t.equal(bl.length, 4)
-
- t.equal(bl.get(0), 97)
- t.equal(bl.get(1), 98)
- t.equal(bl.get(2), 99)
- t.equal(bl.get(3), 100)
-
- t.end()
-})
-
-tape('single bytes from multiple buffers', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.get(0), 97)
- t.equal(bl.get(1), 98)
- t.equal(bl.get(2), 99)
- t.equal(bl.get(3), 100)
- t.equal(bl.get(4), 101)
- t.equal(bl.get(5), 102)
- t.equal(bl.get(6), 103)
- t.equal(bl.get(7), 104)
- t.equal(bl.get(8), 105)
- t.equal(bl.get(9), 106)
- t.end()
-})
-
-tape('multi bytes from single buffer', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abcd'))
-
- t.equal(bl.length, 4)
-
- t.equal(bl.slice(0, 4).toString('ascii'), 'abcd')
- t.equal(bl.slice(0, 3).toString('ascii'), 'abc')
- t.equal(bl.slice(1, 4).toString('ascii'), 'bcd')
- t.equal(bl.slice(-4, -1).toString('ascii'), 'abc')
-
- t.end()
-})
-
-tape('multi bytes from single buffer (negative indexes)', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('buffer'))
-
- t.equal(bl.length, 6)
-
- t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe')
- t.equal(bl.slice(-6, -2).toString('ascii'), 'buff')
- t.equal(bl.slice(-5, -2).toString('ascii'), 'uff')
-
- t.end()
-})
-
-tape('multiple bytes from multiple buffers', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
- t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
- t.equal(bl.slice(3, 6).toString('ascii'), 'def')
- t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
- t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
- t.equal(bl.slice(-7, -4).toString('ascii'), 'def')
-
- t.end()
-})
-
-tape('multiple bytes from multiple buffer lists', function (t) {
- var bl = new BufferList()
-
- bl.append(new BufferList([ new Buffer('abcd'), new Buffer('efg') ]))
- bl.append(new BufferList([ new Buffer('hi'), new Buffer('j') ]))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
-
- t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
- t.equal(bl.slice(3, 6).toString('ascii'), 'def')
- t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
- t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
-
- t.end()
-})
-
-// same data as previous test, just using nested constructors
-tape('multiple bytes from crazy nested buffer lists', function (t) {
- var bl = new BufferList()
-
- bl.append(new BufferList([
- new BufferList([
- new BufferList(new Buffer('abc'))
- , new Buffer('d')
- , new BufferList(new Buffer('efg'))
- ])
- , new BufferList([ new Buffer('hi') ])
- , new BufferList(new Buffer('j'))
- ]))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
-
- t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
- t.equal(bl.slice(3, 6).toString('ascii'), 'def')
- t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
- t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
-
- t.end()
-})
-
-tape('append accepts arrays of Buffers', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abc'))
- bl.append([ new Buffer('def') ])
- bl.append([ new Buffer('ghi'), new Buffer('jkl') ])
- bl.append([ new Buffer('mnop'), new Buffer('qrstu'), new Buffer('vwxyz') ])
- t.equal(bl.length, 26)
- t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
- t.end()
-})
-
-tape('append accepts arrays of BufferLists', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abc'))
- bl.append([ new BufferList('def') ])
- bl.append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ]))
- bl.append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ])
- t.equal(bl.length, 26)
- t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
- t.end()
-})
-
-tape('append chainable', function (t) {
- var bl = new BufferList()
- t.ok(bl.append(new Buffer('abcd')) === bl)
- t.ok(bl.append([ new Buffer('abcd') ]) === bl)
- t.ok(bl.append(new BufferList(new Buffer('abcd'))) === bl)
- t.ok(bl.append([ new BufferList(new Buffer('abcd')) ]) === bl)
- t.end()
-})
-
-tape('append chainable (test results)', function (t) {
- var bl = new BufferList('abc')
- .append([ new BufferList('def') ])
- .append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ]))
- .append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ])
-
- t.equal(bl.length, 26)
- t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
- t.end()
-})
-
-tape('consuming from multiple buffers', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
-
- bl.consume(3)
- t.equal(bl.length, 7)
- t.equal(bl.slice(0, 7).toString('ascii'), 'defghij')
-
- bl.consume(2)
- t.equal(bl.length, 5)
- t.equal(bl.slice(0, 5).toString('ascii'), 'fghij')
-
- bl.consume(1)
- t.equal(bl.length, 4)
- t.equal(bl.slice(0, 4).toString('ascii'), 'ghij')
-
- bl.consume(1)
- t.equal(bl.length, 3)
- t.equal(bl.slice(0, 3).toString('ascii'), 'hij')
-
- bl.consume(2)
- t.equal(bl.length, 1)
- t.equal(bl.slice(0, 1).toString('ascii'), 'j')
-
- t.end()
-})
-
-tape('complete consumption', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('a'))
- bl.append(new Buffer('b'))
-
- bl.consume(2)
-
- t.equal(bl.length, 0)
- t.equal(bl._bufs.length, 0)
-
- t.end()
-})
-
-tape('test readUInt8 / readInt8', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x3
- buf2[2] = 0x4
- buf3[0] = 0x23
- buf3[1] = 0x42
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readUInt8(2), 0x3)
- t.equal(bl.readInt8(2), 0x3)
- t.equal(bl.readUInt8(3), 0x4)
- t.equal(bl.readInt8(3), 0x4)
- t.equal(bl.readUInt8(4), 0x23)
- t.equal(bl.readInt8(4), 0x23)
- t.equal(bl.readUInt8(5), 0x42)
- t.equal(bl.readInt8(5), 0x42)
- t.end()
-})
-
-tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x3
- buf2[2] = 0x4
- buf3[0] = 0x23
- buf3[1] = 0x42
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readUInt16BE(2), 0x0304)
- t.equal(bl.readUInt16LE(2), 0x0403)
- t.equal(bl.readInt16BE(2), 0x0304)
- t.equal(bl.readInt16LE(2), 0x0403)
- t.equal(bl.readUInt16BE(3), 0x0423)
- t.equal(bl.readUInt16LE(3), 0x2304)
- t.equal(bl.readInt16BE(3), 0x0423)
- t.equal(bl.readInt16LE(3), 0x2304)
- t.equal(bl.readUInt16BE(4), 0x2342)
- t.equal(bl.readUInt16LE(4), 0x4223)
- t.equal(bl.readInt16BE(4), 0x2342)
- t.equal(bl.readInt16LE(4), 0x4223)
- t.end()
-})
-
-tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x3
- buf2[2] = 0x4
- buf3[0] = 0x23
- buf3[1] = 0x42
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readUInt32BE(2), 0x03042342)
- t.equal(bl.readUInt32LE(2), 0x42230403)
- t.equal(bl.readInt32BE(2), 0x03042342)
- t.equal(bl.readInt32LE(2), 0x42230403)
- t.end()
-})
-
-tape('test readFloatLE / readFloatBE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x00
- buf2[2] = 0x00
- buf3[0] = 0x80
- buf3[1] = 0x3f
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readFloatLE(2), 0x01)
- t.end()
-})
-
-tape('test readDoubleLE / readDoubleBE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(10)
- , bl = new BufferList()
-
- buf2[1] = 0x55
- buf2[2] = 0x55
- buf3[0] = 0x55
- buf3[1] = 0x55
- buf3[2] = 0x55
- buf3[3] = 0x55
- buf3[4] = 0xd5
- buf3[5] = 0x3f
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readDoubleLE(2), 0.3333333333333333)
- t.end()
-})
-
-tape('test toString', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.toString('ascii', 0, 10), 'abcdefghij')
- t.equal(bl.toString('ascii', 3, 10), 'defghij')
- t.equal(bl.toString('ascii', 3, 6), 'def')
- t.equal(bl.toString('ascii', 3, 8), 'defgh')
- t.equal(bl.toString('ascii', 5, 10), 'fghij')
-
- t.end()
-})
-
-tape('test toString encoding', function (t) {
- var bl = new BufferList()
- , b = new Buffer('abcdefghij\xff\x00')
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
- bl.append(new Buffer('\xff\x00'))
-
- encodings.forEach(function (enc) {
- t.equal(bl.toString(enc), b.toString(enc), enc)
- })
-
- t.end()
-})
-
-!process.browser && tape('test stream', function (t) {
- var random = crypto.randomBytes(65534)
- , rndhash = hash(random, 'md5')
- , md5sum = crypto.createHash('md5')
- , bl = new BufferList(function (err, buf) {
- t.ok(Buffer.isBuffer(buf))
- t.ok(err === null)
- t.equal(rndhash, hash(bl.slice(), 'md5'))
- t.equal(rndhash, hash(buf, 'md5'))
-
- bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat'))
- .on('close', function () {
- var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat')
- s.on('data', md5sum.update.bind(md5sum))
- s.on('end', function() {
- t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!')
- t.end()
- })
- })
-
- })
-
- fs.writeFileSync('/tmp/bl_test_rnd.dat', random)
- fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl)
-})
-
-tape('instantiation with Buffer', function (t) {
- var buf = crypto.randomBytes(1024)
- , buf2 = crypto.randomBytes(1024)
- , b = BufferList(buf)
-
- t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer')
- b = BufferList([ buf, buf2 ])
- t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('test String appendage', function (t) {
- var bl = new BufferList()
- , b = new Buffer('abcdefghij\xff\x00')
-
- bl.append('abcd')
- bl.append('efg')
- bl.append('hi')
- bl.append('j')
- bl.append('\xff\x00')
-
- encodings.forEach(function (enc) {
- t.equal(bl.toString(enc), b.toString(enc))
- })
-
- t.end()
-})
-
-tape('test Number appendage', function (t) {
- var bl = new BufferList()
- , b = new Buffer('1234567890')
-
- bl.append(1234)
- bl.append(567)
- bl.append(89)
- bl.append(0)
-
- encodings.forEach(function (enc) {
- t.equal(bl.toString(enc), b.toString(enc))
- })
-
- t.end()
-})
-
-tape('write nothing, should get empty buffer', function (t) {
- t.plan(3)
- BufferList(function (err, data) {
- t.notOk(err, 'no error')
- t.ok(Buffer.isBuffer(data), 'got a buffer')
- t.equal(0, data.length, 'got a zero-length buffer')
- t.end()
- }).end()
-})
-
-tape('unicode string', function (t) {
- t.plan(2)
- var inp1 = '\u2600'
- , inp2 = '\u2603'
- , exp = inp1 + ' and ' + inp2
- , bl = BufferList()
- bl.write(inp1)
- bl.write(' and ')
- bl.write(inp2)
- t.equal(exp, bl.toString())
- t.equal(new Buffer(exp).toString('hex'), bl.toString('hex'))
-})
-
-tape('should emit finish', function (t) {
- var source = BufferList()
- , dest = BufferList()
-
- source.write('hello')
- source.pipe(dest)
-
- dest.on('finish', function () {
- t.equal(dest.toString('utf8'), 'hello')
- t.end()
- })
-})
-
-tape('basic copy', function (t) {
- var buf = crypto.randomBytes(1024)
- , buf2 = new Buffer(1024)
- , b = BufferList(buf)
-
- b.copy(buf2)
- t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy after many appends', function (t) {
- var buf = crypto.randomBytes(512)
- , buf2 = new Buffer(1024)
- , b = BufferList(buf)
-
- b.append(buf)
- b.copy(buf2)
- t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy at a precise position', function (t) {
- var buf = crypto.randomBytes(1004)
- , buf2 = new Buffer(1024)
- , b = BufferList(buf)
-
- b.copy(buf2, 20)
- t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy starting from a precise location', function (t) {
- var buf = crypto.randomBytes(10)
- , buf2 = new Buffer(5)
- , b = BufferList(buf)
-
- b.copy(buf2, 0, 5)
- t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy in an interval', function (t) {
- var rnd = crypto.randomBytes(10)
- , b = BufferList(rnd) // put the random bytes there
- , actual = new Buffer(3)
- , expected = new Buffer(3)
-
- rnd.copy(expected, 0, 5, 8)
- b.copy(actual, 0, 5, 8)
-
- t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy an interval between two buffers', function (t) {
- var buf = crypto.randomBytes(10)
- , buf2 = new Buffer(10)
- , b = BufferList(buf)
-
- b.append(buf)
- b.copy(buf2, 0, 5, 15)
-
- t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('shallow slice across buffer boundaries', function (t) {
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh')
- t.end()
-})
-
-tape('shallow slice within single buffer', function (t) {
- t.plan(2)
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice(5, 10).toString(), 'Secon')
- t.equal(bl.shallowSlice(7, 10).toString(), 'con')
- t.end()
-})
-
-tape('shallow slice single buffer', function (t) {
- t.plan(3)
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice(0, 5).toString(), 'First')
- t.equal(bl.shallowSlice(5, 11).toString(), 'Second')
- t.equal(bl.shallowSlice(11, 16).toString(), 'Third')
-})
-
-tape('shallow slice with negative or omitted indices', function (t) {
- t.plan(4)
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice().toString(), 'FirstSecondThird')
- t.equal(bl.shallowSlice(5).toString(), 'SecondThird')
- t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh')
- t.equal(bl.shallowSlice(-8).toString(), 'ondThird')
-})
-
-tape('shallow slice does not make a copy', function (t) {
- t.plan(1)
- var buffers = [new Buffer('First'), new Buffer('Second'), new Buffer('Third')]
- var bl = (new BufferList(buffers)).shallowSlice(5, -3)
-
- buffers[1].fill('h')
- buffers[2].fill('h')
-
- t.equal(bl.toString(), 'hhhhhhhh')
-})
-
-tape('duplicate', function (t) {
- t.plan(2)
-
- var bl = new BufferList('abcdefghij\xff\x00')
- , dup = bl.duplicate()
-
- t.equal(bl.prototype, dup.prototype)
- t.equal(bl.toString('hex'), dup.toString('hex'))
-})
-
-tape('destroy no pipe', function (t) {
- t.plan(2)
-
- var bl = new BufferList('alsdkfja;lsdkfja;lsdk')
- bl.destroy()
-
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
-})
-
-!process.browser && tape('destroy with pipe before read end', function (t) {
- t.plan(2)
-
- var bl = new BufferList()
- fs.createReadStream(__dirname + '/test.js')
- .pipe(bl)
-
- bl.destroy()
-
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
-
-})
-
-!process.browser && tape('destroy with pipe before read end with race', function (t) {
- t.plan(2)
-
- var bl = new BufferList()
- fs.createReadStream(__dirname + '/test.js')
- .pipe(bl)
-
- setTimeout(function () {
- bl.destroy()
- setTimeout(function () {
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
- }, 500)
- }, 500)
-})
-
-!process.browser && tape('destroy with pipe after read end', function (t) {
- t.plan(2)
-
- var bl = new BufferList()
- fs.createReadStream(__dirname + '/test.js')
- .on('end', onEnd)
- .pipe(bl)
-
- function onEnd () {
- bl.destroy()
-
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
- }
-})
-
-!process.browser && tape('destroy with pipe while writing to a destination', function (t) {
- t.plan(4)
-
- var bl = new BufferList()
- , ds = new BufferList()
-
- fs.createReadStream(__dirname + '/test.js')
- .on('end', onEnd)
- .pipe(bl)
-
- function onEnd () {
- bl.pipe(ds)
-
- setTimeout(function () {
- bl.destroy()
-
- t.equals(bl._bufs.length, 0)
- t.equals(bl.length, 0)
-
- ds.destroy()
-
- t.equals(bl._bufs.length, 0)
- t.equals(bl.length, 0)
-
- }, 100)
- }
-})
-
-!process.browser && tape('handle error', function (t) {
- t.plan(2)
- fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) {
- t.ok(err instanceof Error, 'has error')
- t.notOk(data, 'no data')
- }))
-})
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/LICENSE b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/LICENSE
deleted file mode 100644
index 757562ec59276b..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/README.md b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/README.md
deleted file mode 100644
index f2560c939d960e..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# end-of-stream
-
-A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
-
- npm install end-of-stream
-
-## Usage
-
-Simply pass a stream and a callback to the `eos`.
-Both legacy streams, streams2 and stream3 are supported.
-
-``` js
-var eos = require('end-of-stream');
-
-eos(readableStream, function(err) {
- // this will be set to the stream instance
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended', this === readableStream);
-});
-
-eos(writableStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished', this === writableStream);
-});
-
-eos(duplexStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended and finished', this === duplexStream);
-});
-
-eos(duplexStream, {readable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished but might still be readable');
-});
-
-eos(duplexStream, {writable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended but might still be writable');
-});
-
-eos(readableStream, {error:false}, function(err) {
- // do not treat emit('error', err) as a end-of-stream
-});
-```
-
-## License
-
-MIT
-
-## Related
-
-`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/index.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/index.js
deleted file mode 100644
index b3a90686346cfb..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-var once = require('once');
-
-var noop = function() {};
-
-var isRequest = function(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
-};
-
-var isChildProcess = function(stream) {
- return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
-};
-
-var eos = function(stream, opts, callback) {
- if (typeof opts === 'function') return eos(stream, null, opts);
- if (!opts) opts = {};
-
- callback = once(callback || noop);
-
- var ws = stream._writableState;
- var rs = stream._readableState;
- var readable = opts.readable || (opts.readable !== false && stream.readable);
- var writable = opts.writable || (opts.writable !== false && stream.writable);
-
- var onlegacyfinish = function() {
- if (!stream.writable) onfinish();
- };
-
- var onfinish = function() {
- writable = false;
- if (!readable) callback.call(stream);
- };
-
- var onend = function() {
- readable = false;
- if (!writable) callback.call(stream);
- };
-
- var onexit = function(exitCode) {
- callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
- };
-
- var onclose = function() {
- if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close'));
- if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close'));
- };
-
- var onrequest = function() {
- stream.req.on('finish', onfinish);
- };
-
- if (isRequest(stream)) {
- stream.on('complete', onfinish);
- stream.on('abort', onclose);
- if (stream.req) onrequest();
- else stream.on('request', onrequest);
- } else if (writable && !ws) { // legacy streams
- stream.on('end', onlegacyfinish);
- stream.on('close', onlegacyfinish);
- }
-
- if (isChildProcess(stream)) stream.on('exit', onexit);
-
- stream.on('end', onend);
- stream.on('finish', onfinish);
- if (opts.error !== false) stream.on('error', callback);
- stream.on('close', onclose);
-
- return function() {
- stream.removeListener('complete', onfinish);
- stream.removeListener('abort', onclose);
- stream.removeListener('request', onrequest);
- if (stream.req) stream.req.removeListener('finish', onfinish);
- stream.removeListener('end', onlegacyfinish);
- stream.removeListener('close', onlegacyfinish);
- stream.removeListener('finish', onfinish);
- stream.removeListener('exit', onexit);
- stream.removeListener('end', onend);
- stream.removeListener('error', callback);
- stream.removeListener('close', onclose);
- };
-};
-
-module.exports = eos;
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/package.json b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/package.json
deleted file mode 100644
index 66ece806237f32..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/package.json
+++ /dev/null
@@ -1,66 +0,0 @@
-{
- "_from": "end-of-stream@^1.0.0",
- "_id": "end-of-stream@1.4.0",
- "_integrity": "sha1-epDYM+/abPpurA9JSduw+tOmMgY=",
- "_location": "/pacote/tar-stream/end-of-stream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "end-of-stream@^1.0.0",
- "name": "end-of-stream",
- "escapedName": "end-of-stream",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-stream"
- ],
- "_resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.0.tgz",
- "_shasum": "7a90d833efda6cfa6eac0f4949dbb0fad3a63206",
- "_shrinkwrap": null,
- "_spec": "end-of-stream@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-stream",
- "author": {
- "name": "Mathias Buus",
- "email": "mathiasbuus@gmail.com"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/mafintosh/end-of-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "once": "^1.4.0"
- },
- "deprecated": false,
- "description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
- "devDependencies": {},
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/mafintosh/end-of-stream",
- "keywords": [
- "stream",
- "streams",
- "callback",
- "finish",
- "close",
- "end",
- "wait"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "end-of-stream",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/mafintosh/end-of-stream.git"
- },
- "scripts": {
- "test": "node test.js"
- },
- "version": "1.4.0"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/.npmignore b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/.npmignore
deleted file mode 100644
index 3c3629e647f5dd..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/LICENCE b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/LICENCE
deleted file mode 100644
index 1a14b437e87a8f..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/LICENCE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2012-2014 Raynos.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/Makefile b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/Makefile
deleted file mode 100644
index d583fcf49dc1a3..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/Makefile
+++ /dev/null
@@ -1,4 +0,0 @@
-browser:
- node ./support/compile
-
-.PHONY: browser
\ No newline at end of file
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/README.md b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/README.md
deleted file mode 100644
index 093cb2978e4af0..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/README.md
+++ /dev/null
@@ -1,32 +0,0 @@
-# xtend
-
-[![browser support][3]][4]
-
-[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges)
-
-Extend like a boss
-
-xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence.
-
-## Examples
-
-```js
-var extend = require("xtend")
-
-// extend returns a new object. Does not mutate arguments
-var combination = extend({
- a: "a",
- b: 'c'
-}, {
- b: "b"
-})
-// { a: "a", b: "b" }
-```
-
-## Stability status: Locked
-
-## MIT Licenced
-
-
- [3]: http://ci.testling.com/Raynos/xtend.png
- [4]: http://ci.testling.com/Raynos/xtend
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/immutable.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/immutable.js
deleted file mode 100644
index 94889c9de11a18..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/immutable.js
+++ /dev/null
@@ -1,19 +0,0 @@
-module.exports = extend
-
-var hasOwnProperty = Object.prototype.hasOwnProperty;
-
-function extend() {
- var target = {}
-
- for (var i = 0; i < arguments.length; i++) {
- var source = arguments[i]
-
- for (var key in source) {
- if (hasOwnProperty.call(source, key)) {
- target[key] = source[key]
- }
- }
- }
-
- return target
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/mutable.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/mutable.js
deleted file mode 100644
index 72debede6ca585..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/mutable.js
+++ /dev/null
@@ -1,17 +0,0 @@
-module.exports = extend
-
-var hasOwnProperty = Object.prototype.hasOwnProperty;
-
-function extend(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i]
-
- for (var key in source) {
- if (hasOwnProperty.call(source, key)) {
- target[key] = source[key]
- }
- }
- }
-
- return target
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/package.json b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/package.json
deleted file mode 100644
index 10dca33b501848..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/package.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
- "_from": "xtend@^4.0.0",
- "_id": "xtend@4.0.1",
- "_integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=",
- "_location": "/pacote/tar-stream/xtend",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "xtend@^4.0.0",
- "name": "xtend",
- "escapedName": "xtend",
- "rawSpec": "^4.0.0",
- "saveSpec": null,
- "fetchSpec": "^4.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-stream"
- ],
- "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
- "_shasum": "a5c6d532be656e23db820efb943a1f04998d63af",
- "_shrinkwrap": null,
- "_spec": "xtend@^4.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-stream",
- "author": {
- "name": "Raynos",
- "email": "raynos2@gmail.com"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/Raynos/xtend/issues",
- "email": "raynos2@gmail.com"
- },
- "bundleDependencies": false,
- "contributors": [
- {
- "name": "Jake Verbaten"
- },
- {
- "name": "Matt Esch"
- }
- ],
- "dependencies": {},
- "deprecated": false,
- "description": "extend like a boss",
- "devDependencies": {
- "tape": "~1.1.0"
- },
- "engines": {
- "node": ">=0.4"
- },
- "homepage": "https://github.com/Raynos/xtend",
- "keywords": [
- "extend",
- "merge",
- "options",
- "opts",
- "object",
- "array"
- ],
- "license": "MIT",
- "main": "immutable",
- "name": "xtend",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/Raynos/xtend.git"
- },
- "scripts": {
- "test": "node test"
- },
- "testling": {
- "files": "test.js",
- "browsers": [
- "ie/7..latest",
- "firefox/16..latest",
- "firefox/nightly",
- "chrome/22..latest",
- "chrome/canary",
- "opera/12..latest",
- "opera/next",
- "safari/5.1..latest",
- "ipad/6.0..latest",
- "iphone/6.0..latest"
- ]
- },
- "version": "4.0.1"
-}
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/test.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/test.js
deleted file mode 100644
index 093a2b061e81ae..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/test.js
+++ /dev/null
@@ -1,83 +0,0 @@
-var test = require("tape")
-var extend = require("./")
-var mutableExtend = require("./mutable")
-
-test("merge", function(assert) {
- var a = { a: "foo" }
- var b = { b: "bar" }
-
- assert.deepEqual(extend(a, b), { a: "foo", b: "bar" })
- assert.end()
-})
-
-test("replace", function(assert) {
- var a = { a: "foo" }
- var b = { a: "bar" }
-
- assert.deepEqual(extend(a, b), { a: "bar" })
- assert.end()
-})
-
-test("undefined", function(assert) {
- var a = { a: undefined }
- var b = { b: "foo" }
-
- assert.deepEqual(extend(a, b), { a: undefined, b: "foo" })
- assert.deepEqual(extend(b, a), { a: undefined, b: "foo" })
- assert.end()
-})
-
-test("handle 0", function(assert) {
- var a = { a: "default" }
- var b = { a: 0 }
-
- assert.deepEqual(extend(a, b), { a: 0 })
- assert.deepEqual(extend(b, a), { a: "default" })
- assert.end()
-})
-
-test("is immutable", function (assert) {
- var record = {}
-
- extend(record, { foo: "bar" })
- assert.equal(record.foo, undefined)
- assert.end()
-})
-
-test("null as argument", function (assert) {
- var a = { foo: "bar" }
- var b = null
- var c = void 0
-
- assert.deepEqual(extend(b, a, c), { foo: "bar" })
- assert.end()
-})
-
-test("mutable", function (assert) {
- var a = { foo: "bar" }
-
- mutableExtend(a, { bar: "baz" })
-
- assert.equal(a.bar, "baz")
- assert.end()
-})
-
-test("null prototype", function(assert) {
- var a = { a: "foo" }
- var b = Object.create(null)
- b.b = "bar";
-
- assert.deepEqual(extend(a, b), { a: "foo", b: "bar" })
- assert.end()
-})
-
-test("null prototype mutable", function (assert) {
- var a = { foo: "bar" }
- var b = Object.create(null)
- b.bar = "baz";
-
- mutableExtend(a, b)
-
- assert.equal(a.bar, "baz")
- assert.end()
-})
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/pack.js b/deps/npm/node_modules/pacote/node_modules/tar-stream/pack.js
deleted file mode 100644
index 025f007132ff47..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/pack.js
+++ /dev/null
@@ -1,254 +0,0 @@
-var constants = require('constants')
-var eos = require('end-of-stream')
-var util = require('util')
-
-var Readable = require('readable-stream').Readable
-var Writable = require('readable-stream').Writable
-var StringDecoder = require('string_decoder').StringDecoder
-
-var headers = require('./headers')
-
-var DMODE = parseInt('755', 8)
-var FMODE = parseInt('644', 8)
-
-var END_OF_TAR = new Buffer(1024)
-END_OF_TAR.fill(0)
-
-var noop = function () {}
-
-var overflow = function (self, size) {
- size &= 511
- if (size) self.push(END_OF_TAR.slice(0, 512 - size))
-}
-
-function modeToType (mode) {
- switch (mode & constants.S_IFMT) {
- case constants.S_IFBLK: return 'block-device'
- case constants.S_IFCHR: return 'character-device'
- case constants.S_IFDIR: return 'directory'
- case constants.S_IFIFO: return 'fifo'
- case constants.S_IFLNK: return 'symlink'
- }
-
- return 'file'
-}
-
-var Sink = function (to) {
- Writable.call(this)
- this.written = 0
- this._to = to
- this._destroyed = false
-}
-
-util.inherits(Sink, Writable)
-
-Sink.prototype._write = function (data, enc, cb) {
- this.written += data.length
- if (this._to.push(data)) return cb()
- this._to._drain = cb
-}
-
-Sink.prototype.destroy = function () {
- if (this._destroyed) return
- this._destroyed = true
- this.emit('close')
-}
-
-var LinkSink = function () {
- Writable.call(this)
- this.linkname = ''
- this._decoder = new StringDecoder('utf-8')
- this._destroyed = false
-}
-
-util.inherits(LinkSink, Writable)
-
-LinkSink.prototype._write = function (data, enc, cb) {
- this.linkname += this._decoder.write(data)
- cb()
-}
-
-LinkSink.prototype.destroy = function () {
- if (this._destroyed) return
- this._destroyed = true
- this.emit('close')
-}
-
-var Void = function () {
- Writable.call(this)
- this._destroyed = false
-}
-
-util.inherits(Void, Writable)
-
-Void.prototype._write = function (data, enc, cb) {
- cb(new Error('No body allowed for this entry'))
-}
-
-Void.prototype.destroy = function () {
- if (this._destroyed) return
- this._destroyed = true
- this.emit('close')
-}
-
-var Pack = function (opts) {
- if (!(this instanceof Pack)) return new Pack(opts)
- Readable.call(this, opts)
-
- this._drain = noop
- this._finalized = false
- this._finalizing = false
- this._destroyed = false
- this._stream = null
-}
-
-util.inherits(Pack, Readable)
-
-Pack.prototype.entry = function (header, buffer, callback) {
- if (this._stream) throw new Error('already piping an entry')
- if (this._finalized || this._destroyed) return
-
- if (typeof buffer === 'function') {
- callback = buffer
- buffer = null
- }
-
- if (!callback) callback = noop
-
- var self = this
-
- if (!header.size || header.type === 'symlink') header.size = 0
- if (!header.type) header.type = modeToType(header.mode)
- if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
- if (!header.uid) header.uid = 0
- if (!header.gid) header.gid = 0
- if (!header.mtime) header.mtime = new Date()
-
- if (typeof buffer === 'string') buffer = new Buffer(buffer)
- if (Buffer.isBuffer(buffer)) {
- header.size = buffer.length
- this._encode(header)
- this.push(buffer)
- overflow(self, header.size)
- process.nextTick(callback)
- return new Void()
- }
-
- if (header.type === 'symlink' && !header.linkname) {
- var linkSink = new LinkSink()
- eos(linkSink, function (err) {
- if (err) { // stream was closed
- self.destroy()
- return callback(err)
- }
-
- header.linkname = linkSink.linkname
- self._encode(header)
- callback()
- })
-
- return linkSink
- }
-
- this._encode(header)
-
- if (header.type !== 'file' && header.type !== 'contiguous-file') {
- process.nextTick(callback)
- return new Void()
- }
-
- var sink = new Sink(this)
-
- this._stream = sink
-
- eos(sink, function (err) {
- self._stream = null
-
- if (err) { // stream was closed
- self.destroy()
- return callback(err)
- }
-
- if (sink.written !== header.size) { // corrupting tar
- self.destroy()
- return callback(new Error('size mismatch'))
- }
-
- overflow(self, header.size)
- if (self._finalizing) self.finalize()
- callback()
- })
-
- return sink
-}
-
-Pack.prototype.finalize = function () {
- if (this._stream) {
- this._finalizing = true
- return
- }
-
- if (this._finalized) return
- this._finalized = true
- this.push(END_OF_TAR)
- this.push(null)
-}
-
-Pack.prototype.destroy = function (err) {
- if (this._destroyed) return
- this._destroyed = true
-
- if (err) this.emit('error', err)
- this.emit('close')
- if (this._stream && this._stream.destroy) this._stream.destroy()
-}
-
-Pack.prototype._encode = function (header) {
- if (!header.pax) {
- var buf = headers.encode(header)
- if (buf) {
- this.push(buf)
- return
- }
- }
- this._encodePax(header)
-}
-
-Pack.prototype._encodePax = function (header) {
- var paxHeader = headers.encodePax({
- name: header.name,
- linkname: header.linkname,
- pax: header.pax
- })
-
- var newHeader = {
- name: 'PaxHeader',
- mode: header.mode,
- uid: header.uid,
- gid: header.gid,
- size: paxHeader.length,
- mtime: header.mtime,
- type: 'pax-header',
- linkname: header.linkname && 'PaxHeader',
- uname: header.uname,
- gname: header.gname,
- devmajor: header.devmajor,
- devminor: header.devminor
- }
-
- this.push(headers.encode(newHeader))
- this.push(paxHeader)
- overflow(this, paxHeader.length)
-
- newHeader.size = header.size
- newHeader.type = header.type
- this.push(headers.encode(newHeader))
-}
-
-Pack.prototype._read = function (n) {
- var drain = this._drain
- this._drain = noop
- drain()
-}
-
-module.exports = Pack
diff --git a/deps/npm/node_modules/pacote/node_modules/tar-stream/package.json b/deps/npm/node_modules/pacote/node_modules/tar-stream/package.json
deleted file mode 100644
index fca15656fda2e4..00000000000000
--- a/deps/npm/node_modules/pacote/node_modules/tar-stream/package.json
+++ /dev/null
@@ -1,88 +0,0 @@
-{
- "_from": "tar-stream@^1.5.2",
- "_id": "tar-stream@1.5.4",
- "_inBundle": false,
- "_integrity": "sha1-NlSc8E7RrumyowwBQyUiONr5QBY=",
- "_location": "/pacote/tar-stream",
- "_phantomChildren": {
- "once": "1.4.0",
- "readable-stream": "2.2.9"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "tar-stream@^1.5.2",
- "name": "tar-stream",
- "escapedName": "tar-stream",
- "rawSpec": "^1.5.2",
- "saveSpec": null,
- "fetchSpec": "^1.5.2"
- },
- "_requiredBy": [
- "/pacote",
- "/pacote/tar-fs"
- ],
- "_resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.4.tgz",
- "_shasum": "36549cf04ed1aee9b2a30c0143252238daf94016",
- "_spec": "tar-stream@^1.5.2",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
- "author": {
- "name": "Mathias Buus",
- "email": "mathiasbuus@gmail.com"
- },
- "bugs": {
- "url": "https://github.com/mafintosh/tar-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "bl": "^1.0.0",
- "end-of-stream": "^1.0.0",
- "readable-stream": "^2.0.0",
- "xtend": "^4.0.0"
- },
- "deprecated": false,
- "description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
- "devDependencies": {
- "concat-stream": "^1.4.6",
- "standard": "^5.3.1",
- "tape": "^3.0.3"
- },
- "directories": {
- "test": "test"
- },
- "engines": {
- "node": ">= 0.8.0"
- },
- "files": [
- "*.js",
- "LICENSE"
- ],
- "homepage": "https://github.com/mafintosh/tar-stream",
- "keywords": [
- "tar",
- "tarball",
- "parse",
- "parser",
- "generate",
- "generator",
- "stream",
- "stream2",
- "streams",
- "streams2",
- "streaming",
- "pack",
- "extract",
- "modify"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "tar-stream",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/mafintosh/tar-stream.git"
- },
- "scripts": {
- "test": "standard && tape test/*.js"
- },
- "version": "1.5.4"
-}
diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json
index ebdc1f412a7fe4..6050cc6241c940 100644
--- a/deps/npm/node_modules/pacote/package.json
+++ b/deps/npm/node_modules/pacote/package.json
@@ -1,41 +1,37 @@
{
- "_from": "pacote@2.7.38",
- "_id": "pacote@2.7.38",
+ "_from": "pacote@latest",
+ "_id": "pacote@6.0.2",
"_inBundle": false,
- "_integrity": "sha512-XxHUyHQB7QCVBxoXeVu0yKxT+2PvJucsc0+1E+6f95lMUxEAYERgSAc71ckYXrYr35Ew3xFU/LrhdIK21GQFFA==",
+ "_integrity": "sha512-PsnzsjS/rNjY0fojY8QNy6Zq1XvTA3X0vhtfNtExAf1h/746f4MaSatHEU6jiAb9yqvzdThnFGAy+t5VtKwgjg==",
"_location": "/pacote",
"_phantomChildren": {
"cacache": "9.2.9",
- "chownr": "1.0.1",
"lru-cache": "4.1.1",
"mississippi": "1.3.0",
- "mkdirp": "0.5.1",
"npm-package-arg": "5.1.2",
- "once": "1.4.0",
- "readable-stream": "2.3.2",
"retry": "0.10.1",
"safe-buffer": "5.1.1",
- "semver": "5.3.0",
+ "semver": "5.4.1",
"ssri": "4.1.6"
},
"_requested": {
- "type": "version",
+ "type": "tag",
"registry": true,
- "raw": "pacote@2.7.38",
+ "raw": "pacote@latest",
"name": "pacote",
"escapedName": "pacote",
- "rawSpec": "2.7.38",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "2.7.38"
+ "fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
- "_resolved": "https://registry.npmjs.org/pacote/-/pacote-2.7.38.tgz",
- "_shasum": "5091f8774298c26c3eca24606037f1bb73db74c1",
- "_spec": "pacote@2.7.38",
- "_where": "/Users/zkat/Documents/code/npm",
+ "_resolved": "https://registry.npmjs.org/pacote/-/pacote-6.0.2.tgz",
+ "_shasum": "c618a3c08493aeb390e79aa73f95af331ffc6171",
+ "_spec": "pacote@latest",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
@@ -64,32 +60,33 @@
"mississippi": "^1.2.0",
"normalize-package-data": "^2.4.0",
"npm-package-arg": "^5.1.2",
+ "npm-packlist": "^1.1.6",
"npm-pick-manifest": "^1.0.4",
"osenv": "^0.1.4",
"promise-inflight": "^1.0.1",
"promise-retry": "^1.1.1",
"protoduck": "^4.0.0",
"safe-buffer": "^5.1.1",
- "semver": "^5.3.0",
+ "semver": "^5.4.1",
"ssri": "^4.1.6",
- "tar-fs": "^1.15.3",
- "tar-stream": "^1.5.4",
+ "tar": "^4.0.0",
"unique-filename": "^1.1.0",
- "which": "^1.2.12"
+ "which": "^1.3.0"
},
"deprecated": false,
"description": "JavaScript package downloader",
"devDependencies": {
"mkdirp": "^0.5.1",
- "nock": "^9.0.13",
+ "nock": "^9.0.14",
"npmlog": "^4.1.2",
- "nyc": "^11.0.3",
+ "nyc": "^11.1.0",
"require-inject": "^1.4.2",
"rimraf": "^2.5.4",
- "standard": "^10.0.1",
+ "standard": "^10.0.3",
"standard-version": "^4.2.0",
"tacks": "^1.2.6",
- "tap": "^10.7.0",
+ "tap": "^10.7.2",
+ "tar-stream": "^1.5.4",
"weallbehave": "^1.2.0",
"weallcontribute": "^1.0.7"
},
@@ -120,5 +117,5 @@
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "2.7.38"
+ "version": "6.0.2"
}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/CHANGELOG.md b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/CHANGELOG.md
new file mode 100644
index 00000000000000..843a0bcb941887
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/CHANGELOG.md
@@ -0,0 +1,36 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+
+## [1.0.1](https://github.com/zkat/json-parse-better-errors/compare/v1.0.0...v1.0.1) (2017-08-16)
+
+
+### Bug Fixes
+
+* **license:** oops. Forgot to update license.md ([efe2958](https://github.com/zkat/json-parse-better-errors/commit/efe2958))
+
+
+
+
+# 1.0.0 (2017-08-15)
+
+
+### Features
+
+* **init:** Initial Commit ([562c977](https://github.com/zkat/json-parse-better-errors/commit/562c977))
+
+
+### BREAKING CHANGES
+
+* **init:** This is the first commit!
+
+
+
+
+# 0.1.0 (2017-08-15)
+
+
+### Features
+
+* **init:** Initial Commit ([9dd1a19](https://github.com/zkat/json-parse-better-errors/commit/9dd1a19))
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/LICENSE.md b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/LICENSE.md
new file mode 100644
index 00000000000000..c51842cc4ab3c2
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/LICENSE.md
@@ -0,0 +1,7 @@
+Copyright 2017 Kat Marchán
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/README.md b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/README.md
new file mode 100644
index 00000000000000..667323c775a99e
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/README.md
@@ -0,0 +1,53 @@
+# json-parse-better-errors [![npm version](https://img.shields.io/npm/v/json-parse-better-errors.svg)](https://npm.im/json-parse-better-errors) [![license](https://img.shields.io/npm/l/json-parse-better-errors.svg)](https://npm.im/json-parse-better-errors) [![Travis](https://img.shields.io/travis/zkat/json-parse-better-errors.svg)](https://travis-ci.org/zkat/json-parse-better-errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/json-parse-better-errors?svg=true)](https://ci.appveyor.com/project/zkat/json-parse-better-errors) [![Coverage Status](https://coveralls.io/repos/github/zkat/json-parse-better-errors/badge.svg?branch=latest)](https://coveralls.io/github/zkat/json-parse-better-errors?branch=latest)
+
+[`json-parse-better-errors`](https://github.com/zkat/json-parse-better-errors) is a Node.js library for managing
+local key and content address caches. It's really fast, really good at
+concurrency, and it will never give you corrupted data, even if cache files
+get corrupted or manipulated.
+
+It was originally written to be used as [npm](https://npm.im)'s local cache, but
+can just as easily be used on its own
+
+_Translations: [español](README.es.md)_
+
+## Install
+
+`$ npm install --save json-parse-better-errors`
+
+## Table of Contents
+
+* [Example](#example)
+* [Features](#features)
+* [Contributing](#contributing)
+* [API](#api)
+ * [`parse`](#parse)
+
+### Example
+
+```javascript
+const parseJson = require('json-parse-better-errors')
+
+parseJson('"foo"')
+parseJson('garbage') // more useful error message
+```
+
+### Features
+
+* Like JSON.parse, but the errors are better.
+
+### Contributing
+
+The json-parse-better-errors team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
+
+All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
+
+Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
+
+Happy hacking!
+
+### API
+
+#### `> parse(txt, ?reviver, ?context=20)`
+
+Works just like `JSON.parse`, but will include a bit more information when an
+error happens.
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/index.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/index.js
new file mode 100644
index 00000000000000..32c36358661a29
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/index.js
@@ -0,0 +1,32 @@
+'use strict'
+
+module.exports = parseJson
+function parseJson (txt, reviver, context) {
+ context = context || 20
+ try {
+ return JSON.parse(txt, reviver)
+ } catch (e) {
+ const syntaxErr = e.message.match(/^Unexpected token.*position\s+(\d+)/i)
+ const errIdx = syntaxErr
+ ? +syntaxErr[1]
+ : e.message.match(/^Unexpected end of JSON.*/i)
+ ? txt.length - 1
+ : null
+ if (errIdx != null) {
+ const start = errIdx <= context
+ ? 0
+ : errIdx - context
+ const end = errIdx + context >= txt.length
+ ? txt.length
+ : errIdx + context
+ e.message += ` while parsing near '${
+ start === 0 ? '' : '...'
+ }${txt.slice(start, end)}${
+ end === txt.length ? '' : '...'
+ }'`
+ } else {
+ e.message += ` while parsing '${txt.slice(0, context * 2)}'`
+ }
+ throw e
+ }
+}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/package.json b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/package.json
new file mode 100644
index 00000000000000..f1dd5890c78bb3
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-better-errors/package.json
@@ -0,0 +1,76 @@
+{
+ "_from": "json-parse-better-errors@^1.0.0",
+ "_id": "json-parse-better-errors@1.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-xyQpxeWWMKyJps9CuGJYeng6ssI5bpqS9ltQpdVQ90t4ql6NdnxFKh95JcRt2cun/DjMVNrdjniLPuMA69xmCw==",
+ "_location": "/read-package-json/json-parse-better-errors",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "json-parse-better-errors@^1.0.0",
+ "name": "json-parse-better-errors",
+ "escapedName": "json-parse-better-errors",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/read-package-json"
+ ],
+ "_resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.1.tgz",
+ "_shasum": "50183cd1b2d25275de069e9e71b467ac9eab973a",
+ "_spec": "json-parse-better-errors@^1.0.0",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/read-package-json",
+ "author": {
+ "name": "Kat Marchán",
+ "email": "kzm@sykosomatic.org"
+ },
+ "bugs": {
+ "url": "https://github.com/zkat/json-parse-better-errors/issues"
+ },
+ "bundleDependencies": false,
+ "config": {
+ "nyc": {
+ "exclude": [
+ "node_modules/**",
+ "test/**"
+ ]
+ }
+ },
+ "deprecated": false,
+ "description": "JSON.parse with context information on error",
+ "devDependencies": {
+ "nyc": "^10.3.2",
+ "standard": "^9.0.2",
+ "standard-version": "^4.1.0",
+ "tap": "^10.3.3",
+ "weallbehave": "^1.2.0",
+ "weallcontribute": "^1.0.8"
+ },
+ "files": [
+ "*.js"
+ ],
+ "homepage": "https://github.com/zkat/json-parse-better-errors#readme",
+ "keywords": [
+ "JSON",
+ "parser"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "json-parse-better-errors",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/zkat/json-parse-better-errors.git"
+ },
+ "scripts": {
+ "postrelease": "npm publish && git push --follow-tags",
+ "prerelease": "npm t",
+ "pretest": "standard",
+ "release": "standard-version -s",
+ "test": "tap -J --coverage test/*.js",
+ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
+ "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
+ },
+ "version": "1.0.1"
+}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/.editorconfig b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/.editorconfig
deleted file mode 100644
index fb7f73a832a4af..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/.editorconfig
+++ /dev/null
@@ -1,14 +0,0 @@
-root = true
-
-[*]
-end_of_line = lf
-insert_final_newline = true
-trim_trailing_whitespace = true
-
-[*.js, **/*.js]
-indent_size = 4
-indent_style = space
-
-[{package.json,.travis.yml}]
-indent_size = 2
-indent_style = space
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/.npmignore b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/.npmignore
deleted file mode 100644
index 59d842baa84c8b..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/.npmignore
+++ /dev/null
@@ -1,28 +0,0 @@
-# Logs
-logs
-*.log
-
-# Runtime data
-pids
-*.pid
-*.seed
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-lib-cov
-
-# Coverage directory used by tools like istanbul
-coverage
-
-# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
-.grunt
-
-# Compiled binary addons (http://nodejs.org/api/addons.html)
-build/Release
-
-# Dependency directory
-# Commenting this out is preferred by some people, see
-# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git-
-node_modules
-
-# Users Environment Variables
-.lock-wscript
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/LICENSE b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/LICENSE
deleted file mode 100644
index e637724b3bc595..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2015 Sam Mikes
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/README.md b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/README.md
deleted file mode 100644
index ffad93584b19d4..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-# json-parse-helpfulerror
-
-A drop-in replacement for `JSON.parse` that uses
- to provide more useful error messages in the
-event of a parse error.
-
-# Example
-
-## Installation
-
-```
-npm i -S json-parse-helpfulerror
-```
-
-## Use
-
-```js
-var jph = require('json-parse-helpfulerror');
-
-var notJSON = "{'foo': 3}"; // keys must be double-quoted in JSON
-
-JSON.parse(notJSON); // throws unhelpful error
-
-jph.parse("{'foo': 3}") // throws more helpful error: "Unexpected token '\''..."
-```
-
-# License
-
-MIT
\ No newline at end of file
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/index.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/index.js
deleted file mode 100644
index 15648b017b3db5..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/index.js
+++ /dev/null
@@ -1,21 +0,0 @@
-'use strict';
-
-var jju = require('jju');
-
-function parse(text, reviver) {
- try {
- return JSON.parse(text, reviver);
- } catch (err) {
- // we expect this to throw with a more informative message
- jju.parse(text, {
- mode: 'json',
- reviver: reviver
- });
-
- // backup if jju is not as strict as JSON.parse; re-throw error
- // data-dependent code path, I do not know how to cover it
- throw err;
- }
-}
-
-exports.parse = parse;
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/.npmignore b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/.npmignore
deleted file mode 100644
index 5ae40150eea106..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/.npmignore
+++ /dev/null
@@ -1,9 +0,0 @@
-package.json
-node_modules
-test
-benchmark
-docs
-examples
-/.editorconfig
-/.eslint*
-/.travis.yml
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE
deleted file mode 100644
index 5c93f456546877..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
- DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
- Version 2, December 2004
-
- Copyright (C) 2004 Sam Hocevar
-
- Everyone is permitted to copy and distribute verbatim or modified
- copies of this license document, and changing it is allowed as long
- as the name is changed.
-
- DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. You just DO WHAT THE FUCK YOU WANT TO.
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/README.md b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/README.md
deleted file mode 100644
index 85d52a2dcea030..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/README.md
+++ /dev/null
@@ -1,243 +0,0 @@
-`jju` - a set of utilities to work with JSON / JSON5 documents
-
-[![npm version badge](https://img.shields.io/npm/v/jju.svg)](https://www.npmjs.org/package/jju)
-[![travis badge](http://img.shields.io/travis/rlidwka/jju.svg)](https://travis-ci.org/rlidwka/jju)
-[![downloads badge](http://img.shields.io/npm/dm/jju.svg)](https://www.npmjs.org/package/jju)
-
-## Installation
-
-```
-npm install jju
-```
-
-## Usage
-
-This module provides following functions:
-
-1. [jju.parse()](#jjuparse-function) parses json/json5 text and returns a javascript value it corresponds to
-2. [jju.stringify()](#jjustringify-function) converts javascript value to an appropriate json/json5 text
-3. [jju.tokenize()](#jjutokenize-function) parses json/json5 text and returns an array of tokens it consists of ([see demo](http://rlidwka.github.io/jju/tokenizer.html))
-4. [jju.analyze()](#jjuanalyze-function) parses json/json5 text and tries to guess indentation, quoting style, etc.
-5. [jju.update()](#jjuupdate-function) changes json/json5 text, preserving original formatting as much as possible ([see demo](http://rlidwka.github.io/jju/editor.html))
-
-All functions are able to work with a standard JSON documents. `jju.parse()` and `jju.stringify()` are better in some cases, but slower than native `JSON.parse()` and `JSON.stringify()` versions. Detailed description see below.
-
-### jju.parse() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - text to parse, type: String
- * `options` - parser options, type: Object
- */
-jju.parse(text[, options])
-
-// compatibility syntax
-jju.parse(text[, reviver])
-```
-
-Options:
-
- - reserved\_keys - what to do with reserved keys (String, default="ignore")
- - "ignore" - ignore reserved keys
- - "throw" - throw SyntaxError in case of reserved keys
- - "replace" - replace reserved keys, this is the default JSON.parse behaviour, unsafe
-
- Reserved keys are keys that exist in an empty object (`hasOwnProperty`, `__proto__`, etc.).
-
-```javascript
-// 'ignore' will cause reserved keys to be ignored:
-parse('{hasOwnProperty: 1}', {reserved_keys: 'ignore'}) == {}
-parse('{hasOwnProperty: 1, x: 2}', {reserved_keys: 'ignore'}).hasOwnProperty('x') == true
-
-// 'throw' will cause SyntaxError in these cases:
-parse('{hasOwnProperty: 1}', {reserved_keys: 'throw'}) == SyntaxError
-
-// 'replace' will replace reserved keys with new ones:
-parse('{hasOwnProperty: 1}', {reserved_keys: 'throw'}) == {hasOwnProperty: 1}
-parse('{hasOwnProperty: 1, x: 2}', {reserved_keys: 'ignore'}).hasOwnProperty('x') == TypeError
-```
-
-
- - null\_prototype - create object as Object.create(null) instead of '{}' (Boolean)
-
- if `reserved_keys != 'replace'`, default is **false**
-
- if `reserved_keys == 'replace'`, default is **true**
-
- It is usually unsafe and not recommended to change this option to false in the last case.
-
- - reviver - reviver function - Function
-
- This function should follow JSON specification
-
- - mode - operation mode, set it to 'json' if you want to throw on non-strict json files (String)
-
-### jju.stringify() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `value` - value to serialize, type: *
- * `options` - serializer options, type: Object
- */
-jju.stringify(value[, options])
-
-// compatibility syntax
-jju.stringify(value[, replacer [, indent])
-```
-
-Options:
-
- - ascii - output ascii only (Boolean, default=false)
- If this option is enabled, output will not have any characters except of 0x20-0x7f.
-
- - indent - indentation (String, Number or Boolean, default='\t')
- This option follows JSON specification.
-
- - quote - enquoting char (String, "'" or '"', default="'")
- - quote\_keys - whether keys quoting in objects is required or not (String, default=false)
- If you want `{"q": 1}` instead of `{q: 1}`, set it to true.
-
- - sort\_keys - sort all keys while stringifying (Boolean or Function, default=false)
- By default sort order will depend on implementation, with v8 it's insertion order. If set to `true`, all keys (but not arrays) will be sorted alphabetically. You can provide your own sorting function as well.
-
- - replacer - replacer function or array (Function or Array)
- This option follows JSON specification.
-
- - no\_trailing\_comma = don't output trailing comma (Boolean, default=false)
- If this option is set, arrays like this `[1,2,3,]` will never be generated. Otherwise they may be generated for pretty printing.
-
- - mode - operation mode, set it to 'json' if you want correct json in the output (String)
-
- Currently it's either 'json' or something else. If it is 'json', following options are implied:
-
- - options.quote = '"'
- - options.no\_trailing\_comma = true
- - options.quote\_keys = true
- - '\x' literals are not used
-
-### jju.tokenize() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - text to tokenize, type: String
- * `options` - parser options, type: Object
- */
-jju.tokenize(text[, options])
-```
-
-Options are the same as for the `jju.parse` function.
-
-Return value is an array of tokens, where each token is an object:
-
- - raw (String) - raw text of this token, if you join all raw's, you will get the original document
- - type (String) - type of the token, can be `whitespace`, `comment`, `key`, `literal`, `separator` or `newline`
- - stack (Array) - path to the current token in the syntax tree
- - value - value of the token if token is a `key` or `literal`
-
-You can check tokenizer for yourself using [this demo](http://rlidwka.github.io/jju/tokenizer.html).
-
-### jju.analyze() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - text to analyze, type: String
- * `options` - parser options, type: Object
- */
-jju.analyze(text[, options])
-```
-
-Options are the same as for the `jju.parse` function.
-
-Return value is an object defining a programming style in which the document was written.
-
- - indent (String) - preferred indentation
- - newline (String) - preferred newline
- - quote (String) - `"` or `'` depending on which quote is preferred
- - quote\_keys (Boolean) - `true` if unquoted keys were used at least once
- - has\_whitespace (Boolean) - `true` if input has a whitespace token
- - has\_comments (Boolean) - `true` if input has a comment token
- - has\_newlines (Boolean) - `true` if input has a newline token
- - has\_trailing\_comma (Boolean) - `true` if input has at least one trailing comma
-
-### jju.update() function
-
-```javascript
-/*
- * Main syntax:
- *
- * `text` - original text, type: String
- * `new_value` - new value you want to set
- * `options` - parser or stringifier options, type: Object
- */
-jju.update(text, new_value[, options])
-```
-
-If you want to update a JSON document, here is the general approach:
-
-```javascript
-// here is your original JSON document:
-var input = '{"foo": "bar", "baz": 123}'
-
-// you need to parse it first:
-var json = jju.parse(input, {mode: 'json'})
-// json is { foo: 'bar', baz: 123 }
-
-// then you can change it as you like:
-json.foo = 'quux'
-json.hello = 'world'
-
-// then you run an update function to change the original json:
-var output = jju.update(input, json, {mode: 'json'})
-// output is '{"foo": "quux", "baz": 123, "hello": "world"}'
-```
-
-Look at [this demo](http://rlidwka.github.io/jju/editor.html) to test various types of json.
-
-## Advantages over existing JSON libraries
-
-In a few cases it makes sense to use this module instead of built-in JSON methods.
-
-Parser:
- - better error reporting with source code and line numbers
-
-In case of syntax error, JSON.parse does not return any good information to the user. This module does:
-
-```
-$ node -e 'require("jju").parse("[1,1,1,1,invalid]")'
-
-SyntaxError: Unexpected token 'i' at 0:9
-[1,1,1,1,invalid]
- ^
-```
-
-This module is about 5 times slower, so if user experience matters to you more than performance, use this module. If you're working with a lot of machine-generated data, use JSON.parse instead.
-
-Stringifier:
- - util.inspect-like pretty printing
-
-This module behaves more smart when dealing with object and arrays, and does not always print newlines in them:
-
-```
-$ node -e 'console.log(require("./").stringify([[,,,],,,[,,,,]], {mode:"json"}))'
-[
- [null, null, null],
- null,
- null,
- [null, null, null, null]
-]
-```
-
-JSON.stringify will split this into 15 lines, and it's hard to read.
-
-Yet again, this feature comes with a performance hit, so if user experience matters to you more than performance, use this module. If your JSON will be consumed by machines, use JSON.stringify instead.
-
-As a rule of thumb, if you use "space" argument to indent your JSON, you'd better use this module instead.
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/index.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/index.js
deleted file mode 100644
index 50f16249634fb6..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/index.js
+++ /dev/null
@@ -1,32 +0,0 @@
-
-module.exports.__defineGetter__('parse', function() {
- return require('./lib/parse').parse
-})
-
-module.exports.__defineGetter__('stringify', function() {
- return require('./lib/stringify').stringify
-})
-
-module.exports.__defineGetter__('tokenize', function() {
- return require('./lib/parse').tokenize
-})
-
-module.exports.__defineGetter__('update', function() {
- return require('./lib/document').update
-})
-
-module.exports.__defineGetter__('analyze', function() {
- return require('./lib/analyze').analyze
-})
-
-module.exports.__defineGetter__('utils', function() {
- return require('./lib/utils')
-})
-
-/**package
-{ "name": "jju",
- "version": "0.0.0",
- "dependencies": {"js-yaml": "*"},
- "scripts": {"postinstall": "js-yaml package.yaml > package.json ; npm install"}
-}
-**/
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js
deleted file mode 100644
index 9b0f9af01cd9e8..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/analyze.js
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-var tokenize = require('./parse').tokenize
-
-module.exports.analyze = function analyzeJSON(input, options) {
- if (options == null) options = {}
-
- if (!Array.isArray(input)) {
- input = tokenize(input, options)
- }
-
- var result = {
- has_whitespace: false,
- has_comments: false,
- has_newlines: false,
- has_trailing_comma: false,
- indent: '',
- newline: '\n',
- quote: '"',
- quote_keys: true,
- }
-
- var stats = {
- indent: {},
- newline: {},
- quote: {},
- }
-
- for (var i=0; i stats[k][b] ? a : b
- })
- }
- }
-
- return result
-}
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js
deleted file mode 100644
index cfab8691fc9aba..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/document.js
+++ /dev/null
@@ -1,485 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-var assert = require('assert')
-var tokenize = require('./parse').tokenize
-var stringify = require('./stringify').stringify
-var analyze = require('./analyze').analyze
-
-function isObject(x) {
- return typeof(x) === 'object' && x !== null
-}
-
-function value_to_tokenlist(value, stack, options, is_key, indent) {
- options = Object.create(options)
- options._stringify_key = !!is_key
-
- if (indent) {
- options._prefix = indent.prefix.map(function(x) {
- return x.raw
- }).join('')
- }
-
- if (options._splitMin == null) options._splitMin = 0
- if (options._splitMax == null) options._splitMax = 0
-
- var stringified = stringify(value, options)
-
- if (is_key) {
- return [ { raw: stringified, type: 'key', stack: stack, value: value } ]
- }
-
- options._addstack = stack
- var result = tokenize(stringified, {
- _addstack: stack,
- })
- result.data = null
- return result
-}
-
-// '1.2.3' -> ['1','2','3']
-function arg_to_path(path) {
- // array indexes
- if (typeof(path) === 'number') path = String(path)
-
- if (path === '') path = []
- if (typeof(path) === 'string') path = path.split('.')
-
- if (!Array.isArray(path)) throw Error('Invalid path type, string or array expected')
- return path
-}
-
-// returns new [begin, end] or false if not found
-//
-// {x:3, xxx: 111, y: [111, {q: 1, e: 2} ,333] }
-// f('y',0) returns this B^^^^^^^^^^^^^^^^^^^^^^^^E
-// then f('1',1) would reduce it to B^^^^^^^^^^E
-function find_element_in_tokenlist(element, lvl, tokens, begin, end) {
- while(tokens[begin].stack[lvl] != element) {
- if (begin++ >= end) return false
- }
- while(tokens[end].stack[lvl] != element) {
- if (end-- < begin) return false
- }
- return [begin, end]
-}
-
-function is_whitespace(token_type) {
- return token_type === 'whitespace'
- || token_type === 'newline'
- || token_type === 'comment'
-}
-
-function find_first_non_ws_token(tokens, begin, end) {
- while(is_whitespace(tokens[begin].type)) {
- if (begin++ >= end) return false
- }
- return begin
-}
-
-function find_last_non_ws_token(tokens, begin, end) {
- while(is_whitespace(tokens[end].type)) {
- if (end-- < begin) return false
- }
- return end
-}
-
-/*
- * when appending a new element of an object/array, we are trying to
- * figure out the style used on the previous element
- *
- * return {prefix, sep1, sep2, suffix}
- *
- * ' "key" : "element" \r\n'
- * prefix^^^^ sep1^ ^^sep2 ^^^^^^^^suffix
- *
- * begin - the beginning of the object/array
- * end - last token of the last element (value or comma usually)
- */
-function detect_indent_style(tokens, is_array, begin, end, level) {
- var result = {
- sep1: [],
- sep2: [],
- suffix: [],
- prefix: [],
- newline: [],
- }
-
- if (tokens[end].type === 'separator' && tokens[end].stack.length !== level+1 && tokens[end].raw !== ',') {
- // either a beginning of the array (no last element) or other weird situation
- //
- // just return defaults
- return result
- }
-
- // ' "key" : "value" ,'
- // skipping last separator, we're now here ^^
- if (tokens[end].type === 'separator')
- end = find_last_non_ws_token(tokens, begin, end - 1)
- if (end === false) return result
-
- // ' "key" : "value" ,'
- // skipping value ^^^^^^^
- while(tokens[end].stack.length > level) end--
-
- if (!is_array) {
- while(is_whitespace(tokens[end].type)) {
- if (end < begin) return result
- if (tokens[end].type === 'whitespace') {
- result.sep2.unshift(tokens[end])
- } else {
- // newline, comment or other unrecognized codestyle
- return result
- }
- end--
- }
-
- // ' "key" : "value" ,'
- // skipping separator ^
- assert.equal(tokens[end].type, 'separator')
- assert.equal(tokens[end].raw, ':')
- while(is_whitespace(tokens[--end].type)) {
- if (end < begin) return result
- if (tokens[end].type === 'whitespace') {
- result.sep1.unshift(tokens[end])
- } else {
- // newline, comment or other unrecognized codestyle
- return result
- }
- }
-
- assert.equal(tokens[end].type, 'key')
- end--
- }
-
- // ' "key" : "value" ,'
- // skipping key ^^^^^
- while(is_whitespace(tokens[end].type)) {
- if (end < begin) return result
- if (tokens[end].type === 'whitespace') {
- result.prefix.unshift(tokens[end])
- } else if (tokens[end].type === 'newline') {
- result.newline.unshift(tokens[end])
- return result
- } else {
- // comment or other unrecognized codestyle
- return result
- }
- end--
- }
-
- return result
-}
-
-function Document(text, options) {
- var self = Object.create(Document.prototype)
-
- if (options == null) options = {}
- //options._structure = true
- var tokens = self._tokens = tokenize(text, options)
- self._data = tokens.data
- tokens.data = null
- self._options = options
-
- var stats = analyze(text, options)
- if (options.indent == null) {
- options.indent = stats.indent
- }
- if (options.quote == null) {
- options.quote = stats.quote
- }
- if (options.quote_keys == null) {
- options.quote_keys = stats.quote_keys
- }
- if (options.no_trailing_comma == null) {
- options.no_trailing_comma = !stats.has_trailing_comma
- }
- return self
-}
-
-// return true if it's a proper object
-// throw otherwise
-function check_if_can_be_placed(key, object, is_unset) {
- //if (object == null) return false
- function error(add) {
- return Error("You can't " + (is_unset ? 'unset' : 'set') + " key '" + key + "'" + add)
- }
-
- if (!isObject(object)) {
- throw error(' of an non-object')
- }
- if (Array.isArray(object)) {
- // array, check boundary
- if (String(key).match(/^\d+$/)) {
- key = Number(String(key))
- if (object.length < key || (is_unset && object.length === key)) {
- throw error(', out of bounds')
- } else if (is_unset && object.length !== key+1) {
- throw error(' in the middle of an array')
- } else {
- return true
- }
- } else {
- throw error(' of an array')
- }
- } else {
- // object
- return true
- }
-}
-
-// usage: document.set('path.to.something', 'value')
-// or: document.set(['path','to','something'], 'value')
-Document.prototype.set = function(path, value) {
- path = arg_to_path(path)
-
- // updating this._data and check for errors
- if (path.length === 0) {
- if (value === undefined) throw Error("can't remove root document")
- this._data = value
- var new_key = false
-
- } else {
- var data = this._data
-
- for (var i=0; i {x:1}`
- // removing sep, literal and optional sep
- // ':'
- var pos2 = find_last_non_ws_token(this._tokens, pos_old[0], position[0] - 1)
- assert.equal(this._tokens[pos2].type, 'separator')
- assert.equal(this._tokens[pos2].raw, ':')
- position[0] = pos2
-
- // key
- var pos2 = find_last_non_ws_token(this._tokens, pos_old[0], position[0] - 1)
- assert.equal(this._tokens[pos2].type, 'key')
- assert.equal(this._tokens[pos2].value, path[path.length-1])
- position[0] = pos2
- }
-
- // removing comma in arrays and objects
- var pos2 = find_last_non_ws_token(this._tokens, pos_old[0], position[0] - 1)
- assert.equal(this._tokens[pos2].type, 'separator')
- if (this._tokens[pos2].raw === ',') {
- position[0] = pos2
- } else {
- // beginning of the array/object, so we should remove trailing comma instead
- pos2 = find_first_non_ws_token(this._tokens, position[1] + 1, pos_old[1])
- assert.equal(this._tokens[pos2].type, 'separator')
- if (this._tokens[pos2].raw === ',') {
- position[1] = pos2
- }
- }
-
- } else {
- var indent = pos2 !== false
- ? detect_indent_style(this._tokens, Array.isArray(data), pos_old[0], position[1] - 1, i)
- : {}
- var newtokens = value_to_tokenlist(value, path, this._options, false, indent)
- }
-
- } else {
- // insert new key, that's tricky
- var path_1 = path.slice(0, i)
-
- // find a last separator after which we're inserting it
- var pos2 = find_last_non_ws_token(this._tokens, position[0] + 1, position[1] - 1)
- assert(pos2 !== false)
-
- var indent = pos2 !== false
- ? detect_indent_style(this._tokens, Array.isArray(data), position[0] + 1, pos2, i)
- : {}
-
- var newtokens = value_to_tokenlist(value, path, this._options, false, indent)
-
- // adding leading whitespaces according to detected codestyle
- var prefix = []
- if (indent.newline && indent.newline.length)
- prefix = prefix.concat(indent.newline)
- if (indent.prefix && indent.prefix.length)
- prefix = prefix.concat(indent.prefix)
-
- // adding '"key":' (as in "key":"value") to object values
- if (!Array.isArray(data)) {
- prefix = prefix.concat(value_to_tokenlist(path[path.length-1], path_1, this._options, true))
- if (indent.sep1 && indent.sep1.length)
- prefix = prefix.concat(indent.sep1)
- prefix.push({raw: ':', type: 'separator', stack: path_1})
- if (indent.sep2 && indent.sep2.length)
- prefix = prefix.concat(indent.sep2)
- }
-
- newtokens.unshift.apply(newtokens, prefix)
-
- // check if prev token is a separator AND they're at the same level
- if (this._tokens[pos2].type === 'separator' && this._tokens[pos2].stack.length === path.length-1) {
- // previous token is either , or [ or {
- if (this._tokens[pos2].raw === ',') {
- // restore ending comma
- newtokens.push({raw: ',', type: 'separator', stack: path_1})
- }
- } else {
- // previous token isn't a separator, so need to insert one
- newtokens.unshift({raw: ',', type: 'separator', stack: path_1})
- }
-
- if (indent.suffix && indent.suffix.length)
- newtokens.push.apply(newtokens, indent.suffix)
-
- assert.equal(this._tokens[position[1]].type, 'separator')
- position[0] = pos2+1
- position[1] = pos2
- }
-
- newtokens.unshift(position[1] - position[0] + 1)
- newtokens.unshift(position[0])
- this._tokens.splice.apply(this._tokens, newtokens)
-
- return this
-}
-
-// convenience method
-Document.prototype.unset = function(path) {
- return this.set(path, undefined)
-}
-
-Document.prototype.get = function(path) {
- path = arg_to_path(path)
-
- var data = this._data
- for (var i=0; i old_data.length) {
- // adding new elements, so going forward
- for (var i=0; i=0; i--) {
- path.push(String(i))
- change(path, old_data[i], new_data[i])
- path.pop()
- }
- }
-
- } else {
- // both values are objects here
- for (var i in new_data) {
- path.push(String(i))
- change(path, old_data[i], new_data[i])
- path.pop()
- }
-
- for (var i in old_data) {
- if (i in new_data) continue
- path.push(String(i))
- change(path, old_data[i], new_data[i])
- path.pop()
- }
- }
- }
-}
-
-Document.prototype.toString = function() {
- return this._tokens.map(function(x) {
- return x.raw
- }).join('')
-}
-
-module.exports.Document = Document
-
-module.exports.update = function updateJSON(source, new_value, options) {
- return Document(source, options).update(new_value).toString()
-}
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
deleted file mode 100644
index 0c9fbe68809cd0..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
+++ /dev/null
@@ -1,765 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-// RTFM: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-262.pdf
-
-var Uni = require('./unicode')
-
-function isHexDigit(x) {
- return (x >= '0' && x <= '9')
- || (x >= 'A' && x <= 'F')
- || (x >= 'a' && x <= 'f')
-}
-
-function isOctDigit(x) {
- return x >= '0' && x <= '7'
-}
-
-function isDecDigit(x) {
- return x >= '0' && x <= '9'
-}
-
-var unescapeMap = {
- '\'': '\'',
- '"' : '"',
- '\\': '\\',
- 'b' : '\b',
- 'f' : '\f',
- 'n' : '\n',
- 'r' : '\r',
- 't' : '\t',
- 'v' : '\v',
- '/' : '/',
-}
-
-function formatError(input, msg, position, lineno, column, json5) {
- var result = msg + ' at ' + (lineno + 1) + ':' + (column + 1)
- , tmppos = position - column - 1
- , srcline = ''
- , underline = ''
-
- var isLineTerminator = json5 ? Uni.isLineTerminator : Uni.isLineTerminatorJSON
-
- // output no more than 70 characters before the wrong ones
- if (tmppos < position - 70) {
- tmppos = position - 70
- }
-
- while (1) {
- var chr = input[++tmppos]
-
- if (isLineTerminator(chr) || tmppos === input.length) {
- if (position >= tmppos) {
- // ending line error, so show it after the last char
- underline += '^'
- }
- break
- }
- srcline += chr
-
- if (position === tmppos) {
- underline += '^'
- } else if (position > tmppos) {
- underline += input[tmppos] === '\t' ? '\t' : ' '
- }
-
- // output no more than 78 characters on the string
- if (srcline.length > 78) break
- }
-
- return result + '\n' + srcline + '\n' + underline
-}
-
-function parse(input, options) {
- // parse as a standard JSON mode
- var json5 = false;
- var cjson = false;
-
- if (options.legacy || options.mode === 'json') {
- // use json
- } else if (options.mode === 'cjson') {
- cjson = true;
- } else if (options.mode === 'json5') {
- json5 = true;
- } else {
- // use it by default
- json5 = true;
- }
-
- var isLineTerminator = json5 ? Uni.isLineTerminator : Uni.isLineTerminatorJSON
- var isWhiteSpace = json5 ? Uni.isWhiteSpace : Uni.isWhiteSpaceJSON
-
- var length = input.length
- , lineno = 0
- , linestart = 0
- , position = 0
- , stack = []
-
- var tokenStart = function() {}
- var tokenEnd = function(v) {return v}
-
- /* tokenize({
- raw: '...',
- type: 'whitespace'|'comment'|'key'|'literal'|'separator'|'newline',
- value: 'number'|'string'|'whatever',
- path: [...],
- })
- */
- if (options._tokenize) {
- ;(function() {
- var start = null
- tokenStart = function() {
- if (start !== null) throw Error('internal error, token overlap')
- start = position
- }
-
- tokenEnd = function(v, type) {
- if (start != position) {
- var hash = {
- raw: input.substr(start, position-start),
- type: type,
- stack: stack.slice(0),
- }
- if (v !== undefined) hash.value = v
- options._tokenize.call(null, hash)
- }
- start = null
- return v
- }
- })()
- }
-
- function fail(msg) {
- var column = position - linestart
-
- if (!msg) {
- if (position < length) {
- var token = '\'' +
- JSON
- .stringify(input[position])
- .replace(/^"|"$/g, '')
- .replace(/'/g, "\\'")
- .replace(/\\"/g, '"')
- + '\''
-
- if (!msg) msg = 'Unexpected token ' + token
- } else {
- if (!msg) msg = 'Unexpected end of input'
- }
- }
-
- var error = SyntaxError(formatError(input, msg, position, lineno, column, json5))
- error.row = lineno + 1
- error.column = column + 1
- throw error
- }
-
- function newline(chr) {
- // account for
- if (chr === '\r' && input[position] === '\n') position++
- linestart = position
- lineno++
- }
-
- function parseGeneric() {
- var result
-
- while (position < length) {
- tokenStart()
- var chr = input[position++]
-
- if (chr === '"' || (chr === '\'' && json5)) {
- return tokenEnd(parseString(chr), 'literal')
-
- } else if (chr === '{') {
- tokenEnd(undefined, 'separator')
- return parseObject()
-
- } else if (chr === '[') {
- tokenEnd(undefined, 'separator')
- return parseArray()
-
- } else if (chr === '-'
- || chr === '.'
- || isDecDigit(chr)
- // + number Infinity NaN
- || (json5 && (chr === '+' || chr === 'I' || chr === 'N'))
- ) {
- return tokenEnd(parseNumber(), 'literal')
-
- } else if (chr === 'n') {
- parseKeyword('null')
- return tokenEnd(null, 'literal')
-
- } else if (chr === 't') {
- parseKeyword('true')
- return tokenEnd(true, 'literal')
-
- } else if (chr === 'f') {
- parseKeyword('false')
- return tokenEnd(false, 'literal')
-
- } else {
- position--
- return tokenEnd(undefined)
- }
- }
- }
-
- function parseKey() {
- var result
-
- while (position < length) {
- tokenStart()
- var chr = input[position++]
-
- if (chr === '"' || (chr === '\'' && json5)) {
- return tokenEnd(parseString(chr), 'key')
-
- } else if (chr === '{') {
- tokenEnd(undefined, 'separator')
- return parseObject()
-
- } else if (chr === '[') {
- tokenEnd(undefined, 'separator')
- return parseArray()
-
- } else if (chr === '.'
- || isDecDigit(chr)
- ) {
- return tokenEnd(parseNumber(true), 'key')
-
- } else if (json5
- && Uni.isIdentifierStart(chr) || (chr === '\\' && input[position] === 'u')) {
- // unicode char or a unicode sequence
- var rollback = position - 1
- var result = parseIdentifier()
-
- if (result === undefined) {
- position = rollback
- return tokenEnd(undefined)
- } else {
- return tokenEnd(result, 'key')
- }
-
- } else {
- position--
- return tokenEnd(undefined)
- }
- }
- }
-
- function skipWhiteSpace() {
- tokenStart()
- while (position < length) {
- var chr = input[position++]
-
- if (isLineTerminator(chr)) {
- position--
- tokenEnd(undefined, 'whitespace')
- tokenStart()
- position++
- newline(chr)
- tokenEnd(undefined, 'newline')
- tokenStart()
-
- } else if (isWhiteSpace(chr)) {
- // nothing
-
- } else if (chr === '/'
- && (json5 || cjson)
- && (input[position] === '/' || input[position] === '*')
- ) {
- position--
- tokenEnd(undefined, 'whitespace')
- tokenStart()
- position++
- skipComment(input[position++] === '*')
- tokenEnd(undefined, 'comment')
- tokenStart()
-
- } else {
- position--
- break
- }
- }
- return tokenEnd(undefined, 'whitespace')
- }
-
- function skipComment(multi) {
- while (position < length) {
- var chr = input[position++]
-
- if (isLineTerminator(chr)) {
- // LineTerminator is an end of singleline comment
- if (!multi) {
- // let parent function deal with newline
- position--
- return
- }
-
- newline(chr)
-
- } else if (chr === '*' && multi) {
- // end of multiline comment
- if (input[position] === '/') {
- position++
- return
- }
-
- } else {
- // nothing
- }
- }
-
- if (multi) {
- fail('Unclosed multiline comment')
- }
- }
-
- function parseKeyword(keyword) {
- // keyword[0] is not checked because it should've checked earlier
- var _pos = position
- var len = keyword.length
- for (var i=1; i= length || keyword[i] != input[position]) {
- position = _pos-1
- fail()
- }
- position++
- }
- }
-
- function parseObject() {
- var result = options.null_prototype ? Object.create(null) : {}
- , empty_object = {}
- , is_non_empty = false
-
- while (position < length) {
- skipWhiteSpace()
- var item1 = parseKey()
- skipWhiteSpace()
- tokenStart()
- var chr = input[position++]
- tokenEnd(undefined, 'separator')
-
- if (chr === '}' && item1 === undefined) {
- if (!json5 && is_non_empty) {
- position--
- fail('Trailing comma in object')
- }
- return result
-
- } else if (chr === ':' && item1 !== undefined) {
- skipWhiteSpace()
- stack.push(item1)
- var item2 = parseGeneric()
- stack.pop()
-
- if (item2 === undefined) fail('No value found for key ' + item1)
- if (typeof(item1) !== 'string') {
- if (!json5 || typeof(item1) !== 'number') {
- fail('Wrong key type: ' + item1)
- }
- }
-
- if ((item1 in empty_object || empty_object[item1] != null) && options.reserved_keys !== 'replace') {
- if (options.reserved_keys === 'throw') {
- fail('Reserved key: ' + item1)
- } else {
- // silently ignore it
- }
- } else {
- if (typeof(options.reviver) === 'function') {
- item2 = options.reviver.call(null, item1, item2)
- }
-
- if (item2 !== undefined) {
- is_non_empty = true
- Object.defineProperty(result, item1, {
- value: item2,
- enumerable: true,
- configurable: true,
- writable: true,
- })
- }
- }
-
- skipWhiteSpace()
-
- tokenStart()
- var chr = input[position++]
- tokenEnd(undefined, 'separator')
-
- if (chr === ',') {
- continue
-
- } else if (chr === '}') {
- return result
-
- } else {
- fail()
- }
-
- } else {
- position--
- fail()
- }
- }
-
- fail()
- }
-
- function parseArray() {
- var result = []
-
- while (position < length) {
- skipWhiteSpace()
- stack.push(result.length)
- var item = parseGeneric()
- stack.pop()
- skipWhiteSpace()
- tokenStart()
- var chr = input[position++]
- tokenEnd(undefined, 'separator')
-
- if (item !== undefined) {
- if (typeof(options.reviver) === 'function') {
- item = options.reviver.call(null, String(result.length), item)
- }
- if (item === undefined) {
- result.length++
- item = true // hack for check below, not included into result
- } else {
- result.push(item)
- }
- }
-
- if (chr === ',') {
- if (item === undefined) {
- fail('Elisions are not supported')
- }
-
- } else if (chr === ']') {
- if (!json5 && item === undefined && result.length) {
- position--
- fail('Trailing comma in array')
- }
- return result
-
- } else {
- position--
- fail()
- }
- }
- }
-
- function parseNumber() {
- // rewind because we don't know first char
- position--
-
- var start = position
- , chr = input[position++]
- , t
-
- var to_num = function(is_octal) {
- var str = input.substr(start, position - start)
-
- if (is_octal) {
- var result = parseInt(str.replace(/^0o?/, ''), 8)
- } else {
- var result = Number(str)
- }
-
- if (Number.isNaN(result)) {
- position--
- fail('Bad numeric literal - "' + input.substr(start, position - start + 1) + '"')
- } else if (!json5 && !str.match(/^-?(0|[1-9][0-9]*)(\.[0-9]+)?(e[+-]?[0-9]+)?$/i)) {
- // additional restrictions imposed by json
- position--
- fail('Non-json numeric literal - "' + input.substr(start, position - start + 1) + '"')
- } else {
- return result
- }
- }
-
- // ex: -5982475.249875e+29384
- // ^ skipping this
- if (chr === '-' || (chr === '+' && json5)) chr = input[position++]
-
- if (chr === 'N' && json5) {
- parseKeyword('NaN')
- return NaN
- }
-
- if (chr === 'I' && json5) {
- parseKeyword('Infinity')
-
- // returning +inf or -inf
- return to_num()
- }
-
- if (chr >= '1' && chr <= '9') {
- // ex: -5982475.249875e+29384
- // ^^^ skipping these
- while (position < length && isDecDigit(input[position])) position++
- chr = input[position++]
- }
-
- // special case for leading zero: 0.123456
- if (chr === '0') {
- chr = input[position++]
-
- // new syntax, "0o777" old syntax, "0777"
- var is_octal = chr === 'o' || chr === 'O' || isOctDigit(chr)
- var is_hex = chr === 'x' || chr === 'X'
-
- if (json5 && (is_octal || is_hex)) {
- while (position < length
- && (is_hex ? isHexDigit : isOctDigit)( input[position] )
- ) position++
-
- var sign = 1
- if (input[start] === '-') {
- sign = -1
- start++
- } else if (input[start] === '+') {
- start++
- }
-
- return sign * to_num(is_octal)
- }
- }
-
- if (chr === '.') {
- // ex: -5982475.249875e+29384
- // ^^^ skipping these
- while (position < length && isDecDigit(input[position])) position++
- chr = input[position++]
- }
-
- if (chr === 'e' || chr === 'E') {
- chr = input[position++]
- if (chr === '-' || chr === '+') position++
- // ex: -5982475.249875e+29384
- // ^^^ skipping these
- while (position < length && isDecDigit(input[position])) position++
- chr = input[position++]
- }
-
- // we have char in the buffer, so count for it
- position--
- return to_num()
- }
-
- function parseIdentifier() {
- // rewind because we don't know first char
- position--
-
- var result = ''
-
- while (position < length) {
- var chr = input[position++]
-
- if (chr === '\\'
- && input[position] === 'u'
- && isHexDigit(input[position+1])
- && isHexDigit(input[position+2])
- && isHexDigit(input[position+3])
- && isHexDigit(input[position+4])
- ) {
- // UnicodeEscapeSequence
- chr = String.fromCharCode(parseInt(input.substr(position+1, 4), 16))
- position += 5
- }
-
- if (result.length) {
- // identifier started
- if (Uni.isIdentifierPart(chr)) {
- result += chr
- } else {
- position--
- return result
- }
-
- } else {
- if (Uni.isIdentifierStart(chr)) {
- result += chr
- } else {
- return undefined
- }
- }
- }
-
- fail()
- }
-
- function parseString(endChar) {
- // 7.8.4 of ES262 spec
- var result = ''
-
- while (position < length) {
- var chr = input[position++]
-
- if (chr === endChar) {
- return result
-
- } else if (chr === '\\') {
- if (position >= length) fail()
- chr = input[position++]
-
- if (unescapeMap[chr] && (json5 || (chr != 'v' && chr != "'"))) {
- result += unescapeMap[chr]
-
- } else if (json5 && isLineTerminator(chr)) {
- // line continuation
- newline(chr)
-
- } else if (chr === 'u' || (chr === 'x' && json5)) {
- // unicode/character escape sequence
- var off = chr === 'u' ? 4 : 2
-
- // validation for \uXXXX
- for (var i=0; i= length) fail()
- if (!isHexDigit(input[position])) fail('Bad escape sequence')
- position++
- }
-
- result += String.fromCharCode(parseInt(input.substr(position-off, off), 16))
- } else if (json5 && isOctDigit(chr)) {
- if (chr < '4' && isOctDigit(input[position]) && isOctDigit(input[position+1])) {
- // three-digit octal
- var digits = 3
- } else if (isOctDigit(input[position])) {
- // two-digit octal
- var digits = 2
- } else {
- var digits = 1
- }
- position += digits - 1
- result += String.fromCharCode(parseInt(input.substr(position-digits, digits), 8))
- /*if (!isOctDigit(input[position])) {
- // \0 is allowed still
- result += '\0'
- } else {
- fail('Octal literals are not supported')
- }*/
-
- } else if (json5) {
- // \X -> x
- result += chr
-
- } else {
- position--
- fail()
- }
-
- } else if (isLineTerminator(chr)) {
- fail()
-
- } else {
- if (!json5 && chr.charCodeAt(0) < 32) {
- position--
- fail('Unexpected control character')
- }
-
- // SourceCharacter but not one of " or \ or LineTerminator
- result += chr
- }
- }
-
- fail()
- }
-
- skipWhiteSpace()
- var return_value = parseGeneric()
- if (return_value !== undefined || position < length) {
- skipWhiteSpace()
-
- if (position >= length) {
- if (typeof(options.reviver) === 'function') {
- return_value = options.reviver.call(null, '', return_value)
- }
- return return_value
- } else {
- fail()
- }
-
- } else {
- if (position) {
- fail('No data, only a whitespace')
- } else {
- fail('No data, empty input')
- }
- }
-}
-
-/*
- * parse(text, options)
- * or
- * parse(text, reviver)
- *
- * where:
- * text - string
- * options - object
- * reviver - function
- */
-module.exports.parse = function parseJSON(input, options) {
- // support legacy functions
- if (typeof(options) === 'function') {
- options = {
- reviver: options
- }
- }
-
- if (input === undefined) {
- // parse(stringify(x)) should be equal x
- // with JSON functions it is not 'cause of undefined
- // so we're fixing it
- return undefined
- }
-
- // JSON.parse compat
- if (typeof(input) !== 'string') input = String(input)
- if (options == null) options = {}
- if (options.reserved_keys == null) options.reserved_keys = 'ignore'
-
- if (options.reserved_keys === 'throw' || options.reserved_keys === 'ignore') {
- if (options.null_prototype == null) {
- options.null_prototype = true
- }
- }
-
- try {
- return parse(input, options)
- } catch(err) {
- // jju is a recursive parser, so JSON.parse("{{{{{{{") could blow up the stack
- //
- // this catch is used to skip all those internal calls
- if (err instanceof SyntaxError && err.row != null && err.column != null) {
- var old_err = err
- err = SyntaxError(old_err.message)
- err.column = old_err.column
- err.row = old_err.row
- }
- throw err
- }
-}
-
-module.exports.tokenize = function tokenizeJSON(input, options) {
- if (options == null) options = {}
-
- options._tokenize = function(smth) {
- if (options._addstack) smth.stack.unshift.apply(smth.stack, options._addstack)
- tokens.push(smth)
- }
-
- var tokens = []
- tokens.data = module.exports.parse(input, options)
- return tokens
-}
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
deleted file mode 100644
index 232229ecc8af20..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
+++ /dev/null
@@ -1,383 +0,0 @@
-/*
- * Author: Alex Kocharin
- * GIT: https://github.com/rlidwka/jju
- * License: WTFPL, grab your copy here: http://www.wtfpl.net/txt/copying/
- */
-
-var Uni = require('./unicode')
-
-// Fix Function#name on browsers that do not support it (IE)
-// http://stackoverflow.com/questions/6903762/function-name-not-supported-in-ie
-if (!(function f(){}).name) {
- Object.defineProperty((function(){}).constructor.prototype, 'name', {
- get: function() {
- var name = this.toString().match(/^\s*function\s*(\S*)\s*\(/)[1]
- // For better performance only parse once, and then cache the
- // result through a new accessor for repeated access.
- Object.defineProperty(this, 'name', { value: name })
- return name
- }
- })
-}
-
-var special_chars = {
- 0: '\\0', // this is not an octal literal
- 8: '\\b',
- 9: '\\t',
- 10: '\\n',
- 11: '\\v',
- 12: '\\f',
- 13: '\\r',
- 92: '\\\\',
-}
-
-// for oddballs
-var hasOwnProperty = Object.prototype.hasOwnProperty
-
-// some people escape those, so I'd copy this to be safe
-var escapable = /[\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/
-
-function _stringify(object, options, recursiveLvl, currentKey) {
- var json5 = (options.mode === 'json5' || !options.mode)
- /*
- * Opinionated decision warning:
- *
- * Objects are serialized in the following form:
- * { type: 'Class', data: DATA }
- *
- * Class is supposed to be a function, and new Class(DATA) is
- * supposed to be equivalent to the original value
- */
- /*function custom_type() {
- return stringify({
- type: object.constructor.name,
- data: object.toString()
- })
- }*/
-
- // if add, it's an internal indentation, so we add 1 level and a eol
- // if !add, it's an ending indentation, so we just indent
- function indent(str, add) {
- var prefix = options._prefix ? options._prefix : ''
- if (!options.indent) return prefix + str
- var result = ''
- var count = recursiveLvl + (add || 0)
- for (var i=0; i 0) {
- if (!Uni.isIdentifierPart(key[i]))
- return _stringify_str(key)
-
- } else {
- if (!Uni.isIdentifierStart(key[i]))
- return _stringify_str(key)
- }
-
- var chr = key.charCodeAt(i)
-
- if (options.ascii) {
- if (chr < 0x80) {
- result += key[i]
-
- } else {
- result += '\\u' + ('0000' + chr.toString(16)).slice(-4)
- }
-
- } else {
- if (escapable.exec(key[i])) {
- result += '\\u' + ('0000' + chr.toString(16)).slice(-4)
-
- } else {
- result += key[i]
- }
- }
- }
-
- return result
- }
-
- function _stringify_str(key) {
- var quote = options.quote
- var quoteChr = quote.charCodeAt(0)
-
- var result = ''
- for (var i=0; i= 8 && chr <= 13 && (json5 || chr !== 11)) {
- result += special_chars[chr]
- } else if (json5) {
- result += '\\x0' + chr.toString(16)
- } else {
- result += '\\u000' + chr.toString(16)
- }
-
- } else if (chr < 0x20) {
- if (json5) {
- result += '\\x' + chr.toString(16)
- } else {
- result += '\\u00' + chr.toString(16)
- }
-
- } else if (chr >= 0x20 && chr < 0x80) {
- // ascii range
- if (chr === 47 && i && key[i-1] === '<') {
- // escaping slashes in
- result += '\\' + key[i]
-
- } else if (chr === 92) {
- result += '\\\\'
-
- } else if (chr === quoteChr) {
- result += '\\' + quote
-
- } else {
- result += key[i]
- }
-
- } else if (options.ascii || Uni.isLineTerminator(key[i]) || escapable.exec(key[i])) {
- if (chr < 0x100) {
- if (json5) {
- result += '\\x' + chr.toString(16)
- } else {
- result += '\\u00' + chr.toString(16)
- }
-
- } else if (chr < 0x1000) {
- result += '\\u0' + chr.toString(16)
-
- } else if (chr < 0x10000) {
- result += '\\u' + chr.toString(16)
-
- } else {
- throw Error('weird codepoint')
- }
- } else {
- result += key[i]
- }
- }
- return quote + result + quote
- }
-
- function _stringify_object() {
- if (object === null) return 'null'
- var result = []
- , len = 0
- , braces
-
- if (Array.isArray(object)) {
- braces = '[]'
- for (var i=0; i options._splitMax - recursiveLvl * options.indent.length || len > options._splitMin) ) {
- // remove trailing comma in multiline if asked to
- if (options.no_trailing_comma && result.length) {
- result[result.length-1] = result[result.length-1].substring(0, result[result.length-1].length-1)
- }
-
- var innerStuff = result.map(function(x) {return indent(x, 1)}).join('')
- return braces[0]
- + (options.indent ? '\n' : '')
- + innerStuff
- + indent(braces[1])
- } else {
- // always remove trailing comma in one-lined arrays
- if (result.length) {
- result[result.length-1] = result[result.length-1].substring(0, result[result.length-1].length-1)
- }
-
- var innerStuff = result.join(options.indent ? ' ' : '')
- return braces[0]
- + innerStuff
- + braces[1]
- }
- }
-
- function _stringify_nonobject(object) {
- if (typeof(options.replacer) === 'function') {
- object = options.replacer.call(null, currentKey, object)
- }
-
- switch(typeof(object)) {
- case 'string':
- return _stringify_str(object)
-
- case 'number':
- if (object === 0 && 1/object < 0) {
- // Opinionated decision warning:
- //
- // I want cross-platform negative zero in all js engines
- // I know they're equal, but why lose that tiny bit of
- // information needlessly?
- return '-0'
- }
- if (!json5 && !Number.isFinite(object)) {
- // json don't support infinity (= sucks)
- return 'null'
- }
- return object.toString()
-
- case 'boolean':
- return object.toString()
-
- case 'undefined':
- return undefined
-
- case 'function':
-// return custom_type()
-
- default:
- // fallback for something weird
- return JSON.stringify(object)
- }
- }
-
- if (options._stringify_key) {
- return _stringify_key(object)
- }
-
- if (typeof(object) === 'object') {
- if (object === null) return 'null'
-
- var str
- if (typeof(str = object.toJSON5) === 'function' && options.mode !== 'json') {
- object = str.call(object, currentKey)
-
- } else if (typeof(str = object.toJSON) === 'function') {
- object = str.call(object, currentKey)
- }
-
- if (object === null) return 'null'
- if (typeof(object) !== 'object') return _stringify_nonobject(object)
-
- if (object.constructor === Number || object.constructor === Boolean || object.constructor === String) {
- object = object.valueOf()
- return _stringify_nonobject(object)
-
- } else if (object.constructor === Date) {
- // only until we can't do better
- return _stringify_nonobject(object.toISOString())
-
- } else {
- if (typeof(options.replacer) === 'function') {
- object = options.replacer.call(null, currentKey, object)
- if (typeof(object) !== 'object') return _stringify_nonobject(object)
- }
-
- return _stringify_object(object)
- }
- } else {
- return _stringify_nonobject(object)
- }
-}
-
-/*
- * stringify(value, options)
- * or
- * stringify(value, replacer, space)
- *
- * where:
- * value - anything
- * options - object
- * replacer - function or array
- * space - boolean or number or string
- */
-module.exports.stringify = function stringifyJSON(object, options, _space) {
- // support legacy syntax
- if (typeof(options) === 'function' || Array.isArray(options)) {
- options = {
- replacer: options
- }
- } else if (typeof(options) === 'object' && options !== null) {
- // nothing to do
- } else {
- options = {}
- }
- if (_space != null) options.indent = _space
-
- if (options.indent == null) options.indent = '\t'
- if (options.quote == null) options.quote = "'"
- if (options.ascii == null) options.ascii = false
- if (options.mode == null) options.mode = 'json5'
-
- if (options.mode === 'json' || options.mode === 'cjson') {
- // json only supports double quotes (= sucks)
- options.quote = '"'
-
- // json don't support trailing commas (= sucks)
- options.no_trailing_comma = true
-
- // json don't support unquoted property names (= sucks)
- options.quote_keys = true
- }
-
- // why would anyone use such objects?
- if (typeof(options.indent) === 'object') {
- if (options.indent.constructor === Number
- || options.indent.constructor === Boolean
- || options.indent.constructor === String)
- options.indent = options.indent.valueOf()
- }
-
- // gap is capped at 10 characters
- if (typeof(options.indent) === 'number') {
- if (options.indent >= 0) {
- options.indent = Array(Math.min(~~options.indent, 10) + 1).join(' ')
- } else {
- options.indent = false
- }
- } else if (typeof(options.indent) === 'string') {
- options.indent = options.indent.substr(0, 10)
- }
-
- if (options._splitMin == null) options._splitMin = 50
- if (options._splitMax == null) options._splitMax = 70
-
- return _stringify(object, options, 0, '')
-}
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/unicode.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/unicode.js
deleted file mode 100644
index 1a29143c2d6b1c..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/unicode.js
+++ /dev/null
@@ -1,71 +0,0 @@
-
-// This is autogenerated with esprima tools, see:
-// https://github.com/ariya/esprima/blob/master/esprima.js
-//
-// PS: oh God, I hate Unicode
-
-// ECMAScript 5.1/Unicode v6.3.0 NonAsciiIdentifierStart:
-
-var Uni = module.exports
-
-module.exports.isWhiteSpace = function isWhiteSpace(x) {
- // section 7.2, table 2
- return x === '\u0020'
- || x === '\u00A0'
- || x === '\uFEFF' // <-- this is not a Unicode WS, only a JS one
- || (x >= '\u0009' && x <= '\u000D') // 9 A B C D
-
- // + whitespace characters from unicode, category Zs
- || x === '\u1680'
- || x === '\u180E'
- || (x >= '\u2000' && x <= '\u200A') // 0 1 2 3 4 5 6 7 8 9 A
- || x === '\u2028'
- || x === '\u2029'
- || x === '\u202F'
- || x === '\u205F'
- || x === '\u3000'
-}
-
-module.exports.isWhiteSpaceJSON = function isWhiteSpaceJSON(x) {
- return x === '\u0020'
- || x === '\u0009'
- || x === '\u000A'
- || x === '\u000D'
-}
-
-module.exports.isLineTerminator = function isLineTerminator(x) {
- // ok, here is the part when JSON is wrong
- // section 7.3, table 3
- return x === '\u000A'
- || x === '\u000D'
- || x === '\u2028'
- || x === '\u2029'
-}
-
-module.exports.isLineTerminatorJSON = function isLineTerminatorJSON(x) {
- return x === '\u000A'
- || x === '\u000D'
-}
-
-module.exports.isIdentifierStart = function isIdentifierStart(x) {
- return x === '$'
- || x === '_'
- || (x >= 'A' && x <= 'Z')
- || (x >= 'a' && x <= 'z')
- || (x >= '\u0080' && Uni.NonAsciiIdentifierStart.test(x))
-}
-
-module.exports.isIdentifierPart = function isIdentifierPart(x) {
- return x === '$'
- || x === '_'
- || (x >= 'A' && x <= 'Z')
- || (x >= 'a' && x <= 'z')
- || (x >= '0' && x <= '9') // <-- addition to Start
- || (x >= '\u0080' && Uni.NonAsciiIdentifierPart.test(x))
-}
-
-module.exports.NonAsciiIdentifierStart = /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F0\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]/
-
-// ECMAScript 5.1/Unicode v6.3.0 NonAsciiIdentifierPart:
-
-module.exports.NonAsciiIdentifierPart = /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0\u08A2-\u08AC\u08E4-\u08FE\u0900-\u0963\u0966-\u096F\u0971-\u0977\u0979-\u097F\u0981-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C01-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58\u0C59\u0C60-\u0C63\u0C66-\u0C6F\u0C82\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D02\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D60-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F0\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191C\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19D9\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1D00-\u1DE6\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u200C\u200D\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u2E2F\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099\u309A\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66F\uA674-\uA67D\uA67F-\uA697\uA69F-\uA6F1\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA827\uA840-\uA873\uA880-\uA8C4\uA8D0-\uA8D9\uA8E0-\uA8F7\uA8FB\uA900-\uA92D\uA930-\uA953\uA960-\uA97C\uA980-\uA9C0\uA9CF-\uA9D9\uAA00-\uAA36\uAA40-\uAA4D\uAA50-\uAA59\uAA60-\uAA76\uAA7A\uAA7B\uAA80-\uAAC2\uAADB-\uAADD\uAAE0-\uAAEF\uAAF2-\uAAF6\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uABC0-\uABEA\uABEC\uABED\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE00-\uFE0F\uFE20-\uFE26\uFE33\uFE34\uFE4D-\uFE4F\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]/
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js
deleted file mode 100644
index a8476b6c4630e1..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/utils.js
+++ /dev/null
@@ -1,46 +0,0 @@
-var FS = require('fs')
-var jju = require('../')
-
-// this function registers json5 extension, so you
-// can do `require("./config.json5")` kind of thing
-module.exports.register = function() {
- var r = require, e = 'extensions'
- r[e]['.json5'] = function(m, f) {
- /*eslint no-sync:0*/
- m.exports = jju.parse(FS.readFileSync(f, 'utf8'))
- }
-}
-
-// this function monkey-patches JSON.parse, so it
-// will return an exact position of error in case
-// of parse failure
-module.exports.patch_JSON_parse = function() {
- var _parse = JSON.parse
- JSON.parse = function(text, rev) {
- try {
- return _parse(text, rev)
- } catch(err) {
- // this call should always throw
- require('jju').parse(text, {
- mode: 'json',
- legacy: true,
- reviver: rev,
- reserved_keys: 'replace',
- null_prototype: false,
- })
-
- // if it didn't throw, but original parser did,
- // this is an error in this library and should be reported
- throw err
- }
- }
-}
-
-// this function is an express/connect middleware
-// that accepts uploads in application/json5 format
-module.exports.middleware = function() {
- return function(req, res, next) {
- throw Error('this function is removed, use express-json5 instead')
- }
-}
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.json b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.json
deleted file mode 100644
index 8eb5b70e76052f..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- "_from": "jju@^1.1.0",
- "_id": "jju@1.3.0",
- "_integrity": "sha1-2t2e8BkkvHKLA/L3l5vb1i96Kqo=",
- "_location": "/read-package-json/json-parse-helpfulerror/jju",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "jju@^1.1.0",
- "name": "jju",
- "escapedName": "jju",
- "rawSpec": "^1.1.0",
- "saveSpec": null,
- "fetchSpec": "^1.1.0"
- },
- "_requiredBy": [
- "/read-package-json/json-parse-helpfulerror"
- ],
- "_resolved": "https://registry.npmjs.org/jju/-/jju-1.3.0.tgz",
- "_shasum": "dadd9ef01924bc728b03f2f7979bdbd62f7a2aaa",
- "_shrinkwrap": null,
- "_spec": "jju@^1.1.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror",
- "author": {
- "name": "Alex Kocharin",
- "email": "alex@kocharin.ru"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/rlidwka/jju/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "a set of utilities to work with JSON / JSON5 documents",
- "devDependencies": {
- "eslint": "~0.4.2",
- "js-yaml": ">=3.1.0",
- "mocha": ">=1.21.0"
- },
- "homepage": "http://rlidwka.github.io/jju/",
- "keywords": [
- "json",
- "json5",
- "parser",
- "serializer",
- "data"
- ],
- "license": {
- "type": "WTFPL",
- "url": "http://www.wtfpl.net/txt/copying/"
- },
- "name": "jju",
- "optionalDependencies": {},
- "peerDependencies": {},
- "publishConfig": {
- "registry": "https://registry.npmjs.org/"
- },
- "repository": {
- "type": "git",
- "url": "git://github.com/rlidwka/jju.git"
- },
- "scripts": {
- "lint": "eslint -c ./.eslint.yaml ./lib",
- "test": "mocha test/*.js"
- },
- "version": "1.3.0"
-}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
deleted file mode 100644
index fdbb5372d4bcde..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
-# use "yapm install ." if you're installing this from git repository
-
-# "jju" stands for "json/json5 utils"
-name: jju
-
-version: 1.3.0
-description: a set of utilities to work with JSON / JSON5 documents
-
-author:
- name: Alex Kocharin
- email: alex@kocharin.ru
-
-repository:
- type: git
- url: git://github.com/rlidwka/jju
-
-bugs:
- url: https://github.com/rlidwka/jju/issues
-
-homepage: http://rlidwka.github.io/jju/
-
-devDependencies:
- mocha: '>=1.21.0'
- js-yaml: '>=3.1.0'
-
- # linting tools
- eslint: '~0.4.2'
-
-scripts:
- test: 'mocha test/*.js'
- lint: 'eslint -c ./.eslint.yaml ./lib'
-
-keywords:
- - json
- - json5
- - parser
- - serializer
- - data
-
-publishConfig:
- registry: https://registry.npmjs.org/
-
-license:
- type: WTFPL
- url: http://www.wtfpl.net/txt/copying/
-
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/package.json b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/package.json
deleted file mode 100644
index 85d9f7e1275c77..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/package.json
+++ /dev/null
@@ -1,66 +0,0 @@
-{
- "_from": "json-parse-helpfulerror@^1.0.2",
- "_id": "json-parse-helpfulerror@1.0.3",
- "_integrity": "sha1-E/FM4C7tTpgSl7ZOueO5MuLdE9w=",
- "_location": "/read-package-json/json-parse-helpfulerror",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "json-parse-helpfulerror@^1.0.2",
- "name": "json-parse-helpfulerror",
- "escapedName": "json-parse-helpfulerror",
- "rawSpec": "^1.0.2",
- "saveSpec": null,
- "fetchSpec": "^1.0.2"
- },
- "_requiredBy": [
- "/read-package-json"
- ],
- "_resolved": "https://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz",
- "_shasum": "13f14ce02eed4e981297b64eb9e3b932e2dd13dc",
- "_shrinkwrap": null,
- "_spec": "json-parse-helpfulerror@^1.0.2",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/read-package-json",
- "author": {
- "name": "Sam Mikes",
- "email": "smikes@cubane.com"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/smikes/json-parse-helpfulerror/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "jju": "^1.1.0"
- },
- "deprecated": false,
- "description": "A drop-in replacement for JSON.parse that uses `jju` to give helpful errors",
- "devDependencies": {
- "code": "^1.2.1",
- "jslint": "^0.7.1",
- "lab": "^5.1.1"
- },
- "homepage": "https://github.com/smikes/json-parse-helpfulerror",
- "keywords": [
- "json",
- "parse",
- "line",
- "doublequote",
- "error"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "json-parse-helpfulerror",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git+https://github.com/smikes/json-parse-helpfulerror.git"
- },
- "scripts": {
- "lint": "jslint --edition=latest --terse *.js",
- "test": "lab -c"
- },
- "version": "1.0.3"
-}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/test/test.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/test/test.js
deleted file mode 100644
index fca458ac080f60..00000000000000
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/test/test.js
+++ /dev/null
@@ -1,32 +0,0 @@
-var Code = require('code'),
- Lab = require('lab'),
- lab = Lab.script(),
- jph = require('..'); // 'json-parse-helpfulerror'
-
-exports.lab = lab;
-
-lab.test('can parse', function (done) {
- var o = jph.parse('{"foo": "bar"}');
-
- Code.expect(o.foo).to.equal('bar');
- done();
-});
-
-lab.test('helpful error for bad JSON', function (done) {
-
- var bad = "{'foo': 'bar'}";
-
- Code.expect(function () { JSON.parse(bad) }).to.throw();
-
- Code.expect(function () { jph.parse(bad) }).to.throw(SyntaxError, "Unexpected token '\\'' at 1:2\n" + bad + '\n ^');
-
- done();
-});
-
-lab.test('fails if reviver throws', function (done) {
- function badReviver() { throw new ReferenceError('silly'); }
-
- Code.expect(function () { jph.parse('3', badReviver) }).to.throw(ReferenceError, 'silly');
-
- done();
-});
\ No newline at end of file
diff --git a/deps/npm/node_modules/read-package-json/node_modules/slash/index.js b/deps/npm/node_modules/read-package-json/node_modules/slash/index.js
new file mode 100644
index 00000000000000..b946a0841a01f4
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/slash/index.js
@@ -0,0 +1,11 @@
+'use strict';
+module.exports = function (str) {
+ var isExtendedLengthPath = /^\\\\\?\\/.test(str);
+ var hasNonAscii = /[^\x00-\x80]+/.test(str);
+
+ if (isExtendedLengthPath || hasNonAscii) {
+ return str;
+ }
+
+ return str.replace(/\\/g, '/');
+};
diff --git a/deps/npm/node_modules/read-package-json/node_modules/slash/package.json b/deps/npm/node_modules/read-package-json/node_modules/slash/package.json
new file mode 100644
index 00000000000000..847f1844de86d5
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/slash/package.json
@@ -0,0 +1,65 @@
+{
+ "_from": "slash@^1.0.0",
+ "_id": "slash@1.0.0",
+ "_inBundle": false,
+ "_integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=",
+ "_location": "/read-package-json/slash",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "slash@^1.0.0",
+ "name": "slash",
+ "escapedName": "slash",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/read-package-json"
+ ],
+ "_resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz",
+ "_shasum": "c41f2f6c39fc16d1cd17ad4b5d896114ae470d55",
+ "_spec": "slash@^1.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/read-package-json",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "http://sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/slash/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Convert Windows backslash paths to slash paths",
+ "devDependencies": {
+ "mocha": "*"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/sindresorhus/slash#readme",
+ "keywords": [
+ "path",
+ "seperator",
+ "sep",
+ "slash",
+ "backslash",
+ "windows",
+ "win"
+ ],
+ "license": "MIT",
+ "name": "slash",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/slash.git"
+ },
+ "scripts": {
+ "test": "mocha"
+ },
+ "version": "1.0.0"
+}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/slash/readme.md b/deps/npm/node_modules/read-package-json/node_modules/slash/readme.md
new file mode 100644
index 00000000000000..15672f010e182f
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/slash/readme.md
@@ -0,0 +1,44 @@
+# slash [![Build Status](https://travis-ci.org/sindresorhus/slash.svg?branch=master)](https://travis-ci.org/sindresorhus/slash)
+
+> Convert Windows backslash paths to slash paths: `foo\\bar` ➔ `foo/bar`
+
+[Forward-slash paths can be used in Windows](http://superuser.com/a/176395/6877) as long as they're not extended-length paths and don't contain any non-ascii characters.
+
+This was created since the `path` methods in Node outputs `\\` paths on Windows.
+
+
+## Install
+
+```sh
+$ npm install --save slash
+```
+
+
+## Usage
+
+```js
+var path = require('path');
+var slash = require('slash');
+
+var str = path.join('foo', 'bar');
+// Unix => foo/bar
+// Windows => foo\\bar
+
+slash(str);
+// Unix => foo/bar
+// Windows => foo/bar
+```
+
+
+## API
+
+### slash(path)
+
+Type: `string`
+
+Accepts a Windows backslash path and returns a slash path.
+
+
+## License
+
+MIT © [Sindre Sorhus](http://sindresorhus.com)
diff --git a/deps/npm/node_modules/read-package-json/package.json b/deps/npm/node_modules/read-package-json/package.json
index dc9f06e324578a..0df406db67c520 100644
--- a/deps/npm/node_modules/read-package-json/package.json
+++ b/deps/npm/node_modules/read-package-json/package.json
@@ -1,19 +1,19 @@
{
- "_from": "read-package-json@2.0.10",
- "_id": "read-package-json@2.0.10",
+ "_from": "read-package-json@2.0.12",
+ "_id": "read-package-json@2.0.12",
"_inBundle": false,
- "_integrity": "sha512-iNWaEs9hW9nviu5rHADmkm/Ob5dvah5zajtTS1XbyERSzkWgSwWZ6Z12bION7bEAzVc2YRFWnAz8k/tAr+5/eg==",
+ "_integrity": "sha512-m7/I0+tP6D34EVvSlzCtuVA4D/dHL6OpLcn2e4XVP5X57pCKGUy1JjRSBVKHWpB+vUU91sL85h84qX0MdXzBSw==",
"_location": "/read-package-json",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
- "raw": "read-package-json@2.0.10",
+ "raw": "read-package-json@2.0.12",
"name": "read-package-json",
"escapedName": "read-package-json",
- "rawSpec": "2.0.10",
+ "rawSpec": "2.0.12",
"saveSpec": null,
- "fetchSpec": "2.0.10"
+ "fetchSpec": "2.0.12"
},
"_requiredBy": [
"#USER",
@@ -22,9 +22,9 @@
"/read-installed",
"/read-package-tree"
],
- "_resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.0.10.tgz",
- "_shasum": "dc0229f6dde6b4b705b39e25b2d970ebe95685ae",
- "_spec": "read-package-json@2.0.10",
+ "_resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.0.12.tgz",
+ "_shasum": "68ea45f98b3741cb6e10ae3bbd42a605026a6951",
+ "_spec": "read-package-json@2.0.12",
"_where": "/Users/rebecca/code/npm",
"author": {
"name": "Isaac Z. Schlueter",
@@ -38,8 +38,9 @@
"dependencies": {
"glob": "^7.1.1",
"graceful-fs": "^4.1.2",
- "json-parse-helpfulerror": "^1.0.2",
- "normalize-package-data": "^2.0.0"
+ "json-parse-better-errors": "^1.0.0",
+ "normalize-package-data": "^2.0.0",
+ "slash": "^1.0.0"
},
"deprecated": false,
"description": "The thing npm uses to read package.json files with semantics and defaults and validation",
@@ -64,5 +65,5 @@
"scripts": {
"test": "standard && tap -J test/*.js"
},
- "version": "2.0.10"
+ "version": "2.0.12"
}
diff --git a/deps/npm/node_modules/read-package-json/read-json.js b/deps/npm/node_modules/read-package-json/read-json.js
index 7114df481b5547..969bcc030b158d 100644
--- a/deps/npm/node_modules/read-package-json/read-json.js
+++ b/deps/npm/node_modules/read-package-json/read-json.js
@@ -9,8 +9,9 @@ var path = require('path')
var glob = require('glob')
var normalizeData = require('normalize-package-data')
-var safeJSON = require('json-parse-helpfulerror')
+var safeJSON = require('json-parse-better-errors')
var util = require('util')
+var slash = require('slash')
module.exports = readJson
@@ -97,7 +98,7 @@ function parseJson (file, er, d, log, strict, cb) {
var data
try {
- data = safeJSON.parse(stripBOM(d))
+ data = safeJSON(stripBOM(d))
} catch (er) {
data = parseIndex(d)
if (!data) return cb(parseError(er, file))
@@ -316,7 +317,7 @@ function bins_ (file, data, bins, cb) {
data.bin = bins.reduce(function (acc, mf) {
if (mf && mf.charAt(0) !== '.') {
var f = path.basename(mf)
- acc[f] = path.join(m, mf)
+ acc[f] = slash(path.join(m, mf))
}
return acc
}, {})
@@ -425,7 +426,7 @@ function parseIndex (data) {
data = data.replace(/^\s*\*/mg, '')
try {
- return safeJSON.parse(data)
+ return safeJSON(data)
} catch (er) {
return null
}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/Readme.md b/deps/npm/node_modules/request/node_modules/form-data/README.md
similarity index 100%
rename from deps/npm/node_modules/request/node_modules/form-data/Readme.md
rename to deps/npm/node_modules/request/node_modules/form-data/README.md
diff --git a/deps/npm/node_modules/semver/README.md b/deps/npm/node_modules/semver/README.md
index cbd956549dbb01..fd5151ab3769f8 100644
--- a/deps/npm/node_modules/semver/README.md
+++ b/deps/npm/node_modules/semver/README.md
@@ -1,55 +1,65 @@
semver(1) -- The semantic versioner for npm
===========================================
+## Install
+
+```bash
+npm install --save semver
+````
+
## Usage
- $ npm install semver
- $ node
- var semver = require('semver')
+As a node module:
- semver.valid('1.2.3') // '1.2.3'
- semver.valid('a.b.c') // null
- semver.clean(' =v1.2.3 ') // '1.2.3'
- semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
- semver.gt('1.2.3', '9.8.7') // false
- semver.lt('1.2.3', '9.8.7') // true
+```js
+const semver = require('semver')
+
+semver.valid('1.2.3') // '1.2.3'
+semver.valid('a.b.c') // null
+semver.clean(' =v1.2.3 ') // '1.2.3'
+semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
+semver.gt('1.2.3', '9.8.7') // false
+semver.lt('1.2.3', '9.8.7') // true
+```
As a command-line utility:
- $ semver -h
+```
+$ semver -h
- SemVer 5.1.0
+SemVer 5.3.0
- A JavaScript implementation of the http://semver.org/ specification
- Copyright Isaac Z. Schlueter
+A JavaScript implementation of the http://semver.org/ specification
+Copyright Isaac Z. Schlueter
- Usage: semver [options] [ [...]]
- Prints valid versions sorted by SemVer precedence
+Usage: semver [options] [ [...]]
+Prints valid versions sorted by SemVer precedence
- Options:
- -r --range
- Print versions that match the specified range.
+Options:
+-r --range
+ Print versions that match the specified range.
- -i --increment []
- Increment a version by the specified level. Level can
- be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
- Only one version may be specified.
+-i --increment []
+ Increment a version by the specified level. Level can
+ be one of: major, minor, patch, premajor, preminor,
+ prepatch, or prerelease. Default level is 'patch'.
+ Only one version may be specified.
- --preid
- Identifier to be used to prefix premajor, preminor,
- prepatch or prerelease version increments.
+--preid
+ Identifier to be used to prefix premajor, preminor,
+ prepatch or prerelease version increments.
- -l --loose
- Interpret versions and ranges loosely
+-l --loose
+ Interpret versions and ranges loosely
- Program exits successfully if any valid version satisfies
- all supplied ranges, and prints all satisfying versions.
+Program exits successfully if any valid version satisfies
+all supplied ranges, and prints all satisfying versions.
- If no satisfying versions are found, then exits failure.
+If no satisfying versions are found, then exits failure.
- Versions are printed in ascending order, so supplying
- multiple versions to the utility will just sort them.
+Versions are printed in ascending order, so supplying
+multiple versions to the utility will just sort them.
+```
## Versions
@@ -126,20 +136,20 @@ The method `.inc` takes an additional `identifier` string argument that
will append the value of the string as a prerelease identifier:
```javascript
-> semver.inc('1.2.3', 'prerelease', 'beta')
-'1.2.4-beta.0'
+semver.inc('1.2.3', 'prerelease', 'beta')
+// '1.2.4-beta.0'
```
command-line example:
-```shell
+```bash
$ semver 1.2.3 -i prerelease --preid beta
1.2.4-beta.0
```
Which then can be used to increment further:
-```shell
+```bash
$ semver 1.2.4-beta.0 -i prerelease
1.2.4-beta.1
```
@@ -296,6 +306,8 @@ strings that they parse.
* `major(v)`: Return the major version number.
* `minor(v)`: Return the minor version number.
* `patch(v)`: Return the patch version number.
+* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
+ or comparators intersect.
### Comparison
@@ -319,6 +331,9 @@ strings that they parse.
(`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
or null if the versions are the same.
+### Comparators
+
+* `intersects(comparator)`: Return true if the comparators intersect
### Ranges
@@ -337,6 +352,7 @@ strings that they parse.
the bounds of the range in either the high or low direction. The
`hilo` argument must be either the string `'>'` or `'<'`. (This is
the function called by `gtr` and `ltr`.)
+* `intersects(range)`: Return true if any of the ranges comparators intersect
Note that, since ranges may be non-contiguous, a version might not be
greater than a range, less than a range, *or* satisfy a range! For
diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json
index 001b33b5f7a0f6..cf8088662e1557 100644
--- a/deps/npm/node_modules/semver/package.json
+++ b/deps/npm/node_modules/semver/package.json
@@ -1,23 +1,24 @@
{
- "_from": "semver@~5.3.0",
- "_id": "semver@5.3.0",
- "_integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=",
+ "_from": "semver@latest",
+ "_id": "semver@5.4.1",
+ "_inBundle": false,
+ "_integrity": "sha512-WfG/X9+oATh81XtllIo/I8gOiY9EXRdv1cQdyykeXK17YcUW3EXUAi2To4pcH6nZtJPr7ZOpM5OMyWJZm+8Rsg==",
"_location": "/semver",
"_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "semver@~5.3.0",
+ "raw": "semver@latest",
"name": "semver",
"escapedName": "semver",
- "rawSpec": "~5.3.0",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "~5.3.0"
+ "fetchSpec": "latest"
},
"_requiredBy": [
+ "#USER",
"/",
"/init-package-json",
- "/node-gyp",
"/normalize-package-data",
"/npm-install-checks",
"/npm-package-arg",
@@ -28,10 +29,9 @@
"/update-notifier/latest-version/package-json",
"/update-notifier/semver-diff"
],
- "_resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz",
- "_shasum": "9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f",
- "_shrinkwrap": null,
- "_spec": "semver@~5.3.0",
+ "_resolved": "https://registry.npmjs.org/semver/-/semver-5.4.1.tgz",
+ "_shasum": "e059c09d8571f0540823733433505d3a2f00b18e",
+ "_spec": "semver@latest",
"_where": "/Users/zkat/Documents/code/npm",
"bin": {
"semver": "./bin/semver"
@@ -40,11 +40,10 @@
"url": "https://github.com/npm/node-semver/issues"
},
"bundleDependencies": false,
- "dependencies": {},
"deprecated": false,
"description": "The semantic version parser used by npm.",
"devDependencies": {
- "tap": "^2.0.0"
+ "tap": "^10.7.0"
},
"files": [
"bin",
@@ -55,14 +54,12 @@
"license": "ISC",
"main": "semver.js",
"name": "semver",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git+https://github.com/npm/node-semver.git"
},
"scripts": {
- "test": "tap test/*.js"
+ "test": "tap test/*.js --cov -J"
},
- "version": "5.3.0"
+ "version": "5.4.1"
}
diff --git a/deps/npm/node_modules/semver/semver.js b/deps/npm/node_modules/semver/semver.js
index 5f1a3c5c9e5dc9..389cb4467684ae 100644
--- a/deps/npm/node_modules/semver/semver.js
+++ b/deps/npm/node_modules/semver/semver.js
@@ -563,7 +563,7 @@ function patch(a, loose) {
exports.compare = compare;
function compare(a, b, loose) {
- return new SemVer(a, loose).compare(b);
+ return new SemVer(a, loose).compare(new SemVer(b, loose));
}
exports.compareLoose = compareLoose;
@@ -704,11 +704,59 @@ Comparator.prototype.test = function(version) {
return cmp(version, this.operator, this.semver, this.loose);
};
+Comparator.prototype.intersects = function(comp, loose) {
+ if (!(comp instanceof Comparator)) {
+ throw new TypeError('a Comparator is required');
+ }
+
+ var rangeTmp;
+
+ if (this.operator === '') {
+ rangeTmp = new Range(comp.value, loose);
+ return satisfies(this.value, rangeTmp, loose);
+ } else if (comp.operator === '') {
+ rangeTmp = new Range(this.value, loose);
+ return satisfies(comp.semver, rangeTmp, loose);
+ }
+
+ var sameDirectionIncreasing =
+ (this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '>=' || comp.operator === '>');
+ var sameDirectionDecreasing =
+ (this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '<=' || comp.operator === '<');
+ var sameSemVer = this.semver.version === comp.semver.version;
+ var differentDirectionsInclusive =
+ (this.operator === '>=' || this.operator === '<=') &&
+ (comp.operator === '>=' || comp.operator === '<=');
+ var oppositeDirectionsLessThan =
+ cmp(this.semver, '<', comp.semver, loose) &&
+ ((this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '<=' || comp.operator === '<'));
+ var oppositeDirectionsGreaterThan =
+ cmp(this.semver, '>', comp.semver, loose) &&
+ ((this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '>=' || comp.operator === '>'));
+
+ return sameDirectionIncreasing || sameDirectionDecreasing ||
+ (sameSemVer && differentDirectionsInclusive) ||
+ oppositeDirectionsLessThan || oppositeDirectionsGreaterThan;
+};
+
exports.Range = Range;
function Range(range, loose) {
- if ((range instanceof Range) && range.loose === loose)
- return range;
+ if (range instanceof Range) {
+ if (range.loose === loose) {
+ return range;
+ } else {
+ return new Range(range.raw, loose);
+ }
+ }
+
+ if (range instanceof Comparator) {
+ return new Range(range.value, loose);
+ }
if (!(this instanceof Range))
return new Range(range, loose);
@@ -783,6 +831,22 @@ Range.prototype.parseRange = function(range) {
return set;
};
+Range.prototype.intersects = function(range, loose) {
+ if (!(range instanceof Range)) {
+ throw new TypeError('a Range is required');
+ }
+
+ return this.set.some(function(thisComparators) {
+ return thisComparators.every(function(thisComparator) {
+ return range.set.some(function(rangeComparators) {
+ return rangeComparators.every(function(rangeComparator) {
+ return thisComparator.intersects(rangeComparator, loose);
+ });
+ });
+ });
+ });
+};
+
// Mostly just for testing and legacy API reasons
exports.toComparators = toComparators;
function toComparators(range, loose) {
@@ -1087,20 +1151,42 @@ function satisfies(version, range, loose) {
exports.maxSatisfying = maxSatisfying;
function maxSatisfying(versions, range, loose) {
- return versions.filter(function(version) {
- return satisfies(version, range, loose);
- }).sort(function(a, b) {
- return rcompare(a, b, loose);
- })[0] || null;
+ var max = null;
+ var maxSV = null;
+ try {
+ var rangeObj = new Range(range, loose);
+ } catch (er) {
+ return null;
+ }
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) { // satisfies(v, range, loose)
+ if (!max || maxSV.compare(v) === -1) { // compare(max, v, true)
+ max = v;
+ maxSV = new SemVer(max, loose);
+ }
+ }
+ })
+ return max;
}
exports.minSatisfying = minSatisfying;
function minSatisfying(versions, range, loose) {
- return versions.filter(function(version) {
- return satisfies(version, range, loose);
- }).sort(function(a, b) {
- return compare(a, b, loose);
- })[0] || null;
+ var min = null;
+ var minSV = null;
+ try {
+ var rangeObj = new Range(range, loose);
+ } catch (er) {
+ return null;
+ }
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) { // satisfies(v, range, loose)
+ if (!min || minSV.compare(v) === 1) { // compare(min, v, true)
+ min = v;
+ minSV = new SemVer(min, loose);
+ }
+ }
+ })
+ return min;
}
exports.validRange = validRange;
@@ -1201,3 +1287,10 @@ function prerelease(version, loose) {
var parsed = parse(version, loose);
return (parsed && parsed.prerelease.length) ? parsed.prerelease : null;
}
+
+exports.intersects = intersects;
+function intersects(r1, r2, loose) {
+ r1 = new Range(r1, loose)
+ r2 = new Range(r2, loose)
+ return r1.intersects(r2)
+}
diff --git a/deps/npm/node_modules/tar/LICENSE b/deps/npm/node_modules/tar/LICENSE
index 019b7e40ea0568..19129e315fe593 100644
--- a/deps/npm/node_modules/tar/LICENSE
+++ b/deps/npm/node_modules/tar/LICENSE
@@ -1,8 +1,11 @@
The ISC License
+
Copyright (c) Isaac Z. Schlueter and Contributors
+
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
+
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
diff --git a/deps/npm/node_modules/tar/README.md b/deps/npm/node_modules/tar/README.md
index cfda2ac180611c..a356a78da20dc9 100644
--- a/deps/npm/node_modules/tar/README.md
+++ b/deps/npm/node_modules/tar/README.md
@@ -1,50 +1,883 @@
# node-tar
-Tar for Node.js.
+[![Build Status](https://travis-ci.org/npm/node-tar.svg?branch=master)](https://travis-ci.org/npm/node-tar)
-[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/)
+[Fast](./benchmarks) and full-featured Tar for Node.js
-## API
+The API is designed to mimic the behavior of `tar(1)` on unix systems.
+If you are familiar with how tar works, most of this will hopefully be
+straightforward for you. If not, then hopefully this module can teach
+you useful unix skills that may come in handy someday :)
-See `examples/` for usage examples.
+## Background
-### var tar = require('tar')
+A "tar file" or "tarball" is an archive of file system entries
+(directories, files, links, etc.) The name comes from "tape archive".
+If you run `man tar` on almost any Unix command line, you'll learn
+quite a bit about what it can do, and its history.
-Returns an object with `.Pack`, `.Extract` and `.Parse` methods.
+Tar has 5 main top-level commands:
-### tar.Pack([properties])
+* `c` Create an archive
+* `r` Replace entries within an archive
+* `u` Update entries within an archive (ie, replace if they're newer)
+* `t` List out the contents of an archive
+* `x` Extract an archive to disk
-Returns a through stream. Use
-[fstream](https://npmjs.org/package/fstream) to write files into the
-pack stream and you will receive tar archive data from the pack
-stream.
+The other flags and options modify how this top level function works.
-This only works with directories, it does not work with individual files.
+## High-Level API
-The optional `properties` object are used to set properties in the tar
-'Global Extended Header'. If the `fromBase` property is set to true,
-the tar will contain files relative to the path passed, and not with
-the path included.
+These 5 functions are the high-level API. All of them have a
+single-character name (for unix nerds familiar with `tar(1)`) as well
+as a long name (for everyone else).
-### tar.Extract([options])
+All the high-level functions take the following arguments, all three
+of which are optional and may be omitted.
-Returns a through stream. Write tar data to the stream and the files
-in the tarball will be extracted onto the filesystem.
+1. `options` - An optional object specifying various options
+2. `paths` - An array of paths to add or extract
+3. `callback` - Called when the command is completed, if async. (If
+ sync or no file specified, providing a callback throws a
+ `TypeError`.)
-`options` can be:
+If the command is sync (ie, if `options.sync=true`), then the
+callback is not allowed, since the action will be completed immediately.
+
+If a `file` argument is specified, and the command is async, then a
+`Promise` is returned. In this case, if async, a callback may be
+provided which is called when the command is completed.
+
+If a `file` option is not specified, then a stream is returned. For
+`create`, this is a readable stream of the generated archive. For
+`list` and `extract` this is a writable stream that an archive should
+be written into. If a file is not specified, then a callback is not
+allowed, because you're already getting a stream to work with.
+
+`replace` and `update` only work on existing archives, and so require
+a `file` argument.
+
+Sync commands without a file argument return a stream that acts on its
+input immediately in the same tick. For readable streams, this means
+that all of the data is immediately available by calling
+`stream.read()`. For writable streams, it will be acted upon as soon
+as it is provided, but this can be at any time.
+
+### Warnings
+
+Some things cause tar to emit a warning, but should usually not cause
+the entire operation to fail. There are three ways to handle
+warnings:
+
+1. **Ignore them** (default) Invalid entries won't be put in the
+ archive, and invalid entries won't be unpacked. This is usually
+ fine, but can hide failures that you might care about.
+2. **Notice them** Add an `onwarn` function to the options, or listen
+ to the `'warn'` event on any tar stream. The function will get
+ called as `onwarn(message, data)`. Handle as appropriate.
+3. **Explode them.** Set `strict: true` in the options object, and
+ `warn` messages will be emitted as `'error'` events instead. If
+ there's no `error` handler, this causes the program to crash. If
+ used with a promise-returning/callback-taking method, then it'll
+ send the error to the promise/callback.
+
+### Examples
+
+The API mimics the `tar(1)` command line functionality, with aliases
+for more human-readable option and function names. The goal is that
+if you know how to use `tar(1)` in Unix, then you know how to use
+`require('tar')` in JavaScript.
+
+To replicate `tar czf my-tarball.tgz files and folders`, you'd do:
+
+```js
+tar.c(
+ {
+ gzip: ,
+ file: 'my-tarball.tgz'
+ },
+ ['some', 'files', 'and', 'folders']
+).then(_ => { .. tarball has been created .. })
+```
+
+To replicate `tar cz files and folders > my-tarball.tgz`, you'd do:
+
+```js
+tar.c( // or tar.create
+ {
+ gzip:
+ },
+ ['some', 'files', 'and', 'folders']
+).pipe(fs.createWriteStream('my-tarball.tgz')
+```
+
+To replicate `tar xf my-tarball.tgz` you'd do:
+
+```js
+tar.x( // or tar.extract(
+ {
+ file: 'my-tarball.tgz'
+ }
+).then(_=> { .. tarball has been dumped in cwd .. })
+```
+
+To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`:
```js
-{
- path: '/path/to/extract/tar/into',
- strip: 0, // how many path segments to strip from the root when extracting
-}
+fs.createReadStream('my-tarball.tgz').pipe(
+ tar.x({
+ strip: 1,
+ C: 'some-dir' // alias for cwd:'some-dir', also ok
+ })
+)
```
-`options` also get passed to the `fstream.Writer` instance that `tar`
-uses internally.
+To replicate `tar tf my-tarball.tgz`, do this:
+
+```js
+tar.t({
+ file: 'my-tarball.tgz',
+ onentry: entry => { .. do whatever with it .. }
+})
+```
+
+To replicate `cat my-tarball.tgz | tar t` do:
+
+```js
+fs.createReadStream('my-tarball.tgz')
+ .pipe(tar.t())
+ .on('entry', entry => { .. do whatever with it .. })
+```
+
+To do anything synchronous, add `sync: true` to the options. Note
+that sync functions don't take a callback and don't return a promise.
+When the function returns, it's already done. Sync methods without a
+file argument return a sync stream, which flushes immediately. But,
+of course, it still won't be done until you `.end()` it.
+
+To filter entries, add `filter: ` to the options.
+Tar-creating methods call the filter with `filter(path, stat)`.
+Tar-reading methods (including extraction) call the filter with
+`filter(path, entry)`. The filter is called in the `this`-context of
+the `Pack` or `Unpack` stream object.
+
+The arguments list to `tar t` and `tar x` specify a list of filenames
+to extract or list, so they're equivalent to a filter that tests if
+the file is in the list.
+
+For those who _aren't_ fans of tar's single-character command names:
+
+```
+tar.c === tar.create
+tar.r === tar.replace (appends to archive, file is required)
+tar.u === tar.update (appends if newer, file is required)
+tar.x === tar.extract
+tar.t === tar.list
+```
+
+Keep reading for all the command descriptions and options, as well as
+the low-level API that they are built on.
+
+### tar.c(options, fileList, callback) [alias: tar.create]
+
+Create a tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Write the tarball archive to the specified filename. If this
+ is specified, then the callback will be fired when the file has been
+ written, and a promise will be returned that resolves when the file
+ is written. If a filename is not specified, then a Readable Stream
+ will be returned which will emit the file data. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`. If this is set,
+ and a file is not provided, then the resulting stream will already
+ have the data ready to `read` or `emit('data')` as soon as you
+ request it.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `mode` The mode to set on the created file archive
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+
+### tar.x(options, fileList, callback) [alias: tar.extract]
+
+Extract a tarball archive.
+
+The `fileList` is an array of paths to extract from the tarball. If
+no paths are provided, then all the entries are extracted.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+Most extraction errors will cause a `warn` event to be emitted. If
+the `cwd` is missing, or not a directory, then the extraction will
+fail completely.
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory. [Alias: `C`]
+- `file` The archive file to extract. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Create files and directories synchronously.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive. [Alias: `keep-newer`,
+ `keep-newer-files`]
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies. [Alias: `k`, `keep-existing`]
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+ [Alias: `P`]
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file. [Alias:
+ `U`]
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks. [Alias: `strip-components`, `stripComponents`]
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+ [Alias: `p`]
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+
+### tar.t(options, fileList, callback) [alias: tar.list]
+
+List the contents of a tarball archive.
+
+The `fileList` is an array of paths to list from the tarball. If
+no paths are provided, then all the entries are listed.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'`
+events. (If you want to get actual readable entries, use the
+`tar.Parse` class instead.)
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. [Alias: `C`]
+- `file` The archive file to list. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Read the specified file synchronously. (This has no effect
+ when a file option isn't specified, because entries are emitted as
+ fast as they are parsed from the stream anyway.)
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter. This is important for when both `file` and
+ `sync` are set, because it will be called synchronously.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noResume` By default, `entry` streams are resumed immediately after
+ the call to `onentry`. Set `noResume: true` to suppress this
+ behavior. Note that by opting into this, the stream will never
+ complete until the entry data is consumed.
+
+### tar.u(options, fileList, callback) [alias: tar.update]
+
+Add files to an archive if they are newer than the entry already in
+the tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+### tar.r(options, fileList, callback) [alias: tar.replace]
+
+Add files to an existing archive. Because later entries override
+earlier entries, this effectively replaces any existing entries.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+## Low-Level API
+
+### class tar.Pack
+
+A readable tar stream.
+
+Has all the standard readable stream interface stuff. `'data'` and
+`'end'` events, `read()` method, `pause()` and `resume()`, etc.
+
+#### constructor(options)
+
+The following options are supported:
+
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()`
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories.
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+#### add(path)
+
+Adds an entry to the archive. Returns the Pack stream.
+
+#### write(path)
+
+Adds an entry to the archive. Returns true if flushed.
+
+#### end()
+
+Finishes the archive.
+
+### class tar.Pack.Sync
+
+Synchronous version of `tar.Pack`.
+
+### class tar.Unpack
+
+A writable stream that unpacks a tar archive onto the file system.
+
+All the normal writable stream stuff is supported. `write()` and
+`end()` methods, `'drain'` events, etc.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+`'close'` is emitted when it's done writing stuff to the file system.
+
+Most unpack errors will cause a `warn` event to be emitted. If the
+`cwd` is missing, or not a directory, then an error will be emitted.
+
+#### constructor(options)
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive.
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies.
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file.
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+- `win32` True if on a windows platform. Causes behavior where
+ filenames containing `<|>?` chars are converted to
+ windows-compatible values while being unpacked.
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+
+### class tar.Unpack.Sync
+
+Synchronous version of `tar.Unpack`.
+
+### class tar.Parse
+
+A writable stream that parses a tar archive stream. All the standard
+writable stream stuff is supported.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Emits `'entry'` events with `tar.ReadEntry` objects, which are
+themselves readable streams that you can pipe wherever.
+
+Each `entry` will not emit until the one before it is flushed through,
+so make sure to either consume the data (with `on('data', ...)` or
+`.pipe(...)`) or throw it away with `.resume()` to keep the stream
+flowing.
+
+#### constructor(options)
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects.
+
+The following options are supported:
+
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+#### abort(message, error)
+
+Stop all parsing activities. This is called when there are zlib
+errors. It also emits a warning with the message and error provided.
+
+### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being read out of a tar archive.
+
+It has the following fields:
+
+- `extended` The extended metadata object provided to the constructor.
+- `globalExtended` The global extended metadata object provided to the
+ constructor.
+- `remain` The number of bytes remaining to be written into the
+ stream.
+- `blockRemain` The number of 512-byte blocks remaining to be written
+ into the stream.
+- `ignore` Whether this entry should be ignored.
+- `meta` True if this represents metadata about the next entry, false
+ if it represents a filesystem object.
+- All the fields from the header, extended header, and global extended
+ header are added to the ReadEntry object. So it has `path`, `type`,
+ `size, `mode`, and so on.
+
+#### constructor(header, extended, globalExtended)
+
+Create a new ReadEntry object with the specified header, extended
+header, and global extended header values.
+
+### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being written from the file
+system into a tar archive.
+
+Emits data for the Header, and for the Pax Extended Header if one is
+required, as well as any body data.
+
+Creating a WriteEntry for a directory does not also create
+WriteEntry objects for all of the directory contents.
+
+It has the following fields:
+
+- `path` The path field that will be written to the archive. By
+ default, this is also the path from the cwd to the file system
+ object.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `myuid` If supported, the uid of the user running the current
+ process.
+- `myuser` The `env.USER` string if set, or `''`. Set as the entry
+ `uname` field if the file's `uid` matches `this.myuid`.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/` and filenames containing the windows-compatible
+ forms of `<|>?:` characters are converted to actual `<|>?:` characters
+ in the archive.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+#### constructor(path, options)
+
+`path` is the path of the entry as it is written in the archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+#### warn(message, data)
+
+If strict, emit an error with the provided message.
+
+Othewise, emit a `'warn'` event with the provided message and data.
+
+### class tar.WriteEntry.Sync
+
+Synchronous version of tar.WriteEntry
+
+### class tar.WriteEntry.Tar
+
+A version of tar.WriteEntry that gets its data from a tar.ReadEntry
+instead of from the filesystem.
+
+#### constructor(readEntry, options)
+
+`readEntry` is the entry being read out of another archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+### class tar.Header
+
+A class for reading and writing header blocks.
+
+It has the following fields:
+
+- `nullBlock` True if decoding a block which is entirely composed of
+ `0x00` null bytes. (Useful because tar files are terminated by
+ at least 2 null blocks.)
+- `cksumValid` True if the checksum in the header is valid, false
+ otherwise.
+- `needPax` True if the values, as encoded, will require a Pax
+ extended header.
+- `path` The path of the entry.
+- `mode` The 4 lowest-order octal digits of the file mode. That is,
+ read/write/execute permissions for world, group, and owner, and the
+ setuid, setgid, and sticky bits.
+- `uid` Numeric user id of the file owner
+- `gid` Numeric group id of the file owner
+- `size` Size of the file in bytes
+- `mtime` Modified time of the file
+- `cksum` The checksum of the header. This is generated by adding all
+ the bytes of the header block, treating the checksum field itself as
+ all ascii space characters (that is, `0x20`).
+- `type` The human-readable name of the type of entry this represents,
+ or the alphanumeric key if unknown.
+- `typeKey` The alphanumeric key for the type of entry this header
+ represents.
+- `linkpath` The target of Link and SymbolicLink entries.
+- `uname` Human-readable user name of the file owner
+- `gname` Human-readable group name of the file owner
+- `devmaj` The major portion of the device number. Always `0` for
+ files, directories, and links.
+- `devmin` The minor portion of the device number. Always `0` for
+ files, directories, and links.
+- `atime` File access time.
+- `ctime` File change time.
+
+#### constructor(data, [offset=0])
+
+`data` is optional. It is either a Buffer that should be interpreted
+as a tar Header starting at the specified offset and continuing for
+512 bytes, or a data object of keys and values to set on the header
+object, and eventually encode as a tar Header.
+
+#### decode(block, offset)
+
+Decode the provided buffer starting at the specified offset.
+
+Buffer length must be greater than 512 bytes.
+
+#### set(data)
+
+Set the fields in the data object.
+
+#### encode(buffer, offset)
+
+Encode the header fields into the buffer at the specified offset.
+
+Returns `this.needPax` to indicate whether a Pax Extended Header is
+required to properly encode the specified data.
+
+### class tar.Pax
+
+An object representing a set of key-value pairs in an Pax extended
+header entry.
+
+It has the following fields. Where the same name is used, they have
+the same semantics as the tar.Header field of the same name.
+
+- `global` True if this represents a global extended header, or false
+ if it is for a single entry.
+- `atime`
+- `charset`
+- `comment`
+- `ctime`
+- `gid`
+- `gname`
+- `linkpath`
+- `mtime`
+- `path`
+- `size`
+- `uid`
+- `uname`
+- `dev`
+- `ino`
+- `nlink`
+
+#### constructor(object, global)
+
+Set the fields set in the object. `global` is a boolean that defaults
+to false.
+
+#### encode()
+
+Return a Buffer containing the header and body for the Pax extended
+header entry, or `null` if there is nothing to encode.
+
+#### encodeBody()
+
+Return a string representing the body of the pax extended header
+entry.
+
+#### encodeField(fieldName)
+
+Return a string representing the key/value encoding for the specified
+fieldName, or `''` if the field is unset.
+
+### tar.Pax.parse(string, extended, global)
+
+Return a new Pax object created by parsing the contents of the string
+provided.
+
+If the `extended` object is set, then also add the fields from that
+object. (This is necessary because multiple metadata entries can
+occur in sequence.)
+
+### tar.types
+
+A translation table for the `type` field in tar headers.
+
+#### tar.types.name.get(code)
+
+Get the human-readable name for a given alphanumeric code.
-### tar.Parse()
+#### tar.types.code.get(name)
-Returns a writable stream. Write tar data to it and it will emit
-`entry` events for each entry parsed from the tarball. This is used by
-`tar.Extract`.
+Get the alphanumeric code for a given human-readable name.
diff --git a/deps/npm/node_modules/tar/index.js b/deps/npm/node_modules/tar/index.js
new file mode 100644
index 00000000000000..c9ae06e7906c4e
--- /dev/null
+++ b/deps/npm/node_modules/tar/index.js
@@ -0,0 +1,18 @@
+'use strict'
+
+// high-level commands
+exports.c = exports.create = require('./lib/create.js')
+exports.r = exports.replace = require('./lib/replace.js')
+exports.t = exports.list = require('./lib/list.js')
+exports.u = exports.update = require('./lib/update.js')
+exports.x = exports.extract = require('./lib/extract.js')
+
+// classes
+exports.Pack = require('./lib/pack.js')
+exports.Unpack = require('./lib/unpack.js')
+exports.Parse = require('./lib/parse.js')
+exports.ReadEntry = require('./lib/read-entry.js')
+exports.WriteEntry = require('./lib/write-entry.js')
+exports.Header = require('./lib/header.js')
+exports.Pax = require('./lib/pax.js')
+exports.types = require('./lib/types.js')
diff --git a/deps/npm/node_modules/tar/lib/create.js b/deps/npm/node_modules/tar/lib/create.js
new file mode 100644
index 00000000000000..5d46b3ba70621e
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/create.js
@@ -0,0 +1,110 @@
+'use strict'
+
+// tar -c
+const hlo = require('./high-level-opt.js')
+
+const Pack = require('./pack.js')
+const fs = require('fs')
+const t = require('./list.js')
+const path = require('path')
+
+const c = module.exports = (opt_, files, cb) => {
+ if (typeof files === 'function')
+ cb = files
+
+ if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ return opt.file && opt.sync ? createFileSync(opt, files)
+ : opt.file ? createFile(opt, files, cb)
+ : opt.sync ? createSync(opt, files)
+ : create(opt, files)
+}
+
+const createFileSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+
+ let threw = true
+ let fd
+ try {
+ fd = fs.openSync(opt.file, 'w', opt.mode || 0o666)
+ p.on('data', chunk => fs.writeSync(fd, chunk, 0, chunk.length))
+ p.on('end', _ => fs.closeSync(fd))
+ addFilesSync(p, files)
+ threw = false
+ } finally {
+ if (threw)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const createFile = (opt, files, cb) => {
+ const p = new Pack(opt)
+ const stream = fs.createWriteStream(opt.file, { mode: opt.mode || 0o666 })
+ p.pipe(stream)
+
+ const promise = new Promise((res, rej) => {
+ stream.on('error', rej)
+ stream.on('close', res)
+ p.on('error', rej)
+ })
+
+ addFilesAsync(p, files)
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
+
+const createSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+ addFilesSync(p, files)
+ return p
+}
+
+const create = (opt, files) => {
+ const p = new Pack(opt)
+ addFilesAsync(p, files)
+ return p
+}
diff --git a/deps/npm/node_modules/tar/lib/extract.js b/deps/npm/node_modules/tar/lib/extract.js
index fe1bb976eb0ce2..53ecf67894c830 100644
--- a/deps/npm/node_modules/tar/lib/extract.js
+++ b/deps/npm/node_modules/tar/lib/extract.js
@@ -1,94 +1,127 @@
-// give it a tarball and a path, and it'll dump the contents
+'use strict'
-module.exports = Extract
+// tar -x
+const hlo = require('./high-level-opt.js')
+const Unpack = require('./unpack.js')
+const fs = require('fs')
+const path = require('path')
-var tar = require("../tar.js")
- , fstream = require("fstream")
- , inherits = require("inherits")
- , path = require("path")
+const x = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
-function Extract (opts) {
- if (!(this instanceof Extract)) return new Extract(opts)
- tar.Parse.apply(this)
+ if (typeof files === 'function')
+ cb = files, files = null
- if (typeof opts !== "object") {
- opts = { path: opts }
- }
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
- // better to drop in cwd? seems more standard.
- opts.path = opts.path || path.resolve("node-tar-extract")
- opts.type = "Directory"
- opts.Directory = true
-
- // similar to --strip or --strip-components
- opts.strip = +opts.strip
- if (!opts.strip || opts.strip <= 0) opts.strip = 0
-
- this._fst = fstream.Writer(opts)
-
- this.pause()
- var me = this
-
- // Hardlinks in tarballs are relative to the root
- // of the tarball. So, they need to be resolved against
- // the target directory in order to be created properly.
- me.on("entry", function (entry) {
- // if there's a "strip" argument, then strip off that many
- // path components.
- if (opts.strip) {
- var p = entry.path.split("/").slice(opts.strip).join("/")
- entry.path = entry.props.path = p
- if (entry.linkpath) {
- var lp = entry.linkpath.split("/").slice(opts.strip).join("/")
- entry.linkpath = entry.props.linkpath = lp
- }
- }
- if (entry.type === "Link") {
- entry.linkpath = entry.props.linkpath =
- path.join(opts.path, path.join("/", entry.props.linkpath))
- }
+ const opt = hlo(opt_)
- if (entry.type === "SymbolicLink") {
- var dn = path.dirname(entry.path) || ""
- var linkpath = entry.props.linkpath
- var target = path.resolve(opts.path, dn, linkpath)
- if (target.indexOf(opts.path) !== 0) {
- linkpath = path.join(opts.path, path.join("/", linkpath))
- }
- entry.linkpath = entry.props.linkpath = linkpath
- }
- })
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
- this._fst.on("ready", function () {
- me.pipe(me._fst, { end: false })
- me.resume()
- })
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
- this._fst.on('error', function(err) {
- me.emit('error', err)
- })
+ if (files.length)
+ filesFilter(opt, files)
- this._fst.on('drain', function() {
- me.emit('drain')
- })
+ return opt.file && opt.sync ? extractFileSync(opt)
+ : opt.file ? extractFile(opt, cb)
+ : opt.sync ? extractSync(opt)
+ : extract(opt)
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
- // this._fst.on("end", function () {
- // console.error("\nEEEE Extract End", me._fst.path)
- // })
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
- this._fst.on("close", function () {
- // console.error("\nEEEE Extract End", me._fst.path)
- me.emit("finish")
- me.emit("end")
- me.emit("close")
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const extractFileSync = opt => {
+ const u = new Unpack.Sync(opt)
+
+ const file = opt.file
+ let threw = true
+ let fd
+ try {
+ const stat = fs.statSync(file)
+ const readSize = opt.maxReadSize || 16*1024*1024
+ if (stat.size < readSize)
+ u.end(fs.readFileSync(file))
+ else {
+ let pos = 0
+ const buf = Buffer.allocUnsafe(readSize)
+ fd = fs.openSync(file, 'r')
+ while (pos < stat.size) {
+ let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ pos += bytesRead
+ u.write(buf.slice(0, bytesRead))
+ }
+ u.end()
+ fs.closeSync(fd)
+ }
+ threw = false
+ } finally {
+ if (threw && fd)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const extractFile = (opt, cb) => {
+ const u = new Unpack(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ u.on('error', reject)
+ u.on('close', resolve)
+
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else if (stat.size < readSize)
+ fs.readFile(file, (er, data) => {
+ if (er)
+ return reject(er)
+ u.end(data)
+ })
+ else {
+ const stream = fs.createReadStream(file, {
+ highWaterMark: readSize
+ })
+ stream.on('error', reject)
+ stream.pipe(u)
+ }
+ })
})
+ return cb ? p.then(cb, cb) : p
}
-inherits(Extract, tar.Parse)
+const extractSync = opt => {
+ return new Unpack.Sync(opt)
+}
-Extract.prototype._streamEnd = function () {
- var me = this
- if (!me._ended || me._entry) me.error("unexpected eof")
- me._fst.end()
- // my .end() is coming later.
+const extract = opt => {
+ return new Unpack(opt)
}
diff --git a/deps/npm/node_modules/tar/lib/header.js b/deps/npm/node_modules/tar/lib/header.js
index 05b237c0c7b32a..db002e8c188c85 100644
--- a/deps/npm/node_modules/tar/lib/header.js
+++ b/deps/npm/node_modules/tar/lib/header.js
@@ -1,385 +1,272 @@
+'use strict'
// parse a 512-byte header block to a data object, or vice-versa
-// If the data won't fit nicely in a simple header, then generate
-// the appropriate extended header file, and return that.
-
-module.exports = TarHeader
-
-var tar = require("../tar.js")
- , fields = tar.fields
- , fieldOffs = tar.fieldOffs
- , fieldEnds = tar.fieldEnds
- , fieldSize = tar.fieldSize
- , numeric = tar.numeric
- , assert = require("assert").ok
- , space = " ".charCodeAt(0)
- , slash = "/".charCodeAt(0)
- , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
-
-function TarHeader (block) {
- if (!(this instanceof TarHeader)) return new TarHeader(block)
- if (block) this.decode(block)
-}
-
-TarHeader.prototype =
- { decode : decode
- , encode: encode
- , calcSum: calcSum
- , checkSum: checkSum
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+
+const types = require('./types.js')
+const pathModule = require('path')
+const large = require('./large-numbers.js')
+
+const TYPE = Symbol('type')
+
+class Header {
+ constructor (data, off) {
+ this.cksumValid = false
+ this.needPax = false
+ this.nullBlock = false
+
+ this.block = null
+ this.path = null
+ this.mode = null
+ this.uid = null
+ this.gid = null
+ this.size = null
+ this.mtime = null
+ this.cksum = null
+ this[TYPE] = '0'
+ this.linkpath = null
+ this.uname = null
+ this.gname = null
+ this.devmaj = 0
+ this.devmin = 0
+ this.atime = null
+ this.ctime = null
+
+ if (Buffer.isBuffer(data)) {
+ this.decode(data, off || 0)
+ } else if (data)
+ this.set(data)
}
-TarHeader.parseNumeric = parseNumeric
-TarHeader.encode = encode
-TarHeader.decode = decode
-
-// note that this will only do the normal ustar header, not any kind
-// of extended posix header file. If something doesn't fit comfortably,
-// then it will set obj.needExtended = true, and set the block to
-// the closest approximation.
-function encode (obj) {
- if (!obj && !(this instanceof TarHeader)) throw new Error(
- "encode must be called on a TarHeader, or supplied an object")
-
- obj = obj || this
- var block = obj.block = new Buffer(512)
-
- // if the object has a "prefix", then that's actually an extension of
- // the path field.
- if (obj.prefix) {
- // console.error("%% header encoding, got a prefix", obj.prefix)
- obj.path = obj.prefix + "/" + obj.path
- // console.error("%% header encoding, prefixed path", obj.path)
- obj.prefix = ""
- }
-
- obj.needExtended = false
-
- if (obj.mode) {
- if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
- obj.mode = obj.mode & 0777
- }
+ decode (buf, off) {
+ if (!off)
+ off = 0
+
+ if (!buf || !(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ this.path = decString(buf, off, 100)
+ this.mode = decNumber(buf, off + 100, 8)
+ this.uid = decNumber(buf, off + 108, 8)
+ this.gid = decNumber(buf, off + 116, 8)
+ this.size = decNumber(buf, off + 124, 12)
+ this.mtime = decDate(buf, off + 136, 12)
+ this.cksum = decNumber(buf, off + 148, 12)
+
+ // old tar versions marked dirs as a file with a trailing /
+ this[TYPE] = decString(buf, off + 156, 1)
+ if (this[TYPE] === '')
+ this[TYPE] = '0'
+ if (this[TYPE] === '0' && this.path.substr(-1) === '/')
+ this[TYPE] = '5'
+
+ // tar implementations sometimes incorrectly put the stat(dir).size
+ // as the size in the tarball, even though Directory entries are
+ // not able to have any body at all. In the very rare chance that
+ // it actually DOES have a body, we weren't going to do anything with
+ // it anyway, and it'll just be a warning about an invalid header.
+ if (this[TYPE] === '5')
+ this.size = 0
+
+ this.linkpath = decString(buf, off + 157, 100)
+ if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
+ this.uname = decString(buf, off + 265, 32)
+ this.gname = decString(buf, off + 297, 32)
+ this.devmaj = decNumber(buf, off + 329, 8)
+ this.devmin = decNumber(buf, off + 337, 8)
+ if (buf[off + 475] !== 0) {
+ // definitely a prefix, definitely >130 chars.
+ const prefix = decString(buf, off + 345, 155)
+ this.path = prefix + '/' + this.path
+ } else {
+ const prefix = decString(buf, off + 345, 130)
+ if (prefix)
+ this.path = prefix + '/' + this.path
+ this.atime = decDate(buf, off + 476, 12)
+ this.ctime = decDate(buf, off + 488, 12)
+ }
+ }
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , off = fieldOffs[f]
- , end = fieldEnds[f]
- , ret
-
- switch (field) {
- case "cksum":
- // special, done below, after all the others
- break
-
- case "prefix":
- // special, this is an extension of the "path" field.
- // console.error("%% header encoding, skip prefix later")
- break
-
- case "type":
- // convert from long name to a single char.
- var type = obj.type || "0"
- if (type.length > 1) {
- type = tar.types[obj.type]
- if (!type) type = "0"
- }
- writeText(block, off, end, type)
- break
-
- case "path":
- // uses the "prefix" field if > 100 bytes, but <= 255
- var pathLen = Buffer.byteLength(obj.path)
- , pathFSize = fieldSize[fields.path]
- , prefFSize = fieldSize[fields.prefix]
-
- // paths between 100 and 255 should use the prefix field.
- // longer than 255
- if (pathLen > pathFSize &&
- pathLen <= pathFSize + prefFSize) {
- // need to find a slash somewhere in the middle so that
- // path and prefix both fit in their respective fields
- var searchStart = pathLen - 1 - pathFSize
- , searchEnd = prefFSize
- , found = false
- , pathBuf = new Buffer(obj.path)
-
- for ( var s = searchStart
- ; (s <= searchEnd)
- ; s ++ ) {
- if (pathBuf[s] === slash || pathBuf[s] === bslash) {
- found = s
- break
- }
- }
-
- if (found !== false) {
- prefix = pathBuf.slice(0, found).toString("utf8")
- path = pathBuf.slice(found + 1).toString("utf8")
-
- ret = writeText(block, off, end, path)
- off = fieldOffs[fields.prefix]
- end = fieldEnds[fields.prefix]
- // console.error("%% header writing prefix", off, end, prefix)
- ret = writeText(block, off, end, prefix) || ret
- break
- }
- }
-
- // paths less than 100 chars don't need a prefix
- // and paths longer than 255 need an extended header and will fail
- // on old implementations no matter what we do here.
- // Null out the prefix, and fallthrough to default.
- // console.error("%% header writing no prefix")
- var poff = fieldOffs[fields.prefix]
- , pend = fieldEnds[fields.prefix]
- writeText(block, poff, pend, "")
- // fallthrough
-
- // all other fields are numeric or text
- default:
- ret = numeric[field]
- ? writeNumeric(block, off, end, obj[field])
- : writeText(block, off, end, obj[field] || "")
- break
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
}
- obj.needExtended = obj.needExtended || ret
+ this.cksumValid = sum === this.cksum
+ if (this.cksum === null && sum === 8 * 0x20)
+ this.nullBlock = true
}
- var off = fieldOffs[fields.cksum]
- , end = fieldEnds[fields.cksum]
-
- writeNumeric(block, off, end, calcSum.call(this, block))
+ encode (buf, off) {
+ if (!buf) {
+ buf = this.block = Buffer.alloc(512)
+ off = 0
+ }
- return block
-}
+ if (!off)
+ off = 0
+
+ if (!(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ const prefixSize = this.ctime || this.atime ? 130 : 155
+ const split = splitPrefix(this.path || '', prefixSize)
+ const path = split[0]
+ const prefix = split[1]
+ this.needPax = split[2]
+
+ this.needPax = encString(buf, off, 100, path) || this.needPax
+ this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
+ this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
+ this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
+ this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
+ this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
+ buf[off + 156] = this[TYPE].charCodeAt(0)
+ this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
+ buf.write('ustar\u000000', off + 257, 8)
+ this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
+ this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
+ this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
+ this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
+ this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
+ if (buf[off + 475] !== 0)
+ this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
+ else {
+ this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
+ this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
+ this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
+ }
-// if it's a negative number, or greater than will fit,
-// then use write256.
-var MAXNUM = { 12: 077777777777
- , 11: 07777777777
- , 8 : 07777777
- , 7 : 0777777 }
-function writeNumeric (block, off, end, num) {
- var writeLen = end - off
- , maxNum = MAXNUM[writeLen] || 0
-
- num = num || 0
- // console.error(" numeric", num)
-
- if (num instanceof Date ||
- Object.prototype.toString.call(num) === "[object Date]") {
- num = num.getTime() / 1000
- }
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
+ }
+ this.cksum = sum
+ encNumber(buf, off + 148, 8, this.cksum)
+ this.cksumValid = true
- if (num > maxNum || num < 0) {
- write256(block, off, end, num)
- // need an extended header if negative or too big.
- return true
+ return this.needPax
}
- // god, tar is so annoying
- // if the string is small enough, you should put a space
- // between the octal string and the \0, but if it doesn't
- // fit, then don't.
- var numStr = Math.floor(num).toString(8)
- if (num < MAXNUM[writeLen - 1]) numStr += " "
-
- // pad with "0" chars
- if (numStr.length < writeLen) {
- numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
+ set (data) {
+ for (let i in data) {
+ if (data[i] !== null && data[i] !== undefined)
+ this[i] = data[i]
+ }
}
- if (numStr.length !== writeLen - 1) {
- throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
- "expected: "+writeLen)
+ get type () {
+ return types.name.get(this[TYPE]) || this[TYPE]
}
- block.write(numStr, off, writeLen, "utf8")
- block[end - 1] = 0
-}
-function write256 (block, off, end, num) {
- var buf = block.slice(off, end)
- var positive = num >= 0
- buf[0] = positive ? 0x80 : 0xFF
-
- // get the number as a base-256 tuple
- if (!positive) num *= -1
- var tuple = []
- do {
- var n = num % 256
- tuple.push(n)
- num = (num - n) / 256
- } while (num)
-
- var bytes = tuple.length
-
- var fill = buf.length - bytes
- for (var i = 1; i < fill; i ++) {
- buf[i] = positive ? 0 : 0xFF
+ get typeKey () {
+ return this[TYPE]
}
- // tuple is a base256 number, with [0] as the *least* significant byte
- // if it's negative, then we need to flip all the bits once we hit the
- // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
- // complement is (0xFF - n).
- var zero = true
- for (i = bytes; i > 0; i --) {
- var byte = tuple[bytes - i]
- if (positive) buf[fill + i] = byte
- else if (zero && byte === 0) buf[fill + i] = 0
- else if (zero) {
- zero = false
- buf[fill + i] = 0x100 - byte
- } else buf[fill + i] = 0xFF - byte
+ set type (type) {
+ if (types.code.has(type))
+ this[TYPE] = types.code.get(type)
+ else
+ this[TYPE] = type
}
}
-function writeText (block, off, end, str) {
- // strings are written as utf8, then padded with \0
- var strLen = Buffer.byteLength(str)
- , writeLen = Math.min(strLen, end - off)
- // non-ascii fields need extended headers
- // long fields get truncated
- , needExtended = strLen !== str.length || strLen > writeLen
-
- // write the string, and null-pad
- if (writeLen > 0) block.write(str, off, writeLen, "utf8")
- for (var i = off + writeLen; i < end; i ++) block[i] = 0
-
- return needExtended
-}
-
-function calcSum (block) {
- block = block || this.block
- assert(Buffer.isBuffer(block) && block.length === 512)
-
- if (!block) throw new Error("Need block to checksum")
-
- // now figure out what it would be if the cksum was " "
- var sum = 0
- , start = fieldOffs[fields.cksum]
- , end = fieldEnds[fields.cksum]
-
- for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
- sum += block[i]
+const splitPrefix = (p, prefixSize) => {
+ const pathSize = 100
+ let pp = p
+ let prefix = ''
+ let ret
+ const root = pathModule.parse(p).root || '.'
+
+ if (Buffer.byteLength(pp) < pathSize)
+ ret = [pp, prefix, false]
+ else {
+ // first set prefix to the dir, and path to the base
+ prefix = pathModule.dirname(pp)
+ pp = pathModule.basename(pp)
+
+ do {
+ // both fit!
+ if (Buffer.byteLength(pp) <= pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp, prefix, false]
+
+ // prefix fits in prefix, but path doesn't fit in path
+ else if (Buffer.byteLength(pp) > pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp.substr(0, pathSize - 1), prefix, true]
+
+ else {
+ // make path take a bit from prefix
+ pp = pathModule.join(pathModule.basename(prefix), pp)
+ prefix = pathModule.dirname(prefix)
+ }
+ } while (prefix !== root && !ret)
+
+ // at this point, found no resolution, just truncate
+ if (!ret)
+ ret = [p.substr(0, pathSize - 1), '', true]
}
-
- for (var i = start; i < end; i ++) {
- sum += space
- }
-
- for (var i = end; i < 512; i ++) {
- sum += block[i]
- }
-
- return sum
+ return ret
}
+const decString = (buf, off, size) =>
+ buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
-function checkSum (block) {
- var sum = calcSum.call(this, block)
- block = block || this.block
+const decDate = (buf, off, size) =>
+ numToDate(decNumber(buf, off, size))
- var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
- cksum = parseNumeric(cksum)
+const numToDate = num => num === null ? null : new Date(num * 1000)
- return cksum === sum
-}
+const decNumber = (buf, off, size) =>
+ buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
+ : decSmallNumber(buf, off, size)
-function decode (block) {
- block = block || this.block
- assert(Buffer.isBuffer(block) && block.length === 512)
-
- this.block = block
- this.cksumValid = this.checkSum()
-
- var prefix = null
-
- // slice off each field.
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , val = block.slice(fieldOffs[f], fieldEnds[f])
-
- switch (field) {
- case "ustar":
- // if not ustar, then everything after that is just padding.
- if (val.toString() !== "ustar\0") {
- this.ustar = false
- return
- } else {
- // console.error("ustar:", val, val.toString())
- this.ustar = val.toString()
- }
- break
-
- // prefix is special, since it might signal the xstar header
- case "prefix":
- var atime = parseNumeric(val.slice(131, 131 + 12))
- , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
- if ((val[130] === 0 || val[130] === space) &&
- typeof atime === "number" &&
- typeof ctime === "number" &&
- val[131 + 12] === space &&
- val[131 + 12 + 12] === space) {
- this.atime = atime
- this.ctime = ctime
- val = val.slice(0, 130)
- }
- prefix = val.toString("utf8").replace(/\0+$/, "")
- // console.error("%% header reading prefix", prefix)
- break
-
- // all other fields are null-padding text
- // or a number.
- default:
- if (numeric[field]) {
- this[field] = parseNumeric(val)
- } else {
- this[field] = val.toString("utf8").replace(/\0+$/, "")
- }
- break
- }
- }
+const nanNull = value => isNaN(value) ? null : value
- // if we got a prefix, then prepend it to the path.
- if (prefix) {
- this.path = prefix + "/" + this.path
- // console.error("%% header got a prefix", this.path)
- }
+const decSmallNumber = (buf, off, size) =>
+ nanNull(parseInt(
+ buf.slice(off, off + size)
+ .toString('utf8').replace(/\0.*$/, '').trim(), 8))
+
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+ 12: 0o77777777777,
+ 8 : 0o7777777
}
-function parse256 (buf) {
- // first byte MUST be either 80 or FF
- // 80 for positive, FF for 2's comp
- var positive
- if (buf[0] === 0x80) positive = true
- else if (buf[0] === 0xFF) positive = false
- else return null
-
- // build up a base-256 tuple from the least sig to the highest
- var zero = false
- , tuple = []
- for (var i = buf.length - 1; i > 0; i --) {
- var byte = buf[i]
- if (positive) tuple.push(byte)
- else if (zero && byte === 0) tuple.push(0)
- else if (zero) {
- zero = false
- tuple.push(0x100 - byte)
- } else tuple.push(0xFF - byte)
- }
+const encNumber = (buf, off, size, number) =>
+ number === null ? false :
+ number > MAXNUM[size] || number < 0
+ ? (large.encode(number, buf.slice(off, off + size)), true)
+ : (encSmallNumber(buf, off, size, number), false)
- for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
- sum += tuple[i] * Math.pow(256, i)
- }
+const encSmallNumber = (buf, off, size, number) =>
+ buf.write(octalString(number, size), off, size, 'ascii')
- return positive ? sum : -1 * sum
-}
+const octalString = (number, size) =>
+ padOctal(Math.floor(number).toString(8), size)
-function parseNumeric (f) {
- if (f[0] & 0x80) return parse256(f)
+const padOctal = (string, size) =>
+ (string.length === size - 1 ? string
+ : new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
- var str = f.toString("utf8").split("\0")[0].trim()
- , res = parseInt(str, 8)
+const encDate = (buf, off, size, date) =>
+ date === null ? false :
+ encNumber(buf, off, size, date.getTime() / 1000)
- return isNaN(res) ? null : res
-}
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0')
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, string) =>
+ string === null ? false :
+ (buf.write(string + NULLS, off, size, 'utf8'),
+ string.length !== Buffer.byteLength(string) || string.length > size)
+module.exports = Header
diff --git a/deps/npm/node_modules/tar/lib/high-level-opt.js b/deps/npm/node_modules/tar/lib/high-level-opt.js
new file mode 100644
index 00000000000000..7333db915c0bde
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/high-level-opt.js
@@ -0,0 +1,29 @@
+'use strict'
+
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+
+const argmap = new Map([
+ ['C', 'cwd'],
+ ['f', 'file'],
+ ['z', 'gzip'],
+ ['P', 'preservePaths'],
+ ['U', 'unlink'],
+ ['strip-components', 'strip'],
+ ['stripComponents', 'strip'],
+ ['keep-newer', 'newer'],
+ ['keepNewer', 'newer'],
+ ['keep-newer-files', 'newer'],
+ ['keepNewerFiles', 'newer'],
+ ['k', 'keep'],
+ ['keep-existing', 'keep'],
+ ['keepExisting', 'keep'],
+ ['m', 'noMtime'],
+ ['no-mtime', 'noMtime'],
+ ['p', 'preserveOwner'],
+ ['L', 'follow'],
+ ['h', 'follow']
+])
+
+const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
+ argmap.has(k) ? argmap.get(k) : k, opt[k]
+]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
diff --git a/deps/npm/node_modules/tar/lib/large-numbers.js b/deps/npm/node_modules/tar/lib/large-numbers.js
new file mode 100644
index 00000000000000..ff49992630fbee
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/large-numbers.js
@@ -0,0 +1,92 @@
+'use strict'
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive. The trailing byte in the
+// section will always be 0x20, or in some implementations 0x00.
+// this module encodes and decodes these things.
+
+const encode = exports.encode = (num, buf) => {
+ buf[buf.length - 1] = 0x20
+ if (num < 0)
+ encodeNegative(num, buf)
+ else
+ encodePositive(num, buf)
+ return buf
+}
+
+const encodePositive = (num, buf) => {
+ buf[0] = 0x80
+ for (var i = buf.length - 2; i > 0; i--) {
+ if (num === 0)
+ buf[i] = 0
+ else {
+ buf[i] = num % 0x100
+ num = Math.floor(num / 0x100)
+ }
+ }
+}
+
+const encodeNegative = (num, buf) => {
+ buf[0] = 0xff
+ var flipped = false
+ num = num * -1
+ for (var i = buf.length - 2; i > 0; i--) {
+ var byte
+ if (num === 0)
+ byte = 0
+ else {
+ byte = num % 0x100
+ num = Math.floor(num / 0x100)
+ }
+ if (flipped)
+ buf[i] = onesComp(byte)
+ else if (byte === 0)
+ buf[i] = 0
+ else {
+ flipped = true
+ buf[i] = twosComp(byte)
+ }
+ }
+}
+
+const parse = exports.parse = (buf) => {
+ var post = buf[buf.length - 1]
+ var pre = buf[0]
+ return pre === 0x80 ? pos(buf.slice(1, buf.length - 1))
+ : twos(buf.slice(1, buf.length - 1))
+}
+
+const twos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ var flipped = false
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ var f
+ if (flipped)
+ f = onesComp(byte)
+ else if (byte === 0)
+ f = byte
+ else {
+ flipped = true
+ f = twosComp(byte)
+ }
+ if (f !== 0)
+ sum += f * Math.pow(256, len - i - 1)
+ }
+ return sum * -1
+}
+
+const pos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ if (byte !== 0)
+ sum += byte * Math.pow(256, len - i - 1)
+ }
+ return sum
+}
+
+const onesComp = byte => (0xff ^ byte) & 0xff
+
+const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
diff --git a/deps/npm/node_modules/tar/lib/list.js b/deps/npm/node_modules/tar/lib/list.js
new file mode 100644
index 00000000000000..1f5e70bd360bc8
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/list.js
@@ -0,0 +1,132 @@
+'use strict'
+
+// XXX: This shares a lot in common with extract.js
+// maybe some DRY opportunity here?
+
+// tar -t
+const hlo = require('./high-level-opt.js')
+const Parser = require('./parse.js')
+const fs = require('fs')
+const path = require('path')
+
+const t = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (typeof files === 'function')
+ cb = files, files = null
+
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ if (files.length)
+ filesFilter(opt, files)
+
+ if (!opt.noResume)
+ onentryFunction(opt)
+
+ return opt.file && opt.sync ? listFileSync(opt)
+ : opt.file ? listFile(opt, cb)
+ : list(opt)
+}
+
+const onentryFunction = opt => {
+ const onentry = opt.onentry
+ opt.onentry = onentry ? e => {
+ onentry(e)
+ e.resume()
+ } : e => e.resume()
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
+
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
+
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const listFileSync = opt => {
+ const p = list(opt)
+ const file = opt.file
+ let threw = true
+ let fd
+ try {
+ const stat = fs.statSync(file)
+ const readSize = opt.maxReadSize || 16*1024*1024
+ if (stat.size < readSize) {
+ p.end(fs.readFileSync(file))
+ } else {
+ let pos = 0
+ const buf = Buffer.allocUnsafe(readSize)
+ fd = fs.openSync(file, 'r')
+ while (pos < stat.size) {
+ let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ pos += bytesRead
+ p.write(buf.slice(0, bytesRead))
+ }
+ p.end()
+ }
+ threw = false
+ } finally {
+ if (threw && fd)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const listFile = (opt, cb) => {
+ const parse = new Parser(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ parse.on('error', reject)
+ parse.on('end', resolve)
+
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else if (stat.size < readSize)
+ fs.readFile(file, (er, data) => {
+ if (er)
+ return reject(er)
+ parse.end(data)
+ })
+ else {
+ const stream = fs.createReadStream(file, {
+ highWaterMark: readSize
+ })
+ stream.on('error', reject)
+ stream.pipe(parse)
+ }
+ })
+ })
+ return cb ? p.then(cb, cb) : p
+}
+
+const list = opt => new Parser(opt)
diff --git a/deps/npm/node_modules/tar/lib/mkdir.js b/deps/npm/node_modules/tar/lib/mkdir.js
new file mode 100644
index 00000000000000..2a8f461afe4d25
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/mkdir.js
@@ -0,0 +1,207 @@
+'use strict'
+// wrapper around mkdirp for tar's needs.
+
+// TODO: This should probably be a class, not functionally
+// passing around state in a gazillion args.
+
+const mkdirp = require('mkdirp')
+const fs = require('fs')
+const path = require('path')
+const chownr = require('chownr')
+
+class SymlinkError extends Error {
+ constructor (symlink, path) {
+ super('Cannot extract through symbolic link')
+ this.path = path
+ this.symlink = symlink
+ }
+
+ get name () {
+ return 'SylinkError'
+ }
+}
+
+class CwdError extends Error {
+ constructor (path, code) {
+ super(code + ': Cannot cd into \'' + path + '\'')
+ this.path = path
+ this.code = code
+ }
+
+ get name () {
+ return 'CwdError'
+ }
+}
+
+const mkdir = module.exports = (dir, opt, cb) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (er, created) => {
+ if (er)
+ cb(er)
+ else {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr(created, uid, gid, er => done(er))
+ else if (needChmod)
+ fs.chmod(dir, mode, cb)
+ else
+ cb()
+ }
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd)
+ return fs.lstat(dir, (er, st) => {
+ if (er || !st.isDirectory())
+ er = new CwdError(dir, er && er.code || 'ENOTDIR')
+ done(er)
+ })
+
+ if (preserve)
+ return mkdirp(dir, mode, done)
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
+}
+
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+ if (!parts.length)
+ return cb(null, created)
+ const p = parts.shift()
+ const part = base + '/' + p
+ if (cache.get(part))
+ return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+}
+
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
+ if (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return cb(new CwdError(cwd, er.code))
+
+ fs.lstat(part, (statEr, st) => {
+ if (statEr)
+ cb(statEr)
+ else if (st.isDirectory())
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ else if (unlink)
+ fs.unlink(part, er => {
+ if (er)
+ return cb(er)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+ })
+ else if (st.isSymbolicLink())
+ return cb(new SymlinkError(part, part + '/' + parts.join('/')))
+ else
+ cb(er)
+ })
+ } else {
+ created = created || part
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ }
+}
+
+const mkdirSync = module.exports.sync = (dir, opt) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (created) => {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr.sync(created, uid, gid)
+ if (needChmod)
+ fs.chmodSync(dir, mode)
+ cache.set(dir, true)
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd) {
+ let ok = false
+ let code = 'ENOTDIR'
+ try {
+ ok = fs.lstatSync(dir).isDirectory()
+ } catch (er) {
+ code = er.code
+ } finally {
+ if (!ok)
+ throw new CwdError(dir, code)
+ }
+ done()
+ return
+ }
+
+ if (preserve)
+ return done(mkdirp.sync(dir, mode))
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ let created = null
+ for (let p = parts.shift(), part = cwd;
+ p && (part += '/' + p);
+ p = parts.shift()) {
+
+ if (cache.get(part))
+ continue
+
+ try {
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ } catch (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return new CwdError(cwd, er.code)
+
+ const st = fs.lstatSync(part)
+ if (st.isDirectory()) {
+ cache.set(part, true)
+ continue
+ } else if (unlink) {
+ fs.unlinkSync(part)
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ continue
+ } else if (st.isSymbolicLink())
+ return new SymlinkError(part, part + '/' + parts.join('/'))
+ }
+ }
+
+ return done(created)
+}
diff --git a/deps/npm/node_modules/tar/lib/pack.js b/deps/npm/node_modules/tar/lib/pack.js
index 5a3bb95a121bdb..09b6ac590b7c48 100644
--- a/deps/npm/node_modules/tar/lib/pack.js
+++ b/deps/npm/node_modules/tar/lib/pack.js
@@ -1,236 +1,399 @@
-// pipe in an fstream, and it'll make a tarball.
-// key-value pair argument is global extended header props.
-
-module.exports = Pack
-
-var EntryWriter = require("./entry-writer.js")
- , Stream = require("stream").Stream
- , path = require("path")
- , inherits = require("inherits")
- , GlobalHeaderWriter = require("./global-header-writer.js")
- , collect = require("fstream").collect
- , eof = new Buffer(512)
-
-for (var i = 0; i < 512; i ++) eof[i] = 0
-
-inherits(Pack, Stream)
-
-function Pack (props) {
- // console.error("-- p ctor")
- var me = this
- if (!(me instanceof Pack)) return new Pack(props)
-
- if (props) me._noProprietary = props.noProprietary
- else me._noProprietary = false
-
- me._global = props
-
- me.readable = true
- me.writable = true
- me._buffer = []
- // console.error("-- -- set current to null in ctor")
- me._currentEntry = null
- me._processing = false
-
- me._pipeRoot = null
- me.on("pipe", function (src) {
- if (src.root === me._pipeRoot) return
- me._pipeRoot = src
- src.on("end", function () {
- me._pipeRoot = null
- })
- me.add(src)
- })
+'use strict'
+
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+
+class PackJob {
+ constructor (path, absolute) {
+ this.path = path || './'
+ this.absolute = absolute
+ this.entry = null
+ this.stat = null
+ this.readdir = null
+ this.pending = false
+ this.ignore = false
+ this.piped = false
+ }
}
-Pack.prototype.addGlobal = function (props) {
- // console.error("-- p addGlobal")
- if (this._didGlobal) return
- this._didGlobal = true
-
- var me = this
- GlobalHeaderWriter(props)
- .on("data", function (c) {
- me.emit("data", c)
- })
- .end()
-}
+const MiniPass = require('minipass')
+const zlib = require('minizlib')
+const ReadEntry = require('./read-entry.js')
+const WriteEntry = require('./write-entry.js')
+const WriteEntrySync = WriteEntry.Sync
+const WriteEntryTar = WriteEntry.Tar
+const Yallist = require('yallist')
+const EOF = Buffer.alloc(1024)
+const ONSTAT = Symbol('onStat')
+const ENDED = Symbol('ended')
+const QUEUE = Symbol('queue')
+const CURRENT = Symbol('current')
+const PROCESS = Symbol('process')
+const PROCESSING = Symbol('processing')
+const PROCESSJOB = Symbol('processJob')
+const JOBS = Symbol('jobs')
+const JOBDONE = Symbol('jobDone')
+const ADDFSENTRY = Symbol('addFSEntry')
+const ADDTARENTRY = Symbol('addTarEntry')
+const STAT = Symbol('stat')
+const READDIR = Symbol('readdir')
+const ONREADDIR = Symbol('onreaddir')
+const PIPE = Symbol('pipe')
+const ENTRY = Symbol('entry')
+const ENTRYOPT = Symbol('entryOpt')
+const WRITEENTRYCLASS = Symbol('writeEntryClass')
+const WRITE = Symbol('write')
+const ONDRAIN = Symbol('ondrain')
+
+const fs = require('fs')
+const path = require('path')
+const warner = require('./warn-mixin.js')
+
+const Pack = warner(class Pack extends MiniPass {
+ constructor (opt) {
+ super(opt)
+ opt = opt || Object.create(null)
+ this.opt = opt
+ this.cwd = opt.cwd || process.cwd()
+ this.maxReadSize = opt.maxReadSize
+ this.preservePaths = !!opt.preservePaths
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.readdirCache = opt.readdirCache || new Map()
+ this[WRITEENTRYCLASS] = WriteEntry
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ this.zip = null
+ if (opt.gzip) {
+ if (typeof opt.gzip !== 'object')
+ opt.gzip = {}
+ this.zip = new zlib.Gzip(opt.gzip)
+ this.zip.on('data', chunk => super.write(chunk))
+ this.zip.on('end', _ => super.end())
+ this.zip.on('drain', _ => this[ONDRAIN]())
+ this.on('resume', _ => this.zip.resume())
+ } else
+ this.on('drain', this[ONDRAIN])
+
+ this.portable = !!opt.portable
+ this.noDirRecurse = !!opt.noDirRecurse
+ this.follow = !!opt.follow
+
+ this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
+
+ this[QUEUE] = new Yallist
+ this[JOBS] = 0
+ this.jobs = +opt.jobs || 4
+ this[PROCESSING] = false
+ this[ENDED] = false
+ }
-Pack.prototype.add = function (stream) {
- if (this._global && !this._didGlobal) this.addGlobal(this._global)
+ [WRITE] (chunk) {
+ return super.write(chunk)
+ }
- if (this._ended) return this.emit("error", new Error("add after end"))
+ add (path) {
+ this.write(path)
+ return this
+ }
- collect(stream)
- this._buffer.push(stream)
- this._process()
- this._needDrain = this._buffer.length > 0
- return !this._needDrain
-}
+ end (path) {
+ if (path)
+ this.write(path)
+ this[ENDED] = true
+ this[PROCESS]()
+ return this
+ }
-Pack.prototype.pause = function () {
- this._paused = true
- if (this._currentEntry) this._currentEntry.pause()
- this.emit("pause")
-}
+ write (path) {
+ if (this[ENDED])
+ throw new Error('write after end')
-Pack.prototype.resume = function () {
- this._paused = false
- if (this._currentEntry) this._currentEntry.resume()
- this.emit("resume")
- this._process()
-}
+ if (path instanceof ReadEntry)
+ this[ADDTARENTRY](path)
+ else
+ this[ADDFSENTRY](path)
+ return this.flowing
+ }
-Pack.prototype.end = function () {
- this._ended = true
- this._buffer.push(eof)
- this._process()
-}
+ [ADDTARENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p.path)
+ if (this.prefix)
+ p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
+
+ // in this case, we don't have to wait for the stat
+ if (!this.filter(p.path, p))
+ p.resume()
+ else {
+ const job = new PackJob(p.path, absolute, false)
+ job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
+ job.entry.on('end', _ => this[JOBDONE](job))
+ this[JOBS] += 1
+ this[QUEUE].push(job)
+ }
-Pack.prototype._process = function () {
- var me = this
- if (me._paused || me._processing) {
- return
+ this[PROCESS]()
}
- var entry = me._buffer.shift()
+ [ADDFSENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p)
+ if (this.prefix)
+ p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
- if (!entry) {
- if (me._needDrain) {
- me.emit("drain")
- }
- return
+ this[QUEUE].push(new PackJob(p, absolute))
+ this[PROCESS]()
}
- if (entry.ready === false) {
- // console.error("-- entry is not ready", entry)
- me._buffer.unshift(entry)
- entry.on("ready", function () {
- // console.error("-- -- ready!", entry)
- me._process()
+ [STAT] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ const stat = this.follow ? 'stat' : 'lstat'
+ fs[stat](job.absolute, (er, stat) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ this.emit('error', er)
+ else
+ this[ONSTAT](job, stat)
})
- return
}
- me._processing = true
+ [ONSTAT] (job, stat) {
+ this.statCache.set(job.absolute, stat)
+ job.stat = stat
+
+ // now we have the stat, we can filter it.
+ if (!this.filter(job.path, stat))
+ job.ignore = true
- if (entry === eof) {
- // need 2 ending null blocks.
- me.emit("data", eof)
- me.emit("data", eof)
- me.emit("end")
- me.emit("close")
- return
+ this[PROCESS]()
}
- // Change the path to be relative to the root dir that was
- // added to the tarball.
- //
- // XXX This should be more like how -C works, so you can
- // explicitly set a root dir, and also explicitly set a pathname
- // in the tarball to use. That way we can skip a lot of extra
- // work when resolving symlinks for bundled dependencies in npm.
+ [READDIR] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ fs.readdir(job.absolute, (er, entries) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ return this.emit('error', er)
+ this[ONREADDIR](job, entries)
+ })
+ }
- var root = path.dirname((entry.root || entry).path);
- if (me._global && me._global.fromBase && entry.root && entry.root.path) {
- // user set 'fromBase: true' indicating tar root should be directory itself
- root = entry.root.path;
+ [ONREADDIR] (job, entries) {
+ this.readdirCache.set(job.absolute, entries)
+ job.readdir = entries
+ this[PROCESS]()
}
- var wprops = {}
+ [PROCESS] () {
+ if (this[PROCESSING])
+ return
- Object.keys(entry.props || {}).forEach(function (k) {
- wprops[k] = entry.props[k]
- })
+ this[PROCESSING] = true
+ for (let w = this[QUEUE].head;
+ w !== null && this[JOBS] < this.jobs;
+ w = w.next) {
+ this[PROCESSJOB](w.value)
+ if (w.value.ignore) {
+ const p = w.next
+ this[QUEUE].removeNode(w)
+ w.next = p
+ }
+ }
- if (me._noProprietary) wprops.noProprietary = true
+ this[PROCESSING] = false
- wprops.path = path.relative(root, entry.path || '')
+ if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+ if (this.zip)
+ this.zip.end(EOF)
+ else {
+ super.write(EOF)
+ super.end()
+ }
+ }
+ }
- // actually not a matter of opinion or taste.
- if (process.platform === "win32") {
- wprops.path = wprops.path.replace(/\\/g, "/")
+ get [CURRENT] () {
+ return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
}
- if (!wprops.type)
- wprops.type = 'Directory'
+ [JOBDONE] (job) {
+ this[QUEUE].shift()
+ this[JOBS] -= 1
+ this[PROCESS]()
+ }
- switch (wprops.type) {
- // sockets not supported
- case "Socket":
+ [PROCESSJOB] (job) {
+ if (job.pending)
return
- case "Directory":
- wprops.path += "/"
- wprops.size = 0
- break
-
- case "Link":
- var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
- wprops.linkpath = path.relative(root, lp) || "."
- wprops.size = 0
- break
+ if (job.entry) {
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ return
+ }
- case "SymbolicLink":
- var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
- wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
- wprops.size = 0
- break
- }
+ if (!job.stat) {
+ if (this.statCache.has(job.absolute))
+ this[ONSTAT](job, this.statCache.get(job.absolute))
+ else
+ this[STAT](job)
+ }
+ if (!job.stat)
+ return
- // console.error("-- new writer", wprops)
- // if (!wprops.type) {
- // // console.error("-- no type?", entry.constructor.name, entry)
- // }
+ // filtered out!
+ if (job.ignore)
+ return
- // console.error("-- -- set current to new writer", wprops.path)
- var writer = me._currentEntry = EntryWriter(wprops)
+ if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
+ if (this.readdirCache.has(job.absolute))
+ this[ONREADDIR](job, this.readdirCache.get(job.absolute))
+ else
+ this[READDIR](job)
+ if (!job.readdir)
+ return
+ }
- writer.parent = me
+ // we know it doesn't have an entry, because that got checked above
+ job.entry = this[ENTRY](job)
+ if (!job.entry) {
+ job.ignore = true
+ return
+ }
- // writer.on("end", function () {
- // // console.error("-- -- writer end", writer.path)
- // })
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ }
- writer.on("data", function (c) {
- me.emit("data", c)
- })
+ [ENTRYOPT] (job) {
+ return {
+ onwarn: (msg, data) => {
+ this.warn(msg, data)
+ },
+ noPax: this.noPax,
+ cwd: this.cwd,
+ absolute: job.absolute,
+ preservePaths: this.preservePaths,
+ maxReadSize: this.maxReadSize,
+ strict: this.strict,
+ portable: this.portable,
+ linkCache: this.linkCache,
+ statCache: this.statCache
+ }
+ }
- writer.on("header", function () {
- Buffer.prototype.toJSON = function () {
- return this.toString().split(/\0/).join(".")
+ [ENTRY] (job) {
+ this[JOBS] += 1
+ try {
+ return new this[WRITEENTRYCLASS](
+ job.path, this[ENTRYOPT](job)).on('end', _ => {
+ this[JOBDONE](job)
+ }).on('error', er => this.emit('error', er))
+ } catch (er) {
+ this.emit('error', er)
}
- // console.error("-- -- writer header %j", writer.props)
- if (writer.props.size === 0) nextEntry()
- })
- writer.on("close", nextEntry)
+ }
+
+ [ONDRAIN] () {
+ if (this[CURRENT] && this[CURRENT].entry)
+ this[CURRENT].entry.resume()
+ }
+
+ // like .pipe() but using super, because our write() is special
+ [PIPE] (job) {
+ job.piped = true
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ const source = job.entry
+ const zip = this.zip
+
+ if (zip)
+ source.on('data', chunk => {
+ if (!zip.write(chunk))
+ source.pause()
+ })
+ else
+ source.on('data', chunk => {
+ if (!super.write(chunk))
+ source.pause()
+ })
+ }
- var ended = false
- function nextEntry () {
- if (ended) return
- ended = true
+ pause () {
+ if (this.zip)
+ this.zip.pause()
+ return super.pause()
+ }
+})
- // console.error("-- -- writer close", writer.path)
- // console.error("-- -- set current to null", wprops.path)
- me._currentEntry = null
- me._processing = false
- me._process()
+class PackSync extends Pack {
+ constructor (opt) {
+ super(opt)
+ this[WRITEENTRYCLASS] = WriteEntrySync
}
- writer.on("error", function (er) {
- // console.error("-- -- writer error", writer.path)
- me.emit("error", er)
- })
+ // pause/resume are no-ops in sync streams.
+ pause () {}
+ resume () {}
+
+ [STAT] (job) {
+ const stat = this.follow ? 'statSync' : 'lstatSync'
+ this[ONSTAT](job, fs[stat](job.absolute))
+ }
- // if it's the root, then there's no need to add its entries,
- // or data, since they'll be added directly.
- if (entry === me._pipeRoot) {
- // console.error("-- is the root, don't auto-add")
- writer.add = null
+ [READDIR] (job, stat) {
+ this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
- entry.pipe(writer)
+ // gotta get it all in this tick
+ [PIPE] (job) {
+ const source = job.entry
+ const zip = this.zip
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ if (zip)
+ source.on('data', chunk => {
+ zip.write(chunk)
+ })
+ else
+ source.on('data', chunk => {
+ super[WRITE](chunk)
+ })
+ }
}
-Pack.prototype.destroy = function () {}
-Pack.prototype.write = function () {}
+Pack.Sync = PackSync
+
+module.exports = Pack
diff --git a/deps/npm/node_modules/tar/lib/parse.js b/deps/npm/node_modules/tar/lib/parse.js
index 600ad782f0f61d..63c7ee9cefd6e5 100644
--- a/deps/npm/node_modules/tar/lib/parse.js
+++ b/deps/npm/node_modules/tar/lib/parse.js
@@ -1,275 +1,415 @@
+'use strict'
+
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in. We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry. The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+
+const warner = require('./warn-mixin.js')
+const path = require('path')
+const Header = require('./header.js')
+const EE = require('events')
+const Yallist = require('yallist')
+const maxMetaEntrySize = 1024 * 1024
+const Entry = require('./read-entry.js')
+const Pax = require('./pax.js')
+const zlib = require('minizlib')
+
+const gzipHeader = new Buffer([0x1f, 0x8b])
+const STATE = Symbol('state')
+const WRITEENTRY = Symbol('writeEntry')
+const READENTRY = Symbol('readEntry')
+const NEXTENTRY = Symbol('nextEntry')
+const PROCESSENTRY = Symbol('processEntry')
+const EX = Symbol('extendedHeader')
+const GEX = Symbol('globalExtendedHeader')
+const META = Symbol('meta')
+const EMITMETA = Symbol('emitMeta')
+const BUFFER = Symbol('buffer')
+const QUEUE = Symbol('queue')
+const ENDED = Symbol('ended')
+const EMITTEDEND = Symbol('emittedEnd')
+const EMIT = Symbol('emit')
+const UNZIP = Symbol('unzip')
+const CONSUMECHUNK = Symbol('consumeChunk')
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
+const CONSUMEBODY = Symbol('consumeBody')
+const CONSUMEMETA = Symbol('consumeMeta')
+const CONSUMEHEADER = Symbol('consumeHeader')
+const CONSUMING = Symbol('consuming')
+const BUFFERCONCAT = Symbol('bufferConcat')
+const MAYBEEND = Symbol('maybeEnd')
+const WRITING = Symbol('writing')
+const ABORTED = Symbol('aborted')
+const DONE = Symbol('onDone')
+
+const noop = _ => true
+
+module.exports = warner(class Parser extends EE {
+ constructor (opt) {
+ opt = opt || {}
+ super(opt)
+
+ if (opt.ondone)
+ this.on(DONE, opt.ondone)
+ else
+ this.on(DONE, _ => {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ })
+
+ this.strict = !!opt.strict
+ this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
+ this.filter = typeof opt.filter === 'function' ? opt.filter : noop
+
+ // have to set this so that streams are ok piping into it
+ this.writable = true
+ this.readable = false
+
+ this[QUEUE] = new Yallist()
+ this[BUFFER] = null
+ this[READENTRY] = null
+ this[WRITEENTRY] = null
+ this[STATE] = 'begin'
+ this[META] = ''
+ this[EX] = null
+ this[GEX] = null
+ this[ENDED] = false
+ this[UNZIP] = null
+ this[ABORTED] = false
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+ if (typeof opt.onentry === 'function')
+ this.on('entry', opt.onentry)
+ }
+
+ [CONSUMEHEADER] (chunk, position) {
+ const header = new Header(chunk, position)
-// A writable stream.
-// It emits "entry" events, which provide a readable stream that has
-// header info attached.
-
-module.exports = Parse.create = Parse
-
-var stream = require("stream")
- , Stream = stream.Stream
- , BlockStream = require("block-stream")
- , tar = require("../tar.js")
- , TarHeader = require("./header.js")
- , Entry = require("./entry.js")
- , BufferEntry = require("./buffer-entry.js")
- , ExtendedHeader = require("./extended-header.js")
- , assert = require("assert").ok
- , inherits = require("inherits")
- , fstream = require("fstream")
-
-// reading a tar is a lot like reading a directory
-// However, we're actually not going to run the ctor,
-// since it does a stat and various other stuff.
-// This inheritance gives us the pause/resume/pipe
-// behavior that is desired.
-inherits(Parse, fstream.Reader)
-
-function Parse () {
- var me = this
- if (!(me instanceof Parse)) return new Parse()
-
- // doesn't apply fstream.Reader ctor?
- // no, becasue we don't want to stat/etc, we just
- // want to get the entry/add logic from .pipe()
- Stream.apply(me)
-
- me.writable = true
- me.readable = true
- me._stream = new BlockStream(512)
- me.position = 0
- me._ended = false
-
- me._stream.on("error", function (e) {
- me.emit("error", e)
- })
-
- me._stream.on("data", function (c) {
- me._process(c)
- })
-
- me._stream.on("end", function () {
- me._streamEnd()
- })
-
- me._stream.on("drain", function () {
- me.emit("drain")
- })
-}
-
-// overridden in Extract class, since it needs to
-// wait for its DirWriter part to finish before
-// emitting "end"
-Parse.prototype._streamEnd = function () {
- var me = this
- if (!me._ended || me._entry) me.error("unexpected eof")
- me.emit("end")
-}
-
-// a tar reader is actually a filter, not just a readable stream.
-// So, you should pipe a tarball stream into it, and it needs these
-// write/end methods to do that.
-Parse.prototype.write = function (c) {
- if (this._ended) {
- // gnutar puts a LOT of nulls at the end.
- // you can keep writing these things forever.
- // Just ignore them.
- for (var i = 0, l = c.length; i > l; i ++) {
- if (c[i] !== 0) return this.error("write() after end()")
+ if (header.nullBlock)
+ this[EMIT]('nullBlock')
+ else if (!header.cksumValid)
+ this.warn('invalid entry', header)
+ else if (!header.path)
+ this.warn('invalid: path is required', header)
+ else {
+ const type = header.type
+ if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
+ this.warn('invalid: linkpath required', header)
+ else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
+ this.warn('invalid: linkpath forbidden', header)
+ else {
+ const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
+
+ if (entry.meta) {
+ if (entry.size > this.maxMetaEntrySize) {
+ entry.ignore = true
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = 'ignore'
+ } else if (entry.size > 0) {
+ this[META] = ''
+ entry.on('data', c => this[META] += c)
+ this[STATE] = 'meta'
+ }
+ } else {
+
+ this[EX] = null
+ entry.ignore = entry.ignore || !this.filter(entry.path, entry)
+ if (entry.ignore) {
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = entry.remain ? 'ignore' : 'begin'
+ } else {
+ if (entry.remain)
+ this[STATE] = 'body'
+ else {
+ this[STATE] = 'begin'
+ entry.end()
+ }
+
+ if (!this[READENTRY]) {
+ this[QUEUE].push(entry)
+ this[NEXTENTRY]()
+ } else
+ this[QUEUE].push(entry)
+ }
+ }
+ }
}
- return
}
- return this._stream.write(c)
-}
-
-Parse.prototype.end = function (c) {
- this._ended = true
- return this._stream.end(c)
-}
-
-// don't need to do anything, since we're just
-// proxying the data up from the _stream.
-// Just need to override the parent's "Not Implemented"
-// error-thrower.
-Parse.prototype._read = function () {}
-
-Parse.prototype._process = function (c) {
- assert(c && c.length === 512, "block size should be 512")
-
- // one of three cases.
- // 1. A new header
- // 2. A part of a file/extended header
- // 3. One of two or more EOF null blocks
-
- if (this._entry) {
- var entry = this._entry
- if(!entry._abort) entry.write(c)
+
+ [PROCESSENTRY] (entry) {
+ let go = true
+
+ if (!entry) {
+ this[READENTRY] = null
+ go = false
+ } else if (Array.isArray(entry))
+ this.emit.apply(this, entry)
else {
- entry._remaining -= c.length
- if(entry._remaining < 0) entry._remaining = 0
+ this[READENTRY] = entry
+ this.emit('entry', entry)
+ if (!entry.emittedEnd) {
+ entry.on('end', _ => this[NEXTENTRY]())
+ go = false
+ }
}
- if (entry._remaining === 0) {
+
+ return go
+ }
+
+ [NEXTENTRY] () {
+ do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
+
+ if (!this[QUEUE].length) {
+ // At this point, there's nothing in the queue, but we may have an
+ // entry which is being consumed (readEntry).
+ // If we don't, then we definitely can handle more data.
+ // If we do, and either it's flowing, or it has never had any data
+ // written to it, then it needs more.
+ // The only other possibility is that it has returned false from a
+ // write() call, so we wait for the next drain to continue.
+ const re = this[READENTRY]
+ const drainNow = !re || re.flowing || re.size === re.remain
+ if (drainNow) {
+ if (!this[WRITING])
+ this.emit('drain')
+ } else
+ re.once('drain', _ => this.emit('drain'))
+ }
+ }
+
+ [CONSUMEBODY] (chunk, position) {
+ // write up to but no more than writeEntry.blockRemain
+ const entry = this[WRITEENTRY]
+ const br = entry.blockRemain
+ const c = (br >= chunk.length && position === 0) ? chunk
+ : chunk.slice(position, position + br)
+
+ entry.write(c)
+
+ if (!entry.blockRemain) {
+ this[STATE] = 'begin'
+ this[WRITEENTRY] = null
entry.end()
- this._entry = null
- }
- } else {
- // either zeroes or a header
- var zero = true
- for (var i = 0; i < 512 && zero; i ++) {
- zero = c[i] === 0
}
- // eof is *at least* 2 blocks of nulls, and then the end of the
- // file. you can put blocks of nulls between entries anywhere,
- // so appending one tarball to another is technically valid.
- // ending without the eof null blocks is not allowed, however.
- if (zero) {
- if (this._eofStarted)
- this._ended = true
- this._eofStarted = true
- } else {
- this._eofStarted = false
- this._startEntry(c)
+ return c.length
+ }
+
+ [CONSUMEMETA] (chunk, position) {
+ const entry = this[WRITEENTRY]
+ const ret = this[CONSUMEBODY](chunk, position)
+
+ // if we finished, then the entry is reset
+ if (!this[WRITEENTRY])
+ this[EMITMETA](entry)
+
+ return ret
+ }
+
+ [EMIT] (ev, data, extra) {
+ if (!this[QUEUE].length && !this[READENTRY])
+ this.emit(ev, data, extra)
+ else
+ this[QUEUE].push([ev, data, extra])
+ }
+
+ [EMITMETA] (entry) {
+ this[EMIT]('meta', this[META])
+ switch (entry.type) {
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this[EX] = Pax.parse(this[META], this[EX], false)
+ break
+
+ case 'GlobalExtendedHeader':
+ this[GEX] = Pax.parse(this[META], this[GEX], true)
+ break
+
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].path = this[META].replace(/\0.*/, '')
+ break
+
+ case 'NextFileHasLongLinkpath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].linkpath = this[META].replace(/\0.*/, '')
+ break
+
+ /* istanbul ignore next */
+ default: throw new Error('unknown meta: ' + entry.type)
}
}
- this.position += 512
-}
-
-// take a header chunk, start the right kind of entry.
-Parse.prototype._startEntry = function (c) {
- var header = new TarHeader(c)
- , self = this
- , entry
- , ev
- , EntryType
- , onend
- , meta = false
-
- if (null === header.size || !header.cksumValid) {
- var e = new Error("invalid tar file")
- e.header = header
- e.tar_file_offset = this.position
- e.tar_block = this.position / 512
- return this.emit("error", e)
+ abort (msg, error) {
+ this[ABORTED] = true
+ this.warn(msg, error)
+ this.emit('abort')
}
- switch (tar.types[header.type]) {
- case "File":
- case "OldFile":
- case "Link":
- case "SymbolicLink":
- case "CharacterDevice":
- case "BlockDevice":
- case "Directory":
- case "FIFO":
- case "ContiguousFile":
- case "GNUDumpDir":
- // start a file.
- // pass in any extended headers
- // These ones consumers are typically most interested in.
- EntryType = Entry
- ev = "entry"
- break
-
- case "GlobalExtendedHeader":
- // extended headers that apply to the rest of the tarball
- EntryType = ExtendedHeader
- onend = function () {
- self._global = self._global || {}
- Object.keys(entry.fields).forEach(function (k) {
- self._global[k] = entry.fields[k]
- })
+ write (chunk) {
+ if (this[ABORTED])
+ return
+
+ // first write, might be gzipped
+ if (this[UNZIP] === null && chunk) {
+ if (this[BUFFER]) {
+ chunk = Buffer.concat([this[BUFFER], chunk])
+ this[BUFFER] = null
}
- ev = "globalExtendedHeader"
- meta = true
- break
-
- case "ExtendedHeader":
- case "OldExtendedHeader":
- // extended headers that apply to the next entry
- EntryType = ExtendedHeader
- onend = function () {
- self._extended = entry.fields
+ if (chunk.length < gzipHeader.length) {
+ this[BUFFER] = chunk
+ return true
}
- ev = "extendedHeader"
- meta = true
- break
-
- case "NextFileHasLongLinkpath":
- // set linkpath= in extended header
- EntryType = BufferEntry
- onend = function () {
- self._extended = self._extended || {}
- self._extended.linkpath = entry.body
+ for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
+ if (chunk[i] !== gzipHeader[i])
+ this[UNZIP] = false
}
- ev = "longLinkpath"
- meta = true
- break
-
- case "NextFileHasLongPath":
- case "OldGnuLongPath":
- // set path= in file-extended header
- EntryType = BufferEntry
- onend = function () {
- self._extended = self._extended || {}
- self._extended.path = entry.body
+ if (this[UNZIP] === null) {
+ const ended = this[ENDED]
+ this[ENDED] = false
+ this[UNZIP] = new zlib.Unzip()
+ this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
+ this[UNZIP].on('error', er =>
+ this.abort('zlib error: ' + er.message, er))
+ this[UNZIP].on('end', _ => {
+ this[ENDED] = true
+ this[CONSUMECHUNK]()
+ })
+ return ended ? this[UNZIP].end(chunk) : this[UNZIP].write(chunk)
}
- ev = "longPath"
- meta = true
- break
-
- default:
- // all the rest we skip, but still set the _entry
- // member, so that we can skip over their data appropriately.
- // emit an event to say that this is an ignored entry type?
- EntryType = Entry
- ev = "ignoredEntry"
- break
- }
+ }
- var global, extended
- if (meta) {
- global = extended = null
- } else {
- var global = this._global
- var extended = this._extended
+ this[WRITING] = true
+ if (this[UNZIP])
+ this[UNZIP].write(chunk)
+ else
+ this[CONSUMECHUNK](chunk)
+ this[WRITING] = false
- // extendedHeader only applies to one entry, so once we start
- // an entry, it's over.
- this._extended = null
- }
- entry = new EntryType(header, extended, global)
- entry.meta = meta
-
- // only proxy data events of normal files.
- if (!meta) {
- entry.on("data", function (c) {
- me.emit("data", c)
- })
+ // return false if there's a queue, or if the current entry isn't flowing
+ const ret =
+ this[QUEUE].length ? false :
+ this[READENTRY] ? this[READENTRY].flowing :
+ true
+
+ // if we have no queue, then that means a clogged READENTRY
+ if (!ret && !this[QUEUE].length)
+ this[READENTRY].once('drain', _ => this.emit('drain'))
+
+ return ret
}
- if (onend) entry.on("end", onend)
+ [BUFFERCONCAT] (c) {
+ if (c && !this[ABORTED])
+ this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
+ }
- this._entry = entry
- var me = this
+ [MAYBEEND] () {
+ if (this[ENDED] && !this[EMITTEDEND] && !this[ABORTED]) {
+ this[EMITTEDEND] = true
+ const entry = this[WRITEENTRY]
+ if (entry && entry.blockRemain) {
+ const have = this[BUFFER] ? this[BUFFER].length : 0
+ this.warn('Truncated input (needed ' + entry.blockRemain +
+ ' more bytes, only ' + have + ' available)', entry)
+ if (this[BUFFER])
+ entry.write(this[BUFFER])
+ entry.end()
+ }
+ this[EMIT](DONE)
+ }
+ }
- entry.on("pause", function () {
- me.pause()
- })
+ [CONSUMECHUNK] (chunk) {
+ if (this[CONSUMING]) {
+ this[BUFFERCONCAT](chunk)
+ } else if (!chunk && !this[BUFFER]) {
+ this[MAYBEEND]()
+ } else {
+ this[CONSUMING] = true
+ if (this[BUFFER]) {
+ this[BUFFERCONCAT](chunk)
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ } else {
+ this[CONSUMECHUNKSUB](chunk)
+ }
- entry.on("resume", function () {
- me.resume()
- })
+ while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ }
+ this[CONSUMING] = false
+ }
- if (this.listeners("*").length) {
- this.emit("*", ev, entry)
+ if (!this[BUFFER] || this[ENDED])
+ this[MAYBEEND]()
}
- this.emit(ev, entry)
+ [CONSUMECHUNKSUB] (chunk) {
+ // we know that we are in CONSUMING mode, so anything written goes into
+ // the buffer. Advance the position and put any remainder in the buffer.
+ let position = 0
+ let length = chunk.length
+ while (position + 512 <= length && !this[ABORTED]) {
+ switch (this[STATE]) {
+ case 'begin':
+ this[CONSUMEHEADER](chunk, position)
+ position += 512
+ break
+
+ case 'ignore':
+ case 'body':
+ position += this[CONSUMEBODY](chunk, position)
+ break
+
+ case 'meta':
+ position += this[CONSUMEMETA](chunk, position)
+ break
+
+ /* istanbul ignore next */
+ default:
+ throw new Error('invalid state: ' + this[STATE])
+ }
+ }
- // Zero-byte entry. End immediately.
- if (entry.props.size === 0) {
- entry.end()
- this._entry = null
+ if (position < length) {
+ if (this[BUFFER])
+ this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
+ else
+ this[BUFFER] = chunk.slice(position)
+ }
+ }
+
+ end (chunk) {
+ if (!this[ABORTED]) {
+ if (this[UNZIP])
+ this[UNZIP].end(chunk)
+ else {
+ this[ENDED] = true
+ this.write(chunk)
+ }
+ }
}
-}
+})
diff --git a/deps/npm/node_modules/tar/lib/pax.js b/deps/npm/node_modules/tar/lib/pax.js
new file mode 100644
index 00000000000000..214a459f3bdde8
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/pax.js
@@ -0,0 +1,145 @@
+'use strict'
+const Header = require('./header.js')
+const path = require('path')
+
+class Pax {
+ constructor (obj, global) {
+ this.atime = obj.atime || null
+ this.charset = obj.charset || null
+ this.comment = obj.comment || null
+ this.ctime = obj.ctime || null
+ this.gid = obj.gid || null
+ this.gname = obj.gname || null
+ this.linkpath = obj.linkpath || null
+ this.mtime = obj.mtime || null
+ this.path = obj.path || null
+ this.size = obj.size || null
+ this.uid = obj.uid || null
+ this.uname = obj.uname || null
+ this.dev = obj.dev || null
+ this.ino = obj.ino || null
+ this.nlink = obj.nlink || null
+ this.global = global || false
+ }
+
+ encode () {
+ const body = this.encodeBody()
+ if (body === '')
+ return null
+
+ const bodyLen = Buffer.byteLength(body)
+ // round up to 512 bytes
+ // add 512 for header
+ const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
+ const buf = Buffer.allocUnsafe(bufLen)
+
+ // 0-fill the header section, it might not hit every field
+ for (let i = 0; i < 512; i++) {
+ buf[i] = 0
+ }
+
+ new Header({
+ // XXX split the path
+ // then the path should be PaxHeader + basename, but less than 99,
+ // prepend with the dirname
+ path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
+ mode: this.mode || 0o644,
+ uid: this.uid || null,
+ gid: this.gid || null,
+ size: bodyLen,
+ mtime: this.mtime || null,
+ type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+ linkpath: '',
+ uname: this.uname || '',
+ gname: this.gname || '',
+ devmaj: 0,
+ devmin: 0,
+ atime: this.atime || null,
+ ctime: this.ctime || null
+ }).encode(buf)
+
+ buf.write(body, 512, bodyLen, 'utf8')
+
+ // null pad after the body
+ for (let i = bodyLen + 512; i < buf.length; i++) {
+ buf[i] = 0
+ }
+
+ return buf
+ }
+
+ encodeBody () {
+ return (
+ this.encodeField('path') +
+ this.encodeField('ctime') +
+ this.encodeField('atime') +
+ this.encodeField('dev') +
+ this.encodeField('ino') +
+ this.encodeField('nlink') +
+ this.encodeField('charset') +
+ this.encodeField('comment') +
+ this.encodeField('gid') +
+ this.encodeField('gname') +
+ this.encodeField('linkpath') +
+ this.encodeField('mtime') +
+ this.encodeField('size') +
+ this.encodeField('uid') +
+ this.encodeField('uname')
+ )
+ }
+
+ encodeField (field) {
+ if (this[field] === null || this[field] === undefined)
+ return ''
+ const v = this[field] instanceof Date ? this[field].getTime() / 1000
+ : this[field]
+ const s = ' ' +
+ (field === 'dev' || field === 'ino' || field === 'nlink'
+ ? 'SCHILY.' : '') +
+ field + '=' + v + '\n'
+ const byteLen = Buffer.byteLength(s)
+ // the digits includes the length of the digits in ascii base-10
+ // so if it's 9 characters, then adding 1 for the 9 makes it 10
+ // which makes it 11 chars.
+ let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
+ if (byteLen + digits >= Math.pow(10, digits))
+ digits += 1
+ const len = digits + byteLen
+ return len + s
+ }
+}
+
+Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
+
+const merge = (a, b) =>
+ b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
+
+const parseKV = string =>
+ string
+ .replace(/\n$/, '')
+ .split('\n')
+ .reduce(parseKVLine, Object.create(null))
+
+const parseKVLine = (set, line) => {
+ const n = parseInt(line, 10)
+
+ // XXX Values with \n in them will fail this.
+ // Refactor to not be a naive line-by-line parse.
+ if (n !== Buffer.byteLength(line) + 1)
+ return set
+
+ line = line.substr((n + ' ').length)
+ const kv = line.split('=')
+ const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
+ if (!k)
+ return set
+
+ const v = kv.join('=')
+ set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
+ ? new Date(v * 1000)
+ : /^[0-9]+$/.test(v) ? +v
+ : v
+ return set
+}
+
+module.exports = Pax
diff --git a/deps/npm/node_modules/tar/lib/read-entry.js b/deps/npm/node_modules/tar/lib/read-entry.js
new file mode 100644
index 00000000000000..aa369c74f5b024
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/read-entry.js
@@ -0,0 +1,94 @@
+'use strict'
+const types = require('./types.js')
+const MiniPass = require('minipass')
+
+const SLURP = Symbol('slurp')
+module.exports = class ReadEntry extends MiniPass {
+ constructor (header, ex, gex) {
+ super()
+ this.extended = ex
+ this.globalExtended = gex
+ this.header = header
+ this.startBlockSize = 512 * Math.ceil(header.size / 512)
+ this.blockRemain = this.startBlockSize
+ this.remain = header.size
+ this.type = header.type
+ this.meta = false
+ this.ignore = false
+ switch (this.type) {
+ case 'File':
+ case 'OldFile':
+ case 'Link':
+ case 'SymbolicLink':
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'Directory':
+ case 'FIFO':
+ case 'ContiguousFile':
+ case 'GNUDumpDir':
+ break
+
+ case 'NextFileHasLongLinkpath':
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ case 'GlobalExtendedHeader':
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this.meta = true
+ break
+
+ // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+ // it may be worth doing the same, but with a warning.
+ default:
+ this.ignore = true
+ }
+
+ this.path = header.path
+ this.mode = header.mode
+ if (this.mode)
+ this.mode = this.mode & 0o7777
+ this.uid = header.uid
+ this.gid = header.gid
+ this.uname = header.uname
+ this.gname = header.gname
+ this.size = header.size
+ this.mtime = header.mtime
+ this.atime = header.atime
+ this.ctime = header.ctime
+ this.linkpath = header.linkpath
+ this.uname = header.uname
+ this.gname = header.gname
+
+ if (ex) this[SLURP](ex)
+ if (gex) this[SLURP](gex, true)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+
+ const r = this.remain
+ const br = this.blockRemain
+ this.remain = Math.max(0, r - writeLen)
+ this.blockRemain = Math.max(0, br - writeLen)
+ if (this.ignore)
+ return true
+
+ if (r >= writeLen)
+ return super.write(data)
+
+ // r < writeLen
+ return super.write(data.slice(0, r))
+ }
+
+ [SLURP] (ex, global) {
+ for (let k in ex) {
+ // we slurp in everything except for the path attribute in
+ // a global extended header, because that's weird.
+ if (ex[k] !== null && ex[k] !== undefined &&
+ !(global && k === 'path'))
+ this[k] = ex[k]
+ }
+ }
+}
diff --git a/deps/npm/node_modules/tar/lib/replace.js b/deps/npm/node_modules/tar/lib/replace.js
new file mode 100644
index 00000000000000..aac6b57fa88615
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/replace.js
@@ -0,0 +1,211 @@
+'use strict'
+
+// tar -r
+const hlo = require('./high-level-opt.js')
+const Pack = require('./pack.js')
+const Parse = require('./parse.js')
+const fs = require('fs')
+const t = require('./list.js')
+const path = require('path')
+
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+
+const Header = require('./header.js')
+
+const r = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ return opt.sync ? replaceSync(opt, files)
+ : replace(opt, files, cb)
+}
+
+const replaceSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+
+ let threw = true
+ let fd
+ try {
+ try {
+ fd = fs.openSync(opt.file, 'r+')
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ fd = fs.openSync(opt.file, 'w+')
+ else
+ throw er
+ }
+
+ const st = fs.fstatSync(fd)
+ const headBuf = Buffer.alloc(512)
+ let position
+
+ POSITION: for (position = 0; position < st.size; position += 512) {
+ for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+ bytes = fs.readSync(
+ fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ throw new Error('cannot append to compressed archives')
+
+ if (!bytes)
+ break POSITION
+ }
+
+ let h = new Header(headBuf)
+ if (!h.cksumValid)
+ break
+ let entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > st.size)
+ break
+ // the 512 for the header we just parsed will be added as well
+ // also jump ahead all the blocks for the body
+ position += entryBlockSize
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ }
+
+ p.on('data', c => {
+ fs.writeSync(fd, c, 0, c.length, position)
+ position += c.length
+ })
+ p.on('end', _ => fs.closeSync(fd))
+
+ addFilesSync(p, files)
+ threw = false
+ } finally {
+ if (threw)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const replace = (opt, files, cb) => {
+ files = Array.from(files)
+ const p = new Pack(opt)
+
+ const getPos = (fd, size, cb_) => {
+ const cb = (er, pos) => {
+ if (er)
+ fs.close(fd, _ => cb_(er))
+ else
+ cb_(null, pos)
+ }
+
+ let position = 0
+ if (size === 0)
+ return cb(null, 0)
+
+ let bufPos = 0
+ const headBuf = Buffer.alloc(512)
+ const onread = (er, bytes) => {
+ if (er)
+ return cb(er)
+ bufPos += bytes
+ if (bufPos < 512 && bytes)
+ return fs.read(
+ fd, headBuf, bufPos, headBuf.length - bufPos,
+ position + bufPos, onread
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ return cb(new Error('cannot append to compressed archives'))
+
+ // truncated header
+ if (bufPos < 512)
+ return cb(null, position)
+
+ const h = new Header(headBuf)
+ if (!h.cksumValid)
+ return cb(null, position)
+
+ const entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > size)
+ return cb(null, position)
+
+ position += entryBlockSize + 512
+ if (position >= size)
+ return cb(null, position)
+
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ bufPos = 0
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+
+ const promise = new Promise((resolve, reject) => {
+ p.on('error', reject)
+ const onopen = (er, fd) => {
+ if (er) {
+ if (er.code === 'ENOENT')
+ return fs.open(opt.file, 'w+', onopen)
+ return reject(er)
+ }
+ fs.fstat(fd, (er, st) => {
+ if (er)
+ return reject(er)
+ getPos(fd, st.size, (er, position) => {
+ if (er)
+ return reject(er)
+ const stream = fs.createWriteStream(opt.file, {
+ fd: fd,
+ flags: 'r+',
+ start: position
+ })
+ p.pipe(stream)
+ stream.on('error', reject)
+ stream.on('close', resolve)
+ addFilesAsync(p, files)
+ })
+ })
+ }
+ fs.open(opt.file, 'r+', onopen)
+ })
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
diff --git a/deps/npm/node_modules/tar/lib/types.js b/deps/npm/node_modules/tar/lib/types.js
new file mode 100644
index 00000000000000..df425652b51d2e
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/types.js
@@ -0,0 +1,44 @@
+'use strict'
+// map types from key to human-friendly name
+exports.name = new Map([
+ ['0', 'File'],
+ // same as File
+ ['', 'OldFile'],
+ ['1', 'Link'],
+ ['2', 'SymbolicLink'],
+ // Devices and FIFOs aren't fully supported
+ // they are parsed, but skipped when unpacking
+ ['3', 'CharacterDevice'],
+ ['4', 'BlockDevice'],
+ ['5', 'Directory'],
+ ['6', 'FIFO'],
+ // same as File
+ ['7', 'ContiguousFile'],
+ // pax headers
+ ['g', 'GlobalExtendedHeader'],
+ ['x', 'ExtendedHeader'],
+ // vendor-specific stuff
+ // skip
+ ['A', 'SolarisACL'],
+ // like 5, but with data, which should be skipped
+ ['D', 'GNUDumpDir'],
+ // metadata only, skip
+ ['I', 'Inode'],
+ // data = link path of next file
+ ['K', 'NextFileHasLongLinkpath'],
+ // data = path of next file
+ ['L', 'NextFileHasLongPath'],
+ // skip
+ ['M', 'ContinuationFile'],
+ // like L
+ ['N', 'OldGnuLongPath'],
+ // skip
+ ['S', 'SparseFile'],
+ // skip
+ ['V', 'TapeVolumeHeader'],
+ // like x
+ ['X', 'OldExtendedHeader']
+])
+
+// map the other direction
+exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
diff --git a/deps/npm/node_modules/tar/lib/unpack.js b/deps/npm/node_modules/tar/lib/unpack.js
new file mode 100644
index 00000000000000..e8c80c6fd5d6dd
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/unpack.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const assert = require('assert')
+const EE = require('events').EventEmitter
+const Parser = require('./parse.js')
+const fs = require('fs')
+const path = require('path')
+const mkdir = require('./mkdir.js')
+const mkdirSync = mkdir.sync
+const wc = require('./winchars.js')
+
+const ONENTRY = Symbol('onEntry')
+const CHECKFS = Symbol('checkFs')
+const MAKEFS = Symbol('makeFs')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const LINK = Symbol('link')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const UNSUPPORTED = Symbol('unsupported')
+const UNKNOWN = Symbol('unknown')
+const CHECKPATH = Symbol('checkPath')
+const MKDIR = Symbol('mkdir')
+const ONERROR = Symbol('onError')
+const PENDING = Symbol('pending')
+const PEND = Symbol('pend')
+const UNPEND = Symbol('unpend')
+const ENDED = Symbol('ended')
+const MAYBECLOSE = Symbol('maybeClose')
+const SKIP = Symbol('skip')
+const DOCHOWN = Symbol('doChown')
+const UID = Symbol('uid')
+const GID = Symbol('gid')
+
+class Unpack extends Parser {
+ constructor (opt) {
+ if (!opt)
+ opt = {}
+
+ opt.ondone = _ => {
+ this[ENDED] = true
+ this[MAYBECLOSE]()
+ }
+
+ super(opt)
+
+ this.writable = true
+ this.readable = false
+
+ this[PENDING] = 0
+ this[ENDED] = false
+
+ this.dirCache = opt.dirCache || new Map()
+
+ if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+ // need both or neither
+ if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
+ throw new TypeError('cannot set owner without number uid and gid')
+ if (opt.preserveOwner)
+ throw new TypeError(
+ 'cannot preserve owner in archive and also set owner explicitly')
+ this.uid = opt.uid
+ this.gid = opt.gid
+ this.setOwner = true
+ } else {
+ this.uid = null
+ this.gid = null
+ this.setOwner = false
+ }
+
+ // default true for root
+ if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
+ this.preserveOwner = process.getuid && process.getuid() === 0
+ else
+ this.preserveOwner = !!opt.preserveOwner
+
+ this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
+ process.getuid() : null
+ this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
+ process.getgid() : null
+
+ // turn >| in filenames into 0xf000-higher encoded forms
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+
+ // do not unpack over files that are newer than what's in the archive
+ this.newer = !!opt.newer
+
+ // do not unpack over ANY files
+ this.keep = !!opt.keep
+
+ // do not set mtime/atime of extracted entries
+ this.noMtime = !!opt.noMtime
+
+ // allow .., absolute path entries, and unpacking through symlinks
+ // without this, warn and skip .., relativize absolutes, and error
+ // on symlinks in extraction path
+ this.preservePaths = !!opt.preservePaths
+
+ // unlink files and links before writing. This breaks existing hard
+ // links, and removes symlink directories rather than erroring
+ this.unlink = !!opt.unlink
+
+ this.cwd = path.resolve(opt.cwd || process.cwd())
+ this.strip = +opt.strip || 0
+ this.processUmask = process.umask()
+ this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
+ // default mode for dirs created as parents
+ this.dmode = opt.dmode || (0o0777 & (~this.umask))
+ this.fmode = opt.fmode || (0o0666 & (~this.umask))
+ this.on('entry', entry => this[ONENTRY](entry))
+ }
+
+ [MAYBECLOSE] () {
+ if (this[ENDED] && this[PENDING] === 0) {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ }
+ }
+
+ [CHECKPATH] (entry) {
+ if (this.strip) {
+ const parts = entry.path.split(/\/|\\/)
+ if (parts.length < this.strip)
+ return false
+ entry.path = parts.slice(this.strip).join('/')
+ }
+
+ if (!this.preservePaths) {
+ const p = entry.path
+ if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
+ this.warn('path contains \'..\'', p)
+ return false
+ }
+
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ if (path.win32.isAbsolute(p)) {
+ const parsed = path.win32.parse(p)
+ this.warn('stripping ' + parsed.root + ' from absolute path', p)
+ entry.path = p.substr(parsed.root.length)
+ }
+ }
+
+ // only encode : chars that aren't drive letter indicators
+ if (this.win32) {
+ const parsed = path.win32.parse(entry.path)
+ entry.path = parsed.root === '' ? wc.encode(entry.path)
+ : parsed.root + wc.encode(entry.path.substr(parsed.root.length))
+ }
+
+ if (path.isAbsolute(entry.path))
+ entry.absolute = entry.path
+ else
+ entry.absolute = path.resolve(this.cwd, entry.path)
+
+ return true
+ }
+
+ [ONENTRY] (entry) {
+ if (!this[CHECKPATH](entry))
+ return entry.resume()
+
+ assert.equal(typeof entry.absolute, 'string')
+
+ switch (entry.type) {
+ case 'Directory':
+ case 'GNUDumpDir':
+ if (entry.mode)
+ entry.mode = entry.mode | 0o700
+
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ case 'Link':
+ case 'SymbolicLink':
+ return this[CHECKFS](entry)
+
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'FIFO':
+ return this[UNSUPPORTED](entry)
+ }
+ }
+
+ [ONERROR] (er, entry) {
+ // Cwd has to exist, or else nothing works. That's serious.
+ // Other errors are warnings, which raise the error in strict
+ // mode, but otherwise continue on.
+ if (er.name === 'CwdError')
+ this.emit('error', er)
+ else {
+ this.warn(er.message, er)
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ [MKDIR] (dir, mode, cb) {
+ mkdir(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ }, cb)
+ }
+
+ [DOCHOWN] (entry) {
+ // in preserve owner mode, chown if the entry doesn't match process
+ // in set owner mode, chown if setting doesn't match process
+ return this.preserveOwner &&
+ ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
+ typeof entry.gid === 'number' && entry.gid !== this.processGid )
+ ||
+ ( typeof this.uid === 'number' && this.uid !== this.processUid ||
+ typeof this.gid === 'number' && this.gid !== this.processGid )
+ }
+
+ [UID] (entry) {
+ return typeof this.uid === 'number' ? this.uid
+ : typeof entry.uid === 'number' ? entry.uid
+ : this.processUid
+ }
+
+ [GID] (entry) {
+ return typeof this.gid === 'number' ? this.gid
+ : typeof entry.gid === 'number' ? entry.gid
+ : this.processGid
+ }
+
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ const stream = fs.createWriteStream(entry.absolute, { mode: mode })
+ stream.on('error', er => this[ONERROR](er, entry))
+
+ const queue = []
+ const processQueue = _ => {
+ const action = queue.shift()
+ if (action)
+ action(processQueue)
+ else
+ this[UNPEND]()
+ }
+
+ stream.on('close', _ => {
+ if (entry.mtime && !this.noMtime)
+ queue.push(cb =>
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb))
+ if (this[DOCHOWN](entry))
+ queue.push(cb =>
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb))
+ processQueue()
+ })
+ entry.pipe(stream)
+ }
+
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ this[MKDIR](entry.absolute, mode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ const queue = []
+ const processQueue = _ => {
+ const action = queue.shift()
+ if (action)
+ action(processQueue)
+ else {
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ if (entry.mtime && !this.noMtime)
+ queue.push(cb =>
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb))
+ if (this[DOCHOWN](entry))
+ queue.push(cb =>
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb))
+
+ processQueue()
+ })
+ }
+
+ [UNSUPPORTED] (entry) {
+ this.warn('unsupported entry type: ' + entry.type, entry)
+ entry.resume()
+ }
+
+ [SYMLINK] (entry) {
+ this[LINK](entry, entry.linkpath, 'symlink')
+ }
+
+ [HARDLINK] (entry) {
+ this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')
+ }
+
+ [PEND] () {
+ this[PENDING]++
+ }
+
+ [UNPEND] () {
+ this[PENDING]--
+ this[MAYBECLOSE]()
+ }
+
+ [SKIP] (entry) {
+ this[UNPEND]()
+ entry.resume()
+ }
+
+ // check if a thing is there, and if so, try to clobber it
+ [CHECKFS] (entry) {
+ this[PEND]()
+ this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ fs.lstat(entry.absolute, (er, st) => {
+ if (st && (this.keep || this.newer && st.mtime > entry.mtime))
+ this[SKIP](entry)
+ else if (er || (entry.type === 'File' && !this.unlink && st.isFile()))
+ this[MAKEFS](null, entry)
+ else if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (!entry.mode || (st.mode & 0o7777) === entry.mode)
+ this[MAKEFS](null, entry)
+ else
+ fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))
+ } else
+ fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))
+ } else
+ fs.unlink(entry.absolute, er => this[MAKEFS](er, entry))
+ })
+ })
+ }
+
+ [MAKEFS] (er, entry) {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ switch (entry.type) {
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ return this[FILE](entry)
+
+ case 'Link':
+ return this[HARDLINK](entry)
+
+ case 'SymbolicLink':
+ return this[SYMLINK](entry)
+
+ case 'Directory':
+ case 'GNUDumpDir':
+ return this[DIRECTORY](entry)
+ }
+ }
+
+ [LINK] (entry, linkpath, link) {
+ // XXX: get the type ('file' or 'dir') for windows
+ fs[link](linkpath, entry.absolute, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ this[UNPEND]()
+ entry.resume()
+ })
+ }
+}
+
+class UnpackSync extends Unpack {
+ constructor (opt) {
+ super(opt)
+ }
+
+ [CHECKFS] (entry) {
+ const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)
+ if (er)
+ return this[ONERROR](er, entry)
+ try {
+ const st = fs.lstatSync(entry.absolute)
+ if (this.keep || this.newer && st.mtime > entry.mtime)
+ return this[SKIP](entry)
+ else if (entry.type === 'File' && !this.unlink && st.isFile())
+ return this[MAKEFS](null, entry)
+ else {
+ try {
+ if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (entry.mode && (st.mode & 0o7777) !== entry.mode)
+ fs.chmodSync(entry.absolute, entry.mode)
+ } else
+ fs.rmdirSync(entry.absolute)
+ } else
+ fs.unlinkSync(entry.absolute)
+ return this[MAKEFS](null, entry)
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+ } catch (er) {
+ return this[MAKEFS](null, entry)
+ }
+ }
+
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ try {
+ const fd = fs.openSync(entry.absolute, 'w', mode)
+ entry.on('data', buf => fs.writeSync(fd, buf, 0, buf.length, null))
+ entry.on('end', _ => {
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.futimesSync(fd, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
+ }
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.fchownSync(fd, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ try { fs.closeSync(fd) } catch (er) { this[ONERROR](er, entry) }
+ })
+ } catch (er) { this[ONERROR](er, entry) }
+ }
+
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ const er = this[MKDIR](entry.absolute, mode)
+ if (er)
+ return this[ONERROR](er, entry)
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
+ }
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ entry.resume()
+ }
+
+ [MKDIR] (dir, mode) {
+ try {
+ return mkdir.sync(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ })
+ } catch (er) {
+ return er
+ }
+ }
+
+ [LINK] (entry, linkpath, link) {
+ try {
+ fs[link + 'Sync'](linkpath, entry.absolute)
+ entry.resume()
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+}
+
+Unpack.Sync = UnpackSync
+module.exports = Unpack
diff --git a/deps/npm/node_modules/tar/lib/update.js b/deps/npm/node_modules/tar/lib/update.js
new file mode 100644
index 00000000000000..16c3e93ed5af8c
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/update.js
@@ -0,0 +1,36 @@
+'use strict'
+
+// tar -u
+
+const hlo = require('./high-level-opt.js')
+const r = require('./replace.js')
+// just call tar.r with the filter and mtimeCache
+
+const u = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ mtimeFilter(opt)
+ return r(opt, files, cb)
+}
+
+const mtimeFilter = opt => {
+ const filter = opt.filter
+
+ if (!opt.mtimeCache)
+ opt.mtimeCache = new Map()
+
+ opt.filter = filter ? (path, stat) =>
+ filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
+ : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
+}
diff --git a/deps/npm/node_modules/tar/lib/warn-mixin.js b/deps/npm/node_modules/tar/lib/warn-mixin.js
new file mode 100644
index 00000000000000..94a4b9b9908726
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/warn-mixin.js
@@ -0,0 +1,14 @@
+'use strict'
+module.exports = Base => class extends Base {
+ warn (msg, data) {
+ if (!this.strict)
+ this.emit('warn', msg, data)
+ else if (data instanceof Error)
+ this.emit('error', data)
+ else {
+ const er = new Error(msg)
+ er.data = data
+ this.emit('error', er)
+ }
+ }
+}
diff --git a/deps/npm/node_modules/tar/lib/winchars.js b/deps/npm/node_modules/tar/lib/winchars.js
new file mode 100644
index 00000000000000..cf6ea06061c8e2
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/winchars.js
@@ -0,0 +1,23 @@
+'use strict'
+
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+
+const raw = [
+ '|',
+ '<',
+ '>',
+ '?',
+ ':'
+]
+
+const win = raw.map(char =>
+ String.fromCharCode(0xf000 + char.charCodeAt(0)))
+
+const toWin = new Map(raw.map((char, i) => [char, win[i]]))
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
+
+module.exports = {
+ encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
+ decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)
+}
diff --git a/deps/npm/node_modules/tar/lib/write-entry.js b/deps/npm/node_modules/tar/lib/write-entry.js
new file mode 100644
index 00000000000000..f562bf138a5478
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/write-entry.js
@@ -0,0 +1,395 @@
+'use strict'
+const MiniPass = require('minipass')
+const Pax = require('./pax.js')
+const Header = require('./header.js')
+const ReadEntry = require('./read-entry.js')
+const fs = require('fs')
+const path = require('path')
+
+const types = require('./types.js')
+const maxReadSize = 16 * 1024 * 1024
+const PROCESS = Symbol('process')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const HEADER = Symbol('header')
+const READ = Symbol('read')
+const LSTAT = Symbol('lstat')
+const ONLSTAT = Symbol('onlstat')
+const ONREAD = Symbol('onread')
+const ONREADLINK = Symbol('onreadlink')
+const OPENFILE = Symbol('openfile')
+const ONOPENFILE = Symbol('onopenfile')
+const CLOSE = Symbol('close')
+const warner = require('./warn-mixin.js')
+const winchars = require('./winchars.js')
+
+const WriteEntry = warner(class WriteEntry extends MiniPass {
+ constructor (p, opt) {
+ opt = opt || {}
+ super(opt)
+ if (typeof p !== 'string')
+ throw new TypeError('path is required')
+ this.path = p
+ // suppress atime, ctime, uid, gid, uname, gname
+ this.portable = !!opt.portable
+ // until node has builtin pwnam functions, this'll have to do
+ this.myuid = process.getuid && process.getuid()
+ this.myuser = process.env.USER || ''
+ this.maxReadSize = opt.maxReadSize || maxReadSize
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.preservePaths = !!opt.preservePaths
+ this.cwd = opt.cwd || process.cwd()
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ if (!this.preservePaths && path.win32.isAbsolute(p)) {
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ const parsed = path.win32.parse(p)
+ this.warn('stripping ' + parsed.root + ' from absolute path', p)
+ this.path = p.substr(parsed.root.length)
+ }
+
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+ if (this.win32) {
+ this.path = winchars.decode(this.path.replace(/\\/g, '/'))
+ p = p.replace(/\\/g, '/')
+ }
+
+ this.absolute = opt.absolute || path.resolve(this.cwd, p)
+
+ if (this.path === '')
+ this.path = './'
+
+ if (this.statCache.has(this.absolute))
+ this[ONLSTAT](this.statCache.get(this.absolute))
+ else
+ this[LSTAT]()
+ }
+
+ [LSTAT] () {
+ fs.lstat(this.absolute, (er, stat) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONLSTAT](stat)
+ })
+ }
+
+ [ONLSTAT] (stat) {
+ this.statCache.set(this.absolute, stat)
+ this.stat = stat
+ if (!stat.isFile())
+ stat.size = 0
+ this.type = getType(stat)
+ this.emit('stat', stat)
+ this[PROCESS]()
+ }
+
+ [PROCESS] () {
+ switch (this.type) {
+ case 'File': return this[FILE]()
+ case 'Directory': return this[DIRECTORY]()
+ case 'SymbolicLink': return this[SYMLINK]()
+ // unsupported types are ignored.
+ default: return this.end()
+ }
+ }
+
+ [HEADER] () {
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this.stat.mode & 0o7777,
+ uid: this.portable ? null : this.stat.uid,
+ gid: this.portable ? null : this.stat.gid,
+ size: this.stat.size,
+ mtime: this.type === 'Directory' && this.portable
+ ? null : this.stat.mtime,
+ type: this.type,
+ uname: this.portable ? null :
+ this.stat.uid === this.myuid ? this.myuser : '',
+ atime: this.portable ? null : this.stat.atime,
+ ctime: this.portable ? null : this.stat.ctime
+ })
+
+ if (this.header.encode() && !this.noPax)
+ this.write(new Pax({
+ atime: this.portable ? null : this.header.atime,
+ ctime: this.portable ? null : this.header.ctime,
+ gid: this.portable ? null : this.header.gid,
+ mtime: this.header.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.header.size,
+ uid: this.portable ? null : this.header.uid,
+ uname: this.portable ? null : this.header.uname,
+ dev: this.portable ? null : this.stat.dev,
+ ino: this.portable ? null : this.stat.ino,
+ nlink: this.portable ? null : this.stat.nlink
+ }).encode())
+ this.write(this.header.block)
+ }
+
+ [DIRECTORY] () {
+ if (this.path.substr(-1) !== '/')
+ this.path += '/'
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [SYMLINK] () {
+ fs.readlink(this.absolute, (er, linkpath) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONREADLINK](linkpath)
+ })
+ }
+
+ [ONREADLINK] (linkpath) {
+ this.linkpath = linkpath
+ this[HEADER]()
+ this.end()
+ }
+
+ [HARDLINK] (linkpath) {
+ this.type = 'Link'
+ this.linkpath = path.relative(this.cwd, linkpath)
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [FILE] () {
+ if (this.stat.nlink > 1) {
+ const linkKey = this.stat.dev + ':' + this.stat.ino
+ if (this.linkCache.has(linkKey)) {
+ const linkpath = this.linkCache.get(linkKey)
+ if (linkpath.indexOf(this.cwd) === 0)
+ return this[HARDLINK](linkpath)
+ }
+ this.linkCache.set(linkKey, this.absolute)
+ }
+
+ this[HEADER]()
+ if (this.stat.size === 0)
+ return this.end()
+
+ this[OPENFILE]()
+ }
+
+ [OPENFILE] () {
+ fs.open(this.absolute, 'r', (er, fd) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONOPENFILE](fd)
+ })
+ }
+
+ [ONOPENFILE] (fd) {
+ const blockLen = 512 * Math.ceil(this.stat.size / 512)
+ const bufLen = Math.min(blockLen, this.maxReadSize)
+ const buf = Buffer.allocUnsafe(bufLen)
+ this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen)
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+ if (er)
+ return this[CLOSE](fd, _ => this.emit('error', er))
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ })
+ }
+
+ [CLOSE] (fd, cb) {
+ fs.close(fd, cb)
+ }
+
+ [ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) {
+ if (bytesRead <= 0 && remain > 0) {
+ const er = new Error('unexpected EOF')
+ er.path = this.absolute
+ er.syscall = 'read'
+ er.code = 'EOF'
+ this.emit('error', er)
+ }
+
+ // null out the rest of the buffer, if we could fit the block padding
+ if (bytesRead === remain) {
+ for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {
+ buf[i + offset] = 0
+ bytesRead ++
+ remain ++
+ }
+ }
+
+ const writeBuf = offset === 0 && bytesRead === buf.length ?
+ buf : buf.slice(offset, offset + bytesRead)
+ remain -= bytesRead
+ blockRemain -= bytesRead
+ pos += bytesRead
+ offset += bytesRead
+
+ this.write(writeBuf)
+
+ if (!remain) {
+ if (blockRemain)
+ this.write(Buffer.alloc(blockRemain))
+ this.end()
+ this[CLOSE](fd, _ => _)
+ return
+ }
+
+ if (offset >= length) {
+ buf = Buffer.allocUnsafe(length)
+ offset = 0
+ }
+ length = buf.length - offset
+ this[READ](fd, buf, offset, length, pos, remain, blockRemain)
+ }
+})
+
+class WriteEntrySync extends WriteEntry {
+ constructor (path, opt) {
+ super(path, opt)
+ }
+
+ [LSTAT] () {
+ this[ONLSTAT](fs.lstatSync(this.absolute))
+ }
+
+ [SYMLINK] () {
+ this[ONREADLINK](fs.readlinkSync(this.absolute))
+ }
+
+ [OPENFILE] () {
+ this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ let threw = true
+ try {
+ const bytesRead = fs.readSync(fd, buf, offset, length, pos)
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ threw = false
+ } finally {
+ if (threw)
+ try { this[CLOSE](fd) } catch (er) {}
+ }
+ }
+
+ [CLOSE] (fd) {
+ fs.closeSync(fd)
+ }
+}
+
+const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
+ constructor (readEntry, opt) {
+ opt = opt || {}
+ super(opt)
+ this.readEntry = readEntry
+ this.type = readEntry.type
+ this.path = readEntry.path
+ this.mode = readEntry.mode
+ if (this.mode)
+ this.mode = this.mode & 0o7777
+ this.uid = readEntry.uid
+ this.gid = readEntry.gid
+ this.uname = readEntry.uname
+ this.gname = readEntry.gname
+ this.size = readEntry.size
+ this.mtime = readEntry.mtime
+ this.atime = readEntry.atime
+ this.ctime = readEntry.ctime
+ this.linkpath = readEntry.linkpath
+ this.uname = readEntry.uname
+ this.gname = readEntry.gname
+
+ this.preservePaths = !!opt.preservePaths
+ this.portable = !!opt.portable
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ if (path.isAbsolute(this.path) && !this.preservePaths) {
+ const parsed = path.parse(this.path)
+ this.warn(
+ 'stripping ' + parsed.root + ' from absolute path',
+ this.path
+ )
+ this.path = this.path.substr(parsed.root.length)
+ }
+
+ this.remain = readEntry.size
+ this.blockRemain = readEntry.startBlockSize
+
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this.mode,
+ uid: this.portable ? null : this.uid,
+ gid: this.portable ? null : this.gid,
+ size: this.size,
+ mtime: this.mtime,
+ type: this.type,
+ uname: this.portable ? null : this.uname,
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime
+ })
+
+ if (this.header.encode() && !this.noPax)
+ super.write(new Pax({
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime,
+ gid: this.portable ? null : this.gid,
+ mtime: this.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.size,
+ uid: this.portable ? null : this.uid,
+ uname: this.portable ? null : this.uname,
+ dev: this.portable ? null : this.readEntry.dev,
+ ino: this.portable ? null : this.readEntry.ino,
+ nlink: this.portable ? null : this.readEntry.nlink
+ }).encode())
+
+ super.write(this.header.block)
+ readEntry.pipe(this)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+ this.blockRemain -= writeLen
+ return super.write(data)
+ }
+
+ end () {
+ if (this.blockRemain)
+ this.write(Buffer.alloc(this.blockRemain))
+ return super.end()
+ }
+})
+
+WriteEntry.Sync = WriteEntrySync
+WriteEntry.Tar = WriteEntryTar
+
+const getType = stat =>
+ stat.isFile() ? 'File'
+ : stat.isDirectory() ? 'Directory'
+ : stat.isSymbolicLink() ? 'SymbolicLink'
+ : 'Unsupported'
+
+module.exports = WriteEntry
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/package.json b/deps/npm/node_modules/tar/node_modules/block-stream/package.json
deleted file mode 100644
index 045ca8d2431ae8..00000000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/package.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
- "_from": "block-stream@*",
- "_id": "block-stream@0.0.9",
- "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=",
- "_location": "/tar/block-stream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "block-stream@*",
- "name": "block-stream",
- "escapedName": "block-stream",
- "rawSpec": "*",
- "saveSpec": null,
- "fetchSpec": "*"
- },
- "_requiredBy": [
- "/tar"
- ],
- "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz",
- "_shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a",
- "_shrinkwrap": null,
- "_spec": "block-stream@*",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/tar",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/isaacs/block-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "inherits": "~2.0.0"
- },
- "deprecated": false,
- "description": "a stream of blocks",
- "devDependencies": {
- "tap": "^5.7.1"
- },
- "engines": {
- "node": "0.4 || >=0.5.8"
- },
- "files": [
- "block-stream.js"
- ],
- "homepage": "https://github.com/isaacs/block-stream#readme",
- "license": "ISC",
- "main": "block-stream.js",
- "name": "block-stream",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/block-stream.git"
- },
- "scripts": {
- "test": "tap test/*.js --cov"
- },
- "version": "0.0.9"
-}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/.npmignore b/deps/npm/node_modules/tar/node_modules/minipass/.npmignore
new file mode 100644
index 00000000000000..183822a7ffe489
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/.npmignore
@@ -0,0 +1,4 @@
+.*.swp
+node_modules
+.nyc_output/
+coverage/
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/.travis.yml b/deps/npm/node_modules/tar/node_modules/minipass/.travis.yml
new file mode 100644
index 00000000000000..59410a36d03976
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/.travis.yml
@@ -0,0 +1,7 @@
+language: node_js
+sudo: false
+node_js:
+ - 7
+cache:
+ directories:
+ - /Users/isaacs/.npm
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/README.md b/deps/npm/node_modules/tar/node_modules/minipass/README.md
new file mode 100644
index 00000000000000..97eadeaeaeb102
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/README.md
@@ -0,0 +1,46 @@
+# minipass
+
+A _very_ minimal implementation of a [PassThrough
+stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
+
+[It's very
+fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
+for objects, strings, and buffers.
+
+Supports pipe()ing (including multi-pipe() and backpressure
+transmission), buffering data until either a `data` event handler or
+`pipe()` is added (so you don't lose the first chunk), and most other
+cases where PassThrough is a good idea.
+
+There is a `read()` method, but it's much more efficient to consume
+data from this stream via `'data'` events or by calling `pipe()` into
+some other stream. Calling `read()` requires the buffer to be
+flattened in some cases, which requires copying memory. Also,
+`read()` always returns Buffers, even if an `encoding` option is
+specified.
+
+There is also no `unpipe()` method. Once you start piping, there is
+no stopping it!
+
+If you set `objectMode: true` in the options, then whatever is written
+will be emitted. Otherwise, it'll do a minimal amount of Buffer
+copying to ensure proper Streams semantics when `read(n)` is called.
+
+This is not a `through` or `through2` stream. It doesn't transform
+the data, it just passes it right through. If you want to transform
+the data, extend the class, and override the `write()` method. Once
+you're done transforming the data however you want, call
+`super.write()` with the transform output.
+
+For an example of a stream that extends MiniPass to provide transform
+capabilities, check out [minizlib](http://npm.im/minizlib).
+
+## USAGE
+
+```js
+const MiniPass = require('minipass')
+const mp = new MiniPass(options) // optional: { encoding }
+mp.write('foo')
+mp.pipe(someOtherStream)
+mp.end('bar')
+```
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/b.js b/deps/npm/node_modules/tar/node_modules/minipass/b.js
new file mode 100644
index 00000000000000..324c4190a0fe84
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/b.js
@@ -0,0 +1,12 @@
+const MiniPass = require('./')
+const butterfly = '🦋'
+var mp = new MiniPass({ encoding: 'utf8' })
+mp.on('data', chunk => {
+ console.error('data %s', chunk)
+})
+var butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+mp.write(butterbuf.slice(0, 1))
+mp.write(butterbuf.slice(1, 2))
+mp.write(butterbuf.slice(2, 3))
+mp.write(butterbuf.slice(3, 4))
+mp.end()
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js
new file mode 100644
index 00000000000000..8e7841a87c7014
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js
@@ -0,0 +1,11 @@
+'use strict'
+const MiniPass = require('../..')
+
+module.exports = class ExtendMiniPass extends MiniPass {
+ constructor (opts) {
+ super(opts)
+ }
+ write (data, encoding) {
+ return super.write(data, encoding)
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js
new file mode 100644
index 00000000000000..6a021084c42fbc
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js
@@ -0,0 +1,12 @@
+'use strict'
+const through2 = require('through2')
+module.exports = function (opt) {
+ return opt.objectMode
+ ? through2.obj(func)
+ : through2(func)
+
+ function func (data, enc, done) {
+ this.push(data, enc)
+ done()
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js
new file mode 100644
index 00000000000000..1d2d24026d5346
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js
@@ -0,0 +1,11 @@
+'use strict'
+const stream = require('stream')
+module.exports = class ExtendTransform extends stream.Transform {
+ constructor (opts) {
+ super(opts)
+ }
+ _transform (data, enc, done) {
+ this.push(data, enc)
+ done()
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js
new file mode 100644
index 00000000000000..13f6e916b9ccff
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js
@@ -0,0 +1,12 @@
+'use strict'
+const EE = require('events').EventEmitter
+
+module.exports = class NullSink extends EE {
+ write (data, encoding, next) {
+ if (next) next()
+ return true
+ }
+ end () {
+ this.emit('finish')
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/numbers.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/numbers.js
new file mode 100644
index 00000000000000..bd1593299a636d
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/numbers.js
@@ -0,0 +1,41 @@
+'use strict'
+const stream = require('stream')
+
+const numbers = new Array(1000).join(',').split(',').map((v, k) => k)
+let acc = ''
+const strings = numbers.map(n => acc += n)
+const bufs = strings.map(s => new Buffer(s))
+const objs = strings.map(s => ({ str: s }))
+
+module.exports = class Numbers {
+ constructor (opt) {
+ this.objectMode = opt.objectMode
+ this.encoding = opt.encoding
+ this.ii = 0
+ this.done = false
+ }
+ pipe (dest) {
+ this.dest = dest
+ this.go()
+ return dest
+ }
+
+ go () {
+ let flowing = true
+ while (flowing) {
+ if (this.ii >= 1000) {
+ this.dest.end()
+ this.done = true
+ flowing = false
+ } else {
+ flowing = this.dest.write(
+ (this.objectMode ? objs
+ : this.encoding ? strings
+ : bufs)[this.ii++])
+ }
+ }
+
+ if (!this.done)
+ this.dest.once('drain', _ => this.go())
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/timer.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/timer.js
new file mode 100644
index 00000000000000..8d8fe3d80d9907
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/timer.js
@@ -0,0 +1,15 @@
+'use strict'
+module.exports = _ => {
+ const start = process.hrtime()
+ return _ => {
+ const end = process.hrtime(start)
+ const ms = Math.round(end[0]*1e6 + end[1]/1e3)/1e3
+ if (!process.env.isTTY)
+ console.log(ms)
+ else {
+ const s = Math.round(end[0]*10 + end[1]/1e8)/10
+ const ss = s <= 1 ? '' : ' (' + s + 's)'
+ console.log('%d%s', ms, ss)
+ }
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/test.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/test.js
new file mode 100644
index 00000000000000..29c9fd07d61403
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/test.js
@@ -0,0 +1,160 @@
+'use strict'
+
+const iterations = +process.env.BENCH_TEST_ITERATION || 100
+const testCount = +process.env.BENCH_TEST_COUNT || 20
+
+const tests = [
+ 'baseline',
+ 'minipass',
+ 'extend-minipass',
+ 'through2',
+ 'extend-through2',
+ 'passthrough',
+ 'extend-transform'
+]
+
+const manyOpts = [ 'many', 'single' ]
+const typeOpts = [ 'buffer', 'string', 'object' ]
+
+const main = () => {
+ const spawn = require('child_process').spawn
+ const node = process.execPath
+
+ const results = {}
+
+ const testSet = []
+ tests.forEach(t =>
+ manyOpts.forEach(many =>
+ typeOpts.forEach(type =>
+ new Array(testCount).join(',').split(',').forEach(() =>
+ t !== 'baseline' || (many === 'single' && type === 'object')
+ ? testSet.push([t, many, type]) : null))))
+
+ let didFirst = false
+ const mainRunTest = t => {
+ if (!t)
+ return afterMain(results)
+
+ const k = t.join('\t')
+ if (!results[k]) {
+ results[k] = []
+ if (!didFirst)
+ didFirst = true
+ else
+ process.stderr.write('\n')
+
+ process.stderr.write(k + ' #')
+ } else {
+ process.stderr.write('#')
+ }
+
+ const c = spawn(node, [__filename].concat(t), {
+ stdio: [ 'ignore', 'pipe', 2 ]
+ })
+ let out = ''
+ c.stdout.on('data', c => out += c)
+ c.on('close', (code, signal) => {
+ if (code || signal)
+ throw new Error('failed: ' + code + ' ' + signal)
+ results[k].push(+out)
+ mainRunTest(testSet.shift())
+ })
+ }
+
+ mainRunTest(testSet.shift())
+}
+
+const afterMain = results => {
+ console.log('test\tmany\ttype\tops/s\tmean\tmedian\tmax\tmin' +
+ '\tstdev\trange\traw')
+ // get the mean, median, stddev, and range of each test
+ Object.keys(results).forEach(test => {
+ const k = results[test].sort((a, b) => a - b)
+ const min = k[0]
+ const max = k[ k.length - 1 ]
+ const range = max - min
+ const sum = k.reduce((a,b) => a + b, 0)
+ const mean = sum / k.length
+ const ops = iterations / mean * 1000
+ const devs = k.map(n => n - mean).map(n => n * n)
+ const avgdev = devs.reduce((a,b) => a + b, 0) / k.length
+ const stdev = Math.pow(avgdev, 0.5)
+ const median = k.length % 2 ? k[Math.floor(k.length / 2)] :
+ (k[k.length/2] + k[k.length/2+1])/2
+ console.log(
+ '%s\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%s', test, round(ops),
+ round(mean), round(median),
+ max, min, round(stdev), round(range),
+ k.join('\t'))
+ })
+}
+
+const round = num => Math.round(num * 1000)/1000
+
+const test = (testname, many, type) => {
+ const timer = require('./lib/timer.js')
+ const Class = getClass(testname)
+
+ const done = timer()
+ runTest(Class, many, type, iterations, done)
+}
+
+// don't blow up the stack! loop unless deferred
+const runTest = (Class, many, type, iterations, done) => {
+ const Nullsink = require('./lib/nullsink.js')
+ const Numbers = require('./lib/numbers.js')
+ const opt = {}
+ if (type === 'string')
+ opt.encoding = 'utf8'
+ else if (type === 'object')
+ opt.objectMode = true
+
+ while (iterations--) {
+ let finished = false
+ let inloop = true
+ const after = iterations === 0 ? done
+ : () => {
+ if (iterations === 0)
+ done()
+ else if (inloop)
+ finished = true
+ else
+ runTest(Class, many, type, iterations, done)
+ }
+
+ const out = new Nullsink().on('finish', after)
+ let sink = Class ? new Class(opt) : out
+
+ if (many && Class)
+ sink = sink
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+
+ if (sink !== out)
+ sink.pipe(out)
+
+ new Numbers(opt).pipe(sink)
+
+ // keep tight-looping if the stream is done already
+ if (!finished) {
+ inloop = false
+ break
+ }
+ }
+}
+
+const getClass = testname =>
+ testname === 'through2' ? require('through2').obj
+ : testname === 'extend-through2' ? require('./lib/extend-through2.js')
+ : testname === 'minipass' ? require('../')
+ : testname === 'extend-minipass' ? require('./lib/extend-minipass.js')
+ : testname === 'passthrough' ? require('stream').PassThrough
+ : testname === 'extend-transform' ? require('./lib/extend-transform.js')
+ : null
+
+if (!process.argv[2])
+ main()
+else
+ test(process.argv[2], process.argv[3] === 'many', process.argv[4])
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/d.js b/deps/npm/node_modules/tar/node_modules/minipass/d.js
new file mode 100644
index 00000000000000..ceea51396015fa
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/d.js
@@ -0,0 +1,7 @@
+var MD = require('./')
+var d = new MD()
+console.log(d.write('hello'))
+console.log(d.write('goodbye'))
+d.pipe(process.stderr)
+console.log(d.write('the end'))
+console.log(d.end())
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/e.js b/deps/npm/node_modules/tar/node_modules/minipass/e.js
new file mode 100644
index 00000000000000..f1da6c74606336
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/e.js
@@ -0,0 +1,17 @@
+const MP = require('stream').PassThrough // require('./')
+const mp = new MP()
+const wait = (n) => new Promise(resolve => setTimeout(resolve, n))
+const t = require('tap')
+
+t.test('end ordering', async t => {
+ mp.on('end', _ => console.log('end'))
+ mp.end()
+ console.log('called end')
+ // mp.resume()
+ // console.log('called resume()')
+ // mp.read()
+ // console.log('called read')
+ mp.on('data', _=>_)
+ console.log('added data handler')
+ await wait(1)
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/eos.js b/deps/npm/node_modules/tar/node_modules/minipass/eos.js
new file mode 100644
index 00000000000000..22507209404d9e
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/eos.js
@@ -0,0 +1,12 @@
+const EE = require('events').EventEmitter
+const eos = require('end-of-stream')
+const ee = new EE()
+ee.readable = ee.writable = true
+eos(ee, er => {
+ if (er)
+ throw er
+ console.log('stream ended')
+})
+ee.emit('finish')
+ee.emit('close')
+ee.emit('end')
diff --git a/deps/v8/test/fuzzer/wasm_asmjs/foo b/deps/npm/node_modules/tar/node_modules/minipass/foo
similarity index 100%
rename from deps/v8/test/fuzzer/wasm_asmjs/foo
rename to deps/npm/node_modules/tar/node_modules/minipass/foo
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/index.js b/deps/npm/node_modules/tar/node_modules/minipass/index.js
new file mode 100644
index 00000000000000..3a3ad412b51e39
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/index.js
@@ -0,0 +1,295 @@
+'use strict'
+const EE = require('events')
+const Yallist = require('yallist')
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const SD = require('string_decoder').StringDecoder
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const RESUME = Symbol('resume')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+
+class MiniPass extends EE {
+ constructor (options) {
+ super()
+ this[FLOWING] = false
+ this.pipes = new Yallist()
+ this.buffer = new Yallist()
+ this[OBJECTMODE] = options && options.objectMode || false
+ if (this[OBJECTMODE])
+ this[ENCODING] = null
+ else
+ this[ENCODING] = options && options.encoding || null
+ if (this[ENCODING] === 'buffer')
+ this[ENCODING] = null
+ this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+ this[EOF] = false
+ this[EMITTED_END] = false
+ this[CLOSED] = false
+ this.writable = true
+ this.readable = true
+ this[BUFFERLENGTH] = 0
+ }
+
+ get bufferLength () { return this[BUFFERLENGTH] }
+
+ get encoding () { return this[ENCODING] }
+ set encoding (enc) {
+ if (this[OBJECTMODE])
+ throw new Error('cannot set encoding in objectMode')
+
+ if (this[ENCODING] && enc !== this[ENCODING] &&
+ (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
+ throw new Error('cannot change encoding')
+
+ if (this[ENCODING] !== enc) {
+ this[DECODER] = enc ? new SD(enc) : null
+ if (this.buffer.length)
+ this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
+ }
+
+ this[ENCODING] = enc
+ }
+
+ setEncoding (enc) {
+ this.encoding = enc
+ }
+
+ write (chunk, encoding, cb) {
+ if (this[EOF])
+ throw new Error('write after end')
+
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+
+ if (!encoding)
+ encoding = 'utf8'
+
+ // fast-path writing strings of same encoding to a stream with
+ // an empty buffer, skipping the buffer/decoder dance
+ if (typeof chunk === 'string' && !this[OBJECTMODE] &&
+ // unless it is a string already ready for us to use
+ !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
+ chunk = new Buffer(chunk, encoding)
+ }
+
+ if (Buffer.isBuffer(chunk) && this[ENCODING])
+ chunk = this[DECODER].write(chunk)
+
+ try {
+ return this.flowing
+ ? (this.emit('data', chunk), this.flowing)
+ : (this[BUFFERPUSH](chunk), false)
+ } finally {
+ this.emit('readable')
+ if (cb)
+ cb()
+ }
+ }
+
+ read (n) {
+ try {
+ if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
+ return null
+
+ if (this[OBJECTMODE])
+ n = null
+
+ if (this.buffer.length > 1 && !this[OBJECTMODE]) {
+ if (this.encoding)
+ this.buffer = new Yallist([
+ Array.from(this.buffer).join('')
+ ])
+ else
+ this.buffer = new Yallist([
+ Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH])
+ ])
+ }
+
+ return this[READ](n || null, this.buffer.head.value)
+ } finally {
+ this[MAYBE_EMIT_END]()
+ }
+ }
+
+ [READ] (n, chunk) {
+ if (n === chunk.length || n === null)
+ this[BUFFERSHIFT]()
+ else {
+ this.buffer.head.value = chunk.slice(n)
+ chunk = chunk.slice(0, n)
+ this[BUFFERLENGTH] -= n
+ }
+
+ this.emit('data', chunk)
+
+ if (!this.buffer.length && !this[EOF])
+ this.emit('drain')
+
+ return chunk
+ }
+
+ end (chunk, encoding, cb) {
+ if (typeof chunk === 'function')
+ cb = chunk, chunk = null
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+ if (chunk)
+ this.write(chunk, encoding)
+ if (cb)
+ this.once('end', cb)
+ this[EOF] = true
+ this.writable = false
+ if (this.flowing)
+ this[MAYBE_EMIT_END]()
+ }
+
+ // don't let the internal resume be overwritten
+ [RESUME] () {
+ this[FLOWING] = true
+ this.emit('resume')
+ if (this.buffer.length)
+ this[FLUSH]()
+ else if (this[EOF])
+ this[MAYBE_EMIT_END]()
+ else
+ this.emit('drain')
+ }
+
+ resume () {
+ return this[RESUME]()
+ }
+
+ pause () {
+ this[FLOWING] = false
+ }
+
+ get flowing () {
+ return this[FLOWING]
+ }
+
+ [BUFFERPUSH] (chunk) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] += 1
+ else
+ this[BUFFERLENGTH] += chunk.length
+ return this.buffer.push(chunk)
+ }
+
+ [BUFFERSHIFT] () {
+ if (this.buffer.length) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] -= 1
+ else
+ this[BUFFERLENGTH] -= this.buffer.head.value.length
+ }
+ return this.buffer.shift()
+ }
+
+ [FLUSH] () {
+ do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
+
+ if (!this.buffer.length && !this[EOF])
+ this.emit('drain')
+ }
+
+ [FLUSHCHUNK] (chunk) {
+ return chunk ? (this.emit('data', chunk), this.flowing) : false
+ }
+
+ pipe (dest, opts) {
+ if (dest === process.stdout || dest === process.stderr)
+ (opts = opts || {}).end = false
+ const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
+ this.pipes.push(p)
+
+ dest.on('drain', p.ondrain)
+ this[RESUME]()
+ return dest
+ }
+
+ addEventHandler (ev, fn) {
+ return this.on(ev, fn)
+ }
+
+ on (ev, fn) {
+ try {
+ return super.on(ev, fn)
+ } finally {
+ if (ev === 'data' && !this.pipes.length && !this.flowing) {
+ this[RESUME]()
+ }
+ }
+ }
+
+ get emittedEnd () {
+ return this[EMITTED_END]
+ }
+
+ [MAYBE_EMIT_END] () {
+ if (!this[EMITTED_END] && this.buffer.length === 0 && this[EOF]) {
+ this.emit('end')
+ this.emit('prefinish')
+ this.emit('finish')
+ if (this[CLOSED])
+ this.emit('close')
+ }
+ }
+
+ emit (ev, data) {
+ if (ev === 'data') {
+ if (!data)
+ return
+
+ if (this.pipes.length)
+ this.pipes.forEach(p => p.dest.write(data) || this.pause())
+ } else if (ev === 'end') {
+ if (this[DECODER]) {
+ data = this[DECODER].end()
+ if (data) {
+ this.pipes.forEach(p => p.dest.write(data))
+ super.emit('data', data)
+ }
+ }
+ this.pipes.forEach(p => {
+ p.dest.removeListener('drain', p.ondrain)
+ if (!p.opts || p.opts.end !== false)
+ p.dest.end()
+ })
+ this[EMITTED_END] = true
+ this.readable = false
+ } else if (ev === 'close') {
+ this[CLOSED] = true
+ // don't emit close before 'end' and 'finish'
+ if (!this[EMITTED_END])
+ return
+ }
+
+ const args = new Array(arguments.length)
+ args[0] = ev
+ args[1] = data
+ if (arguments.length > 2) {
+ for (let i = 2; i < arguments.length; i++) {
+ args[i] = arguments[i]
+ }
+ }
+
+ try {
+ return super.emit.apply(this, args)
+ } finally {
+ if (ev !== 'end')
+ this[MAYBE_EMIT_END]()
+ }
+ }
+}
+
+module.exports = MiniPass
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx b/deps/npm/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx
new file mode 100644
index 00000000000000..05e19a41b74cd5
Binary files /dev/null and b/deps/npm/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx differ
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/package.json b/deps/npm/node_modules/tar/node_modules/minipass/package.json
new file mode 100644
index 00000000000000..52856521fcf8d7
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/package.json
@@ -0,0 +1,64 @@
+{
+ "_from": "minipass@^2.0.2",
+ "_id": "minipass@2.2.1",
+ "_inBundle": false,
+ "_integrity": "sha512-u1aUllxPJUI07cOqzR7reGmQxmCqlH88uIIsf6XZFEWgw7gXKpJdR+5R9Y3KEDmWYkdIz9wXZs3C0jOPxejk/Q==",
+ "_location": "/tar/minipass",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minipass@^2.0.2",
+ "name": "minipass",
+ "escapedName": "minipass",
+ "rawSpec": "^2.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^2.0.2"
+ },
+ "_requiredBy": [
+ "/tar",
+ "/tar/minizlib"
+ ],
+ "_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.2.1.tgz",
+ "_shasum": "5ada97538b1027b4cf7213432428578cb564011f",
+ "_spec": "minipass@^2.0.2",
+ "_where": "/Users/rebecca/code/npm/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minipass/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "yallist": "^3.0.0"
+ },
+ "deprecated": false,
+ "description": "minimal implementation of a PassThrough stream",
+ "devDependencies": {
+ "end-of-stream": "^1.4.0",
+ "tap": "^10.7.0",
+ "through2": "^2.0.3"
+ },
+ "homepage": "https://github.com/isaacs/minipass#readme",
+ "keywords": [
+ "passthrough",
+ "stream"
+ ],
+ "license": "ISC",
+ "main": "index.js",
+ "name": "minipass",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/minipass.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "2.2.1"
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/test/basic.js b/deps/npm/node_modules/tar/node_modules/minipass/test/basic.js
new file mode 100644
index 00000000000000..e3885c808bbb9f
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/test/basic.js
@@ -0,0 +1,438 @@
+const MiniPass = require('../')
+const t = require('tap')
+const EE = require('events').EventEmitter
+
+t.test('some basic piping and writing', async t => {
+ let mp = new MiniPass({ encoding: 'base64' })
+ t.notOk(mp.flowing)
+ mp.flowing = true
+ t.notOk(mp.flowing)
+ t.equal(mp.encoding, 'base64')
+ mp.encoding = null
+ t.equal(mp.encoding, null)
+ t.equal(mp.readable, true)
+ t.equal(mp.writable, true)
+ t.equal(mp.write('hello'), false)
+ let dest = new MiniPass()
+ let sawDestData = false
+ dest.once('data', chunk => {
+ sawDestData = true
+ t.isa(chunk, Buffer)
+ })
+ t.equal(mp.pipe(dest), dest, 'pipe returns dest')
+ t.ok(sawDestData, 'got data becasue pipe() flushes')
+ t.equal(mp.write('bye'), true, 'write() returns true when flowing')
+ dest.pause()
+ t.equal(mp.write('after pause'), false, 'false when dest is paused')
+ t.equal(mp.write('after false'), false, 'false when not flowing')
+ t.equal(dest.buffer.length, 1, '1 item is buffered in dest')
+ t.equal(mp.buffer.length, 1, '1 item buffered in src')
+ dest.resume()
+ t.equal(dest.buffer.length, 0, 'nothing is buffered in dest')
+ t.equal(mp.buffer.length, 0, 'nothing buffered in src')
+})
+
+t.test('unicode splitting', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ t.plan(2)
+ t.equal(mp.encoding, 'utf8')
+ mp.on('data', chunk => {
+ t.equal(chunk, butterfly)
+ })
+ const butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ mp.end()
+})
+
+t.test('unicode splitting with setEncoding', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'hex' })
+ t.plan(4)
+ t.equal(mp.encoding, 'hex')
+ mp.setEncoding('hex')
+ t.equal(mp.encoding, 'hex')
+ mp.setEncoding('utf8')
+ t.equal(mp.encoding, 'utf8')
+ mp.on('data', chunk => {
+ t.equal(chunk, butterfly)
+ })
+ const butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ mp.end()
+})
+
+t.test('base64 -> utf8 piping', t => {
+ t.plan(1)
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'base64' })
+ const dest = new MiniPass({ encoding: 'utf8' })
+ mp.pipe(dest)
+ let out = ''
+ dest.on('data', c => out += c)
+ dest.on('end', _ =>
+ t.equal(new Buffer(out, 'base64').toString('utf8'), butterfly))
+ mp.write(butterfly)
+ mp.end()
+})
+
+t.test('utf8 -> base64 piping', t => {
+ t.plan(1)
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ const dest = new MiniPass({ encoding: 'base64' })
+ mp.pipe(dest)
+ let out = ''
+ dest.on('data', c => out += c)
+ dest.on('end', _ =>
+ t.equal(new Buffer(out, 'base64').toString('utf8'), butterfly))
+ mp.write(butterfly)
+ mp.end()
+})
+
+t.test('read method', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ mp.write(new Buffer(butterfly))
+ t.equal(mp.read(5), null)
+ t.equal(mp.read(0), null)
+ t.same(mp.read(2), butterfly)
+})
+
+t.test('read with no args', async t => {
+ t.test('buffer -> string', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 2))
+ mp.write(butterbuf.slice(2))
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('buffer -> buffer', async t => {
+ const butterfly = new Buffer('🦋')
+ const mp = new MiniPass()
+ mp.on('data', c => t.same(c, butterfly))
+ mp.pause()
+ mp.write(butterfly.slice(0, 2))
+ mp.write(butterfly.slice(2))
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('string -> buffer', async t => {
+ const butterfly = '🦋'
+ const butterbuf = new Buffer(butterfly)
+ const mp = new MiniPass()
+ mp.on('data', c => t.same(c, butterbuf))
+ mp.pause()
+ mp.write(butterfly)
+ t.same(mp.read(), butterbuf)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('string -> string', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ mp.write(butterfly[0])
+ mp.write(butterfly[1])
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+})
+
+t.test('partial read', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass()
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ t.equal(mp.read(5), null)
+ t.equal(mp.read(0), null)
+ t.same(mp.read(2), butterbuf.slice(0, 2))
+ t.same(mp.read(2), butterbuf.slice(2, 4))
+})
+
+t.test('write after end', async t => {
+ const mp = new MiniPass()
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ mp.end()
+ t.throws(_ => mp.write('nope'))
+ t.notOk(sawEnd, 'should not get end event yet (not flowing)')
+ mp.resume()
+ t.ok(sawEnd, 'should get end event after resume()')
+})
+
+t.test('write cb', async t => {
+ const mp = new MiniPass()
+ let calledCb = false
+ mp.write('ok', () => calledCb = true)
+ t.ok(calledCb)
+})
+
+t.test('end with chunk', async t => {
+ let out = ''
+ const mp = new MiniPass({ encoding: 'utf8' })
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ mp.addEventHandler('data', c => out += c)
+ let endCb = false
+ mp.end('ok', _ => endCb = true)
+ t.equal(out, 'ok')
+ t.ok(sawEnd, 'should see end event')
+ t.ok(endCb, 'end cb should get called')
+})
+
+t.test('no drain if could not entirely drain on resume', async t => {
+ const mp = new MiniPass()
+ const dest = new MiniPass({ encoding: 'buffer' })
+ t.equal(mp.write('foo'), false)
+ t.equal(mp.write('bar'), false)
+ t.equal(mp.write('baz'), false)
+ t.equal(mp.write('qux'), false)
+ mp.on('drain', _ => t.fail('should not drain'))
+ mp.pipe(dest)
+})
+
+t.test('end with chunk pending', async t => {
+ const mp = new MiniPass()
+ t.equal(mp.write('foo'), false)
+ t.equal(mp.write('626172', 'hex'), false)
+ t.equal(mp.write('baz'), false)
+ t.equal(mp.write('qux'), false)
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ let endCb = false
+ mp.end(_ => endCb = true)
+ t.notOk(endCb, 'endcb should not happen yet')
+ t.notOk(sawEnd, 'should not see end yet')
+ let out = ''
+ mp.on('data', c => out += c)
+ t.ok(sawEnd, 'see end after flush')
+ t.ok(endCb, 'end cb after flush')
+ t.equal(out, 'foobarbazqux')
+})
+
+t.test('pipe to stderr does not throw', t => {
+ const spawn = require('child_process').spawn
+ const module = JSON.stringify(require.resolve('../'))
+ const fs = require('fs')
+ const file = __dirname + '/prog.js'
+ fs.writeFileSync(file, `
+ const MP = require(${module})
+ const mp = new MP()
+ mp.pipe(process.stderr)
+ mp.end("hello")
+ `)
+ let err = ''
+ return new Promise(res => {
+ const child = spawn(process.execPath, [file])
+ child.stderr.on('data', c => err += c)
+ child.on('close', (code, signal) => {
+ t.equal(code, 0)
+ t.equal(signal, null)
+ t.equal(err, 'hello')
+ fs.unlinkSync(file)
+ res()
+ })
+ })
+})
+
+t.test('emit works with many args', t => {
+ const mp = new MiniPass()
+ t.plan(2)
+ mp.on('foo', function (a, b, c, d, e, f, g) {
+ t.same([a,b,c,d,e,f,g], [1,2,3,4,5,6,7])
+ t.equal(arguments.length, 7)
+ })
+ mp.emit('foo', 1, 2, 3, 4, 5, 6, 7)
+})
+
+t.test('emit drain on resume, even if no flush', t => {
+ const mp = new MiniPass()
+ mp.encoding = 'utf8'
+
+ const chunks = []
+ class SlowStream extends EE {
+ write (chunk) {
+ chunks.push(chunk)
+ setTimeout(_ => this.emit('drain'))
+ return false
+ }
+ end () { return this.write() }
+ }
+
+ const ss = new SlowStream()
+
+ mp.pipe(ss)
+ t.ok(mp.flowing, 'flowing, because piped')
+ t.equal(mp.write('foo'), false, 'write() returns false, backpressure')
+ t.equal(mp.buffer.length, 0, 'buffer len is 0')
+ t.equal(mp.flowing, false, 'flowing false, awaiting drain')
+ t.same(chunks, ['foo'], 'chunk made it through')
+ mp.once('drain', _ => {
+ t.pass('received mp drain event')
+ t.end()
+ })
+})
+
+t.test('save close for end', t => {
+ const mp = new MiniPass()
+ let ended = false
+ mp.on('close', _ => {
+ t.equal(ended, true, 'end before close')
+ t.end()
+ })
+ mp.on('end', _ => {
+ t.equal(ended, false, 'only end once')
+ ended = true
+ })
+
+ mp.emit('close')
+ mp.end('foo')
+ t.equal(ended, false, 'no end until flushed')
+ mp.resume()
+})
+
+t.test('eos works', t => {
+ const eos = require('end-of-stream')
+ const mp = new MiniPass()
+
+ eos(mp, er => {
+ if (er)
+ throw er
+ t.end()
+ })
+
+ mp.emit('close')
+ mp.end('foo')
+ mp.resume()
+})
+
+t.test('bufferLength property', t => {
+ const eos = require('end-of-stream')
+ const mp = new MiniPass()
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+
+ t.equal(mp.bufferLength, 6)
+ t.equal(mp.read(7), null)
+ t.equal(mp.read(3).toString(), 'aaa')
+ t.equal(mp.bufferLength, 3)
+ t.equal(mp.read().toString(), 'aaa')
+ t.equal(mp.bufferLength, 0)
+ t.end()
+})
+
+t.test('emit resume event on resume', t => {
+ const mp = new MiniPass()
+ t.plan(3)
+ mp.on('resume', _ => t.pass('got resume event'))
+ mp.end('asdf')
+ t.equal(mp.flowing, false, 'not flowing yet')
+ mp.resume()
+ t.equal(mp.flowing, true, 'flowing after resume')
+})
+
+t.test('objectMode', t => {
+ const mp = new MiniPass({ objectMode: true })
+ const a = { a: 1 }
+ const b = { b: 1 }
+ const out = []
+ mp.on('data', c => out.push(c))
+ mp.on('end', _ => {
+ t.equal(out.length, 2)
+ t.equal(out[0], a)
+ t.equal(out[1], b)
+ t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
+ t.end()
+ })
+ t.ok(mp.write(a))
+ t.ok(mp.write(b))
+ mp.end()
+})
+
+t.test('objectMode no encoding', t => {
+ const mp = new MiniPass({
+ objectMode: true,
+ encoding: 'utf8'
+ })
+ t.equal(mp.encoding, null)
+ const a = { a: 1 }
+ const b = { b: 1 }
+ const out = []
+ mp.on('data', c => out.push(c))
+ mp.on('end', _ => {
+ t.equal(out.length, 2)
+ t.equal(out[0], a)
+ t.equal(out[1], b)
+ t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
+ t.end()
+ })
+ t.ok(mp.write(a))
+ t.ok(mp.write(b))
+ mp.end()
+})
+
+t.test('objectMode read() and buffering', t => {
+ const mp = new MiniPass({ objectMode: true })
+ const a = { a: 1 }
+ const b = { b: 1 }
+ t.notOk(mp.write(a))
+ t.notOk(mp.write(b))
+ t.equal(mp.read(2), a)
+ t.equal(mp.read(), b)
+ t.end()
+})
+
+t.test('set encoding in object mode throws', async t =>
+ t.throws(_ => new MiniPass({ objectMode: true }).encoding = 'utf8',
+ new Error('cannot set encoding in objectMode')))
+
+t.test('set encoding again throws', async t =>
+ t.throws(_ => {
+ const mp = new MiniPass({ encoding: 'hex' })
+ mp.write('ok')
+ mp.encoding = 'utf8'
+ }, new Error('cannot change encoding')))
+
+t.test('set encoding with existing buffer', async t => {
+ const mp = new MiniPass()
+ const butterfly = '🦋'
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.setEncoding('utf8')
+ mp.write(butterbuf.slice(2))
+ t.equal(mp.read(), butterfly)
+})
+
+t.test('end:false', async t => {
+ t.plan(1)
+ const mp = new MiniPass({ encoding: 'utf8' })
+ const d = new MiniPass({ encoding: 'utf8' })
+ d.end = () => t.threw(new Error('no end no exit no way out'))
+ d.on('data', c => t.equal(c, 'this is fine'))
+ mp.pipe(d, { end: false })
+ mp.end('this is fine')
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/test/empty-end.js b/deps/npm/node_modules/tar/node_modules/minipass/test/empty-end.js
new file mode 100644
index 00000000000000..42387d51af7732
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/test/empty-end.js
@@ -0,0 +1,38 @@
+const t = require('tap')
+const MP = require('../')
+
+t.test('emit end on resume', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ t.notOk(mp.emittedEnd)
+ list.push('called end')
+ mp.resume()
+ t.ok(mp.emittedEnd)
+ list.push('called resume')
+ t.same(list, ['called end', 'end', 'called resume'])
+})
+
+t.test('emit end on read()', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ list.push('called end')
+
+ mp.read()
+ list.push('called read()')
+ t.same(list, ['called end', 'end', 'called read()'])
+})
+
+t.test('emit end on data handler', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ list.push('called end')
+ mp.on('data', _=>_)
+ list.push('added data handler')
+ t.same(list, ['called end', 'end', 'added data handler'])
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE b/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE
new file mode 100644
index 00000000000000..ffce7383f53e7f
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE
@@ -0,0 +1,26 @@
+Minizlib was created by Isaac Z. Schlueter.
+It is a derivative work of the Node.js project.
+
+"""
+Copyright Isaac Z. Schlueter and Contributors
+Copyright Node.js contributors. All rights reserved.
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/README.md b/deps/npm/node_modules/tar/node_modules/minizlib/README.md
new file mode 100644
index 00000000000000..2b585545efe14b
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/README.md
@@ -0,0 +1,44 @@
+# minizlib
+
+A tiny fast zlib stream built on [minipass](http://npm.im/minipass)
+and Node.js's zlib binding.
+
+This module was created to serve the needs of
+[node-tar](http://npm.im/tar) v2. If your needs are different, then
+it may not be for you.
+
+## How does this differ from the streams in `require('zlib')`?
+
+First, there are no convenience methods to compress or decompress a
+buffer. If you want those, use the built-in `zlib` module. This is
+only streams.
+
+This module compresses and decompresses the data as fast as you feed
+it in. It is synchronous, and runs on the main process thread. Zlib
+operations can be high CPU, but they're very fast, and doing it this
+way means much less bookkeeping and artificial deferral.
+
+Node's built in zlib streams are built on top of `stream.Transform`.
+They do the maximally safe thing with respect to consistent
+asynchrony, buffering, and backpressure.
+
+This module _does_ support backpressure, and will buffer output chunks
+that are not consumed, but is less of a mediator between the input and
+output. There is no high or low watermarks, no state objects, and so
+artificial async deferrals. It will not protect you from Zalgo.
+
+If you write, data will be emitted right away. If you write
+everything synchronously in one tick, and you are listening to the
+`data` event to consume it, then it'll all be emitted right away in
+that same tick. If you want data to be emitted in the next tick, then
+write it in the next tick.
+
+It is thus the responsibility of the reader and writer to manage their
+own consumption and process execution flow.
+
+The goal is to compress and decompress as fast as possible, even for
+files that are too large to store all in one buffer.
+
+The API is very similar to the built-in zlib module. There are
+classes that you instantiate with `new` and they are streams that can
+be piped together.
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/constants.js b/deps/npm/node_modules/tar/node_modules/minizlib/constants.js
new file mode 100644
index 00000000000000..4edffde86f8528
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/constants.js
@@ -0,0 +1,46 @@
+module.exports = Object.freeze({
+ Z_NO_FLUSH: 0,
+ Z_PARTIAL_FLUSH: 1,
+ Z_SYNC_FLUSH: 2,
+ Z_FULL_FLUSH: 3,
+ Z_FINISH: 4,
+ Z_BLOCK: 5,
+ Z_OK: 0,
+ Z_STREAM_END: 1,
+ Z_NEED_DICT: 2,
+ Z_ERRNO: -1,
+ Z_STREAM_ERROR: -2,
+ Z_DATA_ERROR: -3,
+ Z_MEM_ERROR: -4,
+ Z_BUF_ERROR: -5,
+ Z_VERSION_ERROR: -6,
+ Z_NO_COMPRESSION: 0,
+ Z_BEST_SPEED: 1,
+ Z_BEST_COMPRESSION: 9,
+ Z_DEFAULT_COMPRESSION: -1,
+ Z_FILTERED: 1,
+ Z_HUFFMAN_ONLY: 2,
+ Z_RLE: 3,
+ Z_FIXED: 4,
+ Z_DEFAULT_STRATEGY: 0,
+ ZLIB_VERNUM: 4736,
+ DEFLATE: 1,
+ INFLATE: 2,
+ GZIP: 3,
+ GUNZIP: 4,
+ DEFLATERAW: 5,
+ INFLATERAW: 6,
+ UNZIP: 7,
+ Z_MIN_WINDOWBITS: 8,
+ Z_MAX_WINDOWBITS: 15,
+ Z_DEFAULT_WINDOWBITS: 15,
+ Z_MIN_CHUNK: 64,
+ Z_MAX_CHUNK: Infinity,
+ Z_DEFAULT_CHUNK: 16384,
+ Z_MIN_MEMLEVEL: 1,
+ Z_MAX_MEMLEVEL: 9,
+ Z_DEFAULT_MEMLEVEL: 8,
+ Z_MIN_LEVEL: -1,
+ Z_MAX_LEVEL: 9,
+ Z_DEFAULT_LEVEL: -1
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/index.js b/deps/npm/node_modules/tar/node_modules/minizlib/index.js
new file mode 100644
index 00000000000000..7d595dec4f505b
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/index.js
@@ -0,0 +1,333 @@
+'use strict'
+
+const assert = require('assert')
+const Buffer = require('buffer').Buffer
+const binding = process.binding('zlib')
+
+const constants = exports.constants = require('./constants.js')
+const MiniPass = require('minipass')
+
+// translation table for return codes.
+const codes = new Map([
+ [constants.Z_OK, 'Z_OK'],
+ [constants.Z_STREAM_END, 'Z_STREAM_END'],
+ [constants.Z_NEED_DICT, 'Z_NEED_DICT'],
+ [constants.Z_ERRNO, 'Z_ERRNO'],
+ [constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'],
+ [constants.Z_DATA_ERROR, 'Z_DATA_ERROR'],
+ [constants.Z_MEM_ERROR, 'Z_MEM_ERROR'],
+ [constants.Z_BUF_ERROR, 'Z_BUF_ERROR'],
+ [constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR']
+])
+
+const validFlushFlags = new Set([
+ constants.Z_NO_FLUSH,
+ constants.Z_PARTIAL_FLUSH,
+ constants.Z_SYNC_FLUSH,
+ constants.Z_FULL_FLUSH,
+ constants.Z_FINISH,
+ constants.Z_BLOCK
+])
+
+const strategies = new Set([
+ constants.Z_FILTERED,
+ constants.Z_HUFFMAN_ONLY,
+ constants.Z_RLE,
+ constants.Z_FIXED,
+ constants.Z_DEFAULT_STRATEGY
+])
+
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _opts = Symbol('opts')
+const _chunkSize = Symbol('chunkSize')
+const _flushFlag = Symbol('flushFlag')
+const _finishFlush = Symbol('finishFlush')
+const _handle = Symbol('handle')
+const _hadError = Symbol('hadError')
+const _buffer = Symbol('buffer')
+const _offset = Symbol('offset')
+const _level = Symbol('level')
+const _strategy = Symbol('strategy')
+const _ended = Symbol('ended')
+
+class Zlib extends MiniPass {
+ constructor (opts, mode) {
+ super(opts)
+ this[_ended] = false
+ this[_opts] = opts = opts || {}
+ this[_chunkSize] = opts.chunkSize || constants.Z_DEFAULT_CHUNK
+ if (opts.flush && !validFlushFlags.has(opts.flush)) {
+ throw new Error('Invalid flush flag: ' + opts.flush)
+ }
+ if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) {
+ throw new Error('Invalid flush flag: ' + opts.finishFlush)
+ }
+
+ this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH
+ this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ?
+ opts.finishFlush : constants.Z_FINISH
+
+ if (opts.chunkSize) {
+ if (opts.chunkSize < constants.Z_MIN_CHUNK) {
+ throw new Error('Invalid chunk size: ' + opts.chunkSize)
+ }
+ }
+
+ if (opts.windowBits) {
+ if (opts.windowBits < constants.Z_MIN_WINDOWBITS ||
+ opts.windowBits > constants.Z_MAX_WINDOWBITS) {
+ throw new Error('Invalid windowBits: ' + opts.windowBits)
+ }
+ }
+
+ if (opts.level) {
+ if (opts.level < constants.Z_MIN_LEVEL ||
+ opts.level > constants.Z_MAX_LEVEL) {
+ throw new Error('Invalid compression level: ' + opts.level)
+ }
+ }
+
+ if (opts.memLevel) {
+ if (opts.memLevel < constants.Z_MIN_MEMLEVEL ||
+ opts.memLevel > constants.Z_MAX_MEMLEVEL) {
+ throw new Error('Invalid memLevel: ' + opts.memLevel)
+ }
+ }
+
+ if (opts.strategy && !(strategies.has(opts.strategy)))
+ throw new Error('Invalid strategy: ' + opts.strategy)
+
+ if (opts.dictionary) {
+ if (!(opts.dictionary instanceof Buffer)) {
+ throw new Error('Invalid dictionary: it should be a Buffer instance')
+ }
+ }
+
+ this[_handle] = new binding.Zlib(mode)
+
+ this[_hadError] = false
+ this[_handle].onerror = (message, errno) => {
+ // there is no way to cleanly recover.
+ // continuing only obscures problems.
+ this.close()
+ this[_hadError] = true
+
+ const error = new Error(message)
+ error.errno = errno
+ error.code = codes.get(errno)
+ this.emit('error', error)
+ }
+
+ const level = typeof opts.level === 'number' ? opts.level
+ : constants.Z_DEFAULT_COMPRESSION
+
+ var strategy = typeof opts.strategy === 'number' ? opts.strategy
+ : constants.Z_DEFAULT_STRATEGY
+
+ this[_handle].init(opts.windowBits || constants.Z_DEFAULT_WINDOWBITS,
+ level,
+ opts.memLevel || constants.Z_DEFAULT_MEMLEVEL,
+ strategy,
+ opts.dictionary)
+
+ this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
+ this[_offset] = 0
+ this[_level] = level
+ this[_strategy] = strategy
+
+ this.once('end', this.close)
+ }
+
+ close () {
+ if (this[_handle]) {
+ this[_handle].close()
+ this[_handle] = null
+ this.emit('close')
+ }
+ }
+
+ params (level, strategy) {
+ if (!this[_handle])
+ throw new Error('cannot switch params when binding is closed')
+
+ // no way to test this without also not supporting params at all
+ /* istanbul ignore if */
+ if (!this[_handle].params)
+ throw new Error('not supported in this implementation')
+
+ if (level < constants.Z_MIN_LEVEL ||
+ level > constants.Z_MAX_LEVEL) {
+ throw new RangeError('Invalid compression level: ' + level)
+ }
+
+ if (!(strategies.has(strategy)))
+ throw new TypeError('Invalid strategy: ' + strategy)
+
+ if (this[_level] !== level || this[_strategy] !== strategy) {
+ this.flush(constants.Z_SYNC_FLUSH)
+ assert(this[_handle], 'zlib binding closed')
+ this[_handle].params(level, strategy)
+ /* istanbul ignore else */
+ if (!this[_hadError]) {
+ this[_level] = level
+ this[_strategy] = strategy
+ }
+ }
+ }
+
+ reset () {
+ assert(this[_handle], 'zlib binding closed')
+ return this[_handle].reset()
+ }
+
+ flush (kind) {
+ if (kind === undefined)
+ kind = constants.Z_FULL_FLUSH
+
+ if (this.ended)
+ return
+
+ const flushFlag = this[_flushFlag]
+ this[_flushFlag] = kind
+ this.write(Buffer.alloc(0))
+ this[_flushFlag] = flushFlag
+ }
+
+ end (chunk, encoding, cb) {
+ if (chunk)
+ this.write(chunk, encoding)
+ this.flush(this[_finishFlush])
+ this[_ended] = true
+ return super.end(null, null, cb)
+ }
+
+ get ended () {
+ return this[_ended]
+ }
+
+ write (chunk, encoding, cb) {
+ // process the chunk using the sync process
+ // then super.write() all the outputted chunks
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+
+ if (typeof chunk === 'string')
+ chunk = new Buffer(chunk, encoding)
+
+ let availInBefore = chunk && chunk.length
+ let availOutBefore = this[_chunkSize] - this[_offset]
+ let inOff = 0 // the offset of the input buffer
+ const flushFlag = this[_flushFlag]
+ let writeReturn = true
+
+ assert(this[_handle], 'zlib binding closed')
+ do {
+ let res = this[_handle].writeSync(
+ flushFlag,
+ chunk, // in
+ inOff, // in_off
+ availInBefore, // in_len
+ this[_buffer], // out
+ this[_offset], //out_off
+ availOutBefore // out_len
+ )
+ if (this[_hadError])
+ break
+
+ let availInAfter = res[0]
+ let availOutAfter = res[1]
+
+ const have = availOutBefore - availOutAfter
+ assert(have >= 0, 'have should not go down')
+
+ if (have > 0) {
+ const out = this[_buffer].slice(
+ this[_offset], this[_offset] + have
+ )
+
+ this[_offset] += have
+ // serve some output to the consumer.
+ writeReturn = super.write(out) && writeReturn
+ }
+
+ // exhausted the output buffer, or used all the input create a new one.
+ if (availOutAfter === 0 || this[_offset] >= this[_chunkSize]) {
+ availOutBefore = this[_chunkSize]
+ this[_offset] = 0
+ this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
+ }
+
+ if (availOutAfter === 0) {
+ // Not actually done. Need to reprocess.
+ // Also, update the availInBefore to the availInAfter value,
+ // so that if we have to hit it a third (fourth, etc.) time,
+ // it'll have the correct byte counts.
+ inOff += (availInBefore - availInAfter)
+ availInBefore = availInAfter
+ continue
+ }
+ break
+ } while (!this[_hadError])
+
+ if (cb)
+ cb()
+ return writeReturn
+ }
+}
+
+// minimal 2-byte header
+class Deflate extends Zlib {
+ constructor (opts) {
+ super(opts, constants.DEFLATE)
+ }
+}
+
+class Inflate extends Zlib {
+ constructor (opts) {
+ super(opts, constants.INFLATE)
+ }
+}
+
+// gzip - bigger header, same deflate compression
+class Gzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.GZIP)
+ }
+}
+
+class Gunzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.GUNZIP)
+ }
+}
+
+// raw - no header
+class DeflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, constants.DEFLATERAW)
+ }
+}
+
+class InflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, constants.INFLATERAW)
+ }
+}
+
+// auto-detect header.
+class Unzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.UNZIP)
+ }
+}
+
+exports.Deflate = Deflate
+exports.Inflate = Inflate
+exports.Gzip = Gzip
+exports.Gunzip = Gunzip
+exports.DeflateRaw = DeflateRaw
+exports.InflateRaw = InflateRaw
+exports.Unzip = Unzip
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/package.json b/deps/npm/node_modules/tar/node_modules/minizlib/package.json
new file mode 100644
index 00000000000000..ae7fb898d4631c
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "minizlib@^1.0.3",
+ "_id": "minizlib@1.0.3",
+ "_inBundle": false,
+ "_integrity": "sha1-1cGr93vhVGGZUuJTM27Mq5sqMvU=",
+ "_location": "/tar/minizlib",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minizlib@^1.0.3",
+ "name": "minizlib",
+ "escapedName": "minizlib",
+ "rawSpec": "^1.0.3",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.3"
+ },
+ "_requiredBy": [
+ "/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.0.3.tgz",
+ "_shasum": "d5c1abf77be154619952e253336eccab9b2a32f5",
+ "_spec": "minizlib@^1.0.3",
+ "_where": "/Users/rebecca/code/npm/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minizlib/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "minipass": "^2.0.0"
+ },
+ "deprecated": false,
+ "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+ "devDependencies": {
+ "tap": "^10.3.0"
+ },
+ "files": [
+ "index.js",
+ "constants.js"
+ ],
+ "homepage": "https://github.com/isaacs/minizlib#readme",
+ "keywords": [
+ "zlib",
+ "gzip",
+ "gunzip",
+ "deflate",
+ "inflate",
+ "compression",
+ "zip",
+ "unzip"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "minizlib",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/minizlib.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J"
+ },
+ "version": "1.0.3"
+}
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/LICENSE b/deps/npm/node_modules/tar/node_modules/yallist/LICENSE
new file mode 100644
index 00000000000000..19129e315fe593
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/README.md b/deps/npm/node_modules/tar/node_modules/yallist/README.md
new file mode 100644
index 00000000000000..f5861018696688
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/README.md
@@ -0,0 +1,204 @@
+# yallist
+
+Yet Another Linked List
+
+There are many doubly-linked list implementations like it, but this
+one is mine.
+
+For when an array would be too big, and a Map can't be iterated in
+reverse order.
+
+
+[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
+
+## basic usage
+
+```javascript
+var yallist = require('yallist')
+var myList = yallist.create([1, 2, 3])
+myList.push('foo')
+myList.unshift('bar')
+// of course pop() and shift() are there, too
+console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
+myList.forEach(function (k) {
+ // walk the list head to tail
+})
+myList.forEachReverse(function (k, index, list) {
+ // walk the list tail to head
+})
+var myDoubledList = myList.map(function (k) {
+ return k + k
+})
+// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
+// mapReverse is also a thing
+var myDoubledListReverse = myList.mapReverse(function (k) {
+ return k + k
+}) // ['foofoo', 6, 4, 2, 'barbar']
+
+var reduced = myList.reduce(function (set, entry) {
+ set += entry
+ return set
+}, 'start')
+console.log(reduced) // 'startfoo123bar'
+```
+
+## api
+
+The whole API is considered "public".
+
+Functions with the same name as an Array method work more or less the
+same way.
+
+There's reverse versions of most things because that's the point.
+
+### Yallist
+
+Default export, the class that holds and manages a list.
+
+Call it with either a forEach-able (like an array) or a set of
+arguments, to initialize the list.
+
+The Array-ish methods all act like you'd expect. No magic length,
+though, so if you change that it won't automatically prune or add
+empty spots.
+
+### Yallist.create(..)
+
+Alias for Yallist function. Some people like factories.
+
+#### yallist.head
+
+The first node in the list
+
+#### yallist.tail
+
+The last node in the list
+
+#### yallist.length
+
+The number of nodes in the list. (Change this at your peril. It is
+not magic like Array length.)
+
+#### yallist.toArray()
+
+Convert the list to an array.
+
+#### yallist.forEach(fn, [thisp])
+
+Call a function on each item in the list.
+
+#### yallist.forEachReverse(fn, [thisp])
+
+Call a function on each item in the list, in reverse order.
+
+#### yallist.get(n)
+
+Get the data at position `n` in the list. If you use this a lot,
+probably better off just using an Array.
+
+#### yallist.getReverse(n)
+
+Get the data at position `n`, counting from the tail.
+
+#### yallist.map(fn, thisp)
+
+Create a new Yallist with the result of calling the function on each
+item.
+
+#### yallist.mapReverse(fn, thisp)
+
+Same as `map`, but in reverse.
+
+#### yallist.pop()
+
+Get the data from the list tail, and remove the tail from the list.
+
+#### yallist.push(item, ...)
+
+Insert one or more items to the tail of the list.
+
+#### yallist.reduce(fn, initialValue)
+
+Like Array.reduce.
+
+#### yallist.reduceReverse
+
+Like Array.reduce, but in reverse.
+
+#### yallist.reverse
+
+Reverse the list in place.
+
+#### yallist.shift()
+
+Get the data from the list head, and remove the head from the list.
+
+#### yallist.slice([from], [to])
+
+Just like Array.slice, but returns a new Yallist.
+
+#### yallist.sliceReverse([from], [to])
+
+Just like yallist.slice, but the result is returned in reverse.
+
+#### yallist.toArray()
+
+Create an array representation of the list.
+
+#### yallist.toArrayReverse()
+
+Create a reversed array representation of the list.
+
+#### yallist.unshift(item, ...)
+
+Insert one or more items to the head of the list.
+
+#### yallist.unshiftNode(node)
+
+Move a Node object to the front of the list. (That is, pull it out of
+wherever it lives, and make it the new head.)
+
+If the node belongs to a different list, then that list will remove it
+first.
+
+#### yallist.pushNode(node)
+
+Move a Node object to the end of the list. (That is, pull it out of
+wherever it lives, and make it the new tail.)
+
+If the node belongs to a list already, then that list will remove it
+first.
+
+#### yallist.removeNode(node)
+
+Remove a node from the list, preserving referential integrity of head
+and tail and other nodes.
+
+Will throw an error if you try to have a list remove a node that
+doesn't belong to it.
+
+### Yallist.Node
+
+The class that holds the data and is actually the list.
+
+Call with `var n = new Node(value, previousNode, nextNode)`
+
+Note that if you do direct operations on Nodes themselves, it's very
+easy to get into weird states where the list is broken. Be careful :)
+
+#### node.next
+
+The next node in the list.
+
+#### node.prev
+
+The previous node in the list.
+
+#### node.value
+
+The data the node contains.
+
+#### node.list
+
+The list to which this node belongs. (Null if it does not belong to
+any list.)
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/iterator.js b/deps/npm/node_modules/tar/node_modules/yallist/iterator.js
new file mode 100644
index 00000000000000..9149b364889d1e
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/iterator.js
@@ -0,0 +1,8 @@
+'use strict'
+var Yallist = require('./yallist.js')
+
+Yallist.prototype[Symbol.iterator] = function* () {
+ for (let walker = this.head; walker; walker = walker.next) {
+ yield walker.value
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/package.json b/deps/npm/node_modules/tar/node_modules/yallist/package.json
new file mode 100644
index 00000000000000..c2a8e0d3995a51
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/package.json
@@ -0,0 +1,63 @@
+{
+ "_from": "yallist@^3.0.2",
+ "_id": "yallist@3.0.2",
+ "_inBundle": false,
+ "_integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=",
+ "_location": "/tar/yallist",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "yallist@^3.0.2",
+ "name": "yallist",
+ "escapedName": "yallist",
+ "rawSpec": "^3.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^3.0.2"
+ },
+ "_requiredBy": [
+ "/tar",
+ "/tar/minipass"
+ ],
+ "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz",
+ "_shasum": "8452b4bb7e83c7c188d8041c1a837c773d6d8bb9",
+ "_spec": "yallist@^3.0.2",
+ "_where": "/Users/rebecca/code/npm/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/yallist/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "Yet Another Linked List",
+ "devDependencies": {
+ "tap": "^10.3.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "yallist.js",
+ "iterator.js"
+ ],
+ "homepage": "https://github.com/isaacs/yallist#readme",
+ "license": "ISC",
+ "main": "yallist.js",
+ "name": "yallist",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/yallist.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "3.0.2"
+}
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/yallist.js b/deps/npm/node_modules/tar/node_modules/yallist/yallist.js
new file mode 100644
index 00000000000000..4805bc69fa760e
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/yallist.js
@@ -0,0 +1,376 @@
+'use strict'
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+ var self = this
+ if (!(self instanceof Yallist)) {
+ self = new Yallist()
+ }
+
+ self.tail = null
+ self.head = null
+ self.length = 0
+
+ if (list && typeof list.forEach === 'function') {
+ list.forEach(function (item) {
+ self.push(item)
+ })
+ } else if (arguments.length > 0) {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ self.push(arguments[i])
+ }
+ }
+
+ return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+ if (node.list !== this) {
+ throw new Error('removing node which does not belong to this list')
+ }
+
+ var next = node.next
+ var prev = node.prev
+
+ if (next) {
+ next.prev = prev
+ }
+
+ if (prev) {
+ prev.next = next
+ }
+
+ if (node === this.head) {
+ this.head = next
+ }
+ if (node === this.tail) {
+ this.tail = prev
+ }
+
+ node.list.length--
+ node.next = null
+ node.prev = null
+ node.list = null
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+ if (node === this.head) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var head = this.head
+ node.list = this
+ node.next = head
+ if (head) {
+ head.prev = node
+ }
+
+ this.head = node
+ if (!this.tail) {
+ this.tail = node
+ }
+ this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+ if (node === this.tail) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var tail = this.tail
+ node.list = this
+ node.prev = tail
+ if (tail) {
+ tail.next = node
+ }
+
+ this.tail = node
+ if (!this.head) {
+ this.head = node
+ }
+ this.length++
+}
+
+Yallist.prototype.push = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ push(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.unshift = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ unshift(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.pop = function () {
+ if (!this.tail) {
+ return undefined
+ }
+
+ var res = this.tail.value
+ this.tail = this.tail.prev
+ if (this.tail) {
+ this.tail.next = null
+ } else {
+ this.head = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.shift = function () {
+ if (!this.head) {
+ return undefined
+ }
+
+ var res = this.head.value
+ this.head = this.head.next
+ if (this.head) {
+ this.head.prev = null
+ } else {
+ this.tail = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.head, i = 0; walker !== null; i++) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.next
+ }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.prev
+ }
+}
+
+Yallist.prototype.get = function (n) {
+ for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.next
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.getReverse = function (n) {
+ for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.prev
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.head; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.next
+ }
+ return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.tail; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.prev
+ }
+ return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+ var acc
+ var walker = this.head
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.head) {
+ walker = this.head.next
+ acc = this.head.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = 0; walker !== null; i++) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.next
+ }
+
+ return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+ var acc
+ var walker = this.tail
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.tail) {
+ walker = this.tail.prev
+ acc = this.tail.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = this.length - 1; walker !== null; i--) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.prev
+ }
+
+ return acc
+}
+
+Yallist.prototype.toArray = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.head; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.next
+ }
+ return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.tail; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.prev
+ }
+ return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+ walker = walker.next
+ }
+ for (; walker !== null && i < to; i++, walker = walker.next) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+ walker = walker.prev
+ }
+ for (; walker !== null && i > from; i--, walker = walker.prev) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.reverse = function () {
+ var head = this.head
+ var tail = this.tail
+ for (var walker = head; walker !== null; walker = walker.prev) {
+ var p = walker.prev
+ walker.prev = walker.next
+ walker.next = p
+ }
+ this.head = tail
+ this.tail = head
+ return this
+}
+
+function push (self, item) {
+ self.tail = new Node(item, self.tail, null, self)
+ if (!self.head) {
+ self.head = self.tail
+ }
+ self.length++
+}
+
+function unshift (self, item) {
+ self.head = new Node(item, null, self.head, self)
+ if (!self.tail) {
+ self.tail = self.head
+ }
+ self.length++
+}
+
+function Node (value, prev, next, list) {
+ if (!(this instanceof Node)) {
+ return new Node(value, prev, next, list)
+ }
+
+ this.list = list
+ this.value = value
+
+ if (prev) {
+ prev.next = this
+ this.prev = prev
+ } else {
+ this.prev = null
+ }
+
+ if (next) {
+ next.prev = this
+ this.next = next
+ } else {
+ this.next = null
+ }
+}
+
+try {
+ // add if support or Symbol.iterator is present
+ require('./iterator.js')
+} catch (er) {}
diff --git a/deps/npm/node_modules/tar/package.json b/deps/npm/node_modules/tar/package.json
index 4dc0c151cbc1e6..4d6683463a6df1 100644
--- a/deps/npm/node_modules/tar/package.json
+++ b/deps/npm/node_modules/tar/package.json
@@ -1,65 +1,78 @@
{
- "_from": "tar@~2.2.1",
- "_id": "tar@2.2.1",
- "_integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=",
+ "_from": "tar@latest",
+ "_id": "tar@4.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-XBpU+/azPOMvE5m2Tn7Sl6U1ahpGfe77LkdrAlFilwrgHZsR+2iy0l8klQtfJNM+DACZO2Xrw10MTyQRB4du5A==",
"_location": "/tar",
- "_phantomChildren": {
- "inherits": "2.0.3"
- },
+ "_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "tar@~2.2.1",
+ "raw": "tar@latest",
"name": "tar",
"escapedName": "tar",
- "rawSpec": "~2.2.1",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "~2.2.1"
+ "fetchSpec": "latest"
},
"_requiredBy": [
+ "#USER",
"/",
- "/node-gyp"
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz",
- "_shasum": "8e4d2a256c0e2185c6b18ad694aec968b83cb1d1",
- "_shrinkwrap": null,
- "_spec": "tar@~2.2.1",
- "_where": "/Users/zkat/Documents/code/npm",
+ "_resolved": "https://registry.npmjs.org/tar/-/tar-4.0.1.tgz",
+ "_shasum": "3f5b2e5289db30c2abe4c960f43d0d9fff96aaf0",
+ "_spec": "tar@latest",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
- "bin": null,
"bugs": {
- "url": "https://github.com/isaacs/node-tar/issues"
+ "url": "https://github.com/npm/node-tar/issues"
},
"bundleDependencies": false,
"dependencies": {
- "block-stream": "*",
- "fstream": "^1.0.2",
- "inherits": "2"
+ "chownr": "^1.0.1",
+ "minipass": "^2.0.2",
+ "minizlib": "^1.0.3",
+ "mkdirp": "^0.5.0",
+ "yallist": "^3.0.2"
},
"deprecated": false,
"description": "tar for node",
"devDependencies": {
- "graceful-fs": "^4.1.2",
- "mkdirp": "^0.5.0",
+ "chmodr": "^1.0.2",
+ "end-of-stream": "^1.4.0",
+ "events-to-array": "^1.1.2",
+ "mutate-fs": "^1.1.0",
"rimraf": "1.x",
- "tap": "0.x"
+ "tap": "^10.3.3",
+ "tar-fs": "^1.15.2",
+ "tar-stream": "^1.5.2"
},
- "homepage": "https://github.com/isaacs/node-tar#readme",
+ "engines": {
+ "node": ">=4.5"
+ },
+ "files": [
+ "index.js",
+ "lib/"
+ ],
+ "homepage": "https://github.com/npm/node-tar#readme",
"license": "ISC",
- "main": "tar.js",
"name": "tar",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/node-tar.git"
+ "url": "git+https://github.com/npm/node-tar.git"
},
"scripts": {
- "test": "tap test/*.js"
+ "bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done",
+ "genparse": "node scripts/generate-parse-fixtures.js",
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J --coverage-report=text"
},
- "version": "2.2.1"
+ "version": "4.0.1"
}
diff --git a/deps/npm/node_modules/which/CHANGELOG.md b/deps/npm/node_modules/which/CHANGELOG.md
index c44cfbec5b5a4b..367acb12a2aac3 100644
--- a/deps/npm/node_modules/which/CHANGELOG.md
+++ b/deps/npm/node_modules/which/CHANGELOG.md
@@ -1,6 +1,11 @@
# Changes
+## v1.3.0
+
+* Add nothrow option to which.sync
+* update tap
+
## v1.2.14
* appveyor: drop node 5 and 0.x
diff --git a/deps/npm/node_modules/which/README.md b/deps/npm/node_modules/which/README.md
index 7f679d595c28c2..8c0b0cbf7214f7 100644
--- a/deps/npm/node_modules/which/README.md
+++ b/deps/npm/node_modules/which/README.md
@@ -21,6 +21,9 @@ which('node', function (er, resolvedPath) {
// throws if not found
var resolved = which.sync('node')
+// if nothrow option is used, returns null if not found
+resolved = which.sync('node', {nothrow: true})
+
// Pass options to override the PATH and PATHEXT environment vars.
which('node', { path: someOtherPath }, function (er, resolved) {
if (er)
diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json
index 83bcfabfe99d39..5c43403f9eaf29 100644
--- a/deps/npm/node_modules/which/package.json
+++ b/deps/npm/node_modules/which/package.json
@@ -1,31 +1,34 @@
{
- "_from": "which@~1.2.14",
- "_id": "which@1.2.14",
- "_integrity": "sha1-mofEN48D6CfOyvGs31bHNsAcFOU=",
+ "_from": "which@1.3.0",
+ "_id": "which@1.3.0",
+ "_inBundle": false,
+ "_integrity": "sha512-xcJpopdamTuY5duC/KnTTNBraPK54YwpenP4lzxU8H91GudWpFv38u0CKjclE1Wi2EH2EDz5LRcHcKbCIzqGyg==",
"_location": "/which",
"_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "version",
"registry": true,
- "raw": "which@~1.2.14",
+ "raw": "which@1.3.0",
"name": "which",
"escapedName": "which",
- "rawSpec": "~1.2.14",
+ "rawSpec": "1.3.0",
"saveSpec": null,
- "fetchSpec": "~1.2.14"
+ "fetchSpec": "1.3.0"
},
"_requiredBy": [
+ "#USER",
"/",
+ "/libnpx",
+ "/libnpx/yargs/os-locale/execa/cross-spawn",
"/node-gyp",
"/pacote",
"/tap/foreground-child/cross-spawn",
"/update-notifier/boxen/term-size/execa/cross-spawn-async"
],
- "_resolved": "https://registry.npmjs.org/which/-/which-1.2.14.tgz",
- "_shasum": "9a87c4378f03e827cecaf1acdf56c736c01c14e5",
- "_shrinkwrap": null,
- "_spec": "which@~1.2.14",
- "_where": "/Users/zkat/Documents/code/npm",
+ "_resolved": "https://registry.npmjs.org/which/-/which-1.3.0.tgz",
+ "_shasum": "ff04bdfc010ee547d780bec38e1ac1c2777d253a",
+ "_spec": "which@1.3.0",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
@@ -46,7 +49,7 @@
"devDependencies": {
"mkdirp": "^0.5.0",
"rimraf": "^2.3.3",
- "tap": "^10.3.0"
+ "tap": "^10.7.0"
},
"files": [
"which.js",
@@ -56,8 +59,6 @@
"license": "ISC",
"main": "which.js",
"name": "which",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-which.git"
@@ -67,5 +68,5 @@
"postversion": "npm run changelog && git add CHANGELOG.md && git commit -m 'update changelog - '${npm_package_version}",
"test": "tap test/*.js --cov"
},
- "version": "1.2.14"
+ "version": "1.3.0"
}
diff --git a/deps/npm/node_modules/which/which.js b/deps/npm/node_modules/which/which.js
index 70d974c18bac9d..4347f91a1c3878 100644
--- a/deps/npm/node_modules/which/which.js
+++ b/deps/npm/node_modules/which/which.js
@@ -128,5 +128,8 @@ function whichSync (cmd, opt) {
if (opt.all && found.length)
return found
+ if (opt.nothrow)
+ return null
+
throw getNotFoundError(cmd)
}
diff --git a/deps/npm/node_modules/worker-farm/index.d.ts b/deps/npm/node_modules/worker-farm/index.d.ts
new file mode 100644
index 00000000000000..682c21f410c513
--- /dev/null
+++ b/deps/npm/node_modules/worker-farm/index.d.ts
@@ -0,0 +1,44 @@
+interface Workers {
+ (callback: WorkerCallback): void;
+ (arg1: any, callback: WorkerCallback): void;
+ (arg1: any, arg2: any, callback: WorkerCallback): void;
+ (arg1: any, arg2: any, arg3: any, callback: WorkerCallback): void;
+ (arg1: any, arg2: any, arg3: any, arg4: any, callback: WorkerCallback): void;
+}
+
+type WorkerCallback =
+ | WorkerCallback0
+ | WorkerCallback1
+ | WorkerCallback2
+ | WorkerCallback3
+ | WorkerCallback4;
+
+type WorkerCallback0 = () => void;
+type WorkerCallback1 = (arg1: any) => void;
+type WorkerCallback2 = (arg1: any, arg2: any) => void;
+type WorkerCallback3 = (arg1: any, arg2: any, arg3: any) => void;
+type WorkerCallback4 = (arg1: any, arg2: any, arg3: any, arg4: any) => void;
+
+interface FarmOptions {
+ maxCallsPerWorker?: number
+ maxConcurrentWorkers?: number
+ maxConcurrentCallsPerWorker?: number
+ maxConcurrentCalls?: number
+ maxCallTime?: number
+ maxRetries?: number
+ autoStart?: boolean
+}
+
+interface WorkerFarm {
+ (name: string): Workers;
+ (name: string, exportedMethods: string[]): Workers;
+ (options: FarmOptions, name: string): Workers;
+ (options: FarmOptions, name: string, exportedMethods: string[]): Workers;
+
+ end: (workers: Workers) => void;
+}
+
+declare module "worker-farm" {
+ const workerFarm: WorkerFarm;
+ export = workerFarm;
+}
diff --git a/deps/npm/node_modules/worker-farm/lib/fork.js b/deps/npm/node_modules/worker-farm/lib/fork.js
index 030a2146007b6e..cbd4011c7357c2 100644
--- a/deps/npm/node_modules/worker-farm/lib/fork.js
+++ b/deps/npm/node_modules/worker-farm/lib/fork.js
@@ -9,8 +9,9 @@ function fork (forkModule) {
let filteredArgs = process.execArgv.filter(function (v) {
return !(/^--(debug|inspect)/).test(v)
})
- , child = childProcess.fork(childModule, { execArgv: filteredArgs }, {
- env: process.env
+ , child = childProcess.fork(childModule, process.argv, {
+ execArgv: filteredArgs
+ , env: process.env
, cwd: process.cwd()
})
diff --git a/deps/npm/node_modules/worker-farm/package.json b/deps/npm/node_modules/worker-farm/package.json
index 3b5b6950a321c8..d3ff94a6162f32 100644
--- a/deps/npm/node_modules/worker-farm/package.json
+++ b/deps/npm/node_modules/worker-farm/package.json
@@ -1,27 +1,27 @@
{
- "_from": "worker-farm@1.4.1",
- "_id": "worker-farm@1.4.1",
+ "_from": "worker-farm@1.5.0",
+ "_id": "worker-farm@1.5.0",
"_inBundle": false,
- "_integrity": "sha512-tgFAtgOYLPutkAyzgpS6VJFL5HY+0ui1Tvua+fITgz8ByaJTMFGtazR6xxQfwfiAcbwE+2fLG/K49wc2TfwCNw==",
+ "_integrity": "sha512-DHRiUggxtbruaTwnLDm2/BRDKZIoOYvrgYUj5Bam4fU6Gtvc0FaEyoswFPBjMXAweGW2H4BDNIpy//1yXXuaqQ==",
"_location": "/worker-farm",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
- "raw": "worker-farm@1.4.1",
+ "raw": "worker-farm@1.5.0",
"name": "worker-farm",
"escapedName": "worker-farm",
- "rawSpec": "1.4.1",
+ "rawSpec": "1.5.0",
"saveSpec": null,
- "fetchSpec": "1.4.1"
+ "fetchSpec": "1.5.0"
},
"_requiredBy": [
"#USER",
"/"
],
- "_resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.4.1.tgz",
- "_shasum": "a438bc993a7a7d133bcb6547c95eca7cff4897d8",
- "_spec": "worker-farm@1.4.1",
+ "_resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.5.0.tgz",
+ "_shasum": "adfdf0cd40581465ed0a1f648f9735722afd5c8d",
+ "_spec": "worker-farm@1.5.0",
"_where": "/Users/rebecca/code/npm",
"authors": [
"Rod Vagg @rvagg (https://github.com/rvagg)"
@@ -56,5 +56,6 @@
"scripts": {
"test": "node ./tests/"
},
- "version": "1.4.1"
+ "types": "./index.d.ts",
+ "version": "1.5.0"
}
diff --git a/deps/npm/node_modules/write-file-atomic/package.json b/deps/npm/node_modules/write-file-atomic/package.json
index 9f8043b0945c7b..7e4029a0ece3e1 100644
--- a/deps/npm/node_modules/write-file-atomic/package.json
+++ b/deps/npm/node_modules/write-file-atomic/package.json
@@ -1,28 +1,29 @@
{
- "_from": "write-file-atomic@latest",
+ "_from": "write-file-atomic@2.1.0",
"_id": "write-file-atomic@2.1.0",
"_inBundle": false,
"_integrity": "sha512-0TZ20a+xcIl4u0+Mj5xDH2yOWdmQiXlKf9Hm+TgDXjTMsEYb+gDrmb8e8UNAzMCitX8NBqG4Z/FUQIyzv/R1JQ==",
"_location": "/write-file-atomic",
"_phantomChildren": {},
"_requested": {
- "type": "tag",
+ "type": "version",
"registry": true,
- "raw": "write-file-atomic@latest",
+ "raw": "write-file-atomic@2.1.0",
"name": "write-file-atomic",
"escapedName": "write-file-atomic",
- "rawSpec": "latest",
+ "rawSpec": "2.1.0",
"saveSpec": null,
- "fetchSpec": "latest"
+ "fetchSpec": "2.1.0"
},
"_requiredBy": [
"#USER",
- "/"
+ "/",
+ "/update-notifier/configstore"
],
"_resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.1.0.tgz",
"_shasum": "1769f4b551eedce419f0505deae2e26763542d37",
- "_spec": "write-file-atomic@latest",
- "_where": "/Users/zkat/Documents/code/npm",
+ "_spec": "write-file-atomic@2.1.0",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Rebecca Turner",
"email": "me@re-becca.org",
diff --git a/deps/npm/package.json b/deps/npm/package.json
index 5b0ab111e21282..c05b426a286244 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
{
- "version": "5.3.0",
+ "version": "5.4.2",
"name": "npm",
"description": "a package manager for JavaScript",
"keywords": [
@@ -52,8 +52,6 @@
"editor": "~1.0.0",
"fs-vacuum": "~1.2.10",
"fs-write-stream-atomic": "~1.0.10",
- "fstream": "~1.0.11",
- "fstream-npm": "~1.2.1",
"glob": "~7.1.2",
"graceful-fs": "~4.1.11",
"has-unicode": "~2.0.1",
@@ -64,7 +62,7 @@
"ini": "~1.3.4",
"init-package-json": "~1.10.1",
"lazy-property": "~1.0.0",
- "libnpx": "~9.2.1",
+ "libnpx": "~9.6.0",
"lockfile": "~1.0.3",
"lodash._baseuniq": "~4.6.0",
"lodash.clonedeep": "~4.5.0",
@@ -72,6 +70,7 @@
"lodash.uniq": "~4.5.0",
"lodash.without": "~4.4.0",
"lru-cache": "~4.1.1",
+ "meant": "~1.0.0",
"mississippi": "~1.3.0",
"mkdirp": "~0.5.1",
"move-concurrently": "~1.0.1",
@@ -80,34 +79,36 @@
"normalize-package-data": "~2.4.0",
"npm-cache-filename": "~1.0.2",
"npm-install-checks": "~3.0.0",
+ "npm-lifecycle": "~1.0.2",
"npm-package-arg": "~5.1.2",
+ "npm-packlist": "~1.1.8",
"npm-registry-client": "~8.4.0",
"npm-user-validate": "~1.0.0",
"npmlog": "~4.1.2",
"once": "~1.4.0",
"opener": "~1.4.3",
"osenv": "~0.1.4",
- "pacote": "~2.7.38",
+ "pacote": "~6.0.2",
"path-is-inside": "~1.0.2",
"promise-inflight": "~1.0.1",
"read": "~1.0.7",
"read-cmd-shim": "~1.0.1",
"read-installed": "~4.0.3",
- "read-package-json": "~2.0.10",
+ "read-package-json": "~2.0.12",
"read-package-tree": "~5.1.6",
"readable-stream": "~2.3.3",
"request": "~2.81.0",
"retry": "~0.10.1",
"rimraf": "~2.6.1",
"safe-buffer": "~5.1.1",
- "semver": "~5.3.0",
+ "semver": "~5.4.1",
"sha": "~2.0.1",
"slide": "~1.1.6",
"sorted-object": "~2.0.1",
"sorted-union-stream": "~2.1.3",
"ssri": "~4.1.6",
"strip-ansi": "~4.0.0",
- "tar": "~2.2.1",
+ "tar": "~4.0.1",
"text-table": "~0.2.0",
"uid-number": "0.0.6",
"umask": "~1.1.0",
@@ -116,8 +117,8 @@
"update-notifier": "~2.2.0",
"uuid": "~3.1.0",
"validate-npm-package-name": "~3.0.0",
- "which": "~1.2.14",
- "worker-farm": "~1.4.1",
+ "which": "~1.3.0",
+ "worker-farm": "~1.5.0",
"wrappy": "~1.0.2",
"write-file-atomic": "~2.1.0"
},
@@ -141,8 +142,6 @@
"editor",
"fs-vacuum",
"fs-write-stream-atomic",
- "fstream",
- "fstream-npm",
"glob",
"graceful-fs",
"has-unicode",
@@ -168,6 +167,7 @@
"lodash.uniq",
"lodash.without",
"lru-cache",
+ "meant",
"mkdirp",
"mississippi",
"move-concurrently",
@@ -175,6 +175,7 @@
"nopt",
"normalize-package-data",
"npm-cache-filename",
+ "npm-lifecycle",
"npm-install-checks",
"npm-package-arg",
"npm-registry-client",
@@ -218,19 +219,20 @@
"write-file-atomic",
"safe-buffer",
"worker-farm",
- "libnpx"
+ "libnpx",
+ "npm-packlist"
],
"devDependencies": {
"deep-equal": "~1.0.1",
"marked": "~0.3.6",
"marked-man": "~0.2.1",
- "npm-registry-couchapp": "~2.6.13",
+ "npm-registry-couchapp": "~2.7.0",
"npm-registry-mock": "~1.1.0",
"require-inject": "~1.4.2",
"sprintf-js": "~1.1.1",
"standard": "~6.0.8",
"tacks": "~1.2.6",
- "tap": "~10.7.0"
+ "tap": "~10.7.2"
},
"scripts": {
"dumpconf": "env | grep npm | sort | uniq",
diff --git a/deps/npm/scripts/doc-build.sh b/deps/npm/scripts/doc-build.sh
index b951eb7d36257e..a37a5e2618fa87 100755
--- a/deps/npm/scripts/doc-build.sh
+++ b/deps/npm/scripts/doc-build.sh
@@ -6,58 +6,6 @@ fi
set -o errexit
set -o pipefail
-if ! [ -x node_modules/.bin/marked-man ]; then
- ps=0
- if [ -f .building_marked-man ]; then
- pid=$(cat .building_marked-man)
- ps=$(ps -p $pid | grep $pid | wc -l) || true
- fi
-
- if [ -f .building_marked-man ] && [ $ps != 0 ]; then
- while [ -f .building_marked-man ]; do
- sleep 1
- done
- else
- # a race to see which make process will be the one to install marked-man
- echo $$ > .building_marked-man
- sleep 1
- if [ $(cat .building_marked-man) == $$ ]; then
- make node_modules/.bin/marked-man
- rm .building_marked-man
- else
- while [ -f .building_marked-man ]; do
- sleep 1
- done
- fi
- fi
-fi
-
-if ! [ -x node_modules/.bin/marked ]; then
- ps=0
- if [ -f .building_marked ]; then
- pid=$(cat .building_marked)
- ps=$(ps -p $pid | grep $pid | wc -l) || true
- fi
-
- if [ -f .building_marked ] && [ $ps != 0 ]; then
- while [ -f .building_marked ]; do
- sleep 1
- done
- else
- # a race to see which make process will be the one to install marked
- echo $$ > .building_marked
- sleep 1
- if [ $(cat .building_marked) == $$ ]; then
- make node_modules/.bin/marked
- rm .building_marked
- else
- while [ -f .building_marked ]; do
- sleep 1
- done
- fi
- fi
-fi
-
src=$1
dest=$2
name=$(basename ${src%.*})
diff --git a/deps/npm/scripts/install.sh b/deps/npm/scripts/install.sh
index 8ec16f95bb8b42..a5ce5311ff6ff0 100755
--- a/deps/npm/scripts/install.sh
+++ b/deps/npm/scripts/install.sh
@@ -53,6 +53,11 @@ export npm_config_loglevel
# make sure that node exists
node=`which node 2>&1`
ret=$?
+# if not found, try "nodejs" as it is the case on debian
+if [ $ret -ne 0 ]; then
+ node=`which nodejs 2>&1`
+ ret=$?
+fi
if [ $ret -eq 0 ] && [ -x "$node" ]; then
(exit 0)
else
diff --git a/deps/npm/test/fixtures/config/.npmrc b/deps/npm/test/fixtures/config/.npmrc
new file mode 100644
index 00000000000000..7d59bd8be1b82d
--- /dev/null
+++ b/deps/npm/test/fixtures/config/.npmrc
@@ -0,0 +1 @@
+just = testing
diff --git a/deps/npm/test/fixtures/config/userconfig b/deps/npm/test/fixtures/config/userconfig
index d600c0664e29f5..ecc2f043f7fbfd 100644
--- a/deps/npm/test/fixtures/config/userconfig
+++ b/deps/npm/test/fixtures/config/userconfig
@@ -4,7 +4,6 @@ init.author.name = Isaac Z. Schlueter
init.author.email = i@izs.me
init.author.url = http://blog.izs.me/
init.version = 1.2.3
-proprietary-attribs = false
npm:publishtest = true
_npmjs.org:couch = https://admin:password@localhost:5984/registry
npm-www:nocache = 1
diff --git a/deps/npm/test/need-npm5-update/lifecycle-signal.js b/deps/npm/test/need-npm5-update/lifecycle-signal.js
index 065e5a830a37ea..c0fdb766c0c182 100644
--- a/deps/npm/test/need-npm5-update/lifecycle-signal.js
+++ b/deps/npm/test/need-npm5-update/lifecycle-signal.js
@@ -19,6 +19,8 @@ asyncScript += '});setInterval(function(){},10);'
var zombieScript = 'console.error(process.pid);process.on(\'SIGINT\',function (){'
zombieScript += '});setInterval(function(){console.error(process.pid)},10);'
+var SIGSEGV = require('constants').SIGSEGV
+
var json = {
name: 'lifecycle-signal',
version: '1.2.5',
@@ -42,24 +44,25 @@ test('setup', function (t) {
t.end()
})
-test('lifecycle signal abort', function (t) {
- // windows does not use lifecycle signals, abort
- if (process.platform === 'win32' || process.env.TRAVIS) return t.end()
-
+test('lifecycle signal abort', {
+ skip: process.platform === 'win32' && 'windows does not use lifecycle signals'
+}, function (t) {
var child = spawn(node, [npm, 'install'], {
cwd: pkg
})
child.on('close', function (code, signal) {
- t.equal(code, null)
- t.equal(signal, 'SIGSEGV')
+ // The error may be forwarded by the shell as an exit code rather than
+ // the signal itself.
+ t.ok((code === 128 + SIGSEGV) || signal === 'SIGSEGV')
t.end()
})
})
-test('lifecycle propagate signal term to child', function (t) {
- // windows does not use lifecycle signals, abort
- if (process.platform === 'win32' || process.env.TRAVIS) return t.end()
-
+test('lifecycle propagate signal term to child', {
+ /* This feature is broken. npm runs its lifecycle processes in a shell, and at
+ * least `bash` doesn’t forward SIGTERM to its children. */
+ skip: process.platform !== 'darwin' && 'broken'
+}, function (t) {
var innerChildPid
var child = spawn(npm, ['run', 'forever'], {
cwd: pkg
@@ -81,10 +84,9 @@ test('lifecycle propagate signal term to child', function (t) {
})
})
-test('lifecycle wait for async child process exit', function (t) {
- // windows does not use lifecycle signals, abort
- if (process.platform === 'win32' || process.env.TRAVIS) return t.end()
-
+test('lifecycle wait for async child process exit', {
+ skip: process.platform !== 'darwin' && 'broken'
+}, function (t) {
var innerChildPid
var interupted
var child = spawn(npm, ['run', 'async'], {
@@ -104,10 +106,9 @@ test('lifecycle wait for async child process exit', function (t) {
})
})
-test('lifecycle force kill using multiple SIGINT signals', function (t) {
- // windows does not use lifecycle signals, abort
- if (process.platform === 'win32' || process.env.TRAVIS) return t.end()
-
+test('lifecycle force kill using multiple SIGINT signals', {
+ skip: process.platform !== 'darwin' && 'broken'
+}, function (t) {
var innerChildPid
var interupted
var child = spawn(npm, ['run', 'zombie'], {
diff --git a/deps/npm/test/tap/00-config-setup.js b/deps/npm/test/tap/00-config-setup.js
index 7303c8328f3226..448453eded4fd9 100644
--- a/deps/npm/test/tap/00-config-setup.js
+++ b/deps/npm/test/tap/00-config-setup.js
@@ -13,7 +13,6 @@ exports.ucData =
'init.author.email': 'i@izs.me',
'init.author.url': 'http://blog.izs.me/',
'init.version': '1.2.3',
- 'proprietary-attribs': false,
'npm:publishtest': true,
'_npmjs.org:couch': 'https://admin:password@localhost:5984/registry',
'npm-www:nocache': '1',
@@ -63,7 +62,6 @@ try {
// project conf not found, probably working with packed npm
fs.writeFileSync(projectConf, function () { /*
save-prefix = ~
-proprietary-attribs = false
legacy-bundling = true
*/ }.toString().split('\n').slice(1, -1).join('\n'))
}
diff --git a/deps/npm/test/tap/add-remote-git-submodule.js b/deps/npm/test/tap/add-remote-git-submodule.js
index 86fcaa0ee22028..6e26712c561778 100644
--- a/deps/npm/test/tap/add-remote-git-submodule.js
+++ b/deps/npm/test/tap/add-remote-git-submodule.js
@@ -77,6 +77,9 @@ function bootstrap (t) {
}
function setup (cb) {
+ rimraf.sync(pkg)
+ rimraf.sync(repos)
+
mkdirp.sync(topwt)
fs.writeFileSync(resolve(topwt, 'package.json'), pjChild)
mkdirp.sync(subwt)
@@ -120,12 +123,14 @@ function setup (cb) {
var reposopt = { cwd: repos, env: env }
common.makeGitRepo({
path: subwt,
+ message: 'subwt repo: ' + subwt,
added: ['foo.txt'],
commands: [
git.chainableExec(['clone', '--bare', subwt, 'sub.git'], reposopt),
startDaemon,
[common.makeGitRepo, {
path: topwt,
+ message: 'topwt repo: ' + topwt,
commands: [
git.chainableExec(['submodule', 'add', suburl, 'subpath'], topopt),
git.chainableExec(['commit', '-m', 'added submodule'], topopt),
diff --git a/deps/npm/test/tap/bundled-dependencies-nonarray.js b/deps/npm/test/tap/bundled-dependencies-nonarray.js
index 8f8eeb358ffd0b..924e1b27b60a3a 100644
--- a/deps/npm/test/tap/bundled-dependencies-nonarray.js
+++ b/deps/npm/test/tap/bundled-dependencies-nonarray.js
@@ -34,7 +34,7 @@ test('setup', function (t) {
t.end()
})
-test('errors on non-array bundleddependencies', function (t) {
+test('handles non-array bundleddependencies', function (t) {
return Bluebird.try(() => {
return common.npm(['pack', 'a-bundled-dep/'], {cwd: dir, stdio: [0, 1, 2]})
}).spread((code) => {
@@ -44,8 +44,8 @@ test('errors on non-array bundleddependencies', function (t) {
t.is(code, 0, 'prepared pkg-with-bundled')
return common.npm(['pack', 'pkg-with-bundled/'], {cwd: dir, stdio: [0, 1, 'pipe']})
}).spread((code, _, stderr) => {
- t.notEqual(code, 0, 'exited with a error code')
- t.like(stderr, /be an array/, 'nice error output')
+ t.equal(code, 0, 'exited with a error code')
+ t.equal(stderr, '')
})
})
diff --git a/deps/npm/test/tap/bundled-dependencies.js b/deps/npm/test/tap/bundled-dependencies.js
index a49610e666d2d5..c6d67e3c503a57 100644
--- a/deps/npm/test/tap/bundled-dependencies.js
+++ b/deps/npm/test/tap/bundled-dependencies.js
@@ -6,7 +6,6 @@ var rimraf = require('rimraf')
var mkdirp = require('mkdirp')
var fs = require('graceful-fs')
var tar = require('tar')
-var zlib = require('zlib')
var basepath = path.resolve(__dirname, path.basename(__filename, '.js'))
var fixturepath = path.resolve(basepath, 'npm-test-bundled-deps')
var targetpath = path.resolve(basepath, 'target')
@@ -103,10 +102,8 @@ function withFixture (t, fixture, tester) {
function extractTarball (cb) {
// Unpack to disk so case-insensitive filesystems are consistent
- fs.createReadStream(path.join(basepath, 'npm-test-files-1.2.5.tgz'))
- .pipe(zlib.Unzip())
- .on('error', cb)
- .pipe(tar.Extract(targetpath))
- .on('error', cb)
- .on('end', function () { cb() })
+ tar.extract({
+ file: path.join(basepath, 'npm-test-files-1.2.5.tgz'),
+ cwd: targetpath
+ }).then(cb, cb)
}
diff --git a/deps/npm/test/tap/bundled-transitive-deps.js b/deps/npm/test/tap/bundled-transitive-deps.js
index 9af12337d651f0..fe591490626365 100644
--- a/deps/npm/test/tap/bundled-transitive-deps.js
+++ b/deps/npm/test/tap/bundled-transitive-deps.js
@@ -7,8 +7,8 @@ var File = Tacks.File
var Dir = Tacks.Dir
var common = require('../common-tap.js')
var npm = require('../../lib/npm.js')
-var tar = require('../../lib/utils/tar.js')
-
+var tar = require('tar')
+var mkdirp = require('mkdirp')
var testdir = path.join(__dirname, path.basename(__filename, '.js'))
var packed = path.join(testdir, 'packed')
@@ -87,10 +87,13 @@ test('bundled-transitive-deps', function (t) {
var tarball = stdout.trim()
t.comment(stderr.trim())
t.is(code, 0, 'pack successful')
- tar.unpack(path.join(testdir, tarball), packed, thenCheckContents)
- }
- function thenCheckContents (err) {
- t.ifError(err, 'unpack successful')
+ mkdirp.sync(packed)
+ tar.extract({
+ file: path.join(testdir, tarball),
+ cwd: packed,
+ strip: 1,
+ sync: true
+ })
var transitivePackedDep = path.join(packed, 'node_modules', 'b')
exists(t, transitivePackedDep)
var nestedScopedDep = path.join(packed, 'node_modules', '@c', 'd', 'node_modules', 'e')
diff --git a/deps/npm/test/tap/config-basic.js b/deps/npm/test/tap/config-basic.js
index cabfa2439680ad..b8102da90c9141 100644
--- a/deps/npm/test/tap/config-basic.js
+++ b/deps/npm/test/tap/config-basic.js
@@ -5,7 +5,6 @@ var path = require('path')
var projectData = {
'save-prefix': '~',
- 'proprietary-attribs': false,
'legacy-bundling': true
}
diff --git a/deps/npm/test/tap/config-builtin.js b/deps/npm/test/tap/config-builtin.js
index cb1e4eb20fbab0..53d00a31a79a95 100644
--- a/deps/npm/test/tap/config-builtin.js
+++ b/deps/npm/test/tap/config-builtin.js
@@ -16,7 +16,6 @@ var cli = { foo: 'bar', heading: 'foo', 'git-tag-version': false }
var projectData = {
'save-prefix': '~',
- 'proprietary-attribs': false,
'legacy-bundling': true
}
diff --git a/deps/npm/test/tap/config-list.js b/deps/npm/test/tap/config-list.js
index 1c42b64c2e52ac..b06dc154b2157b 100644
--- a/deps/npm/test/tap/config-list.js
+++ b/deps/npm/test/tap/config-list.js
@@ -8,28 +8,54 @@ var common = require('../common-tap.js')
var pkg = path.resolve(__dirname, 'config-list')
var opts = { cwd: pkg }
var npmrc = path.resolve(pkg, '.npmrc')
+var npmrcContents = `
+_private=private;
+registry/:_pwd=pwd;
+foo=1234
+`
test('setup', function (t) {
rimraf.sync(pkg)
mkdirp.sync(pkg)
- t.end()
-})
-test('config list includes project config', function (t) {
// Write per-project conf file
- fs.writeFileSync(npmrc, 'foo=1234', 'utf8')
+ fs.writeFileSync(npmrc, npmrcContents, 'utf8')
// Create empty package.json to indicate project root
fs.writeFileSync(path.resolve(pkg, 'package.json'), '{}', 'utf8')
+ t.end()
+})
+test('config list includes project config', function (t) {
common.npm(
['config', 'list'],
opts,
function (err, code, stdout, stderr) {
t.ifError(err)
t.equal(stderr, '', 'stderr is empty')
+
var expected = '; project config ' + npmrc + '\nfoo = "1234"'
t.match(stdout, expected, 'contains project config')
+ t.notMatch(stdout, '_private', 'excludes private config')
+ t.notMatch(stdout, '_pwd', 'excludes private segmented config')
+ t.end()
+ }
+ )
+})
+
+test('config list --json outputs json', function (t) {
+ common.npm(
+ ['config', 'list', '--json'],
+ opts,
+ function (err, code, stdout, stderr) {
+ t.ifError(err)
+ t.equal(stderr, '', 'stderr is empty')
+
+ var json = JSON.parse(stdout)
+ t.equal(json.foo, '1234', 'contains project config')
+ t.equal(json.argv, undefined, 'excludes argv')
+ t.equal(json._private, undefined, 'excludes private config')
+ t.equal(json['registry/:_pwd'], undefined, 'excludes private config')
t.end()
}
)
diff --git a/deps/npm/test/tap/config-save.js b/deps/npm/test/tap/config-save.js
index 903bac7ae3a4a2..a7f85d6015e4bb 100644
--- a/deps/npm/test/tap/config-save.js
+++ b/deps/npm/test/tap/config-save.js
@@ -12,7 +12,6 @@ var expectConf = [
'init.author.email = i@izs.me',
'init.author.url = http://blog.izs.me/',
'init.version = 1.2.3',
- 'proprietary-attribs = false',
'npm:publishtest = true',
'_npmjs.org:couch = https://admin:password@localhost:5984/registry',
'npm-www:nocache = 1',
@@ -38,7 +37,6 @@ var expectFile = [
'init.author.email = i@izs.me',
'init.author.url = http://blog.izs.me/',
'init.version = 1.2.3',
- 'proprietary-attribs = false',
'npm:publishtest = true',
'_npmjs.org:couch = https://admin:password@localhost:5984/registry',
'npm-www:nocache = 1',
diff --git a/deps/npm/test/tap/debug-logs.js b/deps/npm/test/tap/debug-logs.js
index 48845124295b6b..e2dfb6a8b099e7 100644
--- a/deps/npm/test/tap/debug-logs.js
+++ b/deps/npm/test/tap/debug-logs.js
@@ -99,3 +99,4 @@ test('cleanup', function (t) {
cleanup()
t.done()
})
+
diff --git a/deps/npm/test/tap/files-and-ignores.js b/deps/npm/test/tap/files-and-ignores.js
index d86b17acfa99a9..6d44f3ea35ffa5 100644
--- a/deps/npm/test/tap/files-and-ignores.js
+++ b/deps/npm/test/tap/files-and-ignores.js
@@ -6,7 +6,6 @@ var rimraf = require('rimraf')
var mkdirp = require('mkdirp')
var fs = require('graceful-fs')
var tar = require('tar')
-var zlib = require('zlib')
var basepath = path.resolve(__dirname, path.basename(__filename, '.js'))
var fixturepath = path.resolve(basepath, 'npm-test-files')
var targetpath = path.resolve(basepath, 'target')
@@ -210,14 +209,14 @@ test('.npmignore should always be overridden by files array', function (t) {
include: File(''),
ignore: File(''),
sub: Dir({
- include: File('')
+ included: File('')
})
})
)
withFixture(t, fixture, function (done) {
t.notOk(fileExists('ignore'), 'toplevel file excluded')
t.ok(fileExists('include'), 'unignored file included')
- t.ok(fileExists('sub/include'), 'nested file included')
+ t.ok(fileExists('sub/included'), 'nested file included')
done()
})
})
@@ -384,7 +383,55 @@ test('include main file', function (t) {
})
})
-test('certain files ignored unconditionally', function (t) {
+test('certain files ignored by default', function (t) {
+ var fixture = new Tacks(
+ Dir({
+ 'package.json': File({
+ name: 'npm-test-files',
+ version: '1.2.5'
+ }),
+ '.git': Dir({foo: File('')}),
+ '.svn': Dir({foo: File('')}),
+ 'CVS': Dir({foo: File('')}),
+ '.hg': Dir({foo: File('')}),
+ '.lock-wscript': File(''),
+ '.wafpickle-0': File(''),
+ '.wafpickle-5': File(''),
+ '.wafpickle-50': File(''),
+ 'build': Dir({'config.gypi': File('')}),
+ 'npm-debug.log': File(''),
+ '.npmrc': File(''),
+ '.foo.swp': File(''),
+ '.DS_Store': Dir({foo: File('')}),
+ '._ohno': File(''),
+ '._ohnoes': Dir({noes: File('')}),
+ 'foo.orig': File(''),
+ 'package-lock.json': File('')
+ })
+ )
+ withFixture(t, fixture, function (done) {
+ t.notOk(fileExists('.git'), '.git not included')
+ t.notOk(fileExists('.svn'), '.svn not included')
+ t.notOk(fileExists('CVS'), 'CVS not included')
+ t.notOk(fileExists('.hg'), '.hg not included')
+ t.notOk(fileExists('.lock-wscript'), '.lock-wscript not included')
+ t.notOk(fileExists('.wafpickle-0'), '.wafpickle-0 not included')
+ t.notOk(fileExists('.wafpickle-5'), '.wafpickle-5 not included')
+ t.notOk(fileExists('.wafpickle-50'), '.wafpickle-50 not included')
+ t.notOk(fileExists('build/config.gypi'), 'build/config.gypi not included')
+ t.notOk(fileExists('npm-debug.log'), 'npm-debug.log not included')
+ t.notOk(fileExists('.npmrc'), '.npmrc not included')
+ t.notOk(fileExists('.foo.swp'), '.foo.swp not included')
+ t.notOk(fileExists('.DS_Store'), '.DS_Store not included')
+ t.notOk(fileExists('._ohno'), '._ohno not included')
+ t.notOk(fileExists('._ohnoes'), '._ohnoes not included')
+ t.notOk(fileExists('foo.orig'), 'foo.orig not included')
+ t.notOk(fileExists('package-lock.json'), 'package-lock.json not included')
+ done()
+ })
+})
+
+test('default-ignored files can be explicitly included', function (t) {
var fixture = new Tacks(
Dir({
'package.json': File({
@@ -405,6 +452,7 @@ test('certain files ignored unconditionally', function (t) {
'.foo.swp',
'.DS_Store',
'._ohno',
+ '._ohnoes',
'foo.orig',
'package-lock.json'
]
@@ -429,23 +477,23 @@ test('certain files ignored unconditionally', function (t) {
})
)
withFixture(t, fixture, function (done) {
- t.notOk(fileExists('.git'), '.git not included')
- t.notOk(fileExists('.svn'), '.svn not included')
- t.notOk(fileExists('CVS'), 'CVS not included')
- t.notOk(fileExists('.hg'), '.hg not included')
- t.notOk(fileExists('.lock-wscript'), '.lock-wscript not included')
- t.notOk(fileExists('.wafpickle-0'), '.wafpickle-0 not included')
- t.notOk(fileExists('.wafpickle-5'), '.wafpickle-5 not included')
- t.notOk(fileExists('.wafpickle-50'), '.wafpickle-50 not included')
- t.notOk(fileExists('build/config.gypi'), 'build/config.gypi not included')
- t.notOk(fileExists('npm-debug.log'), 'npm-debug.log not included')
- t.notOk(fileExists('.npmrc'), '.npmrc not included')
- t.notOk(fileExists('.foo.swp'), '.foo.swp not included')
- t.notOk(fileExists('.DS_Store'), '.DS_Store not included')
- t.notOk(fileExists('._ohno'), '._ohno not included')
- t.notOk(fileExists('._ohnoes'), '._ohnoes not included')
- t.notOk(fileExists('foo.orig'), 'foo.orig not included')
- t.notOk(fileExists('package-lock.json'), 'package-lock.json not included')
+ t.ok(fileExists('.git'), '.git included')
+ t.ok(fileExists('.svn'), '.svn included')
+ t.ok(fileExists('CVS'), 'CVS included')
+ t.ok(fileExists('.hg'), '.hg included')
+ t.ok(fileExists('.lock-wscript'), '.lock-wscript included')
+ t.ok(fileExists('.wafpickle-0'), '.wafpickle-0 included')
+ t.ok(fileExists('.wafpickle-5'), '.wafpickle-5 included')
+ t.ok(fileExists('.wafpickle-50'), '.wafpickle-50 included')
+ t.ok(fileExists('build/config.gypi'), 'build/config.gypi included')
+ t.ok(fileExists('npm-debug.log'), 'npm-debug.log included')
+ t.ok(fileExists('.npmrc'), '.npmrc included')
+ t.ok(fileExists('.foo.swp'), '.foo.swp included')
+ t.ok(fileExists('.DS_Store'), '.DS_Store included')
+ t.ok(fileExists('._ohno'), '._ohno included')
+ t.ok(fileExists('._ohnoes'), '._ohnoes included')
+ t.ok(fileExists('foo.orig'), 'foo.orig included')
+ t.ok(fileExists('package-lock.json'), 'package-lock.json included')
done()
})
})
@@ -609,10 +657,11 @@ function withFixture (t, fixture, tester) {
function extractTarball (cb) {
// Unpack to disk so case-insensitive filesystems are consistent
- fs.createReadStream(path.join(basepath, 'npm-test-files-1.2.5.tgz'))
- .pipe(zlib.Unzip())
- .on('error', cb)
- .pipe(tar.Extract(targetpath))
- .on('error', cb)
- .on('end', function () { cb() })
+ tar.extract({
+ file: basepath + '/npm-test-files-1.2.5.tgz',
+ cwd: targetpath,
+ sync: true
+ })
+
+ cb()
}
diff --git a/deps/npm/test/tap/git-npmignore.js b/deps/npm/test/tap/git-npmignore.js
index 819e0a62852893..1fe2ca24e69aaf 100644
--- a/deps/npm/test/tap/git-npmignore.js
+++ b/deps/npm/test/tap/git-npmignore.js
@@ -50,8 +50,7 @@ var modules = resolve(testdir, 'node_modules')
var installed = resolve(modules, 'gitch')
var expected = [
'a.js',
- 'package.json',
- '.npmignore'
+ 'package.json'
].sort()
var NPM_OPTS = {
diff --git a/deps/npm/test/tap/help.js b/deps/npm/test/tap/help.js
new file mode 100644
index 00000000000000..0b8c22a7b8e4c7
--- /dev/null
+++ b/deps/npm/test/tap/help.js
@@ -0,0 +1,26 @@
+var test = require('tap').test
+var common = require('../common-tap')
+
+test('npm food', function (t) {
+ common.npm('food', {}, function (err, code, stdout, stderr) {
+ if (err) throw err
+ t.equal(code, 1, 'command ran with error')
+
+ t.has(stdout, 'Did you mean this?')
+
+ t.notOk(stderr, 'stderr should be empty')
+ t.end()
+ })
+})
+
+test('npm jet', function (t) {
+ common.npm('jet', {}, function (err, code, stdout, stderr) {
+ if (err) throw err
+ t.equal(code, 1, 'command ran with error')
+
+ t.has(stdout, 'Did you mean one of these?')
+
+ t.notOk(stderr, 'stderr should be empty')
+ t.end()
+ })
+})
diff --git a/deps/npm/test/tap/install-scoped-with-peer-dependency.js b/deps/npm/test/tap/install-scoped-with-peer-dependency.js
index 71584b115a9fe6..7f60c732210927 100644
--- a/deps/npm/test/tap/install-scoped-with-peer-dependency.js
+++ b/deps/npm/test/tap/install-scoped-with-peer-dependency.js
@@ -30,7 +30,7 @@ test('it should install peerDependencies in same tree level as the parent packag
common.npm(['install', '--loglevel=warn', './package'], EXEC_OPTS, function (err, code, stdout, stderr) {
t.ifError(err, 'install local package successful')
t.equal(code, 0, 'npm install exited with code')
- t.match(stderr, /npm WARN @scope[/]package@0[.]0[.]0 requires a peer of underscore@[*] but none was installed[.]\n/,
+ t.match(stderr, /npm WARN @scope[/]package@0[.]0[.]0 requires a peer of underscore@[*] but none is installed[.] You must install peer dependencies yourself[.]\n/,
'npm install warned about unresolved peer dep')
t.end()
diff --git a/deps/npm/test/tap/install-windows-newlines.js b/deps/npm/test/tap/install-windows-newlines.js
new file mode 100644
index 00000000000000..56ef172a297b14
--- /dev/null
+++ b/deps/npm/test/tap/install-windows-newlines.js
@@ -0,0 +1,90 @@
+var fs = require('graceful-fs')
+var path = require('path')
+var existsSync = fs.existsSync || path.existsSync
+
+var mkdirp = require('mkdirp')
+var rimraf = require('rimraf')
+var test = require('tap').test
+
+var common = require('../common-tap.js')
+
+var pkg = path.join(__dirname, 'install-windows-newlines')
+
+var EXEC_OPTS = { cwd: pkg, stdio: [0, 1, 2] }
+
+var json = {
+ name: 'install-windows-newlines',
+ description: 'fixture',
+ version: '0.0.0',
+ dependencies: {
+ 'cli-dependency': 'file:cli-dependency'
+ }
+}
+
+var dependency = {
+ name: 'cli-dependency',
+ description: 'fixture',
+ version: '0.0.0',
+ bin: {
+ hashbang: './hashbang.js',
+ nohashbang: './nohashbang.js'
+ }
+}
+
+test('setup', function (t) {
+ cleanup()
+ mkdirp.sync(path.join(pkg, 'cli-dependency'))
+ fs.writeFileSync(
+ path.join(pkg, 'cli-dependency', 'package.json'),
+ JSON.stringify(dependency, null, 2)
+ )
+ fs.writeFileSync(
+ path.join(pkg, 'cli-dependency', 'hashbang.js'),
+ '#!/usr/bin/env node\r\nconsole.log(\'Hello, world!\')\r\n'
+ )
+ fs.writeFileSync(
+ path.join(pkg, 'cli-dependency', 'nohashbang.js'),
+ '\'use strict\'\r\nconsole.log(\'Goodbye, world!\')\r\n'
+ )
+
+ mkdirp.sync(path.join(pkg, 'node_modules'))
+ fs.writeFileSync(
+ path.join(pkg, 'package.json'),
+ JSON.stringify(json, null, 2)
+ )
+
+ return common.npm(['install'], EXEC_OPTS).spread((code) => {
+ t.equal(code, 0, 'npm install did not raise error code')
+ t.ok(
+ existsSync(path.resolve(pkg, 'node_modules/.bin/hashbang')),
+ 'hashbang installed'
+ )
+ t.ok(
+ existsSync(path.resolve(pkg, 'node_modules/.bin/nohashbang')),
+ 'nohashbang installed'
+ )
+ t.notOk(
+ fs.readFileSync(
+ path.resolve(pkg, 'node_modules/cli-dependency/hashbang.js'),
+ 'utf8'
+ ).includes('\r\n'),
+ 'hashbang dependency cli newlines converted'
+ )
+ t.ok(
+ fs.readFileSync(
+ path.resolve(pkg, 'node_modules/cli-dependency/nohashbang.js'),
+ 'utf8'
+ ).includes('\r\n'),
+ 'nohashbang dependency cli newlines retained'
+ )
+ })
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
+function cleanup () {
+ rimraf.sync(pkg)
+}
diff --git a/deps/npm/test/tap/lifecycle-INIT_CWD.js b/deps/npm/test/tap/lifecycle-INIT_CWD.js
new file mode 100644
index 00000000000000..eec5c266eec98d
--- /dev/null
+++ b/deps/npm/test/tap/lifecycle-INIT_CWD.js
@@ -0,0 +1,58 @@
+var fs = require('fs')
+var path = require('path')
+
+var mkdirp = require('mkdirp')
+var osenv = require('osenv')
+var rimraf = require('rimraf')
+var test = require('tap').test
+
+var common = require('../common-tap.js')
+
+var pkg = path.resolve(__dirname, 'lifecycle-initcwd')
+var subdir = path.resolve(pkg, 'subdir')
+
+var json = {
+ name: 'init-cwd',
+ version: '1.0.0',
+ scripts: {
+ initcwd: 'echo "$INIT_CWD"'
+ }
+}
+
+test('setup', function (t) {
+ cleanup()
+ mkdirp.sync(pkg)
+ mkdirp.sync(subdir)
+ fs.writeFileSync(
+ path.join(pkg, 'package.json'),
+ JSON.stringify(json, null, 2)
+ )
+
+ process.chdir(subdir)
+ t.end()
+})
+
+test('make sure the env.INIT_CWD is correct', function (t) {
+ common.npm(['run-script', 'initcwd'], {
+ cwd: subdir
+ }, function (er, code, stdout) {
+ if (er) throw er
+ t.equal(code, 0, 'exit code')
+ stdout = stdout.trim().split(/\r|\n/).pop()
+ var actual = stdout
+
+ t.equal(actual, subdir)
+ t.end()
+ })
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
+function cleanup () {
+ process.chdir(osenv.tmpdir())
+ rimraf.sync(subdir)
+ rimraf.sync(pkg)
+}
diff --git a/deps/npm/test/tap/lifecycle.js b/deps/npm/test/tap/lifecycle.js
deleted file mode 100644
index 1a98bf3db75802..00000000000000
--- a/deps/npm/test/tap/lifecycle.js
+++ /dev/null
@@ -1,44 +0,0 @@
-var test = require('tap').test
-var npm = require('../../')
-var lifecycle = require('../../lib/utils/lifecycle')
-
-test('lifecycle: make env correctly', function (t) {
- npm.load({enteente: Infinity}, function () {
- var env = lifecycle.makeEnv({}, null, process.env)
-
- t.equal('Infinity', env.npm_config_enteente)
- t.end()
- })
-})
-
-test('lifecycle : accepts wd for package that matches project\'s name', function (t) {
- npm.load({}, function () {
- var wd = '/opt/my-time/node_modules/time'
- var pkg = {name: 'time'}
-
- t.equal(lifecycle._incorrectWorkingDirectory(wd, pkg), false)
- t.end()
- })
-})
-
-test('lifecycle : accepts wd for package that doesn\'t match project\'s name', function (t) {
- npm.load({}, function () {
- var wd = '/opt/my-project/node_modules/time'
- var pkg = {name: 'time'}
-
- t.equal(lifecycle._incorrectWorkingDirectory(wd, pkg), false)
- t.end()
- })
-})
-
-test('lifecycle : rejects wd for ', function (t) {
- npm.load({}, function () {
- var wd = '/opt/my-time/node_modules/time/invalid'
- var pkg = {
- name: 'time'
- }
-
- t.equal(lifecycle._incorrectWorkingDirectory(wd, pkg), true)
- t.end()
- })
-})
diff --git a/deps/npm/test/tap/no-global-warns.js b/deps/npm/test/tap/no-global-warns.js
index cae62fff99ed59..577b393122cd07 100644
--- a/deps/npm/test/tap/no-global-warns.js
+++ b/deps/npm/test/tap/no-global-warns.js
@@ -14,7 +14,16 @@ var toInstall = path.join(base, 'to-install')
var config = 'prefix = ' + base
var configPath = path.join(base, '_npmrc')
-var OPTS = { }
+// use a clean environment for this test
+// otherwise local dev-time npm settings can throw it off
+var OPTS = {
+ env: Object.keys(process.env).filter(function (k) {
+ return !/^npm_config_/i.test(k)
+ }).reduce(function (set, k) {
+ set[k] = process.env[k]
+ return set
+ }, {})
+}
var installJSON = {
name: 'to-install',
@@ -43,6 +52,9 @@ test('no-global-warns', function (t) {
OPTS,
function (err, code, stdout, stderr) {
t.ifError(err, 'installed w/o error')
+ const preWarn = 'npm WARN You are using a pre-release version ' +
+ 'of node and things may not work as expected'
+ stderr = stderr.trim().replace(preWarn, '')
t.is(stderr, '', 'no warnings printed to stderr')
t.end()
})
diff --git a/deps/npm/test/tap/optional-metadep-rollback-collision.js b/deps/npm/test/tap/optional-metadep-rollback-collision.js
index 862e6e8c72f894..ce62e005ba4ae0 100644
--- a/deps/npm/test/tap/optional-metadep-rollback-collision.js
+++ b/deps/npm/test/tap/optional-metadep-rollback-collision.js
@@ -1,3 +1,4 @@
+'use strict'
var fs = require('graceful-fs')
var path = require('path')
@@ -12,7 +13,9 @@ var pkg = path.resolve(__dirname, 'optional-metadep-rollback-collision')
var deps = path.resolve(pkg, 'deps')
var opdep = path.resolve(pkg, 'node_modules', 'opdep')
var cache = path.resolve(pkg, 'cache')
-var pidfile = path.resolve(pkg, 'child.pid')
+var createServer = require('http').createServer
+var mr = require('npm-registry-mock')
+var serverPort = 27991
var json = {
name: 'optional-metadep-rollback-collision',
@@ -31,7 +34,7 @@ var d1 = {
preinstall: 'sleep 1'
},
dependencies: {
- foo: 'http://localhost:8080/'
+ foo: 'http://localhost:' + serverPort + '/'
}
}
@@ -43,9 +46,9 @@ var d2 = {
postinstall: 'node blart.js'
},
dependencies: {
- 'graceful-fs': '^3.0.2',
- mkdirp: '^0.5.0',
- rimraf: '^2.2.8'
+ 'request': '^0.9.0',
+ mkdirp: '^0.3.5',
+ wordwrap: '^0.0.2'
}
}
@@ -54,52 +57,12 @@ var opdep_json = {
version: '1.0.0',
description: 'To explode, of course!',
main: 'index.js',
- scripts: {
- preinstall: 'node bad-server.js'
- },
dependencies: {
d1: 'file:../d1',
d2: 'file:../d2'
}
}
-var badServer = function () { /*
-var createServer = require('http').createServer
-var spawn = require('child_process').spawn
-var fs = require('fs')
-var path = require('path')
-var pidfile = path.resolve(__dirname, '..', '..', 'child.pid')
-
-if (process.argv[2]) {
- console.log('ok')
- createServer(function (req, res) {
- setTimeout(function () {
- res.writeHead(404)
- res.end()
- }, 1000)
- this.close()
- }).listen(8080)
-} else {
- var child = spawn(
- process.execPath,
- [__filename, 'whatever'],
- {
- stdio: [0, 1, 2],
- detached: true
- }
- )
- child.unref()
-
- // kill any prior children, if existing.
- try {
- var pid = +fs.readFileSync(pidfile)
- process.kill(pid, 'SIGKILL')
- } catch (er) {}
-
- fs.writeFileSync(pidfile, child.pid + '\n')
-}
-*/ }.toString().split('\n').slice(1, -1).join('\n')
-
var blart = function () { /*
var rando = require('crypto').randomBytes
var resolve = require('path').resolve
@@ -153,8 +116,17 @@ mkdirp(BASEDIR, function go () {
}, 3 * 1000)
})
*/ }.toString().split('\n').slice(1, -1).join('\n')
+
+let badServer
+let mockServer
test('setup', function (t) {
cleanup()
+ badServer = createServer(function (req, res) {
+ setTimeout(function () {
+ res.writeHead(404)
+ res.end()
+ }, 1000)
+ }).listen(serverPort)
mkdirp.sync(pkg)
fs.writeFileSync(
@@ -180,17 +152,19 @@ test('setup', function (t) {
path.join(deps, 'opdep', 'package.json'),
JSON.stringify(opdep_json, null, 2)
)
- fs.writeFileSync(path.join(deps, 'opdep', 'bad-server.js'), badServer)
-
- t.end()
+ mr({ port: common.port }, function (er, server) {
+ mockServer = server
+ t.end()
+ })
})
-
test('go go test racer', function (t) {
common.npm(
[
'--prefix', pkg,
'--fetch-retries', '0',
- '--loglevel', 'silent',
+ '--loglevel', 'error',
+ '--no-progress',
+ '--registry', common.registry,
'--parseable',
'--cache', cache,
'install'
@@ -200,12 +174,13 @@ test('go go test racer', function (t) {
env: {
PATH: process.env.PATH,
Path: process.env.Path
- }
+ },
+ stdio: [ 0, 'pipe', 2 ]
},
function (er, code, stdout, stderr) {
t.ifError(er, 'install ran to completion without error')
t.is(code, 0, 'npm install exited with code 0')
- t.comment(stdout.trim())
+ t.comment(stderr.trim())
// stdout should be empty, because we only have one, optional, dep and
// if it fails we shouldn't try installing anything
t.equal(stdout, '')
@@ -223,16 +198,14 @@ test('verify results', function (t) {
})
test('cleanup', function (t) {
+ mockServer.close()
+ badServer.close()
cleanup()
t.end()
})
function cleanup () {
process.chdir(osenv.tmpdir())
- try {
- var pid = +fs.readFileSync(pidfile)
- process.kill(pid, 'SIGKILL')
- } catch (er) {}
rimraf.sync(pkg)
}
diff --git a/deps/npm/test/tap/ping.js b/deps/npm/test/tap/ping.js
index 13fc4bec9c4588..76d115a482343d 100644
--- a/deps/npm/test/tap/ping.js
+++ b/deps/npm/test/tap/ping.js
@@ -47,7 +47,7 @@ test('npm ping', function (t) {
t.ifError(err, 'no error output')
t.notOk(code, 'exited OK')
- t.same(JSON.parse(stdout), pingResponse)
+ t.same(stdout, 'Ping success: ' + JSON.stringify(pingResponse) + '\n')
t.end()
})
})
diff --git a/deps/npm/test/tap/prune-dev-dep-with-bins.js b/deps/npm/test/tap/prune-dev-dep-with-bins.js
new file mode 100644
index 00000000000000..a89db389db731b
--- /dev/null
+++ b/deps/npm/test/tap/prune-dev-dep-with-bins.js
@@ -0,0 +1,105 @@
+'use strict'
+var fs = require('fs')
+var path = require('path')
+var test = require('tap').test
+var Tacks = require('tacks')
+var File = Tacks.File
+var Dir = Tacks.Dir
+var common = require('../common-tap.js')
+var testdir = path.join(__dirname, path.basename(__filename, '.js'))
+
+var fixture = new Tacks(
+ Dir({
+ node_modules: Dir({
+ 'yes': Dir({
+ 'package.json': File({
+ _requested: {
+ rawSpec: 'file:///mods/yes'
+ },
+ dependencies: {},
+ bin: {
+ 'yes': 'yes.js'
+ },
+ name: 'yes',
+ version: '1.0.0'
+ }),
+ 'yes.js': File('while (true) { console.log("y") }')
+ }),
+ '.bin': Dir({
+ // verbose, but needed for `read-cmd-shim` to properly identify which
+ // package this belongs to
+ 'yes': File(
+ '#!/bin/sh\n' +
+ 'basedir=$(dirname "$(echo "$0" | sed -e \'s,\\\\,/,g\')")\n' +
+ '\n' +
+ 'case `uname` in\n' +
+ ' *CYGWIN*) basedir=`cygpath -w "$basedir"`;;\n' +
+ 'esac\n' +
+ '\n' +
+ 'if [ -x "$basedir/node" ]; then\n' +
+ ' "$basedir/node" "$basedir/../yes/yes.js" "$@"\n' +
+ ' ret=$?\n' +
+ 'else\n' +
+ ' node "$basedir/../yes/yes.js" "$@"\n' +
+ ' ret=$?\n' +
+ 'fi\n' +
+ 'exit $ret\n'),
+ 'yes.cmd': File(
+ '@IF EXIST "%~dp0\node.exe" (\n' +
+ '"%~dp0\\node.exe" "%~dp0\\..\\yes\\yes.js" %*\n' +
+ ') ELSE (\n' +
+ '@SETLOCAL\n' +
+ '@SET PATHEXT=%PATHEXT:;.JS;=;%\n' +
+ 'node "%~dp0\..\yes\yes.js" %*')
+ })
+ }),
+ 'package.json': File({
+ name: 'test',
+ version: '1.0.0',
+ devDependencies: {
+ 'yes': 'file:///mods/yes'
+ }
+ })
+ })
+)
+
+function setup () {
+ cleanup()
+ fixture.create(testdir)
+}
+
+function cleanup () {
+ fixture.remove(testdir)
+}
+
+test('setup', function (t) {
+ setup()
+ t.end()
+})
+
+function readdir (dir) {
+ try {
+ return fs.readdirSync(dir)
+ } catch (ex) {
+ if (ex.code === 'ENOENT') return []
+ throw ex
+ }
+}
+
+test('prune cycle in dev deps', function (t) {
+ common.npm(['prune', '--production', '--json'], {cwd: testdir}, function (err, code, stdout, stderr) {
+ if (err) throw err
+ t.is(code, 0, 'prune finished successfully')
+ t.like(JSON.parse(stdout), {removed: [{name: 'yes'}]}, 'removed the right modules')
+ var dirs = readdir(testdir + '/node_modules').sort()
+ // bindirs are never removed, it's ok for them to remain after prune
+ t.same(dirs, ['.bin'])
+ t.end()
+ })
+})
+
+test('cleanup', function (t) {
+ cleanup()
+ t.end()
+})
+
diff --git a/deps/npm/test/tap/shrinkwrap-lifecycle-cwd.js b/deps/npm/test/tap/shrinkwrap-lifecycle-cwd.js
index 8d5210c4048d5b..a968030e26ba88 100644
--- a/deps/npm/test/tap/shrinkwrap-lifecycle-cwd.js
+++ b/deps/npm/test/tap/shrinkwrap-lifecycle-cwd.js
@@ -87,3 +87,4 @@ test('cleanup', function (t) {
cleanup()
t.done()
})
+
diff --git a/deps/npm/test/tap/unit-deps-replaceModule.js b/deps/npm/test/tap/unit-deps-replaceModule.js
index d5f0fdbf89f268..15cce005d5dcff 100644
--- a/deps/npm/test/tap/unit-deps-replaceModule.js
+++ b/deps/npm/test/tap/unit-deps-replaceModule.js
@@ -10,7 +10,7 @@ test('replaceModuleByName', function (t) {
var replaceModuleByName = require('../../lib/install/deps')._replaceModuleByName
var mods = []
for (var ii = 0; ii < 10; ++ii) {
- mods.push({package: {name: ii}, path: '/path/to/' + ii})
+ mods.push({package: {name: String(ii)}, path: '/path/to/' + ii})
}
var test = {}
@@ -21,7 +21,7 @@ test('replaceModuleByName', function (t) {
t.isDeeply(test.A, mods.slice(0, 4).concat(mods[7]), 'replacing a new module appends')
test.B = mods.slice(0, 4)
- var replacement = {package: {name: 1}, isReplacement: true}
+ var replacement = {package: {name: '1'}, isReplacement: true}
replaceModuleByName(test, 'B', replacement)
t.isDeeply(test.B, [mods[0], replacement, mods[2], mods[3]], 'replacing existing module swaps out for the new version')
@@ -39,7 +39,7 @@ test('replaceModuleByPath', function (t) {
var replaceModuleByPath = require('../../lib/install/deps')._replaceModuleByPath
var mods = []
for (var ii = 0; ii < 10; ++ii) {
- mods.push({package: {name: ii}, path: '/path/to/' + ii})
+ mods.push({package: {name: String(ii)}, path: '/path/to/' + ii})
}
var test = {}
@@ -50,7 +50,7 @@ test('replaceModuleByPath', function (t) {
t.isDeeply(test.A, mods.slice(0, 4).concat(mods[7]), 'replacing a new module appends')
test.B = mods.slice(0, 4)
- var replacement = {package: {name: 1}, isReplacement: true, path: '/path/to/1'}
+ var replacement = {package: {name: '1'}, isReplacement: true, path: '/path/to/1'}
replaceModuleByPath(test, 'B', replacement)
t.isDeeply(test.B, [mods[0], replacement, mods[2], mods[3]], 'replacing existing module swaps out for the new version')
diff --git a/deps/npm/test/tap/unsupported.js b/deps/npm/test/tap/unsupported.js
index b0dd8448aa3766..1a4ef614386bd5 100644
--- a/deps/npm/test/tap/unsupported.js
+++ b/deps/npm/test/tap/unsupported.js
@@ -11,20 +11,22 @@ var versions = [
['v0.5.3', true, true],
['v0.6.17', true, true],
['v0.7.8', true, true],
- ['v0.8.28', false, true],
- ['v0.9.6', false, true],
- ['v0.10.48', false, true],
- ['v0.11.16', false, true],
- ['v0.12.9', false, true],
- ['v1.0.1', false, true],
- ['v1.6.0', false, true],
- ['v2.3.1', false, true],
- ['v3.0.0', false, true],
- ['v4.5.0', false, false],
- ['v5.7.1', false, false],
+ ['v0.8.28', true, true],
+ ['v0.9.6', true, true],
+ ['v0.10.48', true, true],
+ ['v0.11.16', true, true],
+ ['v0.12.9', true, true],
+ ['v1.0.1', true, true],
+ ['v1.6.0', true, true],
+ ['v2.3.1', true, true],
+ ['v3.0.0', true, true],
+ ['v4.5.0', true, true],
+ ['v4.8.4', false, false],
+ ['v5.7.1', false, true],
['v6.8.1', false, false],
['v7.0.0-beta23', false, false],
- ['v7.2.3', false, false]
+ ['v7.2.3', false, false],
+ ['v8.4.0', false, false]
]
test('versions', function (t) {
diff --git a/deps/npm/test/tap/verify-no-lifecycle-on-repo.js b/deps/npm/test/tap/verify-no-lifecycle-on-repo.js
index eedaa756b8f379..babdfb7dace238 100644
--- a/deps/npm/test/tap/verify-no-lifecycle-on-repo.js
+++ b/deps/npm/test/tap/verify-no-lifecycle-on-repo.js
@@ -23,7 +23,11 @@ var baseJSON = {
var lastOpened
var npm = requireInject.installGlobally('../../lib/npm.js', {
- '../../lib/utils/lifecycle.js': function (pkg, stage, wd, unsafe, failOk, cb) {
+ '../../lib/utils/lifecycle.js': function (pkg, stage, wd, moreOpts, cb) {
+ if (typeof moreOpts === 'function') {
+ cb = moreOpts
+ }
+
cb(new Error("Shouldn't be calling lifecycle scripts"))
},
opener: function (url, options, cb) {
diff --git a/deps/npm/test/tap/version-allow-same-version.js b/deps/npm/test/tap/version-allow-same-version.js
index 66f568dec97212..9900d28bac6c9d 100644
--- a/deps/npm/test/tap/version-allow-same-version.js
+++ b/deps/npm/test/tap/version-allow-same-version.js
@@ -63,3 +63,4 @@ function setup () {
fs.writeFileSync(npmrc, configContents, 'ascii')
process.chdir(pkg)
}
+
diff --git a/deps/npm/test/tap/version-commit-hooks-default.js b/deps/npm/test/tap/version-commit-hooks-default.js
new file mode 100644
index 00000000000000..890aa554101637
--- /dev/null
+++ b/deps/npm/test/tap/version-commit-hooks-default.js
@@ -0,0 +1,13 @@
+var test = require('tap').test
+var npm = require('../../')
+
+// This test has to be separate from `version-commit-hooks.js`, due to
+// mutual exclusivity with the first test in that file. Initial configuration
+// seems to only work as expected for defaults during the first `npm.load()`.
+
+test('npm config `commit-hooks` defaults to `true`', function (t) {
+ npm.load({}, function () {
+ t.same(npm.config.get('commit-hooks'), true)
+ t.end()
+ })
+})
diff --git a/deps/npm/test/tap/version-commit-hooks.js b/deps/npm/test/tap/version-commit-hooks.js
new file mode 100644
index 00000000000000..4791fc3f3c4d24
--- /dev/null
+++ b/deps/npm/test/tap/version-commit-hooks.js
@@ -0,0 +1,64 @@
+var fs = require('graceful-fs')
+var path = require('path')
+var osenv = require('osenv')
+var mkdirp = require('mkdirp')
+var rimraf = require('rimraf')
+var pkg = path.resolve(__dirname, 'version-commit-hooks')
+
+var test = require('tap').test
+var npm = require('../../')
+
+delete process.env['npm_config_commit_hooks']
+
+test('npm version with commit-hooks disabled in .npmrc', function (t) {
+ mkdirp.sync(pkg)
+ var npmrc = path.resolve(pkg, '.npmrc')
+ fs.writeFileSync(npmrc, 'commit-hooks=false\n', 'ascii')
+ process.chdir(pkg)
+
+ npm.load({ prefix: pkg, userconfig: npmrc }, function (err, conf) {
+ if (err) {
+ t.fail('error loading npm')
+ }
+ t.same(npm.config.get('commit-hooks'), false)
+ t.end()
+ })
+})
+
+test('npm version with commit-hooks disabled', function (t) {
+ npm.load({}, function () {
+ npm.config.set('commit-hooks', false)
+
+ var version = require('../../lib/version')
+ var args1 = version.buildCommitArgs()
+ var args2 = version.buildCommitArgs([ 'commit' ])
+ var args3 = version.buildCommitArgs([ 'commit', '-m', 'some commit message' ])
+
+ t.same(args1, [ 'commit', '-n' ])
+ t.same(args2, [ 'commit', '-n' ])
+ t.same(args3, [ 'commit', '-m', 'some commit message', '-n' ])
+ t.end()
+ })
+})
+
+test('npm version with commit-hooks enabled (default)', function (t) {
+ npm.load({}, function () {
+ npm.config.set('commit-hooks', true)
+
+ var version = require('../../lib/version')
+ var args1 = version.buildCommitArgs()
+ var args2 = version.buildCommitArgs([ 'commit' ])
+ var args3 = version.buildCommitArgs([ 'commit', '-m', 'some commit message' ])
+
+ t.same(args1, [ 'commit' ])
+ t.same(args2, [ 'commit' ])
+ t.same(args3, [ 'commit', '-m', 'some commit message' ])
+ t.end()
+ })
+})
+
+test('cleanup', function (t) {
+ process.chdir(osenv.tmpdir())
+ rimraf.sync(pkg)
+ t.end()
+})
diff --git a/deps/uv/.mailmap b/deps/uv/.mailmap
index 896d4065bc486e..500c2b5185c5e8 100644
--- a/deps/uv/.mailmap
+++ b/deps/uv/.mailmap
@@ -31,6 +31,7 @@ Rasmus Christian Pedersen
Robert Mustacchi
Ryan Dahl
Ryan Emery
+Sakthipriyan Vairamani
Sam Roberts
San-Tai Hsu
Santiago Gimeno
diff --git a/deps/uv/AUTHORS b/deps/uv/AUTHORS
index 3562bb81698b66..4747c06b30600f 100644
--- a/deps/uv/AUTHORS
+++ b/deps/uv/AUTHORS
@@ -308,3 +308,10 @@ Jacob Segal
Maciej Szeptuch (Neverous)
Joel Winarske
Gergely Nagy
+Kamil Rytarowski
+tux.uudiin <77389867@qq.com>
+Nick Logan
+darobs
+Zheng, Lei
+Carlo Marcelo Arenas Belón
+Scott Parker
diff --git a/deps/uv/ChangeLog b/deps/uv/ChangeLog
index d0b55750172152..64b18695e16847 100644
--- a/deps/uv/ChangeLog
+++ b/deps/uv/ChangeLog
@@ -1,3 +1,70 @@
+2017.10.03, Version 1.15.0 (Stable), 8b69ce1419d2958011d415a636810705c36c2cc2
+
+Changes since version 1.14.1:
+
+* unix: limit uv__has_forked_with_cfrunloop to macOS (Kamil Rytarowski)
+
+* win: fix buffer size in uv__getpwuid_r() (tux.uudiin)
+
+* win,tty: improve SIGWINCH support (Bartosz Sosnowski)
+
+* unix: use fchmod() in uv_fs_copyfile() (cjihrig)
+
+* unix: support copying empty files (cjihrig)
+
+* unix: truncate destination in uv_fs_copyfile() (Nick Logan)
+
+* win,build: keep cwd when setting build environment (darobs)
+
+* test: add NetBSD support to test-udp-ipv6.c (Kamil Rytarowski)
+
+* unix: add NetBSD support in core.c (Kamil Rytarowski)
+
+* linux: increase thread stack size with musl libc (Ben Noordhuis)
+
+* netbsd: correct uv_exepath() on NetBSD (Kamil Rytarowski)
+
+* test: clean up semaphore after use (jBarz)
+
+* win,build: bump vswhere_usability_wrapper to 2.0.0 (Refael Ackermann)
+
+* win: let UV_PROCESS_WINDOWS_HIDE hide consoles (cjihrig)
+
+* zos: lock protect global epoll list in epoll_ctl (jBarz)
+
+* zos: change platform name to match python (jBarz)
+
+* android: fix getifaddrs() (Zheng, Lei)
+
+* netbsd: implement uv__tty_is_slave() (Kamil Rytarowski)
+
+* zos: fix readlink for mounts with system variables (jBarz)
+
+* test: sort the tests alphabetically (Sakthipriyan Vairamani)
+
+* windows: fix compilation warnings (Carlo Marcelo Arenas Belón)
+
+* build: avoid -fstrict-aliasing compile option (jBarz)
+
+* win: remove unused variables (Carlo Marcelo Arenas Belón)
+
+* unix: remove unused variables (Sakthipriyan Vairamani)
+
+* netbsd: disable poll_bad_fdtype on NetBSD (Kamil Rytarowski)
+
+* netbsd: use uv__cloexec and uv__nonblock (Kamil Rytarowski)
+
+* test: fix udp_multicast_join6 on NetBSD (Kamil Rytarowski)
+
+* unix,win: add uv_mutex_init_recursive() (Scott Parker)
+
+* netbsd: do not exclude IPv6 functionality (Kamil Rytarowski)
+
+* fsevents: watch files with fsevents on macos 10.7+ (Ben Noordhuis)
+
+* unix: retry on ENOBUFS in sendmsg(2) (Kamil Rytarowski)
+
+
2017.09.07, Version 1.14.1 (Stable), b0f9fb2a07a5e638b1580fe9a42a356c3ab35f37
Changes since version 1.14.0:
diff --git a/deps/uv/appveyor.yml b/deps/uv/appveyor.yml
index 986c0d44030e5b..f519bc099b6220 100644
--- a/deps/uv/appveyor.yml
+++ b/deps/uv/appveyor.yml
@@ -1,4 +1,4 @@
-version: v1.14.1.build{build}
+version: v1.15.0.build{build}
init:
- git config --global core.autocrlf true
diff --git a/deps/uv/common.gypi b/deps/uv/common.gypi
index ec482340c27b47..816847bfc70690 100644
--- a/deps/uv/common.gypi
+++ b/deps/uv/common.gypi
@@ -32,10 +32,9 @@
},
'xcode_settings': {
'GCC_OPTIMIZATION_LEVEL': '0',
- 'OTHER_CFLAGS': [ '-Wno-strict-aliasing' ],
},
'conditions': [
- ['OS != "os390"', {
+ ['OS != "zos"', {
'cflags': [ '-O0', '-fwrapv' ]
}],
['OS == "android"', {
@@ -48,7 +47,6 @@
'defines': [ 'NDEBUG' ],
'cflags': [
'-O3',
- '-fstrict-aliasing',
],
'msvs_settings': {
'VCCLCompilerTool': {
@@ -80,7 +78,7 @@
},
},
'conditions': [
- ['OS != "os390"', {
+ ['OS != "zos"', {
'cflags': [
'-fomit-frame-pointer',
'-fdata-sections',
@@ -160,7 +158,7 @@
'cflags': [ '-pthreads' ],
'ldflags': [ '-pthreads' ],
}],
- [ 'OS not in "solaris android os390"', {
+ [ 'OS not in "solaris android zos"', {
'cflags': [ '-pthread' ],
'ldflags': [ '-pthread' ],
}],
@@ -178,9 +176,6 @@
'GCC_THREADSAFE_STATICS': 'NO', # -fno-threadsafe-statics
'PREBINDING': 'NO', # No -Wl,-prebind
'USE_HEADERMAP': 'NO',
- 'OTHER_CFLAGS': [
- '-fstrict-aliasing',
- ],
'WARNING_CFLAGS': [
'-Wall',
'-Wendif-labels',
diff --git a/deps/uv/configure.ac b/deps/uv/configure.ac
index 41349a092c85e6..ebf5bc3d8ef9e5 100644
--- a/deps/uv/configure.ac
+++ b/deps/uv/configure.ac
@@ -13,7 +13,7 @@
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
AC_PREREQ(2.57)
-AC_INIT([libuv], [1.14.1], [https://github.com/libuv/libuv/issues])
+AC_INIT([libuv], [1.15.0], [https://github.com/libuv/libuv/issues])
AC_CONFIG_MACRO_DIR([m4])
m4_include([m4/libuv-extra-automake-flags.m4])
m4_include([m4/as_case.m4])
diff --git a/deps/uv/docs/src/threading.rst b/deps/uv/docs/src/threading.rst
index e876dde1256d1a..bca8ba1d0636f4 100644
--- a/deps/uv/docs/src/threading.rst
+++ b/deps/uv/docs/src/threading.rst
@@ -91,6 +91,7 @@ Functions return 0 on success or an error code < 0 (unless the
return type is void, of course).
.. c:function:: int uv_mutex_init(uv_mutex_t* handle)
+.. c:function:: int uv_mutex_init_recursive(uv_mutex_t* handle)
.. c:function:: void uv_mutex_destroy(uv_mutex_t* handle)
.. c:function:: void uv_mutex_lock(uv_mutex_t* handle)
.. c:function:: int uv_mutex_trylock(uv_mutex_t* handle)
diff --git a/deps/uv/include/uv-version.h b/deps/uv/include/uv-version.h
index 9b891499eb5f4a..55d7c91055c3ea 100644
--- a/deps/uv/include/uv-version.h
+++ b/deps/uv/include/uv-version.h
@@ -31,8 +31,8 @@
*/
#define UV_VERSION_MAJOR 1
-#define UV_VERSION_MINOR 14
-#define UV_VERSION_PATCH 1
+#define UV_VERSION_MINOR 15
+#define UV_VERSION_PATCH 0
#define UV_VERSION_IS_RELEASE 1
#define UV_VERSION_SUFFIX ""
diff --git a/deps/uv/include/uv.h b/deps/uv/include/uv.h
index eac63dde445829..0e4151d1389582 100644
--- a/deps/uv/include/uv.h
+++ b/deps/uv/include/uv.h
@@ -1424,6 +1424,7 @@ UV_EXTERN int uv_dlsym(uv_lib_t* lib, const char* name, void** ptr);
UV_EXTERN const char* uv_dlerror(const uv_lib_t* lib);
UV_EXTERN int uv_mutex_init(uv_mutex_t* handle);
+UV_EXTERN int uv_mutex_init_recursive(uv_mutex_t* handle);
UV_EXTERN void uv_mutex_destroy(uv_mutex_t* handle);
UV_EXTERN void uv_mutex_lock(uv_mutex_t* handle);
UV_EXTERN int uv_mutex_trylock(uv_mutex_t* handle);
diff --git a/deps/uv/src/unix/android-ifaddrs.c b/deps/uv/src/unix/android-ifaddrs.c
index 1a842ced48f9b5..bf30b14179509d 100644
--- a/deps/uv/src/unix/android-ifaddrs.c
+++ b/deps/uv/src/unix/android-ifaddrs.c
@@ -457,7 +457,7 @@ static int interpretAddr(struct nlmsghdr *p_hdr, struct ifaddrs **p_resultList,
char *l_name;
char *l_addr;
- for(l_rta = IFLA_RTA(l_info); RTA_OK(l_rta, l_rtaSize); l_rta = RTA_NEXT(l_rta, l_rtaSize))
+ for(l_rta = IFA_RTA(l_info); RTA_OK(l_rta, l_rtaSize); l_rta = RTA_NEXT(l_rta, l_rtaSize))
{
size_t l_rtaDataSize = RTA_PAYLOAD(l_rta);
if(l_info->ifa_family == AF_PACKET)
@@ -479,7 +479,7 @@ static int interpretAddr(struct nlmsghdr *p_hdr, struct ifaddrs **p_resultList,
l_addrSize += NLMSG_ALIGN(calcAddrLen(l_info->ifa_family, l_rtaDataSize));
break;
case IFA_LABEL:
- l_nameSize += NLMSG_ALIGN(l_rtaSize + 1);
+ l_nameSize += NLMSG_ALIGN(l_rtaDataSize + 1);
break;
default:
break;
@@ -504,7 +504,7 @@ static int interpretAddr(struct nlmsghdr *p_hdr, struct ifaddrs **p_resultList,
}
l_rtaSize = NLMSG_PAYLOAD(p_hdr, sizeof(struct ifaddrmsg));
- for(l_rta = IFLA_RTA(l_info); RTA_OK(l_rta, l_rtaSize); l_rta = RTA_NEXT(l_rta, l_rtaSize))
+ for(l_rta = IFA_RTA(l_info); RTA_OK(l_rta, l_rtaSize); l_rta = RTA_NEXT(l_rta, l_rtaSize))
{
void *l_rtaData = RTA_DATA(l_rta);
size_t l_rtaDataSize = RTA_PAYLOAD(l_rta);
@@ -567,7 +567,7 @@ static int interpretAddr(struct nlmsghdr *p_hdr, struct ifaddrs **p_resultList,
{
unsigned l_maxPrefix = (l_entry->ifa_addr->sa_family == AF_INET ? 32 : 128);
unsigned l_prefix = (l_info->ifa_prefixlen > l_maxPrefix ? l_maxPrefix : l_info->ifa_prefixlen);
- char l_mask[16] = {0};
+ unsigned char l_mask[16] = {0};
unsigned i;
for(i=0; i<(l_prefix/8); ++i)
{
diff --git a/deps/uv/src/unix/bsd-ifaddrs.c b/deps/uv/src/unix/bsd-ifaddrs.c
index ffcf156440d559..2593b9ff330e84 100644
--- a/deps/uv/src/unix/bsd-ifaddrs.c
+++ b/deps/uv/src/unix/bsd-ifaddrs.c
@@ -50,7 +50,11 @@ static int uv__ifaddr_exclude(struct ifaddrs *ent, int exclude_type) {
*/
if (ent->ifa_addr->sa_family == AF_LINK)
return 1;
-#elif defined(__NetBSD__) || defined(__OpenBSD__)
+#elif defined(__NetBSD__)
+ if (ent->ifa_addr->sa_family != PF_INET &&
+ ent->ifa_addr->sa_family != PF_INET6)
+ return 1;
+#elif defined(__OpenBSD__)
if (ent->ifa_addr->sa_family != PF_INET)
return 1;
#endif
diff --git a/deps/uv/src/unix/core.c b/deps/uv/src/unix/core.c
index bee641cb440410..ef82ee27b8568f 100644
--- a/deps/uv/src/unix/core.c
+++ b/deps/uv/src/unix/core.c
@@ -58,13 +58,19 @@
#if defined(__DragonFly__) || \
defined(__FreeBSD__) || \
- defined(__FreeBSD_kernel__)
+ defined(__FreeBSD_kernel__) || \
+ defined(__NetBSD__)
# include
# include
# include
# define UV__O_CLOEXEC O_CLOEXEC
# if defined(__FreeBSD__) && __FreeBSD__ >= 10
# define uv__accept4 accept4
+# endif
+# if defined(__NetBSD__)
+# define uv__accept4(a, b, c, d) paccept((a), (b), (c), NULL, (d))
+# endif
+# if (defined(__FreeBSD__) && __FreeBSD__ >= 10) || defined(__NetBSD__)
# define UV__SOCK_NONBLOCK SOCK_NONBLOCK
# define UV__SOCK_CLOEXEC SOCK_CLOEXEC
# endif
@@ -462,7 +468,9 @@ int uv__accept(int sockfd) {
assert(sockfd >= 0);
while (1) {
-#if defined(__linux__) || (defined(__FreeBSD__) && __FreeBSD__ >= 10)
+#if defined(__linux__) || \
+ (defined(__FreeBSD__) && __FreeBSD__ >= 10) || \
+ defined(__NetBSD__)
static int no_accept4;
if (no_accept4)
@@ -988,7 +996,7 @@ int uv__open_cloexec(const char* path, int flags) {
int uv__dup2_cloexec(int oldfd, int newfd) {
int r;
-#if defined(__FreeBSD__) && __FreeBSD__ >= 10
+#if (defined(__FreeBSD__) && __FreeBSD__ >= 10) || defined(__NetBSD__)
r = dup3(oldfd, newfd, O_CLOEXEC);
if (r == -1)
return -errno;
diff --git a/deps/uv/src/unix/fs.c b/deps/uv/src/unix/fs.c
index 5a172cc7805171..2684c814a2b1d8 100644
--- a/deps/uv/src/unix/fs.c
+++ b/deps/uv/src/unix/fs.c
@@ -438,7 +438,12 @@ static ssize_t uv__fs_readlink(uv_fs_t* req) {
return -1;
}
+#if defined(__MVS__)
+ len = os390_readlink(req->path, buf, len);
+#else
len = readlink(req->path, buf, len);
+#endif
+
if (len == -1) {
uv__free(buf);
@@ -795,6 +800,7 @@ static ssize_t uv__fs_copyfile(uv_fs_t* req) {
int64_t in_offset;
dstfd = -1;
+ err = 0;
/* Open the source file. */
srcfd = uv_fs_open(NULL, &fs_req, req->path, O_RDONLY, 0, NULL);
@@ -809,7 +815,7 @@ static ssize_t uv__fs_copyfile(uv_fs_t* req) {
goto out;
}
- dst_flags = O_WRONLY | O_CREAT;
+ dst_flags = O_WRONLY | O_CREAT | O_TRUNC;
if (req->flags & UV_FS_COPYFILE_EXCL)
dst_flags |= O_EXCL;
@@ -828,6 +834,11 @@ static ssize_t uv__fs_copyfile(uv_fs_t* req) {
goto out;
}
+ if (fchmod(dstfd, statsbuf.st_mode) == -1) {
+ err = -errno;
+ goto out;
+ }
+
bytes_to_send = statsbuf.st_size;
in_offset = 0;
while (bytes_to_send != 0) {
diff --git a/deps/uv/src/unix/fsevents.c b/deps/uv/src/unix/fsevents.c
index 643e233cfe9e94..38837406a66f68 100644
--- a/deps/uv/src/unix/fsevents.c
+++ b/deps/uv/src/unix/fsevents.c
@@ -230,6 +230,7 @@ static void uv__fsevents_event_cb(ConstFSEventStreamRef streamRef,
uv_loop_t* loop;
uv__cf_loop_state_t* state;
uv__fsevents_event_t* event;
+ FSEventStreamEventFlags flags;
QUEUE head;
loop = info;
@@ -245,8 +246,10 @@ static void uv__fsevents_event_cb(ConstFSEventStreamRef streamRef,
/* Process and filter out events */
for (i = 0; i < numEvents; i++) {
+ flags = eventFlags[i];
+
/* Ignore system events */
- if (eventFlags[i] & kFSEventsSystem)
+ if (flags & kFSEventsSystem)
continue;
path = paths[i];
@@ -271,6 +274,9 @@ static void uv__fsevents_event_cb(ConstFSEventStreamRef streamRef,
/* Ignore events with path equal to directory itself */
if (len == 0)
continue;
+#else
+ if (len == 0 && (flags & kFSEventStreamEventFlagItemIsDir))
+ continue;
#endif /* MAC_OS_X_VERSION_10_7 */
/* Do not emit events from subdirectories (without option set) */
@@ -291,12 +297,24 @@ static void uv__fsevents_event_cb(ConstFSEventStreamRef streamRef,
memset(event, 0, sizeof(*event));
memcpy(event->path, path, len + 1);
+ event->events = UV_RENAME;
- if ((eventFlags[i] & kFSEventsModified) != 0 &&
- (eventFlags[i] & kFSEventsRenamed) == 0)
+#ifdef MAC_OS_X_VERSION_10_7
+ if (0 != (flags & kFSEventsModified) &&
+ 0 == (flags & kFSEventsRenamed)) {
+ event->events = UV_CHANGE;
+ }
+#else
+ if (0 != (flags & kFSEventsModified) &&
+ 0 != (flags & kFSEventStreamEventFlagItemIsDir) &&
+ 0 == (flags & kFSEventStreamEventFlagItemRenamed)) {
event->events = UV_CHANGE;
- else
- event->events = UV_RENAME;
+ }
+ if (0 == (flags & kFSEventStreamEventFlagItemIsDir) &&
+ 0 == (flags & kFSEventStreamEventFlagItemRenamed)) {
+ event->events = UV_CHANGE;
+ }
+#endif /* MAC_OS_X_VERSION_10_7 */
QUEUE_INSERT_TAIL(&head, &event->member);
}
diff --git a/deps/uv/src/unix/internal.h b/deps/uv/src/unix/internal.h
index c0898d982e9815..3df5c4c3eb8619 100644
--- a/deps/uv/src/unix/internal.h
+++ b/deps/uv/src/unix/internal.h
@@ -175,7 +175,8 @@ struct uv__stream_queued_fds_s {
defined(__FreeBSD__) || \
defined(__FreeBSD_kernel__) || \
defined(__linux__) || \
- defined(__OpenBSD__)
+ defined(__OpenBSD__) || \
+ defined(__NetBSD__)
#define uv__cloexec uv__cloexec_ioctl
#define uv__nonblock uv__nonblock_ioctl
#else
diff --git a/deps/uv/src/unix/kqueue.c b/deps/uv/src/unix/kqueue.c
index 300bac07c3322f..c9adddbdb8722a 100644
--- a/deps/uv/src/unix/kqueue.c
+++ b/deps/uv/src/unix/kqueue.c
@@ -59,7 +59,9 @@ int uv__kqueue_init(uv_loop_t* loop) {
}
+#if defined(__APPLE__)
static int uv__has_forked_with_cfrunloop;
+#endif
int uv__io_fork(uv_loop_t* loop) {
int err;
diff --git a/deps/uv/src/unix/netbsd.c b/deps/uv/src/unix/netbsd.c
index c54c04df28c201..d9066349c1d623 100644
--- a/deps/uv/src/unix/netbsd.c
+++ b/deps/uv/src/unix/netbsd.c
@@ -66,22 +66,32 @@ void uv_loadavg(double avg[3]) {
int uv_exepath(char* buffer, size_t* size) {
+ /* Intermediate buffer, retrieving partial path name does not work
+ * As of NetBSD-8(beta), vnode->path translator does not handle files
+ * with longer names than 31 characters.
+ */
+ char int_buf[PATH_MAX];
+ size_t int_size;
int mib[4];
- size_t cb;
- pid_t mypid;
if (buffer == NULL || size == NULL || *size == 0)
return -EINVAL;
- mypid = getpid();
mib[0] = CTL_KERN;
mib[1] = KERN_PROC_ARGS;
- mib[2] = mypid;
- mib[3] = KERN_PROC_ARGV;
+ mib[2] = -1;
+ mib[3] = KERN_PROC_PATHNAME;
+ int_size = ARRAY_SIZE(int_buf);
- cb = *size;
- if (sysctl(mib, 4, buffer, &cb, NULL, 0))
+ if (sysctl(mib, 4, int_buf, &int_size, NULL, 0))
return -errno;
+
+ /* Copy string from the intermediate buffer to outer one with appropriate
+ * length.
+ */
+ strlcpy(buffer, int_buf, *size);
+
+ /* Set new size. */
*size = strlen(buffer);
return 0;
diff --git a/deps/uv/src/unix/os390-syscalls.c b/deps/uv/src/unix/os390-syscalls.c
index 08623f4eafa137..ca539c26f7b447 100644
--- a/deps/uv/src/unix/os390-syscalls.c
+++ b/deps/uv/src/unix/os390-syscalls.c
@@ -130,17 +130,17 @@ static void epoll_init(void) {
uv__os390_epoll* epoll_create1(int flags) {
uv__os390_epoll* lst;
- uv_once(&once, epoll_init);
- uv_mutex_lock(&global_epoll_lock);
lst = uv__malloc(sizeof(*lst));
- if (lst == -1)
- return NULL;
- QUEUE_INSERT_TAIL(&global_epoll_queue, &lst->member);
- uv_mutex_unlock(&global_epoll_lock);
+ if (lst != NULL) {
+ /* initialize list */
+ lst->size = 0;
+ lst->items = NULL;
+ uv_once(&once, epoll_init);
+ uv_mutex_lock(&global_epoll_lock);
+ QUEUE_INSERT_TAIL(&global_epoll_queue, &lst->member);
+ uv_mutex_unlock(&global_epoll_lock);
+ }
- /* initialize list */
- lst->size = 0;
- lst->items = NULL;
return lst;
}
@@ -149,8 +149,11 @@ int epoll_ctl(uv__os390_epoll* lst,
int op,
int fd,
struct epoll_event *event) {
+ uv_mutex_lock(&global_epoll_lock);
+
if(op == EPOLL_CTL_DEL) {
if (fd >= lst->size || lst->items[fd].fd == -1) {
+ uv_mutex_unlock(&global_epoll_lock);
errno = ENOENT;
return -1;
}
@@ -158,6 +161,7 @@ int epoll_ctl(uv__os390_epoll* lst,
} else if(op == EPOLL_CTL_ADD) {
maybe_resize(lst, fd + 1);
if (lst->items[fd].fd != -1) {
+ uv_mutex_unlock(&global_epoll_lock);
errno = EEXIST;
return -1;
}
@@ -165,6 +169,7 @@ int epoll_ctl(uv__os390_epoll* lst,
lst->items[fd].events = event->events;
} else if(op == EPOLL_CTL_MOD) {
if (fd >= lst->size || lst->items[fd].fd == -1) {
+ uv_mutex_unlock(&global_epoll_lock);
errno = ENOENT;
return -1;
}
@@ -172,6 +177,7 @@ int epoll_ctl(uv__os390_epoll* lst,
} else
abort();
+ uv_mutex_unlock(&global_epoll_lock);
return 0;
}
@@ -321,3 +327,72 @@ char* mkdtemp(char* path) {
return path;
}
+
+
+ssize_t os390_readlink(const char* path, char* buf, size_t len) {
+ ssize_t rlen;
+ ssize_t vlen;
+ ssize_t plen;
+ char* delimiter;
+ char old_delim;
+ char* tmpbuf;
+ char realpathstr[PATH_MAX + 1];
+
+ tmpbuf = uv__malloc(len + 1);
+ if (tmpbuf == NULL) {
+ errno = ENOMEM;
+ return -1;
+ }
+
+ rlen = readlink(path, tmpbuf, len);
+ if (rlen < 0) {
+ uv__free(tmpbuf);
+ return rlen;
+ }
+
+ if (rlen < 3 || strncmp("/$", tmpbuf, 2) != 0) {
+ /* Straightforward readlink. */
+ memcpy(buf, tmpbuf, rlen);
+ uv__free(tmpbuf);
+ return rlen;
+ }
+
+ /*
+ * There is a parmlib variable at the beginning
+ * which needs interpretation.
+ */
+ tmpbuf[rlen] = '\0';
+ delimiter = strchr(tmpbuf + 2, '/');
+ if (delimiter == NULL)
+ /* No slash at the end */
+ delimiter = strchr(tmpbuf + 2, '\0');
+
+ /* Read real path of the variable. */
+ old_delim = *delimiter;
+ *delimiter = '\0';
+ if (realpath(tmpbuf, realpathstr) == NULL) {
+ uv__free(tmpbuf);
+ return -1;
+ }
+
+ /* realpathstr is not guaranteed to end with null byte.*/
+ realpathstr[PATH_MAX] = '\0';
+
+ /* Reset the delimiter and fill up the buffer. */
+ *delimiter = old_delim;
+ plen = strlen(delimiter);
+ vlen = strlen(realpathstr);
+ rlen = plen + vlen;
+ if (rlen > len) {
+ uv__free(tmpbuf);
+ errno = ENAMETOOLONG;
+ return -1;
+ }
+ memcpy(buf, realpathstr, vlen);
+ memcpy(buf + vlen, delimiter, plen);
+
+ /* Done using temporary buffer. */
+ uv__free(tmpbuf);
+
+ return rlen;
+}
diff --git a/deps/uv/src/unix/os390-syscalls.h b/deps/uv/src/unix/os390-syscalls.h
index 61a7cee8396587..7aba3d27da7f9b 100644
--- a/deps/uv/src/unix/os390-syscalls.h
+++ b/deps/uv/src/unix/os390-syscalls.h
@@ -65,5 +65,6 @@ int scandir(const char* maindir, struct dirent*** namelist,
int (*compar)(const struct dirent **,
const struct dirent **));
char *mkdtemp(char* path);
+ssize_t os390_readlink(const char* path, char* buf, size_t len);
#endif /* UV_OS390_SYSCALL_H_ */
diff --git a/deps/uv/src/unix/os390.c b/deps/uv/src/unix/os390.c
index 559970de2c3d59..127656db8789e6 100644
--- a/deps/uv/src/unix/os390.c
+++ b/deps/uv/src/unix/os390.c
@@ -117,7 +117,7 @@ void uv_loadavg(double avg[3]) {
int uv__platform_loop_init(uv_loop_t* loop) {
uv__os390_epoll* ep;
- ep = epoll_create1(UV__EPOLL_CLOEXEC);
+ ep = epoll_create1(0);
loop->ep = ep;
if (ep == NULL)
return -errno;
@@ -386,7 +386,6 @@ int uv_uptime(double* uptime) {
int uv_cpu_info(uv_cpu_info_t** cpu_infos, int* count) {
uv_cpu_info_t* cpu_info;
- int result;
int idx;
siv1v2 info;
data_area_ptr cvt = {0};
diff --git a/deps/uv/src/unix/stream.c b/deps/uv/src/unix/stream.c
index c502098dcf1b38..672a7e2d6c65f6 100644
--- a/deps/uv/src/unix/stream.c
+++ b/deps/uv/src/unix/stream.c
@@ -859,7 +859,7 @@ static void uv__write(uv_stream_t* stream) {
}
if (n < 0) {
- if (errno != EAGAIN && errno != EWOULDBLOCK) {
+ if (errno != EAGAIN && errno != EWOULDBLOCK && errno != ENOBUFS) {
err = -errno;
goto error;
} else if (stream->flags & UV_STREAM_BLOCKING) {
diff --git a/deps/uv/src/unix/sunos.c b/deps/uv/src/unix/sunos.c
index 49de5a7fcd5b1f..a72c26a01f0a65 100644
--- a/deps/uv/src/unix/sunos.c
+++ b/deps/uv/src/unix/sunos.c
@@ -757,7 +757,6 @@ int uv_interface_addresses(uv_interface_address_t** addresses, int* count) {
uv_interface_address_t* address;
struct ifaddrs* addrs;
struct ifaddrs* ent;
- int i;
if (getifaddrs(&addrs))
return -errno;
diff --git a/deps/uv/src/unix/thread.c b/deps/uv/src/unix/thread.c
index f8846225910a80..abaca295d247c3 100644
--- a/deps/uv/src/unix/thread.c
+++ b/deps/uv/src/unix/thread.c
@@ -145,36 +145,55 @@ int pthread_barrier_destroy(pthread_barrier_t* barrier) {
#endif
-int uv_thread_create(uv_thread_t *tid, void (*entry)(void *arg), void *arg) {
- int err;
- pthread_attr_t* attr;
-#if defined(__APPLE__)
- pthread_attr_t attr_storage;
+/* On MacOS, threads other than the main thread are created with a reduced
+ * stack size by default. Adjust to RLIMIT_STACK aligned to the page size.
+ *
+ * On Linux, threads created by musl have a much smaller stack than threads
+ * created by glibc (80 vs. 2048 or 4096 kB.) Follow glibc for consistency.
+ */
+static size_t thread_stack_size(void) {
+#if defined(__APPLE__) || defined(__linux__)
struct rlimit lim;
-#endif
- /* On OSX threads other than the main thread are created with a reduced stack
- * size by default, adjust it to RLIMIT_STACK.
- */
-#if defined(__APPLE__)
if (getrlimit(RLIMIT_STACK, &lim))
abort();
- attr = &attr_storage;
- if (pthread_attr_init(attr))
- abort();
-
if (lim.rlim_cur != RLIM_INFINITY) {
/* pthread_attr_setstacksize() expects page-aligned values. */
lim.rlim_cur -= lim.rlim_cur % (rlim_t) getpagesize();
-
if (lim.rlim_cur >= PTHREAD_STACK_MIN)
- if (pthread_attr_setstacksize(attr, lim.rlim_cur))
- abort();
+ return lim.rlim_cur;
}
+#endif
+
+#if !defined(__linux__)
+ return 0;
+#elif defined(__PPC__) || defined(__ppc__) || defined(__powerpc__)
+ return 4 << 20; /* glibc default. */
#else
- attr = NULL;
+ return 2 << 20; /* glibc default. */
#endif
+}
+
+
+int uv_thread_create(uv_thread_t *tid, void (*entry)(void *arg), void *arg) {
+ int err;
+ size_t stack_size;
+ pthread_attr_t* attr;
+ pthread_attr_t attr_storage;
+
+ attr = NULL;
+ stack_size = thread_stack_size();
+
+ if (stack_size > 0) {
+ attr = &attr_storage;
+
+ if (pthread_attr_init(attr))
+ abort();
+
+ if (pthread_attr_setstacksize(attr, stack_size))
+ abort();
+ }
err = pthread_create(tid, attr, (void*(*)(void*)) entry, arg);
@@ -222,6 +241,25 @@ int uv_mutex_init(uv_mutex_t* mutex) {
}
+int uv_mutex_init_recursive(uv_mutex_t* mutex) {
+ pthread_mutexattr_t attr;
+ int err;
+
+ if (pthread_mutexattr_init(&attr))
+ abort();
+
+ if (pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE))
+ abort();
+
+ err = pthread_mutex_init(mutex, &attr);
+
+ if (pthread_mutexattr_destroy(&attr))
+ abort();
+
+ return -err;
+}
+
+
void uv_mutex_destroy(uv_mutex_t* mutex) {
if (pthread_mutex_destroy(mutex))
abort();
@@ -385,7 +423,6 @@ int uv_sem_trywait(uv_sem_t* sem) {
int uv_sem_init(uv_sem_t* sem, unsigned int value) {
uv_sem_t semid;
- struct sembuf buf;
int err;
union {
int val;
diff --git a/deps/uv/src/unix/tty.c b/deps/uv/src/unix/tty.c
index b2d37f4c2c1e5b..357f9748f65bf1 100644
--- a/deps/uv/src/unix/tty.c
+++ b/deps/uv/src/unix/tty.c
@@ -48,6 +48,42 @@ static int uv__tty_is_slave(const int fd) {
char dummy[256];
result = ioctl(fd, TIOCPTYGNAME, &dummy) != 0;
+#elif defined(__NetBSD__)
+ /*
+ * NetBSD as an extension returns with ptsname(3) and ptsname_r(3) the slave
+ * device name for both descriptors, the master one and slave one.
+ *
+ * Implement function to compare major device number with pts devices.
+ *
+ * The major numbers are machine-dependent, on NetBSD/amd64 they are
+ * respectively:
+ * - master tty: ptc - major 6
+ * - slave tty: pts - major 5
+ */
+
+ struct stat sb;
+ /* Lookup device's major for the pts driver and cache it. */
+ static devmajor_t pts = NODEVMAJOR;
+
+ if (pts == NODEVMAJOR) {
+ pts = getdevmajor("pts", S_IFCHR);
+ if (pts == NODEVMAJOR)
+ abort();
+ }
+
+ /* Lookup stat structure behind the file descriptor. */
+ if (fstat(fd, &sb) != 0)
+ abort();
+
+ /* Assert character device. */
+ if (!S_ISCHR(sb.st_mode))
+ abort();
+
+ /* Assert valid major. */
+ if (major(sb.st_rdev) == NODEVMAJOR)
+ abort();
+
+ result = (pts == major(sb.st_rdev));
#else
/* Fallback to ptsname
*/
diff --git a/deps/uv/src/unix/udp.c b/deps/uv/src/unix/udp.c
index c556325de018b3..a475bf5741634d 100644
--- a/deps/uv/src/unix/udp.c
+++ b/deps/uv/src/unix/udp.c
@@ -237,8 +237,10 @@ static void uv__udp_sendmsg(uv_udp_t* handle) {
size = sendmsg(handle->io_watcher.fd, &h, 0);
} while (size == -1 && errno == EINTR);
- if (size == -1 && (errno == EAGAIN || errno == EWOULDBLOCK))
- break;
+ if (size == -1) {
+ if (errno == EAGAIN || errno == EWOULDBLOCK || errno == ENOBUFS)
+ break;
+ }
req->status = (size == -1 ? -errno : size);
@@ -472,7 +474,7 @@ int uv__udp_try_send(uv_udp_t* handle,
} while (size == -1 && errno == EINTR);
if (size == -1) {
- if (errno == EAGAIN || errno == EWOULDBLOCK)
+ if (errno == EAGAIN || errno == EWOULDBLOCK || errno == ENOBUFS)
return -EAGAIN;
else
return -errno;
diff --git a/deps/uv/src/win/error.c b/deps/uv/src/win/error.c
index 642d1112e11928..9b03bfef6b5d71 100644
--- a/deps/uv/src/win/error.c
+++ b/deps/uv/src/win/error.c
@@ -58,7 +58,7 @@ void uv_fatal_error(const int errorno, const char* syscall) {
LocalFree(buf);
}
- *((char*)NULL) = 0xff; /* Force debug break */
+ DebugBreak();
abort();
}
diff --git a/deps/uv/src/win/process.c b/deps/uv/src/win/process.c
index 97b67ca529582e..764250e138c48d 100644
--- a/deps/uv/src/win/process.c
+++ b/deps/uv/src/win/process.c
@@ -1058,15 +1058,18 @@ int uv_spawn(uv_loop_t* loop,
startup.hStdOutput = uv__stdio_handle(process->child_stdio_buffer, 1);
startup.hStdError = uv__stdio_handle(process->child_stdio_buffer, 2);
+ process_flags = CREATE_UNICODE_ENVIRONMENT;
+
if (options->flags & UV_PROCESS_WINDOWS_HIDE) {
/* Use SW_HIDE to avoid any potential process window. */
startup.wShowWindow = SW_HIDE;
+
+ /* Hide console windows. */
+ process_flags |= CREATE_NO_WINDOW;
} else {
startup.wShowWindow = SW_SHOWDEFAULT;
}
- process_flags = CREATE_UNICODE_ENVIRONMENT;
-
if (options->flags & UV_PROCESS_DETACHED) {
/* Note that we're not setting the CREATE_BREAKAWAY_FROM_JOB flag. That
* means that libuv might not let you create a fully daemonized process
diff --git a/deps/uv/src/win/signal.c b/deps/uv/src/win/signal.c
index 7b42dd99280a00..a174da1f760d62 100644
--- a/deps/uv/src/win/signal.c
+++ b/deps/uv/src/win/signal.c
@@ -64,7 +64,7 @@ static int uv__signal_compare(uv_signal_t* w1, uv_signal_t* w2) {
}
-RB_GENERATE_STATIC(uv_signal_tree_s, uv_signal_s, tree_entry, uv__signal_compare);
+RB_GENERATE_STATIC(uv_signal_tree_s, uv_signal_s, tree_entry, uv__signal_compare)
/*
diff --git a/deps/uv/src/win/thread.c b/deps/uv/src/win/thread.c
index 91684e93875541..30b2d7793cf906 100644
--- a/deps/uv/src/win/thread.c
+++ b/deps/uv/src/win/thread.c
@@ -198,6 +198,11 @@ int uv_mutex_init(uv_mutex_t* mutex) {
}
+int uv_mutex_init_recursive(uv_mutex_t* mutex) {
+ return uv_mutex_init(mutex);
+}
+
+
void uv_mutex_destroy(uv_mutex_t* mutex) {
DeleteCriticalSection(mutex);
}
diff --git a/deps/uv/src/win/timer.c b/deps/uv/src/win/timer.c
index 27ca7716af6d8a..7e006fedfaf3ee 100644
--- a/deps/uv/src/win/timer.c
+++ b/deps/uv/src/win/timer.c
@@ -56,7 +56,7 @@ static int uv_timer_compare(uv_timer_t* a, uv_timer_t* b) {
}
-RB_GENERATE_STATIC(uv_timer_tree_s, uv_timer_s, tree_entry, uv_timer_compare);
+RB_GENERATE_STATIC(uv_timer_tree_s, uv_timer_s, tree_entry, uv_timer_compare)
int uv_timer_init(uv_loop_t* loop, uv_timer_t* handle) {
diff --git a/deps/uv/src/win/tty.c b/deps/uv/src/win/tty.c
index c4f99bdc7961b8..4cd1c72129ceb9 100644
--- a/deps/uv/src/win/tty.c
+++ b/deps/uv/src/win/tty.c
@@ -112,14 +112,30 @@ static int uv_tty_virtual_offset = -1;
static int uv_tty_virtual_height = -1;
static int uv_tty_virtual_width = -1;
+/* The console window size
+ * We keep this separate from uv_tty_virtual_*. We use those values to only
+ * handle signalling SIGWINCH
+ */
+
+static HANDLE uv__tty_console_handle = INVALID_HANDLE_VALUE;
+static int uv__tty_console_height = -1;
+static int uv__tty_console_width = -1;
+
+static DWORD WINAPI uv__tty_console_resize_message_loop_thread(void* param);
+static void CALLBACK uv__tty_console_resize_event(HWINEVENTHOOK hWinEventHook,
+ DWORD event,
+ HWND hwnd,
+ LONG idObject,
+ LONG idChild,
+ DWORD dwEventThread,
+ DWORD dwmsEventTime);
+
/* We use a semaphore rather than a mutex or critical section because in some
cases (uv__cancel_read_console) we need take the lock in the main thread and
release it in another thread. Using a semaphore ensures that in such
scenario the main thread will still block when trying to acquire the lock. */
static uv_sem_t uv_tty_output_lock;
-static HANDLE uv_tty_output_handle = INVALID_HANDLE_VALUE;
-
static WORD uv_tty_default_text_attributes =
FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE;
@@ -141,6 +157,18 @@ static void uv__determine_vterm_state(HANDLE handle);
void uv_console_init(void) {
if (uv_sem_init(&uv_tty_output_lock, 1))
abort();
+ uv__tty_console_handle = CreateFileW(L"CONOUT$",
+ GENERIC_READ | GENERIC_WRITE,
+ FILE_SHARE_WRITE,
+ 0,
+ OPEN_EXISTING,
+ 0,
+ 0);
+ if (uv__tty_console_handle != NULL) {
+ QueueUserWorkItem(uv__tty_console_resize_message_loop_thread,
+ NULL,
+ WT_EXECUTELONGFUNCTION);
+ }
}
@@ -184,11 +212,6 @@ int uv_tty_init(uv_loop_t* loop, uv_tty_t* tty, uv_file fd, int readable) {
if (uv__vterm_state == UV_UNCHECKED)
uv__determine_vterm_state(handle);
- /* Store the global tty output handle. This handle is used by TTY read */
- /* streams to update the virtual window when a CONSOLE_BUFFER_SIZE_EVENT */
- /* is received. */
- uv_tty_output_handle = handle;
-
/* Remember the original console text attributes. */
uv_tty_capture_initial_style(&screen_buffer_info);
@@ -705,25 +728,7 @@ void uv_process_tty_read_raw_req(uv_loop_t* loop, uv_tty_t* handle,
}
records_left--;
- /* If the window was resized, recompute the virtual window size. This */
- /* will trigger a SIGWINCH signal if the window size changed in an */
- /* way that matters to libuv. */
- if (handle->tty.rd.last_input_record.EventType == WINDOW_BUFFER_SIZE_EVENT) {
- CONSOLE_SCREEN_BUFFER_INFO info;
-
- uv_sem_wait(&uv_tty_output_lock);
-
- if (uv_tty_output_handle != INVALID_HANDLE_VALUE &&
- GetConsoleScreenBufferInfo(uv_tty_output_handle, &info)) {
- uv_tty_update_virtual_window(&info);
- }
-
- uv_sem_post(&uv_tty_output_lock);
-
- continue;
- }
-
- /* Ignore other events that are not key or resize events. */
+ /* Ignore other events that are not key events. */
if (handle->tty.rd.last_input_record.EventType != KEY_EVENT) {
continue;
}
@@ -1103,9 +1108,6 @@ static int uv__cancel_read_console(uv_tty_t* handle) {
static void uv_tty_update_virtual_window(CONSOLE_SCREEN_BUFFER_INFO* info) {
- int old_virtual_width = uv_tty_virtual_width;
- int old_virtual_height = uv_tty_virtual_height;
-
uv_tty_virtual_width = info->dwSize.X;
uv_tty_virtual_height = info->srWindow.Bottom - info->srWindow.Top + 1;
@@ -1125,14 +1127,6 @@ static void uv_tty_update_virtual_window(CONSOLE_SCREEN_BUFFER_INFO* info) {
if (uv_tty_virtual_offset < 0) {
uv_tty_virtual_offset = 0;
}
-
- /* If the virtual window size changed, emit a SIGWINCH signal. Don't emit */
- /* if this was the first time the virtual window size was computed. */
- if (old_virtual_width != -1 && old_virtual_height != -1 &&
- (uv_tty_virtual_width != old_virtual_width ||
- uv_tty_virtual_height != old_virtual_height)) {
- uv__signal_dispatch(SIGWINCH);
- }
}
@@ -2280,3 +2274,52 @@ static void uv__determine_vterm_state(HANDLE handle) {
uv__vterm_state = UV_SUPPORTED;
}
+
+static DWORD WINAPI uv__tty_console_resize_message_loop_thread(void* param) {
+ CONSOLE_SCREEN_BUFFER_INFO sb_info;
+ MSG msg;
+
+ if (!GetConsoleScreenBufferInfo(uv__tty_console_handle, &sb_info))
+ return 0;
+
+ uv__tty_console_width = sb_info.dwSize.X;
+ uv__tty_console_height = sb_info.srWindow.Bottom - sb_info.srWindow.Top + 1;
+
+ if (!SetWinEventHook(EVENT_CONSOLE_LAYOUT,
+ EVENT_CONSOLE_LAYOUT,
+ NULL,
+ uv__tty_console_resize_event,
+ 0,
+ 0,
+ WINEVENT_OUTOFCONTEXT))
+ return 0;
+
+ while (GetMessage(&msg, NULL, 0, 0)) {
+ TranslateMessage(&msg);
+ DispatchMessage(&msg);
+ }
+ return 0;
+}
+
+static void CALLBACK uv__tty_console_resize_event(HWINEVENTHOOK hWinEventHook,
+ DWORD event,
+ HWND hwnd,
+ LONG idObject,
+ LONG idChild,
+ DWORD dwEventThread,
+ DWORD dwmsEventTime) {
+ CONSOLE_SCREEN_BUFFER_INFO sb_info;
+ int width, height;
+
+ if (!GetConsoleScreenBufferInfo(uv__tty_console_handle, &sb_info))
+ return;
+
+ width = sb_info.dwSize.X;
+ height = sb_info.srWindow.Bottom - sb_info.srWindow.Top + 1;
+
+ if (width != uv__tty_console_width || height != uv__tty_console_height) {
+ uv__tty_console_width = width;
+ uv__tty_console_height = height;
+ uv__signal_dispatch(SIGWINCH);
+ }
+}
diff --git a/deps/uv/src/win/util.c b/deps/uv/src/win/util.c
index d2e7f772ce2977..a2acda1152f663 100644
--- a/deps/uv/src/win/util.c
+++ b/deps/uv/src/win/util.c
@@ -1388,7 +1388,7 @@ int uv__getpwuid_r(uv_passwd_t* pwd) {
if (OpenProcessToken(GetCurrentProcess(), TOKEN_READ, &token) == 0)
return uv_translate_sys_error(GetLastError());
- bufsize = sizeof(path);
+ bufsize = ARRAY_SIZE(path);
if (!GetUserProfileDirectoryW(token, path, &bufsize)) {
r = GetLastError();
CloseHandle(token);
@@ -1403,7 +1403,7 @@ int uv__getpwuid_r(uv_passwd_t* pwd) {
CloseHandle(token);
/* Get the username using GetUserNameW() */
- bufsize = sizeof(username);
+ bufsize = ARRAY_SIZE(username);
if (!GetUserNameW(username, &bufsize)) {
r = GetLastError();
diff --git a/deps/uv/test/runner.c b/deps/uv/test/runner.c
index 4f54f85e2305f1..5e44118775edbc 100644
--- a/deps/uv/test/runner.c
+++ b/deps/uv/test/runner.c
@@ -29,6 +29,13 @@
char executable_path[sizeof(executable_path)];
+static int compare_task(const void* va, const void* vb) {
+ const task_entry_t* a = va;
+ const task_entry_t* b = vb;
+ return strcmp(a->task_name, b->task_name);
+}
+
+
const char* fmt(double d) {
static char buf[1024];
static char* p;
@@ -67,6 +74,7 @@ const char* fmt(double d) {
int run_tests(int benchmark_output) {
+ int actual;
int total;
int passed;
int failed;
@@ -76,13 +84,16 @@ int run_tests(int benchmark_output) {
task_entry_t* task;
/* Count the number of tests. */
+ actual = 0;
total = 0;
- for (task = TASKS; task->main; task++) {
+ for (task = TASKS; task->main; task++, actual++) {
if (!task->is_helper) {
total++;
}
}
+ qsort(TASKS, actual, sizeof(TASKS[0]), compare_task);
+
fprintf(stderr, "1..%d\n", total);
fflush(stderr);
@@ -352,12 +363,6 @@ int run_test_part(const char* test, const char* part) {
}
-static int compare_task(const void* va, const void* vb) {
- const task_entry_t* a = va;
- const task_entry_t* b = vb;
- return strcmp(a->task_name, b->task_name);
-}
-
static int find_helpers(const task_entry_t* task,
const task_entry_t** helpers) {
diff --git a/deps/uv/test/test-fs-copyfile.c b/deps/uv/test/test-fs-copyfile.c
index 2d1f9079a5f915..460c1dc6ae0b9f 100644
--- a/deps/uv/test/test-fs-copyfile.c
+++ b/deps/uv/test/test-fs-copyfile.c
@@ -68,7 +68,8 @@ static void touch_file(const char* name, unsigned int size) {
int r;
unsigned int i;
- r = uv_fs_open(NULL, &req, name, O_WRONLY | O_CREAT, S_IWUSR | S_IRUSR, NULL);
+ r = uv_fs_open(NULL, &req, name, O_WRONLY | O_CREAT | O_TRUNC,
+ S_IWUSR | S_IRUSR, NULL);
uv_fs_req_cleanup(&req);
ASSERT(r >= 0);
file = r;
@@ -119,6 +120,13 @@ TEST_IMPL(fs_copyfile) {
ASSERT(r == 0);
handle_result(&req);
+ /* Copies a file of size zero. */
+ unlink(dst);
+ touch_file(src, 0);
+ r = uv_fs_copyfile(NULL, &req, src, dst, 0, NULL);
+ ASSERT(r == 0);
+ handle_result(&req);
+
/* Copies file synchronously. Overwrites existing file. */
r = uv_fs_copyfile(NULL, &req, fixture, dst, 0, NULL);
ASSERT(r == 0);
@@ -129,6 +137,12 @@ TEST_IMPL(fs_copyfile) {
ASSERT(r == UV_EEXIST);
uv_fs_req_cleanup(&req);
+ /* Truncates when an existing destination is larger than the source file. */
+ touch_file(src, 1);
+ r = uv_fs_copyfile(NULL, &req, src, dst, 0, NULL);
+ ASSERT(r == 0);
+ handle_result(&req);
+
/* Copies a larger file. */
unlink(dst);
touch_file(src, 4096 * 2);
@@ -141,9 +155,9 @@ TEST_IMPL(fs_copyfile) {
unlink(dst);
r = uv_fs_copyfile(loop, &req, fixture, dst, 0, handle_result);
ASSERT(r == 0);
- ASSERT(result_check_count == 3);
+ ASSERT(result_check_count == 5);
uv_run(loop, UV_RUN_DEFAULT);
- ASSERT(result_check_count == 4);
+ ASSERT(result_check_count == 6);
unlink(dst); /* Cleanup */
return 0;
diff --git a/deps/uv/test/test-list.h b/deps/uv/test/test-list.h
index 6e84653e8b4f83..0dde57c2ed22ab 100644
--- a/deps/uv/test/test-list.h
+++ b/deps/uv/test/test-list.h
@@ -328,6 +328,7 @@ TEST_DECLARE (threadpool_cancel_single)
TEST_DECLARE (thread_local_storage)
TEST_DECLARE (thread_stack_size)
TEST_DECLARE (thread_mutex)
+TEST_DECLARE (thread_mutex_recursive)
TEST_DECLARE (thread_rwlock)
TEST_DECLARE (thread_rwlock_trylock)
TEST_DECLARE (thread_create)
@@ -840,6 +841,7 @@ TASK_LIST_START
TEST_ENTRY (thread_local_storage)
TEST_ENTRY (thread_stack_size)
TEST_ENTRY (thread_mutex)
+ TEST_ENTRY (thread_mutex_recursive)
TEST_ENTRY (thread_rwlock)
TEST_ENTRY (thread_rwlock_trylock)
TEST_ENTRY (thread_create)
diff --git a/deps/uv/test/test-mutexes.c b/deps/uv/test/test-mutexes.c
index af5e4e88a22f30..975222ca1229d7 100644
--- a/deps/uv/test/test-mutexes.c
+++ b/deps/uv/test/test-mutexes.c
@@ -50,6 +50,26 @@ TEST_IMPL(thread_mutex) {
}
+TEST_IMPL(thread_mutex_recursive) {
+ uv_mutex_t mutex;
+ int r;
+
+ r = uv_mutex_init_recursive(&mutex);
+ ASSERT(r == 0);
+
+ uv_mutex_lock(&mutex);
+ uv_mutex_lock(&mutex);
+ ASSERT(0 == uv_mutex_trylock(&mutex));
+
+ uv_mutex_unlock(&mutex);
+ uv_mutex_unlock(&mutex);
+ uv_mutex_unlock(&mutex);
+ uv_mutex_destroy(&mutex);
+
+ return 0;
+}
+
+
TEST_IMPL(thread_rwlock) {
uv_rwlock_t rwlock;
int r;
diff --git a/deps/uv/test/test-poll.c b/deps/uv/test/test-poll.c
index 7cfc159a2b3d03..e828addbb48225 100644
--- a/deps/uv/test/test-poll.c
+++ b/deps/uv/test/test-poll.c
@@ -601,7 +601,8 @@ TEST_IMPL(poll_unidirectional) {
TEST_IMPL(poll_bad_fdtype) {
#if !defined(__DragonFly__) && !defined(__FreeBSD__) && !defined(__sun) && \
!defined(_AIX) && !defined(__MVS__) && !defined(__FreeBSD_kernel__) && \
- !defined(__OpenBSD__) && !defined(__CYGWIN__) && !defined(__MSYS__)
+ !defined(__OpenBSD__) && !defined(__CYGWIN__) && !defined(__MSYS__) && \
+ !defined(__NetBSD__)
uv_poll_t poll_handle;
int fd;
diff --git a/deps/uv/test/test-signal-multiple-loops.c b/deps/uv/test/test-signal-multiple-loops.c
index 11193dcf50b227..1272d4576fd968 100644
--- a/deps/uv/test/test-signal-multiple-loops.c
+++ b/deps/uv/test/test-signal-multiple-loops.c
@@ -275,6 +275,7 @@ TEST_IMPL(signal_multiple_loops) {
ASSERT(r == 0);
}
+ uv_sem_destroy(&sem);
printf("signal1_cb calls: %d\n", signal1_cb_counter);
printf("signal2_cb calls: %d\n", signal2_cb_counter);
printf("loops created and destroyed: %d\n", loop_creation_counter);
diff --git a/deps/uv/test/test-thread.c b/deps/uv/test/test-thread.c
index 10bec3fe6c6587..b0e87e208155ea 100644
--- a/deps/uv/test/test-thread.c
+++ b/deps/uv/test/test-thread.c
@@ -211,22 +211,28 @@ TEST_IMPL(thread_local_storage) {
}
-#if defined(__APPLE__)
static void thread_check_stack(void* arg) {
- /* 512KB is the default stack size of threads other than the main thread
- * on OSX. */
+#if defined(__APPLE__)
+ /* 512 kB is the default stack size of threads other than the main thread
+ * on MacOS. */
ASSERT(pthread_get_stacksize_np(pthread_self()) > 512*1024);
-}
+#elif defined(__linux__) && defined(__GLIBC__)
+ struct rlimit lim;
+ size_t stack_size;
+ pthread_attr_t attr;
+ ASSERT(0 == getrlimit(RLIMIT_STACK, &lim));
+ if (lim.rlim_cur == RLIM_INFINITY)
+ lim.rlim_cur = 2 << 20; /* glibc default. */
+ ASSERT(0 == pthread_getattr_np(pthread_self(), &attr));
+ ASSERT(0 == pthread_attr_getstacksize(&attr, &stack_size));
+ ASSERT(stack_size >= lim.rlim_cur);
#endif
+}
TEST_IMPL(thread_stack_size) {
-#if defined(__APPLE__)
uv_thread_t thread;
ASSERT(0 == uv_thread_create(&thread, thread_check_stack, NULL));
ASSERT(0 == uv_thread_join(&thread));
return 0;
-#else
- RETURN_SKIP("OSX only test");
-#endif
}
diff --git a/deps/uv/test/test-udp-ipv6.c b/deps/uv/test/test-udp-ipv6.c
index 54b364da9ebc75..000079185557a4 100644
--- a/deps/uv/test/test-udp-ipv6.c
+++ b/deps/uv/test/test-udp-ipv6.c
@@ -26,7 +26,7 @@
#include
#include
-#if defined(__FreeBSD__) || defined(__FreeBSD_kernel__)
+#if defined(__FreeBSD__) || defined(__FreeBSD_kernel__) || defined(__NetBSD__)
#include
#endif
@@ -47,8 +47,8 @@ static int send_cb_called;
static int recv_cb_called;
static int close_cb_called;
-#if defined(__FreeBSD__) || defined(__FreeBSD_kernel__)
-static int can_ipv6_ipv4_dual() {
+#if defined(__FreeBSD__) || defined(__FreeBSD_kernel__) || defined(__NetBSD__)
+static int can_ipv6_ipv4_dual(void) {
int v6only;
size_t size = sizeof(int);
@@ -171,7 +171,7 @@ TEST_IMPL(udp_dual_stack) {
if (!can_ipv6())
RETURN_SKIP("IPv6 not supported");
-#if defined(__FreeBSD__) || defined(__FreeBSD_kernel__)
+#if defined(__FreeBSD__) || defined(__FreeBSD_kernel__) || defined(__NetBSD__)
if (!can_ipv6_ipv4_dual())
RETURN_SKIP("IPv6-IPv4 dual stack not supported");
#endif
diff --git a/deps/uv/test/test-udp-multicast-join6.c b/deps/uv/test/test-udp-multicast-join6.c
index 2eb9e920e7f2f5..8814b5ad13c6ff 100644
--- a/deps/uv/test/test-udp-multicast-join6.c
+++ b/deps/uv/test/test-udp-multicast-join6.c
@@ -122,7 +122,8 @@ TEST_IMPL(udp_multicast_join6) {
#if defined(__APPLE__) || \
defined(_AIX) || \
defined(__MVS__) || \
- defined(__FreeBSD_kernel__)
+ defined(__FreeBSD_kernel__) || \
+ defined(__NetBSD__)
r = uv_udp_set_membership(&client, "ff02::1", "::1%lo0", UV_JOIN_GROUP);
#else
r = uv_udp_set_membership(&client, "ff02::1", NULL, UV_JOIN_GROUP);
diff --git a/deps/uv/tools/vswhere_usability_wrapper.cmd b/deps/uv/tools/vswhere_usability_wrapper.cmd
index e4acf03e1ee952..ee0549c8034ff7 100644
--- a/deps/uv/tools/vswhere_usability_wrapper.cmd
+++ b/deps/uv/tools/vswhere_usability_wrapper.cmd
@@ -2,23 +2,23 @@
:: Distributed under MIT style license or the libuv license
:: See accompanying file LICENSE at https://github.com/node4good/windows-autoconf
:: or libuv LICENSE file at https://github.com/libuv/libuv
-:: version: 1.15.3
+:: version: 2.0.0
@if not defined DEBUG_HELPER @ECHO OFF
setlocal
+if "%~1"=="prerelease" set VSWHERE_WITH_PRERELEASE=1
set "InstallerPath=%ProgramFiles(x86)%\Microsoft Visual Studio\Installer"
if not exist "%InstallerPath%" set "InstallerPath=%ProgramFiles%\Microsoft Visual Studio\Installer"
-if not exist "%InstallerPath%" exit goto :no-vswhere
+if not exist "%InstallerPath%" goto :no-vswhere
:: Manipulate %Path% for easier " handeling
-set Path=%Path%;%InstallerPath%
+set "Path=%Path%;%InstallerPath%"
where vswhere 2> nul > nul
if errorlevel 1 goto :no-vswhere
set VSWHERE_REQ=-requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64
set VSWHERE_PRP=-property installationPath
set VSWHERE_LMT=-version "[15.0,16.0)"
vswhere -prerelease > nul
-if "%~1"=="prerelase" set VSWHERE_WITH_PRERELASE=1
-if not errorlevel 1 if "%VSWHERE_WITH_PRERELASE%"=="1" set "VSWHERE_LMT=%VSWHERE_LMT% -prerelease"
+if not errorlevel 1 if "%VSWHERE_WITH_PRERELEASE%"=="1" set "VSWHERE_LMT=%VSWHERE_LMT% -prerelease"
SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT%
for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do (
endlocal
diff --git a/deps/uv/uv.gyp b/deps/uv/uv.gyp
index cac6da819b2196..38765eefd129b2 100644
--- a/deps/uv/uv.gyp
+++ b/deps/uv/uv.gyp
@@ -10,10 +10,10 @@
['OS=="solaris"', {
'cflags': [ '-pthreads' ],
}],
- ['OS not in "solaris android os390"', {
+ ['OS not in "solaris android zos"', {
'cflags': [ '-pthread' ],
}],
- ['OS in "os390"', {
+ ['OS in "zos"', {
'defines': [
'_UNIX03_THREADS',
'_UNIX03_SOURCE',
@@ -172,10 +172,10 @@
['OS=="solaris"', {
'ldflags': [ '-pthreads' ],
}],
- [ 'OS=="os390" and uv_library=="shared_library"', {
+ [ 'OS=="zos" and uv_library=="shared_library"', {
'ldflags': [ '-Wl,DLL' ],
}],
- ['OS != "solaris" and OS != "android" and OS != "os390"', {
+ ['OS != "solaris" and OS != "android" and OS != "zos"', {
'ldflags': [ '-pthread' ],
}],
],
@@ -183,14 +183,14 @@
'conditions': [
['uv_library=="shared_library"', {
'conditions': [
- ['OS=="os390"', {
+ ['OS=="zos"', {
'cflags': [ '-qexportall' ],
}, {
'cflags': [ '-fPIC' ],
}],
],
}],
- ['uv_library=="shared_library" and OS!="mac" and OS!="os390"', {
+ ['uv_library=="shared_library" and OS!="mac" and OS!="zos"', {
# This will cause gyp to set soname
# Must correspond with UV_VERSION_MAJOR
# in include/uv-version.h
@@ -198,10 +198,10 @@
}],
],
}],
- [ 'OS in "linux mac ios android os390"', {
+ [ 'OS in "linux mac ios android zos"', {
'sources': [ 'src/unix/proctitle.c' ],
}],
- [ 'OS != "os390"', {
+ [ 'OS != "zos"', {
'cflags': [
'-fvisibility=hidden',
'-g',
@@ -224,11 +224,6 @@
'_DARWIN_UNLIMITED_SELECT=1',
]
}],
- [ 'OS!="mac" and OS!="os390"', {
- # Enable on all platforms except OS X. The antique gcc/clang that
- # ships with Xcode emits waaaay too many false positives.
- 'cflags': [ '-Wstrict-aliasing' ],
- }],
[ 'OS=="linux"', {
'defines': [ '_GNU_SOURCE' ],
'sources': [
@@ -317,7 +312,7 @@
['uv_library=="shared_library"', {
'defines': [ 'BUILDING_UV_SHARED=1' ]
}],
- ['OS=="os390"', {
+ ['OS=="zos"', {
'sources': [
'src/unix/pthread-fixes.c',
'src/unix/no-fsevents.c',
@@ -489,7 +484,7 @@
'test/runner-unix.h',
],
'conditions': [
- [ 'OS != "os390"', {
+ [ 'OS != "zos"', {
'defines': [ '_GNU_SOURCE' ],
'cflags': [ '-Wno-long-long' ],
'xcode_settings': {
@@ -518,7 +513,7 @@
['uv_library=="shared_library"', {
'defines': [ 'USING_UV_SHARED=1' ],
'conditions': [
- [ 'OS == "os390"', {
+ [ 'OS == "zos"', {
'cflags': [ '-Wc,DLL' ],
}],
],
@@ -579,7 +574,7 @@
['uv_library=="shared_library"', {
'defines': [ 'USING_UV_SHARED=1' ],
'conditions': [
- [ 'OS == "os390"', {
+ [ 'OS == "zos"', {
'cflags': [ '-Wc,DLL' ],
}],
],
diff --git a/deps/uv/vcbuild.bat b/deps/uv/vcbuild.bat
index 698044df490c02..46b3476107757a 100644
--- a/deps/uv/vcbuild.bat
+++ b/deps/uv/vcbuild.bat
@@ -55,6 +55,8 @@ set "VSINSTALLDIR="
call tools\vswhere_usability_wrapper.cmd
if "_%VCINSTALLDIR%_" == "__" goto vs-set-2015
@rem Need to clear VSINSTALLDIR for vcvarsall to work as expected.
+@rem Keep current working directory after call to vcvarsall
+set "VSCMD_START_DIR=%CD%"
set vcvars_call="%VCINSTALLDIR%\Auxiliary\Build\vcvarsall.bat" %vs_toolset%
echo calling: %vcvars_call%
call %vcvars_call%
diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore
index bcec3768f398dc..b1f61ed6fcf908 100644
--- a/deps/v8/.gitignore
+++ b/deps/v8/.gitignore
@@ -35,6 +35,7 @@
/_*
/build
/buildtools
+/gypfiles/.gold_plugin
/gypfiles/win_toolchain.json
/hydrogen.cfg
/obj
@@ -47,8 +48,6 @@
/test/benchmarks/data
/test/fuzzer/wasm_corpus
/test/fuzzer/wasm_corpus.tar.gz
-/test/fuzzer/wasm_asmjs_corpus
-/test/fuzzer/wasm_asmjs_corpus.tar.gz
/test/mozilla/data
/test/promises-aplus/promises-tests
/test/promises-aplus/promises-tests.tar.gz
diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS
index 07cf31914431e0..048702701c4211 100644
--- a/deps/v8/AUTHORS
+++ b/deps/v8/AUTHORS
@@ -20,7 +20,7 @@ Imagination Technologies, LLC <*@imgtec.com>
Loongson Technology Corporation Limited <*@loongson.cn>
Code Aurora Forum <*@codeaurora.org>
Home Jinni Inc. <*@homejinni.com>
-IBM Inc. <*@*.ibm.com>
+IBM Inc. <*@*ibm.com>
Samsung <*@*.samsung.com>
Joyent, Inc <*@joyent.com>
RT-RK Computer Based System <*@rt-rk.com>
@@ -126,6 +126,7 @@ Victor Costan
Vlad Burlik
Vladimir Krivosheev
Vladimir Shutoff
+Wiktor Garbacz
Yu Yin
Zac Hansen
Zhongping Wang
diff --git a/deps/v8/BUILD.gn b/deps/v8/BUILD.gn
index b60425df45b469..494ba22f2934d6 100644
--- a/deps/v8/BUILD.gn
+++ b/deps/v8/BUILD.gn
@@ -24,9 +24,6 @@ declare_args() {
# Sets -DV8_ENABLE_FUTURE.
v8_enable_future = false
- # Sets -DV8_DISABLE_TURBO.
- v8_disable_turbo = false
-
# Sets -DVERIFY_HEAP.
v8_enable_verify_heap = ""
@@ -82,6 +79,10 @@ declare_args() {
# Sets -dV8_CONCURRENT_MARKING
v8_enable_concurrent_marking = false
+ # Build the snapshot with unwinding information for perf.
+ # Sets -dV8_USE_SNAPSHOT_WITH_UNWINDING_INFO.
+ v8_perf_prof_unwinding_info = false
+
# With post mortem support enabled, metadata is embedded into libv8 that
# describes various parameters of the VM for use by debuggers. See
# tools/gen-postmortem-metadata.py for details.
@@ -111,9 +112,13 @@ declare_args() {
v8_experimental_extra_library_files =
[ "//test/cctest/test-experimental-extra.js" ]
- v8_enable_gdbjit = ((v8_current_cpu == "x86" || v8_current_cpu == "x64" ||
- v8_current_cpu == "x87") && (is_linux || is_mac)) ||
- (v8_current_cpu == "ppc64" && is_linux)
+ v8_enable_gdbjit =
+ ((v8_current_cpu == "x86" || v8_current_cpu == "x64") &&
+ (is_linux || is_mac)) || (v8_current_cpu == "ppc64" && is_linux)
+
+ # Temporary flag to allow embedders to update their microtasks scopes
+ # while rolling in a new version of V8.
+ v8_check_microtasks_scopes_consistency = ""
}
# Derived defaults.
@@ -132,6 +137,9 @@ if (v8_enable_trace_maps == "") {
if (v8_enable_v8_checks == "") {
v8_enable_v8_checks = is_debug
}
+if (v8_check_microtasks_scopes_consistency == "") {
+ v8_check_microtasks_scopes_consistency = is_debug || dcheck_always_on
+}
# Specifies if the target build is a simulator build. Comparing target cpu
# with v8 target cpu to not affect simulator builds for making cross-compile
@@ -219,9 +227,6 @@ config("features") {
if (v8_enable_future) {
defines += [ "V8_ENABLE_FUTURE" ]
}
- if (v8_disable_turbo) {
- defines += [ "V8_DISABLE_TURBO" ]
- }
if (v8_enable_gdbjit) {
defines += [ "ENABLE_GDB_JIT_INTERFACE" ]
}
@@ -263,6 +268,9 @@ config("features") {
}
if (v8_use_snapshot) {
defines += [ "V8_USE_SNAPSHOT" ]
+ if (v8_perf_prof_unwinding_info) {
+ defines += [ "V8_USE_SNAPSHOT_WITH_UNWINDING_INFO" ]
+ }
}
if (v8_use_external_startup_data) {
defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
@@ -270,6 +278,9 @@ config("features") {
if (v8_enable_concurrent_marking) {
defines += [ "V8_CONCURRENT_MARKING" ]
}
+ if (v8_check_microtasks_scopes_consistency) {
+ defines += [ "V8_CHECK_MICROTASKS_SCOPES_CONSISTENCY" ]
+ }
}
config("toolchain") {
@@ -320,8 +331,7 @@ config("toolchain") {
defines += [ "_MIPS_TARGET_SIMULATOR" ]
}
- # TODO(jochen): Add support for mips.
- if (v8_current_cpu == "mipsel") {
+ if (v8_current_cpu == "mipsel" || v8_current_cpu == "mips") {
defines += [ "V8_TARGET_ARCH_MIPS" ]
if (v8_can_use_fpu_instructions) {
defines += [ "CAN_USE_FPU_INSTRUCTIONS" ]
@@ -355,15 +365,17 @@ config("toolchain") {
# TODO(jochen): Add support for mips_arch_variant rx and loongson.
}
- # TODO(jochen): Add support for mips64.
- if (v8_current_cpu == "mips64el") {
+ if (v8_current_cpu == "mips64el" || v8_current_cpu == "mips64") {
defines += [ "V8_TARGET_ARCH_MIPS64" ]
if (v8_can_use_fpu_instructions) {
defines += [ "CAN_USE_FPU_INSTRUCTIONS" ]
}
- # TODO(jochen): Add support for big endian host byteorder.
- defines += [ "V8_TARGET_ARCH_MIPS64_LE" ]
+ if (host_byteorder == "little") {
+ defines += [ "V8_TARGET_ARCH_MIPS64_LE" ]
+ } else if (host_byteorder == "big") {
+ defines += [ "V8_TARGET_ARCH_MIPS64_BE" ]
+ }
if (v8_use_mips_abi_hardfloat) {
defines += [
"__mips_hard_float=1",
@@ -496,6 +508,19 @@ config("toolchain") {
}
}
+# Configs for code coverage with gcov. Separate configs for cflags and ldflags
+# to selectively influde cflags in non-test targets only.
+config("v8_gcov_coverage_cflags") {
+ cflags = [
+ "-fprofile-arcs",
+ "-ftest-coverage",
+ ]
+}
+
+config("v8_gcov_coverage_ldflags") {
+ ldflags = [ "-fprofile-arcs" ]
+}
+
###############################################################################
# Actions
#
@@ -523,8 +548,6 @@ action("js2c") {
"src/js/typedarray.js",
"src/js/collection.js",
"src/js/weak-collection.js",
- "src/js/collection-iterator.js",
- "src/js/promise.js",
"src/js/messages.js",
"src/js/templates.js",
"src/js/spread.js",
@@ -703,6 +726,12 @@ action("postmortem-metadata") {
"src/objects-inl.h",
"src/objects/map.h",
"src/objects/map-inl.h",
+ "src/objects/script.h",
+ "src/objects/script-inl.h",
+ "src/objects/shared-function-info.h",
+ "src/objects/shared-function-info-inl.h",
+ "src/objects/string.h",
+ "src/objects/string-inl.h",
]
outputs = [
@@ -750,6 +779,10 @@ action("run_mksnapshot") {
]
}
+ if (v8_perf_prof_unwinding_info) {
+ args += [ "--perf-prof-unwinding-info" ]
+ }
+
if (v8_use_external_startup_data) {
outputs += [ "$root_out_dir/snapshot_blob.bin" ]
args += [
@@ -769,6 +802,7 @@ action("v8_dump_build_config") {
outputs = [
"$root_out_dir/v8_build_config.json",
]
+ is_gcov_coverage = v8_code_coverage && !is_clang
args = [
rebase_path("$root_out_dir/v8_build_config.json", root_build_dir),
"current_cpu=\"$current_cpu\"",
@@ -777,6 +811,7 @@ action("v8_dump_build_config") {
"is_cfi=$is_cfi",
"is_component_build=$is_component_build",
"is_debug=$is_debug",
+ "is_gcov_coverage=$is_gcov_coverage",
"is_msan=$is_msan",
"is_tsan=$is_tsan",
"target_cpu=\"$target_cpu\"",
@@ -907,12 +942,16 @@ v8_source_set("v8_builtins_generators") {
"src/builtins/builtins-async-iterator-gen.cc",
"src/builtins/builtins-boolean-gen.cc",
"src/builtins/builtins-call-gen.cc",
+ "src/builtins/builtins-call-gen.h",
+ "src/builtins/builtins-collections-gen.cc",
"src/builtins/builtins-console-gen.cc",
"src/builtins/builtins-constructor-gen.cc",
"src/builtins/builtins-constructor-gen.h",
"src/builtins/builtins-constructor.h",
"src/builtins/builtins-conversion-gen.cc",
+ "src/builtins/builtins-conversion-gen.h",
"src/builtins/builtins-date-gen.cc",
+ "src/builtins/builtins-debug-gen.cc",
"src/builtins/builtins-forin-gen.cc",
"src/builtins/builtins-forin-gen.h",
"src/builtins/builtins-function-gen.cc",
@@ -923,11 +962,14 @@ v8_source_set("v8_builtins_generators") {
"src/builtins/builtins-internal-gen.cc",
"src/builtins/builtins-interpreter-gen.cc",
"src/builtins/builtins-intl-gen.cc",
+ "src/builtins/builtins-iterator-gen.cc",
+ "src/builtins/builtins-iterator-gen.h",
"src/builtins/builtins-math-gen.cc",
"src/builtins/builtins-number-gen.cc",
"src/builtins/builtins-object-gen.cc",
"src/builtins/builtins-promise-gen.cc",
"src/builtins/builtins-promise-gen.h",
+ "src/builtins/builtins-proxy-gen.cc",
"src/builtins/builtins-regexp-gen.cc",
"src/builtins/builtins-regexp-gen.h",
"src/builtins/builtins-sharedarraybuffer-gen.cc",
@@ -994,11 +1036,6 @@ v8_source_set("v8_builtins_generators") {
### gcmole(arch:s390) ###
"src/builtins/s390/builtins-s390.cc",
]
- } else if (v8_current_cpu == "x87") {
- sources += [
- ### gcmole(arch:x87) ###
- "src/builtins/x87/builtins-x87.cc",
- ]
}
if (!v8_enable_i18n_support) {
@@ -1053,6 +1090,9 @@ v8_header_set("v8_headers") {
v8_source_set("v8_base") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
+ # Split static libraries on windows into two.
+ split_count = 2
+
sources = [
"//base/trace_event/common/trace_event_common.h",
@@ -1070,7 +1110,6 @@ v8_source_set("v8_base") {
"src/accessors.h",
"src/address-map.cc",
"src/address-map.h",
- "src/allocation-site-scopes.cc",
"src/allocation-site-scopes.h",
"src/allocation.cc",
"src/allocation.h",
@@ -1105,10 +1144,8 @@ v8_source_set("v8_base") {
"src/ast/ast-function-literal-id-reindexer.h",
"src/ast/ast-numbering.cc",
"src/ast/ast-numbering.h",
+ "src/ast/ast-source-ranges.h",
"src/ast/ast-traversal-visitor.h",
- "src/ast/ast-type-bounds.h",
- "src/ast/ast-types.cc",
- "src/ast/ast-types.h",
"src/ast/ast-value-factory.cc",
"src/ast/ast-value-factory.h",
"src/ast/ast.cc",
@@ -1145,11 +1182,11 @@ v8_source_set("v8_base") {
"src/builtins/builtins-boolean.cc",
"src/builtins/builtins-call.cc",
"src/builtins/builtins-callsite.cc",
+ "src/builtins/builtins-collections.cc",
"src/builtins/builtins-console.cc",
"src/builtins/builtins-constructor.h",
"src/builtins/builtins-dataview.cc",
"src/builtins/builtins-date.cc",
- "src/builtins/builtins-debug.cc",
"src/builtins/builtins-definitions.h",
"src/builtins/builtins-descriptors.h",
"src/builtins/builtins-error.cc",
@@ -1158,11 +1195,12 @@ v8_source_set("v8_base") {
"src/builtins/builtins-internal.cc",
"src/builtins/builtins-interpreter.cc",
"src/builtins/builtins-intl.cc",
+ "src/builtins/builtins-intl.h",
"src/builtins/builtins-json.cc",
"src/builtins/builtins-math.cc",
"src/builtins/builtins-number.cc",
"src/builtins/builtins-object.cc",
- "src/builtins/builtins-proxy.cc",
+ "src/builtins/builtins-promise.cc",
"src/builtins/builtins-reflect.cc",
"src/builtins/builtins-regexp.cc",
"src/builtins/builtins-sharedarraybuffer.cc",
@@ -1186,7 +1224,6 @@ v8_source_set("v8_base") {
"src/code-factory.h",
"src/code-stub-assembler.cc",
"src/code-stub-assembler.h",
- "src/code-stubs-hydrogen.cc",
"src/code-stubs-utils.h",
"src/code-stubs.cc",
"src/code-stubs.h",
@@ -1232,6 +1269,8 @@ v8_source_set("v8_base") {
"src/compiler/bytecode-liveness-map.cc",
"src/compiler/bytecode-liveness-map.h",
"src/compiler/c-linkage.cc",
+ "src/compiler/check-elimination.cc",
+ "src/compiler/check-elimination.h",
"src/compiler/checkpoint-elimination.cc",
"src/compiler/checkpoint-elimination.h",
"src/compiler/code-assembler.cc",
@@ -1324,8 +1363,6 @@ v8_source_set("v8_base") {
"src/compiler/linkage.h",
"src/compiler/live-range-separator.cc",
"src/compiler/live-range-separator.h",
- "src/compiler/liveness-analyzer.cc",
- "src/compiler/liveness-analyzer.h",
"src/compiler/load-elimination.cc",
"src/compiler/load-elimination.h",
"src/compiler/loop-analysis.cc",
@@ -1369,6 +1406,8 @@ v8_source_set("v8_base") {
"src/compiler/pipeline-statistics.h",
"src/compiler/pipeline.cc",
"src/compiler/pipeline.h",
+ "src/compiler/property-access-builder.cc",
+ "src/compiler/property-access-builder.h",
"src/compiler/raw-machine-assembler.cc",
"src/compiler/raw-machine-assembler.h",
"src/compiler/redundancy-elimination.cc",
@@ -1397,8 +1436,6 @@ v8_source_set("v8_base") {
"src/compiler/state-values-utils.h",
"src/compiler/store-store-elimination.cc",
"src/compiler/store-store-elimination.h",
- "src/compiler/tail-call-optimization.cc",
- "src/compiler/tail-call-optimization.h",
"src/compiler/type-cache.cc",
"src/compiler/type-cache.h",
"src/compiler/typed-optimization.cc",
@@ -1426,67 +1463,6 @@ v8_source_set("v8_base") {
"src/counters-inl.h",
"src/counters.cc",
"src/counters.h",
- "src/crankshaft/compilation-phase.cc",
- "src/crankshaft/compilation-phase.h",
- "src/crankshaft/hydrogen-alias-analysis.h",
- "src/crankshaft/hydrogen-bce.cc",
- "src/crankshaft/hydrogen-bce.h",
- "src/crankshaft/hydrogen-canonicalize.cc",
- "src/crankshaft/hydrogen-canonicalize.h",
- "src/crankshaft/hydrogen-check-elimination.cc",
- "src/crankshaft/hydrogen-check-elimination.h",
- "src/crankshaft/hydrogen-dce.cc",
- "src/crankshaft/hydrogen-dce.h",
- "src/crankshaft/hydrogen-dehoist.cc",
- "src/crankshaft/hydrogen-dehoist.h",
- "src/crankshaft/hydrogen-environment-liveness.cc",
- "src/crankshaft/hydrogen-environment-liveness.h",
- "src/crankshaft/hydrogen-escape-analysis.cc",
- "src/crankshaft/hydrogen-escape-analysis.h",
- "src/crankshaft/hydrogen-flow-engine.h",
- "src/crankshaft/hydrogen-gvn.cc",
- "src/crankshaft/hydrogen-gvn.h",
- "src/crankshaft/hydrogen-infer-representation.cc",
- "src/crankshaft/hydrogen-infer-representation.h",
- "src/crankshaft/hydrogen-infer-types.cc",
- "src/crankshaft/hydrogen-infer-types.h",
- "src/crankshaft/hydrogen-instructions.cc",
- "src/crankshaft/hydrogen-instructions.h",
- "src/crankshaft/hydrogen-load-elimination.cc",
- "src/crankshaft/hydrogen-load-elimination.h",
- "src/crankshaft/hydrogen-mark-unreachable.cc",
- "src/crankshaft/hydrogen-mark-unreachable.h",
- "src/crankshaft/hydrogen-osr.cc",
- "src/crankshaft/hydrogen-osr.h",
- "src/crankshaft/hydrogen-range-analysis.cc",
- "src/crankshaft/hydrogen-range-analysis.h",
- "src/crankshaft/hydrogen-redundant-phi.cc",
- "src/crankshaft/hydrogen-redundant-phi.h",
- "src/crankshaft/hydrogen-removable-simulates.cc",
- "src/crankshaft/hydrogen-removable-simulates.h",
- "src/crankshaft/hydrogen-representation-changes.cc",
- "src/crankshaft/hydrogen-representation-changes.h",
- "src/crankshaft/hydrogen-sce.cc",
- "src/crankshaft/hydrogen-sce.h",
- "src/crankshaft/hydrogen-store-elimination.cc",
- "src/crankshaft/hydrogen-store-elimination.h",
- "src/crankshaft/hydrogen-types.cc",
- "src/crankshaft/hydrogen-types.h",
- "src/crankshaft/hydrogen-uint32-analysis.cc",
- "src/crankshaft/hydrogen-uint32-analysis.h",
- "src/crankshaft/hydrogen.cc",
- "src/crankshaft/hydrogen.h",
- "src/crankshaft/lithium-allocator-inl.h",
- "src/crankshaft/lithium-allocator.cc",
- "src/crankshaft/lithium-allocator.h",
- "src/crankshaft/lithium-codegen.cc",
- "src/crankshaft/lithium-codegen.h",
- "src/crankshaft/lithium-inl.h",
- "src/crankshaft/lithium.cc",
- "src/crankshaft/lithium.h",
- "src/crankshaft/typing.cc",
- "src/crankshaft/typing.h",
- "src/crankshaft/unique.h",
"src/date.cc",
"src/date.h",
"src/dateparser-inl.h",
@@ -1518,7 +1494,6 @@ v8_source_set("v8_base") {
"src/double.h",
"src/dtoa.cc",
"src/dtoa.h",
- "src/effects.h",
"src/eh-frame.cc",
"src/eh-frame.h",
"src/elements-kind.cc",
@@ -1560,6 +1535,7 @@ v8_source_set("v8_base") {
"src/flag-definitions.h",
"src/flags.cc",
"src/flags.h",
+ "src/float.h",
"src/frames-inl.h",
"src/frames.cc",
"src/frames.h",
@@ -1581,7 +1557,6 @@ v8_source_set("v8_base") {
"src/heap/array-buffer-tracker.h",
"src/heap/code-stats.cc",
"src/heap/code-stats.h",
- "src/heap/concurrent-marking-deque.h",
"src/heap/concurrent-marking.cc",
"src/heap/concurrent-marking.h",
"src/heap/embedder-tracing.cc",
@@ -1599,9 +1574,11 @@ v8_source_set("v8_base") {
"src/heap/incremental-marking.cc",
"src/heap/incremental-marking.h",
"src/heap/item-parallel-job.h",
+ "src/heap/local-allocator.h",
"src/heap/mark-compact-inl.h",
"src/heap/mark-compact.cc",
"src/heap/mark-compact.h",
+ "src/heap/marking.cc",
"src/heap/marking.h",
"src/heap/memory-reducer.cc",
"src/heap/memory-reducer.h",
@@ -1610,7 +1587,6 @@ v8_source_set("v8_base") {
"src/heap/objects-visiting-inl.h",
"src/heap/objects-visiting.cc",
"src/heap/objects-visiting.h",
- "src/heap/page-parallel-job.h",
"src/heap/remembered-set.h",
"src/heap/scavenge-job.cc",
"src/heap/scavenge-job.h",
@@ -1625,7 +1601,7 @@ v8_source_set("v8_base") {
"src/heap/spaces.h",
"src/heap/store-buffer.cc",
"src/heap/store-buffer.h",
- "src/heap/workstealing-marking-deque.h",
+ "src/heap/worklist.h",
"src/ic/access-compiler-data.h",
"src/ic/access-compiler.cc",
"src/ic/access-compiler.h",
@@ -1650,6 +1626,7 @@ v8_source_set("v8_base") {
"src/identity-map.h",
"src/interface-descriptors.cc",
"src/interface-descriptors.h",
+ "src/interpreter/block-coverage-builder.h",
"src/interpreter/bytecode-array-accessor.cc",
"src/interpreter/bytecode-array-accessor.h",
"src/interpreter/bytecode-array-builder.cc",
@@ -1740,10 +1717,15 @@ v8_source_set("v8_base") {
"src/objects-printer.cc",
"src/objects.cc",
"src/objects.h",
+ "src/objects/arguments-inl.h",
+ "src/objects/arguments.h",
"src/objects/code-cache-inl.h",
"src/objects/code-cache.h",
"src/objects/compilation-cache-inl.h",
"src/objects/compilation-cache.h",
+ "src/objects/debug-objects-inl.h",
+ "src/objects/debug-objects.cc",
+ "src/objects/debug-objects.h",
"src/objects/descriptor-array.h",
"src/objects/dictionary.h",
"src/objects/frame-array-inl.h",
@@ -1757,12 +1739,20 @@ v8_source_set("v8_base") {
"src/objects/map-inl.h",
"src/objects/map.h",
"src/objects/module-info.h",
+ "src/objects/name-inl.h",
+ "src/objects/name.h",
"src/objects/object-macros-undef.h",
"src/objects/object-macros.h",
"src/objects/regexp-match-info.h",
"src/objects/scope-info.cc",
"src/objects/scope-info.h",
+ "src/objects/script-inl.h",
+ "src/objects/script.h",
+ "src/objects/shared-function-info-inl.h",
+ "src/objects/shared-function-info.h",
+ "src/objects/string-inl.h",
"src/objects/string-table.h",
+ "src/objects/string.h",
"src/ostreams.cc",
"src/ostreams.h",
"src/parsing/duplicate-finder.h",
@@ -1948,8 +1938,6 @@ v8_source_set("v8_base") {
"src/trap-handler/trap-handler.h",
"src/type-hints.cc",
"src/type-hints.h",
- "src/type-info.cc",
- "src/type-info.h",
"src/unicode-cache-inl.h",
"src/unicode-cache.h",
"src/unicode-decoder.cc",
@@ -1976,6 +1964,8 @@ v8_source_set("v8_base") {
"src/visitors.h",
"src/vm-state-inl.h",
"src/vm-state.h",
+ "src/wasm/compilation-manager.cc",
+ "src/wasm/compilation-manager.h",
"src/wasm/decoder.h",
"src/wasm/function-body-decoder-impl.h",
"src/wasm/function-body-decoder.cc",
@@ -1983,6 +1973,8 @@ v8_source_set("v8_base") {
"src/wasm/leb-helper.h",
"src/wasm/local-decl-encoder.cc",
"src/wasm/local-decl-encoder.h",
+ "src/wasm/module-compiler.cc",
+ "src/wasm/module-compiler.h",
"src/wasm/module-decoder.cc",
"src/wasm/module-decoder.h",
"src/wasm/signature-map.cc",
@@ -2011,6 +2003,7 @@ v8_source_set("v8_base") {
"src/wasm/wasm-result.h",
"src/wasm/wasm-text.cc",
"src/wasm/wasm-text.h",
+ "src/wasm/wasm-value.h",
"src/zone/accounting-allocator.cc",
"src/zone/accounting-allocator.h",
"src/zone/zone-allocator.h",
@@ -2030,12 +2023,6 @@ v8_source_set("v8_base") {
"src/compiler/ia32/instruction-codes-ia32.h",
"src/compiler/ia32/instruction-scheduler-ia32.cc",
"src/compiler/ia32/instruction-selector-ia32.cc",
- "src/crankshaft/ia32/lithium-codegen-ia32.cc",
- "src/crankshaft/ia32/lithium-codegen-ia32.h",
- "src/crankshaft/ia32/lithium-gap-resolver-ia32.cc",
- "src/crankshaft/ia32/lithium-gap-resolver-ia32.h",
- "src/crankshaft/ia32/lithium-ia32.cc",
- "src/crankshaft/ia32/lithium-ia32.h",
"src/debug/ia32/debug-ia32.cc",
"src/full-codegen/ia32/full-codegen-ia32.cc",
"src/ia32/assembler-ia32-inl.h",
@@ -2070,12 +2057,6 @@ v8_source_set("v8_base") {
"src/compiler/x64/instruction-selector-x64.cc",
"src/compiler/x64/unwinding-info-writer-x64.cc",
"src/compiler/x64/unwinding-info-writer-x64.h",
- "src/crankshaft/x64/lithium-codegen-x64.cc",
- "src/crankshaft/x64/lithium-codegen-x64.h",
- "src/crankshaft/x64/lithium-gap-resolver-x64.cc",
- "src/crankshaft/x64/lithium-gap-resolver-x64.h",
- "src/crankshaft/x64/lithium-x64.cc",
- "src/crankshaft/x64/lithium-x64.h",
"src/debug/x64/debug-x64.cc",
"src/full-codegen/x64/full-codegen-x64.cc",
"src/ic/x64/access-compiler-x64.cc",
@@ -2136,12 +2117,6 @@ v8_source_set("v8_base") {
"src/compiler/arm/instruction-selector-arm.cc",
"src/compiler/arm/unwinding-info-writer-arm.cc",
"src/compiler/arm/unwinding-info-writer-arm.h",
- "src/crankshaft/arm/lithium-arm.cc",
- "src/crankshaft/arm/lithium-arm.h",
- "src/crankshaft/arm/lithium-codegen-arm.cc",
- "src/crankshaft/arm/lithium-codegen-arm.h",
- "src/crankshaft/arm/lithium-gap-resolver-arm.cc",
- "src/crankshaft/arm/lithium-gap-resolver-arm.h",
"src/debug/arm/debug-arm.cc",
"src/full-codegen/arm/full-codegen-arm.cc",
"src/ic/arm/access-compiler-arm.cc",
@@ -2181,6 +2156,7 @@ v8_source_set("v8_base") {
"src/arm64/macro-assembler-arm64.h",
"src/arm64/simulator-arm64.cc",
"src/arm64/simulator-arm64.h",
+ "src/arm64/simulator-logic-arm64.cc",
"src/arm64/utils-arm64.cc",
"src/arm64/utils-arm64.h",
"src/compiler/arm64/code-generator-arm64.cc",
@@ -2189,15 +2165,6 @@ v8_source_set("v8_base") {
"src/compiler/arm64/instruction-selector-arm64.cc",
"src/compiler/arm64/unwinding-info-writer-arm64.cc",
"src/compiler/arm64/unwinding-info-writer-arm64.h",
- "src/crankshaft/arm64/delayed-masm-arm64-inl.h",
- "src/crankshaft/arm64/delayed-masm-arm64.cc",
- "src/crankshaft/arm64/delayed-masm-arm64.h",
- "src/crankshaft/arm64/lithium-arm64.cc",
- "src/crankshaft/arm64/lithium-arm64.h",
- "src/crankshaft/arm64/lithium-codegen-arm64.cc",
- "src/crankshaft/arm64/lithium-codegen-arm64.h",
- "src/crankshaft/arm64/lithium-gap-resolver-arm64.cc",
- "src/crankshaft/arm64/lithium-gap-resolver-arm64.h",
"src/debug/arm64/debug-arm64.cc",
"src/full-codegen/arm64/full-codegen-arm64.cc",
"src/ic/arm64/access-compiler-arm64.cc",
@@ -2212,12 +2179,6 @@ v8_source_set("v8_base") {
"src/compiler/mips/instruction-codes-mips.h",
"src/compiler/mips/instruction-scheduler-mips.cc",
"src/compiler/mips/instruction-selector-mips.cc",
- "src/crankshaft/mips/lithium-codegen-mips.cc",
- "src/crankshaft/mips/lithium-codegen-mips.h",
- "src/crankshaft/mips/lithium-gap-resolver-mips.cc",
- "src/crankshaft/mips/lithium-gap-resolver-mips.h",
- "src/crankshaft/mips/lithium-mips.cc",
- "src/crankshaft/mips/lithium-mips.h",
"src/debug/mips/debug-mips.cc",
"src/full-codegen/mips/full-codegen-mips.cc",
"src/ic/mips/access-compiler-mips.cc",
@@ -2251,12 +2212,6 @@ v8_source_set("v8_base") {
"src/compiler/mips64/instruction-codes-mips64.h",
"src/compiler/mips64/instruction-scheduler-mips64.cc",
"src/compiler/mips64/instruction-selector-mips64.cc",
- "src/crankshaft/mips64/lithium-codegen-mips64.cc",
- "src/crankshaft/mips64/lithium-codegen-mips64.h",
- "src/crankshaft/mips64/lithium-gap-resolver-mips64.cc",
- "src/crankshaft/mips64/lithium-gap-resolver-mips64.h",
- "src/crankshaft/mips64/lithium-mips64.cc",
- "src/crankshaft/mips64/lithium-mips64.h",
"src/debug/mips64/debug-mips64.cc",
"src/full-codegen/mips64/full-codegen-mips64.cc",
"src/ic/mips64/access-compiler-mips64.cc",
@@ -2290,12 +2245,6 @@ v8_source_set("v8_base") {
"src/compiler/ppc/instruction-codes-ppc.h",
"src/compiler/ppc/instruction-scheduler-ppc.cc",
"src/compiler/ppc/instruction-selector-ppc.cc",
- "src/crankshaft/ppc/lithium-codegen-ppc.cc",
- "src/crankshaft/ppc/lithium-codegen-ppc.h",
- "src/crankshaft/ppc/lithium-gap-resolver-ppc.cc",
- "src/crankshaft/ppc/lithium-gap-resolver-ppc.h",
- "src/crankshaft/ppc/lithium-ppc.cc",
- "src/crankshaft/ppc/lithium-ppc.h",
"src/debug/ppc/debug-ppc.cc",
"src/full-codegen/ppc/full-codegen-ppc.cc",
"src/ic/ppc/access-compiler-ppc.cc",
@@ -2329,12 +2278,6 @@ v8_source_set("v8_base") {
"src/compiler/s390/instruction-codes-s390.h",
"src/compiler/s390/instruction-scheduler-s390.cc",
"src/compiler/s390/instruction-selector-s390.cc",
- "src/crankshaft/s390/lithium-codegen-s390.cc",
- "src/crankshaft/s390/lithium-codegen-s390.h",
- "src/crankshaft/s390/lithium-gap-resolver-s390.cc",
- "src/crankshaft/s390/lithium-gap-resolver-s390.h",
- "src/crankshaft/s390/lithium-s390.cc",
- "src/crankshaft/s390/lithium-s390.h",
"src/debug/s390/debug-s390.cc",
"src/full-codegen/s390/full-codegen-s390.cc",
"src/ic/s390/access-compiler-s390.cc",
@@ -2362,43 +2305,6 @@ v8_source_set("v8_base") {
"src/s390/simulator-s390.cc",
"src/s390/simulator-s390.h",
]
- } else if (v8_current_cpu == "x87") {
- sources += [ ### gcmole(arch:x87) ###
- "src/compiler/x87/code-generator-x87.cc",
- "src/compiler/x87/instruction-codes-x87.h",
- "src/compiler/x87/instruction-scheduler-x87.cc",
- "src/compiler/x87/instruction-selector-x87.cc",
- "src/crankshaft/x87/lithium-codegen-x87.cc",
- "src/crankshaft/x87/lithium-codegen-x87.h",
- "src/crankshaft/x87/lithium-gap-resolver-x87.cc",
- "src/crankshaft/x87/lithium-gap-resolver-x87.h",
- "src/crankshaft/x87/lithium-x87.cc",
- "src/crankshaft/x87/lithium-x87.h",
- "src/debug/x87/debug-x87.cc",
- "src/full-codegen/x87/full-codegen-x87.cc",
- "src/ic/x87/access-compiler-x87.cc",
- "src/ic/x87/handler-compiler-x87.cc",
- "src/ic/x87/ic-x87.cc",
- "src/regexp/x87/regexp-macro-assembler-x87.cc",
- "src/regexp/x87/regexp-macro-assembler-x87.h",
- "src/x87/assembler-x87-inl.h",
- "src/x87/assembler-x87.cc",
- "src/x87/assembler-x87.h",
- "src/x87/code-stubs-x87.cc",
- "src/x87/code-stubs-x87.h",
- "src/x87/codegen-x87.cc",
- "src/x87/codegen-x87.h",
- "src/x87/cpu-x87.cc",
- "src/x87/deoptimizer-x87.cc",
- "src/x87/disasm-x87.cc",
- "src/x87/frames-x87.cc",
- "src/x87/frames-x87.h",
- "src/x87/interface-descriptors-x87.cc",
- "src/x87/macro-assembler-x87.cc",
- "src/x87/macro-assembler-x87.h",
- "src/x87/simulator-x87.cc",
- "src/x87/simulator-x87.h",
- ]
}
configs = [ ":internal_config" ]
@@ -2421,6 +2327,8 @@ v8_source_set("v8_base") {
} else {
sources -= [
"src/builtins/builtins-intl.cc",
+ "src/builtins/builtins-intl.h",
+ "src/char-predicates.cc",
"src/intl.cc",
"src/intl.h",
"src/objects/intl-objects.cc",
@@ -2473,6 +2381,7 @@ v8_component("v8_libbase") {
"src/base/macros.h",
"src/base/once.cc",
"src/base/once.h",
+ "src/base/optional.h",
"src/base/platform/condition-variable.cc",
"src/base/platform/condition-variable.h",
"src/base/platform/elapsed-timer.h",
@@ -2490,6 +2399,7 @@ v8_component("v8_libbase") {
"src/base/safe_math_impl.h",
"src/base/sys-info.cc",
"src/base/sys-info.h",
+ "src/base/template-utils.h",
"src/base/timezone-cache.h",
"src/base/utils/random-number-generator.cc",
"src/base/utils/random-number-generator.h",
@@ -2557,6 +2467,11 @@ v8_component("v8_libbase") {
"src/base/platform/platform-linux.cc",
]
}
+ } else if (is_fuchsia) {
+ sources += [
+ "src/base/debug/stack_trace_fuchsia.cc",
+ "src/base/platform/platform-fuchsia.cc",
+ ]
} else if (is_mac) {
sources += [
"src/base/debug/stack_trace_posix.cc",
@@ -2737,7 +2652,7 @@ group("v8_fuzzers") {
":v8_simple_json_fuzzer",
":v8_simple_parser_fuzzer",
":v8_simple_regexp_fuzzer",
- ":v8_simple_wasm_asmjs_fuzzer",
+ ":v8_simple_wasm_async_fuzzer",
":v8_simple_wasm_call_fuzzer",
":v8_simple_wasm_code_fuzzer",
":v8_simple_wasm_compile_fuzzer",
@@ -2758,10 +2673,6 @@ if (is_component_build) {
"src/v8dll-main.cc",
]
- deps = [
- ":v8_dump_build_config",
- ]
-
public_deps = [
":v8_base",
":v8_maybe_snapshot",
@@ -2779,10 +2690,6 @@ if (is_component_build) {
"src/v8dll-main.cc",
]
- deps = [
- ":v8_dump_build_config",
- ]
-
public_deps = [
":v8_base",
":v8_maybe_snapshot",
@@ -2798,10 +2705,6 @@ if (is_component_build) {
}
} else {
group("v8") {
- deps = [
- ":v8_dump_build_config",
- ]
-
public_deps = [
":v8_base",
":v8_maybe_snapshot",
@@ -2813,10 +2716,6 @@ if (is_component_build) {
group("v8_for_testing") {
testonly = true
- deps = [
- ":v8_dump_build_config",
- ]
-
public_deps = [
":v8_base",
":v8_maybe_snapshot",
@@ -3069,9 +2968,9 @@ v8_source_set("wasm_fuzzer") {
v8_fuzzer("wasm_fuzzer") {
}
-v8_source_set("wasm_asmjs_fuzzer") {
+v8_source_set("wasm_async_fuzzer") {
sources = [
- "test/fuzzer/wasm-asmjs.cc",
+ "test/fuzzer/wasm-async.cc",
]
deps = [
@@ -3086,7 +2985,7 @@ v8_source_set("wasm_asmjs_fuzzer") {
]
}
-v8_fuzzer("wasm_asmjs_fuzzer") {
+v8_fuzzer("wasm_async_fuzzer") {
}
v8_source_set("wasm_code_fuzzer") {
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index 7ee1b37e798c5c..f3e2941fddd5aa 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,3 +1,2833 @@
+2017-07-18: Version 6.1.534
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.533
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.532
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.531
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.530
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.529
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.528
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.527
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.526
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.525
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.524
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.523
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-18: Version 6.1.522
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.521
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.520
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.519
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.518
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.517
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.516
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.515
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.514
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.513
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.512
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.511
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.510
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.509
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.508
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.507
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.506
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.505
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.504
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.503
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.502
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.501
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.500
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.499
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-17: Version 6.1.498
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-16: Version 6.1.497
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.496
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.495
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.494
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.493
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.492
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.491
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.490
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.489
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.488
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.487
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.486
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.485
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.484
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.483
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.482
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.481
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.480
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.479
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-14: Version 6.1.478
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.477
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.476
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.475
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.474
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.473
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.472
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.471
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.470
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.469
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.468
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.467
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.466
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.465
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.464
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.463
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.462
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.461
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-13: Version 6.1.460
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.459
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.458
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.457
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.456
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.455
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.454
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.453
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.452
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.451
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.450
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.449
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.448
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.447
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.446
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.445
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.444
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.443
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.442
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.441
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.440
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.439
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.438
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-12: Version 6.1.437
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.436
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.435
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.434
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.433
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.432
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.431
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.430
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.429
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.428
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.427
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.426
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.425
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.424
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.423
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.422
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.421
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.420
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-11: Version 6.1.419
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.418
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.417
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.416
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.415
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.414
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.413
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.412
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.411
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.410
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.409
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.408
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.407
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.406
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.405
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.404
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.403
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.402
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.401
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.400
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.399
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.398
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.397
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.396
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-10: Version 6.1.395
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.394
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.393
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.392
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.391
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.390
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.389
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.388
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.387
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.386
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.385
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.384
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.383
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.382
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.381
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.380
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.379
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.378
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.377
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.376
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-06: Version 6.1.375
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.374
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.373
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.372
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.371
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.370
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.369
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.368
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.367
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.366
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.365
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.364
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-05: Version 6.1.363
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-04: Version 6.1.362
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-04: Version 6.1.361
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-04: Version 6.1.360
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-04: Version 6.1.359
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-04: Version 6.1.358
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-04: Version 6.1.357
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.356
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.355
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.354
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.353
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.352
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.351
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.350
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.349
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.348
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.347
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.346
+
+ Performance and stability improvements on all platforms.
+
+
+2017-07-03: Version 6.1.345
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.344
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.343
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.342
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.341
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.340
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.339
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.338
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.337
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.336
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.335
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.334
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.333
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.332
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.331
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.330
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-30: Version 6.1.329
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.328
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.327
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.326
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.325
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.324
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.323
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.322
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.321
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.320
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.319
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.318
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.317
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.316
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.315
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.314
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-29: Version 6.1.313
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.312
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.311
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.310
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.309
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.308
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.307
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.306
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.305
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.304
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.303
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.302
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.301
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-28: Version 6.1.300
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-27: Version 6.1.299
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-27: Version 6.1.298
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-27: Version 6.1.297
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-27: Version 6.1.296
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-27: Version 6.1.295
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-27: Version 6.1.294
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.293
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.292
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.291
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.290
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.289
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.288
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.287
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.286
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.285
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.284
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.283
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.282
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.281
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.280
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.279
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.278
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.277
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.276
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.275
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.274
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.273
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.272
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.271
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.270
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.269
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-26: Version 6.1.268
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-24: Version 6.1.267
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.266
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.265
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.264
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.263
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.262
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.261
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.260
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.259
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.258
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.257
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.256
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.255
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.254
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.253
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.252
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.251
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-23: Version 6.1.250
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.249
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.248
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.247
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.246
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.245
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.244
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.243
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.242
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.241
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.240
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.239
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.238
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.237
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.236
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.235
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.234
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.233
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.232
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.231
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.230
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.229
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.228
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.227
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.226
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.225
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.224
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-22: Version 6.1.223
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.222
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.221
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.220
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.219
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.218
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.217
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.216
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-21: Version 6.1.215
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.214
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.213
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.212
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.211
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.210
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.209
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.208
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.207
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.206
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.205
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.204
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.203
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-20: Version 6.1.202
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.201
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.200
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.199
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.198
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.197
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.196
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.195
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.194
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.193
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.192
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.191
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.190
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.189
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-16: Version 6.1.188
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.187
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.186
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.185
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.184
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.183
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.182
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.181
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.180
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.179
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-15: Version 6.1.178
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.177
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.176
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.175
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.174
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.173
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.172
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.171
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.170
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.169
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.168
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.167
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.166
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-14: Version 6.1.165
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.164
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.163
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.162
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.161
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.160
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.159
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.158
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.157
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.156
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.155
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.154
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.153
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.152
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.151
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.150
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-13: Version 6.1.149
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.148
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.147
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.146
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.145
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.144
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.143
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.142
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.141
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.140
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.139
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.138
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.137
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.136
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.135
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.134
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.133
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.132
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.131
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.130
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.129
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.128
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.127
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.126
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.125
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-12: Version 6.1.124
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-11: Version 6.1.123
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-11: Version 6.1.122
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-11: Version 6.1.121
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.120
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.119
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.118
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.117
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.116
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.115
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.114
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-09: Version 6.1.113
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.112
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.111
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.110
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.109
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.108
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.107
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.106
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.105
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.104
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.103
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-08: Version 6.1.102
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.101
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.100
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.99
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.98
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.97
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.96
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.95
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.94
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.93
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.92
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.91
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.90
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.89
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.88
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.87
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.86
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-07: Version 6.1.85
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.84
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.83
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.82
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.81
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.80
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.79
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.78
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.77
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.76
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.75
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.74
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-06: Version 6.1.73
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-05: Version 6.1.72
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-05: Version 6.1.71
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-05: Version 6.1.70
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-05: Version 6.1.69
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-05: Version 6.1.68
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-05: Version 6.1.67
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-05: Version 6.1.66
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-04: Version 6.1.65
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-03: Version 6.1.64
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-02: Version 6.1.63
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-02: Version 6.1.62
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-02: Version 6.1.61
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-02: Version 6.1.60
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-02: Version 6.1.59
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-02: Version 6.1.58
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.57
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.56
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.55
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.54
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.53
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.52
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.51
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.50
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.49
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.48
+
+ Performance and stability improvements on all platforms.
+
+
+2017-06-01: Version 6.1.47
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.46
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.45
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.44
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.43
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.42
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.41
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.40
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.39
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.38
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.37
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.36
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.35
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-31: Version 6.1.34
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.33
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.32
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.31
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.30
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.29
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.28
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.27
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.26
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.25
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.24
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.23
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.22
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-30: Version 6.1.21
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.20
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.19
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.18
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.17
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.16
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.15
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.14
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.13
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.12
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.11
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.10
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.9
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.8
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.7
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.6
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.5
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.4
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.3
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.2
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-29: Version 6.1.1
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-24: Version 6.0.318
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-24: Version 6.0.317
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.316
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.315
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.314
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.313
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.312
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.311
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.310
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.309
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.308
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.307
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.306
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.305
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-23: Version 6.0.304
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.303
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.302
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.301
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.300
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.299
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.298
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.297
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.296
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.295
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.294
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.293
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.292
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.291
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.290
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.289
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-22: Version 6.0.288
+
+ Performance and stability improvements on all platforms.
+
+
+2017-05-21: Version 6.0.287
+
+ Performance and stability improvements on all platforms.
+
+
2017-05-20: Version 6.0.286
Performance and stability improvements on all platforms.
diff --git a/deps/v8/DEPS b/deps/v8/DEPS
index 1a55e663c6b17a..7752da2f40f817 100644
--- a/deps/v8/DEPS
+++ b/deps/v8/DEPS
@@ -8,23 +8,23 @@ vars = {
deps = {
"v8/build":
- Var("chromium_url") + "/chromium/src/build.git" + "@" + "1caf3a69f3b0379c9fef2493aa1b3cda96e17d7b",
+ Var("chromium_url") + "/chromium/src/build.git" + "@" + "1808a907ce42f13b224c263e9843d718fc6d9c39",
"v8/tools/gyp":
Var("chromium_url") + "/external/gyp.git" + "@" + "eb296f67da078ec01f5e3a9ea9cdc6d26d680161",
"v8/third_party/icu":
- Var("chromium_url") + "/chromium/deps/icu.git" + "@" + "c844075aa0f1758d04f9192825f1b1e7e607992e",
+ Var("chromium_url") + "/chromium/deps/icu.git" + "@" + "dfa798fe694702b43a3debc3290761f22b1acaf8",
"v8/third_party/instrumented_libraries":
Var("chromium_url") + "/chromium/src/third_party/instrumented_libraries.git" + "@" + "644afd349826cb68204226a16c38bde13abe9c3c",
"v8/buildtools":
- Var("chromium_url") + "/chromium/buildtools.git" + "@" + "98f00fa10dbad2cdbb2e297a66c3d6d5bc3994f3",
+ Var("chromium_url") + "/chromium/buildtools.git" + "@" + "5ad14542a6a74dd914f067b948c5d3e8d170396b",
"v8/base/trace_event/common":
- Var("chromium_url") + "/chromium/src/base/trace_event/common.git" + "@" + "39a3450531fc73432e963db8668695d2e8f13053",
+ Var("chromium_url") + "/chromium/src/base/trace_event/common.git" + "@" + "65d1d42a5df6c0a563a6fdfa58a135679185e5d9",
"v8/third_party/jinja2":
Var("chromium_url") + "/chromium/src/third_party/jinja2.git" + "@" + "d34383206fa42d52faa10bb9931d6d538f3a57e0",
"v8/third_party/markupsafe":
Var("chromium_url") + "/chromium/src/third_party/markupsafe.git" + "@" + "8f45f5cfa0009d2a70589bcda0349b8cb2b72783",
"v8/tools/swarming_client":
- Var('chromium_url') + '/external/swarming.client.git' + '@' + "a941a089ff1000403078b74cb628eb430f07d271",
+ Var('chromium_url') + '/external/swarming.client.git' + '@' + "a56c2b39ca23bdf41458421a7f825ddbf3f43f28",
"v8/testing/gtest":
Var("chromium_url") + "/external/github.com/google/googletest.git" + "@" + "6f8a66431cb592dad629028a50b3dd418a408c87",
"v8/testing/gmock":
@@ -34,21 +34,21 @@ deps = {
"v8/test/mozilla/data":
Var("chromium_url") + "/v8/deps/third_party/mozilla-tests.git" + "@" + "f6c578a10ea707b1a8ab0b88943fe5115ce2b9be",
"v8/test/test262/data":
- Var("chromium_url") + "/external/github.com/tc39/test262.git" + "@" + "230f9fc5688ce76bfaa99aba5f680a159eaac9e2",
+ Var("chromium_url") + "/external/github.com/tc39/test262.git" + "@" + "1b911a8f8abf4cb63882cfbe72dcd4c82bb8ad91",
"v8/test/test262/harness":
Var("chromium_url") + "/external/github.com/test262-utils/test262-harness-py.git" + "@" + "0f2acdd882c84cff43b9d60df7574a1901e2cdcd",
"v8/tools/clang":
- Var("chromium_url") + "/chromium/src/tools/clang.git" + "@" + "05f306039aa5029fa88768690e5c512097419f9d",
+ Var("chromium_url") + "/chromium/src/tools/clang.git" + "@" + "844603c1fcd47f578931b3ccd583e19f816a3842",
"v8/test/wasm-js":
- Var("chromium_url") + "/external/github.com/WebAssembly/spec.git" + "@" + "07fd6430f879d36928d179a62d9bdeed82286065",
+ Var("chromium_url") + "/external/github.com/WebAssembly/spec.git" + "@" + "aadd3a340c78e53078a7bb6c17cc30f105c2960c",
}
deps_os = {
"android": {
"v8/third_party/android_tools":
- Var("chromium_url") + "/android_tools.git" + "@" + "cb6bc21107001e2f2eeee2707b482b2b755baf51",
+ Var("chromium_url") + "/android_tools.git" + "@" + "e9d4018e149d50172ed462a7c21137aa915940ec",
"v8/third_party/catapult":
- Var('chromium_url') + "/external/github.com/catapult-project/catapult.git" + "@" + "08a6e0ac161db7309d8f9cad0ccd38e0b1fd41e0",
+ Var('chromium_url') + "/external/github.com/catapult-project/catapult.git" + "@" + "44b022b2a09508ec025ae76a26308e89deb2cf69",
},
}
@@ -262,13 +262,6 @@ hooks = [
'v8/third_party/binutils/download.py',
],
},
- {
- # Pull gold plugin if needed or requested via GYP_DEFINES.
- # Note: This must run before the clang update.
- 'name': 'gold_plugin',
- 'pattern': '.',
- 'action': ['python', 'v8/gypfiles/download_gold_plugin.py'],
- },
{
# Pull clang if needed or requested via GYP_DEFINES.
# Note: On Win, this should run after win_toolchain, as it may use it.
diff --git a/deps/v8/Makefile b/deps/v8/Makefile
index ed5b3a7fabc325..b381918355602d 100644
--- a/deps/v8/Makefile
+++ b/deps/v8/Makefile
@@ -255,14 +255,13 @@ endif
# Architectures and modes to be compiled. Consider these to be internal
# variables, don't override them (use the targets instead).
-ARCHES = ia32 x64 arm arm64 mips mipsel mips64 mips64el x87 ppc ppc64 s390 \
- s390x
-ARCHES32 = ia32 arm mips mipsel x87 ppc s390
+ARCHES = ia32 x64 arm arm64 mips mipsel mips64 mips64el ppc ppc64 s390 s390x
+ARCHES32 = ia32 arm mips mipsel ppc s390
DEFAULT_ARCHES = ia32 x64 arm
MODES = release debug optdebug
DEFAULT_MODES = release debug
ANDROID_ARCHES = android_ia32 android_x64 android_arm android_arm64 \
- android_mipsel android_x87
+ android_mipsel
# List of files that trigger Makefile regeneration:
GYPFILES = third_party/icu/icu.gypi third_party/icu/icu.gyp \
@@ -272,9 +271,7 @@ GYPFILES = third_party/icu/icu.gypi third_party/icu/icu.gyp \
test/cctest/cctest.gyp test/fuzzer/fuzzer.gyp \
test/unittests/unittests.gyp src/v8.gyp \
tools/parser-shell.gyp testing/gmock.gyp testing/gtest.gyp \
- buildtools/third_party/libc++abi/libc++abi.gyp \
- buildtools/third_party/libc++/libc++.gyp samples/samples.gyp \
- src/third_party/vtune/v8vtune.gyp src/d8.gyp
+ samples/samples.gyp src/third_party/vtune/v8vtune.gyp src/d8.gyp
# If vtunejit=on, the v8vtune.gyp will be appended.
ifeq ($(vtunejit), on)
diff --git a/deps/v8/OWNERS b/deps/v8/OWNERS
index 4a2dcdf74cddb6..dd96fa6b5fe586 100644
--- a/deps/v8/OWNERS
+++ b/deps/v8/OWNERS
@@ -35,3 +35,6 @@ ulan@chromium.org
verwaest@chromium.org
vogelheim@chromium.org
yangguo@chromium.org
+
+# TEAM: v8-dev@googlegroups.com
+# COMPONENT: Blink>JavaScript
diff --git a/deps/v8/PRESUBMIT.py b/deps/v8/PRESUBMIT.py
index 7d7faec69614be..2d79ae682ce16f 100644
--- a/deps/v8/PRESUBMIT.py
+++ b/deps/v8/PRESUBMIT.py
@@ -31,6 +31,7 @@
for more details about the presubmit API built into gcl.
"""
+import json
import re
import sys
@@ -277,6 +278,7 @@ def _CommonChecks(input_api, output_api):
results.extend(
_CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api))
results.extend(_CheckMissingFiles(input_api, output_api))
+ results.extend(_CheckJSONFiles(input_api, output_api))
return results
@@ -316,6 +318,25 @@ def _CheckCommitMessageBugEntry(input_api, output_api):
return [output_api.PresubmitError(r) for r in results]
+def _CheckJSONFiles(input_api, output_api):
+ def FilterFile(affected_file):
+ return input_api.FilterSourceFile(
+ affected_file,
+ white_list=(r'.+\.json',))
+
+ results = []
+ for f in input_api.AffectedFiles(
+ file_filter=FilterFile, include_deletes=False):
+ with open(f.LocalPath()) as j:
+ try:
+ json.load(j)
+ except Exception as e:
+ results.append(
+ 'JSON validation failed for %s. Error:\n%s' % (f.LocalPath(), e))
+
+ return [output_api.PresubmitError(r) for r in results]
+
+
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
@@ -332,3 +353,19 @@ def CheckChangeOnCommit(input_api, output_api):
input_api, output_api,
json_url='http://v8-status.appspot.com/current?format=json'))
return results
+
+def PostUploadHook(cl, change, output_api):
+ """git cl upload will call this hook after the issue is created/modified.
+
+ This hook adds a noi18n bot if the patch affects Intl.
+ """
+ def affects_intl(f):
+ return 'intl' in f.LocalPath() or 'test262' in f.LocalPath()
+ if not change.AffectedFiles(file_filter=affects_intl):
+ return []
+ return output_api.EnsureCQIncludeTrybotsAreAdded(
+ cl,
+ [
+ 'master.tryserver.v8:v8_linux_noi18n_rel_ng'
+ ],
+ 'Automatically added noi18n trybots to run tests on CQ.')
diff --git a/deps/v8/base/trace_event/common/trace_event_common.h b/deps/v8/base/trace_event/common/trace_event_common.h
index 76d3039250ed89..bdc450d56824cd 100644
--- a/deps/v8/base/trace_event/common/trace_event_common.h
+++ b/deps/v8/base/trace_event/common/trace_event_common.h
@@ -359,6 +359,12 @@
TRACE_EVENT_PHASE_MARK, category_group, name, timestamp, \
TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val)
+#define TRACE_EVENT_MARK_WITH_TIMESTAMP2( \
+ category_group, name, timestamp, arg1_name, arg1_val, arg2_name, arg2_val) \
+ INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
+ TRACE_EVENT_PHASE_MARK, category_group, name, timestamp, \
+ TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, arg2_name, arg2_val)
+
#define TRACE_EVENT_COPY_MARK(category_group, name) \
INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_MARK, category_group, name, \
TRACE_EVENT_FLAG_COPY)
diff --git a/deps/v8/build_overrides/build.gni b/deps/v8/build_overrides/build.gni
index 8dcaf3a29d78f1..b656fce61a1aa2 100644
--- a/deps/v8/build_overrides/build.gni
+++ b/deps/v8/build_overrides/build.gni
@@ -2,9 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mac_sdk_min_build_override = "10.10"
-mac_deployment_target_build_override = "10.7"
-
# Variable that can be used to support multiple build scenarios, like having
# Chromium specific targets in a client project's GN file etc.
build_with_chromium = false
diff --git a/deps/v8/codereview.settings b/deps/v8/codereview.settings
index bff4e38ba51885..b7a5a972b018bb 100644
--- a/deps/v8/codereview.settings
+++ b/deps/v8/codereview.settings
@@ -4,3 +4,4 @@ CODE_REVIEW_SERVER: https://codereview.chromium.org
CC_LIST: v8-reviews@googlegroups.com
VIEW_VC: https://chromium.googlesource.com/v8/v8/+/
STATUS: http://v8-status.appspot.com/status
+RUN_POST_UPLOAD_HOOK: True
diff --git a/deps/v8/gni/isolate.gni b/deps/v8/gni/isolate.gni
index 90bc8c5d7fa4b9..82dc8cf3fbc005 100644
--- a/deps/v8/gni/isolate.gni
+++ b/deps/v8/gni/isolate.gni
@@ -56,16 +56,16 @@ template("v8_isolate_run") {
}
# Translate gn to gyp variables.
+ if (v8_code_coverage) {
+ coverage = "1"
+ } else {
+ coverage = "0"
+ }
if (is_asan) {
asan = "1"
} else {
asan = "0"
}
- if (is_lsan) {
- lsan = "1"
- } else {
- lsan = "0"
- }
if (is_msan) {
msan = "1"
} else {
@@ -158,15 +158,13 @@ template("v8_isolate_run") {
"--config-variable",
"is_gn=1",
"--config-variable",
- "lsan=$lsan",
- "--config-variable",
"msan=$msan",
"--config-variable",
"tsan=$tsan",
"--config-variable",
- "coverage=0",
+ "coverage=$coverage",
"--config-variable",
- "sanitizer_coverage=0",
+ "sanitizer_coverage=$sanitizer_coverage_flags",
"--config-variable",
"component=$component",
"--config-variable",
diff --git a/deps/v8/gni/v8.gni b/deps/v8/gni/v8.gni
index 33f85f989b73e6..9a2bb3dff4ffa6 100644
--- a/deps/v8/gni/v8.gni
+++ b/deps/v8/gni/v8.gni
@@ -4,8 +4,13 @@
import("//build/config/sanitizers/sanitizers.gni")
import("//build/config/v8_target_cpu.gni")
+import("//build/split_static_library.gni")
declare_args() {
+ # Set flags for tracking code coverage. Uses gcov with gcc and sanitizer
+ # coverage with clang.
+ v8_code_coverage = false
+
# Includes files needed for correctness fuzzing.
v8_correctness_fuzzer = false
@@ -84,6 +89,13 @@ if (is_debug && !v8_optimized_debug) {
}
}
+if (v8_code_coverage && !is_clang) {
+ v8_add_configs += [
+ v8_path_prefix + ":v8_gcov_coverage_cflags",
+ v8_path_prefix + ":v8_gcov_coverage_ldflags",
+ ]
+}
+
if (is_posix && v8_enable_backtrace) {
v8_remove_configs += [ "//build/config/gcc:symbol_visibility_hidden" ]
v8_add_configs += [ "//build/config/gcc:symbol_visibility_default" ]
@@ -91,20 +103,19 @@ if (is_posix && v8_enable_backtrace) {
# All templates should be kept in sync.
template("v8_source_set") {
- if (defined(v8_static_library) && v8_static_library) {
- static_library(target_name) {
- forward_variables_from(invoker, "*", [ "configs" ])
- configs += invoker.configs
- configs -= v8_remove_configs
- configs += v8_add_configs
- }
+ if (defined(invoker.split_count) && invoker.split_count > 1 &&
+ defined(v8_static_library) && v8_static_library && is_win) {
+ link_target_type = "split_static_library"
+ } else if (defined(v8_static_library) && v8_static_library) {
+ link_target_type = "static_library"
} else {
- source_set(target_name) {
- forward_variables_from(invoker, "*", [ "configs" ])
- configs += invoker.configs
- configs -= v8_remove_configs
- configs += v8_add_configs
- }
+ link_target_type = "source_set"
+ }
+ target(link_target_type, target_name) {
+ forward_variables_from(invoker, "*", [ "configs" ])
+ configs += invoker.configs
+ configs -= v8_remove_configs
+ configs += v8_add_configs
}
}
@@ -135,6 +146,19 @@ template("v8_executable") {
# For enabling ASLR.
ldflags = [ "-pie" ]
}
+ if (defined(testonly) && testonly && v8_code_coverage) {
+ # Only add code coverage cflags for non-test files for performance
+ # reasons.
+ if (is_clang) {
+ configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ]
+ configs += [ "//build/config/sanitizers:default_sanitizer_flags_but_coverage" ]
+ } else {
+ configs -= [ v8_path_prefix + ":v8_gcov_coverage_cflags" ]
+ }
+ }
+ deps += [
+ v8_path_prefix + ":v8_dump_build_config",
+ ]
}
}
diff --git a/deps/v8/gypfiles/download_gold_plugin.py b/deps/v8/gypfiles/download_gold_plugin.py
deleted file mode 100755
index b8131fd449d7e2..00000000000000
--- a/deps/v8/gypfiles/download_gold_plugin.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 the V8 project authors. All rights reserved.
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to download LLVM gold plugin from google storage."""
-
-import json
-import os
-import re
-import platform
-import shutil
-import subprocess
-import sys
-import zipfile
-
-# Bail out on windows and cygwin.
-if "win" in platform.system().lower():
- # Python 2.7.6 hangs at the second path.insert command on windows. Works
- # with python 2.7.8.
- print "Gold plugin download not supported on windows."
- sys.exit(0)
-
-SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
-CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
-sys.path.insert(0, os.path.join(CHROME_SRC, 'tools'))
-
-import find_depot_tools
-
-DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
-GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
-
-LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build',
- 'Release+Asserts')
-CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts',
- 'update.py')
-CLANG_REVISION = os.popen(CLANG_UPDATE_PY + ' --print-revision').read().rstrip()
-
-CLANG_BUCKET = 'gs://chromium-browser-clang/Linux_x64'
-
-GOLD_PLUGIN_PATH = os.path.join(LLVM_BUILD_PATH, 'lib', 'LLVMgold.so')
-
-sys.path.insert(0, os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts'))
-
-import update
-
-def main():
- if not re.search(r'cfi_vptr=1', os.environ.get('GYP_DEFINES', '')):
- # Bailout if this is not a cfi build.
- print 'Skipping gold plugin download for non-cfi build.'
- return 0
- if (os.path.exists(GOLD_PLUGIN_PATH) and
- update.ReadStampFile().strip() == update.PACKAGE_VERSION):
- # Bailout if clang is up-to-date. This requires the script to be run before
- # the clang update step! I.e. afterwards clang would always be up-to-date.
- print 'Skipping gold plugin download. File present and clang up to date.'
- return 0
-
- # Make sure this works on empty checkouts (i.e. clang not downloaded yet).
- if not os.path.exists(LLVM_BUILD_PATH):
- os.makedirs(LLVM_BUILD_PATH)
-
- targz_name = 'llvmgold-%s.tgz' % CLANG_REVISION
- remote_path = '%s/%s' % (CLANG_BUCKET, targz_name)
-
- os.chdir(LLVM_BUILD_PATH)
-
- # TODO(pcc): Fix gsutil.py cp url file < /dev/null 2>&0
- # (currently aborts with exit code 1,
- # https://github.com/GoogleCloudPlatform/gsutil/issues/289) or change the
- # stdin->stderr redirect in update.py to do something else (crbug.com/494442).
- subprocess.check_call(['python', GSUTIL_PATH,
- 'cp', remote_path, targz_name],
- stderr=open('/dev/null', 'w'))
- subprocess.check_call(['tar', 'xzf', targz_name])
- os.remove(targz_name)
- return 0
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/deps/v8/gypfiles/features.gypi b/deps/v8/gypfiles/features.gypi
index b38735e162ec8e..0eeec2466ebc51 100644
--- a/deps/v8/gypfiles/features.gypi
+++ b/deps/v8/gypfiles/features.gypi
@@ -73,6 +73,9 @@
# Enable/disable JavaScript API accessors.
'v8_js_accessors%': 0,
+
+ # Temporary flag to allow embedders to update their microtasks scopes.
+ 'v8_check_microtasks_scopes_consistency%': 'false',
},
'target_defaults': {
'conditions': [
@@ -118,12 +121,15 @@
['dcheck_always_on!=0', {
'defines': ['DEBUG',],
}],
+ ['v8_check_microtasks_scopes_consistency=="true"', {
+ 'defines': ['V8_CHECK_MICROTASKS_SCOPES_CONSISTENCY',],
+ }],
], # conditions
'configurations': {
'DebugBaseCommon': {
'abstract': 1,
'variables': {
- 'v8_enable_handle_zapping%': 0,
+ 'v8_enable_handle_zapping%': 1,
},
'conditions': [
['v8_enable_handle_zapping==1', {
@@ -133,7 +139,7 @@
}, # Debug
'Release': {
'variables': {
- 'v8_enable_handle_zapping%': 1,
+ 'v8_enable_handle_zapping%': 0,
},
'conditions': [
['v8_enable_handle_zapping==1', {
diff --git a/deps/v8/gypfiles/isolate.gypi b/deps/v8/gypfiles/isolate.gypi
index af3b3ae5c8926e..11b05705307625 100644
--- a/deps/v8/gypfiles/isolate.gypi
+++ b/deps/v8/gypfiles/isolate.gypi
@@ -75,7 +75,6 @@
'--config-variable', 'has_valgrind=<(has_valgrind)',
'--config-variable', 'icu_use_data_file_flag=<(icu_use_data_file_flag)',
'--config-variable', 'is_gn=0',
- '--config-variable', 'lsan=<(lsan)',
'--config-variable', 'msan=<(msan)',
'--config-variable', 'tsan=<(tsan)',
'--config-variable', 'coverage=<(coverage)',
diff --git a/deps/v8/gypfiles/standalone.gypi b/deps/v8/gypfiles/standalone.gypi
index 4c805bf6432ff3..a30373be6129e1 100644
--- a/deps/v8/gypfiles/standalone.gypi
+++ b/deps/v8/gypfiles/standalone.gypi
@@ -43,6 +43,7 @@
'v8_enable_i18n_support%': 1,
'v8_deprecation_warnings': 1,
'v8_imminent_deprecation_warnings': 1,
+ 'v8_check_microtasks_scopes_consistency': 'true',
'msvs_multi_core_compile%': '1',
'mac_deployment_target%': '10.7',
'release_extra_cflags%': '',
@@ -135,8 +136,6 @@
'clang_dir%': '<(base_dir)/third_party/llvm-build/Release+Asserts',
'make_clang_dir%': '<(base_dir)/third_party/llvm-build/Release+Asserts',
- 'use_lto%': 0,
-
# Control Flow Integrity for virtual calls and casts.
# See http://clang.llvm.org/docs/ControlFlowIntegrity.html
'cfi_vptr%': 0,
@@ -201,7 +200,6 @@
'use_prebuilt_instrumented_libraries%': '<(use_prebuilt_instrumented_libraries)',
'use_custom_libcxx%': '<(use_custom_libcxx)',
'linux_use_bundled_gold%': '<(linux_use_bundled_gold)',
- 'use_lto%': '<(use_lto)',
'cfi_vptr%': '<(cfi_vptr)',
'cfi_diag%': '<(cfi_diag)',
'cfi_blacklist%': '<(cfi_blacklist)',
@@ -264,14 +262,14 @@
# goma doesn't support PDB yet.
'fastbuild%': 1,
}],
- ['((v8_target_arch=="ia32" or v8_target_arch=="x64" or v8_target_arch=="x87") and \
+ ['((v8_target_arch=="ia32" or v8_target_arch=="x64") and \
(OS=="linux" or OS=="mac")) or (v8_target_arch=="ppc64" and OS=="linux")', {
'v8_enable_gdbjit%': 1,
}, {
'v8_enable_gdbjit%': 0,
}],
['(OS=="linux" or OS=="mac") and (target_arch=="ia32" or target_arch=="x64") and \
- (v8_target_arch!="x87" and v8_target_arch!="x32")', {
+ v8_target_arch!="x32"', {
'clang%': 1,
}, {
'clang%': 0,
@@ -292,9 +290,6 @@
# the C++ standard library is used.
'use_custom_libcxx%': 1,
}],
- ['cfi_vptr==1', {
- 'use_lto%': 1,
- }],
['OS=="android"', {
# Location of Android NDK.
'variables': {
@@ -678,15 +673,11 @@
}],
],
}],
- ['linux_use_bundled_gold==1 and not (clang==0 and use_lto==1)', {
+ ['linux_use_bundled_gold==1', {
# Put our binutils, which contains gold in the search path. We pass
# the path to gold to the compiler. gyp leaves unspecified what the
# cwd is when running the compiler, so the normal gyp path-munging
# fails us. This hack gets the right path.
- #
- # Disabled when using GCC LTO because GCC also uses the -B search
- # path at link time to find "as", and our bundled "as" can only
- # target x86.
'ldflags': [
# Note, Chromium allows ia32 host arch as well, we limit this to
# x64 in v8.
@@ -696,12 +687,15 @@
['sysroot!="" and clang==1', {
'target_conditions': [
['_toolset=="target"', {
+ 'variables': {
+ 'ld_paths': ['JavaScript>API
diff --git a/deps/v8/include/PRESUBMIT.py b/deps/v8/include/PRESUBMIT.py
new file mode 100644
index 00000000000000..386f2e5006186f
--- /dev/null
+++ b/deps/v8/include/PRESUBMIT.py
@@ -0,0 +1,29 @@
+# Copyright 2017 the V8 project authors. All rights reserved.')
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for //v8/include
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into depot_tools.
+"""
+
+import os
+
+
+def PostUploadHook(cl, change, output_api):
+ """git cl upload will call this hook after the issue is created/modified.
+
+ This hook adds extra try bots to the CL description in order to run layout
+ tests in addition to CQ try bots.
+ """
+ def header_filter(f):
+ return '.h' in os.path.split(f.LocalPath())[1]
+ if not change.AffectedFiles(file_filter=header_filter):
+ return []
+ return output_api.EnsureCQIncludeTrybotsAreAdded(
+ cl,
+ [
+ 'master.tryserver.chromium.linux:linux_chromium_rel_ng'
+ ],
+ 'Automatically added layout test trybots to run tests on CQ.')
diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h
index db9369f649a9e5..b2c1e5384eceea 100644
--- a/deps/v8/include/v8-version.h
+++ b/deps/v8/include/v8-version.h
@@ -9,9 +9,9 @@
// NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 6
-#define V8_MINOR_VERSION 0
-#define V8_BUILD_NUMBER 287
-#define V8_PATCH_LEVEL 53
+#define V8_MINOR_VERSION 1
+#define V8_BUILD_NUMBER 534
+#define V8_PATCH_LEVEL 42
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index 1b3117ef34c67d..931dd9a6280c29 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -869,8 +869,6 @@ class V8_EXPORT HandleScope {
HandleScope(const HandleScope&) = delete;
void operator=(const HandleScope&) = delete;
- void* operator new(size_t size);
- void operator delete(void*, size_t);
protected:
V8_INLINE HandleScope() {}
@@ -881,6 +879,13 @@ class V8_EXPORT HandleScope {
internal::Object* value);
private:
+ // Declaring operator new and delete as deleted is not spec compliant.
+ // Therefore declare them private instead to disable dynamic alloc
+ void* operator new(size_t size);
+ void* operator new[](size_t size);
+ void operator delete(void*, size_t);
+ void operator delete[](void*, size_t);
+
// Uses heap_object to obtain the current Isolate.
static internal::Object** CreateHandle(internal::HeapObject* heap_object,
internal::Object* value);
@@ -921,10 +926,15 @@ class V8_EXPORT EscapableHandleScope : public HandleScope {
EscapableHandleScope(const EscapableHandleScope&) = delete;
void operator=(const EscapableHandleScope&) = delete;
+
+ private:
+ // Declaring operator new and delete as deleted is not spec compliant.
+ // Therefore declare them private instead to disable dynamic alloc
void* operator new(size_t size);
+ void* operator new[](size_t size);
void operator delete(void*, size_t);
+ void operator delete[](void*, size_t);
- private:
internal::Object** Escape(internal::Object** escape_value);
internal::Object** escape_slot_;
};
@@ -941,10 +951,15 @@ class V8_EXPORT SealHandleScope {
SealHandleScope(const SealHandleScope&) = delete;
void operator=(const SealHandleScope&) = delete;
+
+ private:
+ // Declaring operator new and delete as deleted is not spec compliant.
+ // Therefore declare them private instead to disable dynamic alloc
void* operator new(size_t size);
+ void* operator new[](size_t size);
void operator delete(void*, size_t);
+ void operator delete[](void*, size_t);
- private:
internal::Isolate* const isolate_;
internal::Object** prev_limit_;
int prev_sealed_level_;
@@ -1016,9 +1031,6 @@ class ScriptOrigin {
V8_INLINE Local ResourceName() const;
V8_INLINE Local ResourceLineOffset() const;
V8_INLINE Local ResourceColumnOffset() const;
- /**
- * Returns true for embedder's debugger scripts
- */
V8_INLINE Local ScriptID() const;
V8_INLINE Local SourceMapUrl() const;
V8_INLINE ScriptOriginOptions Options() const { return options_; }
@@ -1032,7 +1044,6 @@ class ScriptOrigin {
Local source_map_url_;
};
-
/**
* A compiled JavaScript script, not yet tied to a Context.
*/
@@ -1064,6 +1075,22 @@ class V8_EXPORT UnboundScript {
static const int kNoScriptId = 0;
};
+/**
+ * A location in JavaScript source.
+ */
+class V8_EXPORT Location {
+ public:
+ int GetLineNumber() { return line_number_; }
+ int GetColumnNumber() { return column_number_; }
+
+ Location(int line_number, int column_number)
+ : line_number_(line_number), column_number_(column_number) {}
+
+ private:
+ int line_number_;
+ int column_number_;
+};
+
/**
* This is an unfinished experimental feature, and is only exposed
* here for internal testing purposes. DO NOT USE.
@@ -1072,6 +1099,28 @@ class V8_EXPORT UnboundScript {
*/
class V8_EXPORT Module {
public:
+ /**
+ * The different states a module can be in.
+ */
+ enum Status {
+ kUninstantiated,
+ kInstantiating,
+ kInstantiated,
+ kEvaluating,
+ kEvaluated,
+ kErrored
+ };
+
+ /**
+ * Returns the module's current status.
+ */
+ Status GetStatus() const;
+
+ /**
+ * For a module in kErrored status, this returns the corresponding exception.
+ */
+ Local GetException() const;
+
/**
* Returns the number of modules requested by this module.
*/
@@ -1083,6 +1132,12 @@ class V8_EXPORT Module {
*/
Local GetModuleRequest(int i) const;
+ /**
+ * Returns the source location (line number and column number) of the ith
+ * module specifier's first occurrence in this module.
+ */
+ Location GetModuleRequestLocation(int i) const;
+
/**
* Returns the identity hash for this object.
*/
@@ -1095,40 +1150,29 @@ class V8_EXPORT Module {
/**
* ModuleDeclarationInstantiation
*
- * Returns false if an exception occurred during instantiation. (In the case
- * where the callback throws an exception, that exception is propagated.)
+ * Returns an empty Maybe if an exception occurred during
+ * instantiation. (In the case where the callback throws an exception, that
+ * exception is propagated.)
*/
- V8_WARN_UNUSED_RESULT bool Instantiate(Local context,
- ResolveCallback callback);
+ V8_DEPRECATED("Use Maybe version",
+ bool Instantiate(Local context,
+ ResolveCallback callback));
+ V8_WARN_UNUSED_RESULT Maybe InstantiateModule(Local context,
+ ResolveCallback callback);
/**
* ModuleEvaluation
*
* Returns the completion value.
+ * TODO(neis): Be more precise or say nothing.
*/
V8_WARN_UNUSED_RESULT MaybeLocal Evaluate(Local context);
-};
-/**
- * This is an unfinished experimental feature, and is only exposed
- * here for internal testing purposes. DO NOT USE.
- *
- * A compiled JavaScript module.
- */
-class V8_EXPORT DynamicImportResult {
- public:
/**
- * Resolves the promise with the namespace object of the given
- * module.
+ * Returns the namespace object of this module. The module must have
+ * been successfully instantiated before and must not be errored.
*/
- V8_WARN_UNUSED_RESULT bool FinishDynamicImportSuccess(Local context,
- Local module);
-
- /**
- * Rejects the promise with the given exception.
- */
- V8_WARN_UNUSED_RESULT bool FinishDynamicImportFailure(Local context,
- Local exception);
+ Local GetModuleNamespace();
};
/**
@@ -4861,7 +4905,8 @@ class V8_EXPORT External : public Value {
F(ArrayProto_forEach, array_for_each_iterator) \
F(ArrayProto_keys, array_keys_iterator) \
F(ArrayProto_values, array_values_iterator) \
- F(IteratorPrototype, initial_iterator_prototype)
+ F(IteratorPrototype, initial_iterator_prototype) \
+ F(ErrorPrototype, initial_error_prototype) \
enum Intrinsic {
#define V8_DECL_INTRINSIC(name, iname) k##name,
@@ -6059,21 +6104,23 @@ typedef void (*DeprecatedCallCompletedCallback)();
/**
* HostImportDynamicallyCallback is called when we require the
* embedder to load a module. This is used as part of the dynamic
- * import syntax. The behavior of this callback is not specified in
- * EcmaScript.
+ * import syntax.
*
* The referrer is the name of the file which calls the dynamic
* import. The referrer can be used to resolve the module location.
*
* The specifier is the name of the module that should be imported.
*
- * The DynamicImportResult object is used to signal success or failure
- * by calling it's respective methods.
+ * The embedder must compile, instantiate, evaluate the Module, and
+ * obtain it's namespace object.
*
+ * The Promise returned from this function is forwarded to userland
+ * JavaScript. The embedder must resolve this promise with the module
+ * namespace object. In case of an exception, the embedder must reject
+ * this promise with the exception.
*/
-typedef void (*HostImportModuleDynamicallyCallback)(
- Isolate* isolate, Local referrer, Local specifier,
- Local result);
+typedef MaybeLocal (*HostImportModuleDynamicallyCallback)(
+ Local context, Local referrer, Local specifier);
/**
* PromiseHook with type kInit is called when a new promise is
@@ -6196,11 +6243,32 @@ typedef void (*FailedAccessCheckCallback)(Local