diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 2c26f44657369..868a9608d1a84 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -422,7 +422,6 @@ graph LR; libnpmversion-->tap; make-fetch-happen-->cacache; make-fetch-happen-->http-cache-semantics; - make-fetch-happen-->is-lambda; make-fetch-happen-->minipass-fetch; make-fetch-happen-->minipass-flush; make-fetch-happen-->minipass-pipeline; @@ -444,7 +443,6 @@ graph LR; minipass-sized-->minipass; minizlib-->minipass; minizlib-->rimraf; - minizlib-->yallist; node-gyp-->env-paths; node-gyp-->exponential-backoff; node-gyp-->glob; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index f66c81e4a37dd..460c2d031d769 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -32,29 +32,6 @@ !/@npmcli/query !/@npmcli/redact !/@npmcli/run-script -!/@npmcli/run-script/node_modules/ -/@npmcli/run-script/node_modules/* -!/@npmcli/run-script/node_modules/@npmcli/ -/@npmcli/run-script/node_modules/@npmcli/* -!/@npmcli/run-script/node_modules/@npmcli/agent -!/@npmcli/run-script/node_modules/@npmcli/fs -!/@npmcli/run-script/node_modules/abbrev -!/@npmcli/run-script/node_modules/cacache -!/@npmcli/run-script/node_modules/isexe -!/@npmcli/run-script/node_modules/make-fetch-happen -!/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/ -/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/* -!/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log -!/@npmcli/run-script/node_modules/minipass-fetch -!/@npmcli/run-script/node_modules/node-gyp -!/@npmcli/run-script/node_modules/node-gyp/node_modules/ -/@npmcli/run-script/node_modules/node-gyp/node_modules/* -!/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log -!/@npmcli/run-script/node_modules/node-gyp/node_modules/which -!/@npmcli/run-script/node_modules/nopt -!/@npmcli/run-script/node_modules/ssri -!/@npmcli/run-script/node_modules/unique-filename -!/@npmcli/run-script/node_modules/unique-slug !/@pkgjs/ /@pkgjs/* !/@pkgjs/parseargs @@ -127,7 +104,6 @@ !/ip-regex !/is-cidr !/is-fullwidth-code-point -!/is-lambda !/isexe !/jackspeak !/jsbn @@ -167,7 +143,6 @@ !/mkdirp !/ms !/mute-stream -!/negotiator !/node-gyp !/node-gyp/node_modules/ /node-gyp/node_modules/* diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/agents.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/agents.js deleted file mode 100644 index c541b93001517..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/agents.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const net = require('net') -const tls = require('tls') -const { once } = require('events') -const timers = require('timers/promises') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') -const Errors = require('./errors.js') -const { Agent: AgentBase } = require('agent-base') - -module.exports = class Agent extends AgentBase { - #options - #timeouts - #proxy - #noProxy - #ProxyAgent - - constructor (options = {}) { - const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) - - super(normalizedOptions) - - this.#options = normalizedOptions - this.#timeouts = timeouts - - if (proxy) { - this.#proxy = new URL(proxy) - this.#noProxy = noProxy - this.#ProxyAgent = getProxyAgent(proxy) - } - } - - get proxy () { - return this.#proxy ? { url: this.#proxy } : {} - } - - #getProxy (options) { - if (!this.#proxy) { - return - } - - const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { - proxy: this.#proxy, - noProxy: this.#noProxy, - }) - - if (!proxy) { - return - } - - const cacheKey = cacheOptions({ - ...options, - ...this.#options, - timeouts: this.#timeouts, - proxy, - }) - - if (proxyCache.has(cacheKey)) { - return proxyCache.get(cacheKey) - } - - let ProxyAgent = this.#ProxyAgent - if (Array.isArray(ProxyAgent)) { - ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] - } - - const proxyAgent = new ProxyAgent(proxy, { - ...this.#options, - socketOptions: { family: this.#options.family }, - }) - proxyCache.set(cacheKey, proxyAgent) - - return proxyAgent - } - - // takes an array of promises and races them against the connection timeout - // which will throw the necessary error if it is hit. This will return the - // result of the promise race. - async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { - if (timeout) { - const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) - .then(() => { - throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) - }).catch((err) => { - if (err.name === 'AbortError') { - return - } - throw err - }) - promises.push(connectionTimeout) - } - - let result - try { - result = await Promise.race(promises) - ac.abort() - } catch (err) { - ac.abort() - throw err - } - return result - } - - async connect (request, options) { - // if the connection does not have its own lookup function - // set, then use the one from our options - options.lookup ??= this.#options.lookup - - let socket - let timeout = this.#timeouts.connection - const isSecureEndpoint = this.isSecureEndpoint(options) - - const proxy = this.#getProxy(options) - if (proxy) { - // some of the proxies will wait for the socket to fully connect before - // returning so we have to await this while also racing it against the - // connection timeout. - const start = Date.now() - socket = await this.#timeoutConnection({ - options, - timeout, - promises: [proxy.connect(request, options)], - }) - // see how much time proxy.connect took and subtract it from - // the timeout - if (timeout) { - timeout = timeout - (Date.now() - start) - } - } else { - socket = (isSecureEndpoint ? tls : net).connect(options) - } - - socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) - socket.setNoDelay(this.keepAlive) - - const abortController = new AbortController() - const { signal } = abortController - - const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] - ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) - : Promise.resolve() - - await this.#timeoutConnection({ - options, - timeout, - promises: [ - connectPromise, - once(socket, 'error', { signal }).then((err) => { - throw err[0] - }), - ], - }, abortController) - - if (this.#timeouts.idle) { - socket.setTimeout(this.#timeouts.idle, () => { - socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) - }) - } - - return socket - } - - addRequest (request, options) { - const proxy = this.#getProxy(options) - // it would be better to call proxy.addRequest here but this causes the - // http-proxy-agent to call its super.addRequest which causes the request - // to be added to the agent twice. since we only support 3 agents - // currently (see the required agents in proxy.js) we have manually - // checked that the only public methods we need to call are called in the - // next block. this could change in the future and presumably we would get - // failing tests until we have properly called the necessary methods on - // each of our proxy agents - if (proxy?.setRequestProps) { - proxy.setRequestProps(request, options) - } - - request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') - - if (this.#timeouts.response) { - let responseTimeout - request.once('finish', () => { - setTimeout(() => { - request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) - }, this.#timeouts.response) - }) - request.once('response', () => { - clearTimeout(responseTimeout) - }) - } - - if (this.#timeouts.transfer) { - let transferTimeout - request.once('response', (res) => { - setTimeout(() => { - res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) - }, this.#timeouts.transfer) - res.once('close', () => { - clearTimeout(transferTimeout) - }) - }) - } - - return super.addRequest(request, options) - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/dns.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/dns.js deleted file mode 100644 index 3c6946c566d73..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/dns.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const dns = require('dns') - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -const cache = new LRUCache({ max: 50 }) - -const getOptions = ({ - family = 0, - hints = dns.ADDRCONFIG, - all = false, - verbatim = undefined, - ttl = 5 * 60 * 1000, - lookup = dns.lookup, -}) => ({ - // hints and lookup are returned since both are top level properties to (net|tls).connect - hints, - lookup: (hostname, ...args) => { - const callback = args.pop() // callback is always last arg - const lookupOptions = args[0] ?? {} - - const options = { - family, - hints, - all, - verbatim, - ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), - } - - const key = JSON.stringify({ hostname, ...options }) - - if (cache.has(key)) { - const cached = cache.get(key) - return process.nextTick(callback, null, ...cached) - } - - lookup(hostname, options, (err, ...result) => { - if (err) { - return callback(err) - } - - cache.set(key, result, { ttl }) - return callback(null, ...result) - }) - }, -}) - -module.exports = { - cache, - getOptions, -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/errors.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/errors.js deleted file mode 100644 index 70475aec8eb35..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/errors.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict' - -class InvalidProxyProtocolError extends Error { - constructor (url) { - super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) - this.code = 'EINVALIDPROXY' - this.proxy = url - } -} - -class ConnectionTimeoutError extends Error { - constructor (host) { - super(`Timeout connecting to host \`${host}\``) - this.code = 'ECONNECTIONTIMEOUT' - this.host = host - } -} - -class IdleTimeoutError extends Error { - constructor (host) { - super(`Idle timeout reached for host \`${host}\``) - this.code = 'EIDLETIMEOUT' - this.host = host - } -} - -class ResponseTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Response timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `connecting to host \`${request.host}\`` - super(msg) - this.code = 'ERESPONSETIMEOUT' - this.proxy = proxy - this.request = request - } -} - -class TransferTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Transfer timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `for \`${request.host}\`` - super(msg) - this.code = 'ETRANSFERTIMEOUT' - this.proxy = proxy - this.request = request - } -} - -module.exports = { - InvalidProxyProtocolError, - ConnectionTimeoutError, - IdleTimeoutError, - ResponseTimeoutError, - TransferTimeoutError, -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/index.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/index.js deleted file mode 100644 index b33d6eaef07a2..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/index.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, proxyCache } = require('./proxy.js') -const dns = require('./dns.js') -const Agent = require('./agents.js') - -const agentCache = new LRUCache({ max: 20 }) - -const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { - // false has meaning so this can't be a simple truthiness check - if (agent != null) { - return agent - } - - url = new URL(url) - - const proxyForUrl = getProxy(url, { proxy, noProxy }) - const normalizedOptions = { - ...normalizeOptions(options), - proxy: proxyForUrl, - } - - const cacheKey = cacheOptions({ - ...normalizedOptions, - secureEndpoint: url.protocol === 'https:', - }) - - if (agentCache.has(cacheKey)) { - return agentCache.get(cacheKey) - } - - const newAgent = new Agent(normalizedOptions) - agentCache.set(cacheKey, newAgent) - - return newAgent -} - -module.exports = { - getAgent, - Agent, - // these are exported for backwards compatability - HttpAgent: Agent, - HttpsAgent: Agent, - cache: { - proxy: proxyCache, - agent: agentCache, - dns: dns.cache, - clear: () => { - proxyCache.clear() - agentCache.clear() - dns.cache.clear() - }, - }, -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/options.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/options.js deleted file mode 100644 index 0bf53f725f084..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/options.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict' - -const dns = require('./dns') - -const normalizeOptions = (opts) => { - const family = parseInt(opts.family ?? '0', 10) - const keepAlive = opts.keepAlive ?? true - - const normalized = { - // nodejs http agent options. these are all the defaults - // but kept here to increase the likelihood of cache hits - // https://nodejs.org/api/http.html#new-agentoptions - keepAliveMsecs: keepAlive ? 1000 : undefined, - maxSockets: opts.maxSockets ?? 15, - maxTotalSockets: Infinity, - maxFreeSockets: keepAlive ? 256 : undefined, - scheduling: 'fifo', - // then spread the rest of the options - ...opts, - // we already set these to their defaults that we want - family, - keepAlive, - // our custom timeout options - timeouts: { - // the standard timeout option is mapped to our idle timeout - // and then deleted below - idle: opts.timeout ?? 0, - connection: 0, - response: 0, - transfer: 0, - ...opts.timeouts, - }, - // get the dns options that go at the top level of socket connection - ...dns.getOptions({ family, ...opts.dns }), - } - - // remove timeout since we already used it to set our own idle timeout - delete normalized.timeout - - return normalized -} - -const createKey = (obj) => { - let key = '' - const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) - for (let [k, v] of sorted) { - if (v == null) { - v = 'null' - } else if (v instanceof URL) { - v = v.toString() - } else if (typeof v === 'object') { - v = createKey(v) - } - key += `${k}:${v}:` - } - return key -} - -const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ - secureEndpoint: !!secureEndpoint, - // socket connect options - family: options.family, - hints: options.hints, - localAddress: options.localAddress, - // tls specific connect options - strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, - ca: secureEndpoint ? options.ca : null, - cert: secureEndpoint ? options.cert : null, - key: secureEndpoint ? options.key : null, - // http agent options - keepAlive: options.keepAlive, - keepAliveMsecs: options.keepAliveMsecs, - maxSockets: options.maxSockets, - maxTotalSockets: options.maxTotalSockets, - maxFreeSockets: options.maxFreeSockets, - scheduling: options.scheduling, - // timeout options - timeouts: options.timeouts, - // proxy - proxy: options.proxy, -}) - -module.exports = { - normalizeOptions, - cacheOptions, -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/proxy.js deleted file mode 100644 index 6272e929e57bc..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/lib/proxy.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict' - -const { HttpProxyAgent } = require('http-proxy-agent') -const { HttpsProxyAgent } = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -const { LRUCache } = require('lru-cache') -const { InvalidProxyProtocolError } = require('./errors.js') - -const PROXY_CACHE = new LRUCache({ max: 20 }) - -const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) - -const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) - -const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { - key = key.toLowerCase() - if (PROXY_ENV_KEYS.has(key)) { - acc[key] = value - } - return acc -}, {}) - -const getProxyAgent = (url) => { - url = new URL(url) - - const protocol = url.protocol.slice(0, -1) - if (SOCKS_PROTOCOLS.has(protocol)) { - return SocksProxyAgent - } - if (protocol === 'https' || protocol === 'http') { - return [HttpProxyAgent, HttpsProxyAgent] - } - - throw new InvalidProxyProtocolError(url) -} - -const isNoProxy = (url, noProxy) => { - if (typeof noProxy === 'string') { - noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) - } - - if (!noProxy || !noProxy.length) { - return false - } - - const hostSegments = url.hostname.split('.').reverse() - - return noProxy.some((no) => { - const noSegments = no.split('.').filter(Boolean).reverse() - if (!noSegments.length) { - return false - } - - for (let i = 0; i < noSegments.length; i++) { - if (hostSegments[i] !== noSegments[i]) { - return false - } - } - - return true - }) -} - -const getProxy = (url, { proxy, noProxy }) => { - url = new URL(url) - - if (!proxy) { - proxy = url.protocol === 'https:' - ? PROXY_ENV.https_proxy - : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy - } - - if (!noProxy) { - noProxy = PROXY_ENV.no_proxy - } - - if (!proxy || isNoProxy(url, noProxy)) { - return null - } - - return new URL(proxy) -} - -module.exports = { - getProxyAgent, - getProxy, - proxyCache: PROXY_CACHE, -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/package.json b/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/package.json deleted file mode 100644 index ef5b4e3228cc4..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/agent/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "name": "@npmcli/agent", - "version": "2.2.2", - "description": "the http/https agent used by the npm cli", - "main": "lib/index.js", - "scripts": { - "gencerts": "bash scripts/create-cert.sh", - "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/agent/issues" - }, - "homepage": "https://github.com/npm/agent#readme", - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", - "publish": "true" - }, - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", - "minipass-fetch": "^3.0.3", - "nock": "^13.2.7", - "semver": "^7.5.4", - "simple-socks": "^3.1.0", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/agent.git" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/LICENSE.md b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/LICENSE.md deleted file mode 100644 index 5fc208ff122e0..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/common/get-options.js deleted file mode 100644 index cb5982f79077a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/common/get-options.js +++ /dev/null @@ -1,20 +0,0 @@ -// given an input that may or may not be an object, return an object that has -// a copy of every defined property listed in 'copy'. if the input is not an -// object, assign it to the property named by 'wrap' -const getOptions = (input, { copy, wrap }) => { - const result = {} - - if (input && typeof input === 'object') { - for (const prop of copy) { - if (input[prop] !== undefined) { - result[prop] = input[prop] - } - } - } else { - result[wrap] = input - } - - return result -} - -module.exports = getOptions diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/common/node.js deleted file mode 100644 index 4d13bc037359d..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/common/node.js +++ /dev/null @@ -1,9 +0,0 @@ -const semver = require('semver') - -const satisfies = (range) => { - return semver.satisfies(process.version, range, { includePrerelease: true }) -} - -module.exports = { - satisfies, -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/LICENSE deleted file mode 100644 index 93546dfb7655b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/errors.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/errors.js deleted file mode 100644 index 1cd1e05d0c533..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/errors.js +++ /dev/null @@ -1,129 +0,0 @@ -'use strict' -const { inspect } = require('util') - -// adapted from node's internal/errors -// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js - -// close copy of node's internal SystemError class. -class SystemError { - constructor (code, prefix, context) { - // XXX context.code is undefined in all constructors used in cp/polyfill - // that may be a bug copied from node, maybe the constructor should use - // `code` not `errno`? nodejs/node#41104 - let message = `${prefix}: ${context.syscall} returned ` + - `${context.code} (${context.message})` - - if (context.path !== undefined) { - message += ` ${context.path}` - } - if (context.dest !== undefined) { - message += ` => ${context.dest}` - } - - this.code = code - Object.defineProperties(this, { - name: { - value: 'SystemError', - enumerable: false, - writable: true, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - info: { - value: context, - enumerable: true, - configurable: true, - writable: false, - }, - errno: { - get () { - return context.errno - }, - set (value) { - context.errno = value - }, - enumerable: true, - configurable: true, - }, - syscall: { - get () { - return context.syscall - }, - set (value) { - context.syscall = value - }, - enumerable: true, - configurable: true, - }, - }) - - if (context.path !== undefined) { - Object.defineProperty(this, 'path', { - get () { - return context.path - }, - set (value) { - context.path = value - }, - enumerable: true, - configurable: true, - }) - } - - if (context.dest !== undefined) { - Object.defineProperty(this, 'dest', { - get () { - return context.dest - }, - set (value) { - context.dest = value - }, - enumerable: true, - configurable: true, - }) - } - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } - - [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { - return inspect(this, { - ...ctx, - getters: true, - customInspect: false, - }) - } -} - -function E (code, message) { - module.exports[code] = class NodeError extends SystemError { - constructor (ctx) { - super(code, message, ctx) - } - } -} - -E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') -E('ERR_FS_CP_EEXIST', 'Target already exists') -E('ERR_FS_CP_EINVAL', 'Invalid src or dest') -E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') -E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') -E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') -E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') -E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') -E('ERR_FS_EISDIR', 'Path is a directory') - -module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { - constructor (name, expected, actual) { - super() - this.code = 'ERR_INVALID_ARG_TYPE' - this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/index.js deleted file mode 100644 index 972ce7aa12abe..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('fs/promises') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 16.7.0 added fs.cp -const useNative = node.satisfies('>=16.7.0') - -const cp = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.cp(src, dest, options) - : polyfill(src, dest, options) -} - -module.exports = cp diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/polyfill.js deleted file mode 100644 index 80eb10de97191..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/cp/polyfill.js +++ /dev/null @@ -1,428 +0,0 @@ -// this file is a modified version of the code in node 17.2.0 -// which is, in turn, a modified version of the fs-extra module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -'use strict' - -const { - ERR_FS_CP_DIR_TO_NON_DIR, - ERR_FS_CP_EEXIST, - ERR_FS_CP_EINVAL, - ERR_FS_CP_FIFO_PIPE, - ERR_FS_CP_NON_DIR_TO_DIR, - ERR_FS_CP_SOCKET, - ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, - ERR_FS_CP_UNKNOWN, - ERR_FS_EISDIR, - ERR_INVALID_ARG_TYPE, -} = require('./errors.js') -const { - constants: { - errno: { - EEXIST, - EISDIR, - EINVAL, - ENOTDIR, - }, - }, -} = require('os') -const { - chmod, - copyFile, - lstat, - mkdir, - readdir, - readlink, - stat, - symlink, - unlink, - utimes, -} = require('fs/promises') -const { - dirname, - isAbsolute, - join, - parse, - resolve, - sep, - toNamespacedPath, -} = require('path') -const { fileURLToPath } = require('url') - -const defaultOptions = { - dereference: false, - errorOnExist: false, - filter: undefined, - force: true, - preserveTimestamps: false, - recursive: false, -} - -async function cp (src, dest, opts) { - if (opts != null && typeof opts !== 'object') { - throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) - } - return cpFn( - toNamespacedPath(getValidatedPath(src)), - toNamespacedPath(getValidatedPath(dest)), - { ...defaultOptions, ...opts }) -} - -function getValidatedPath (fileURLOrPath) { - const path = fileURLOrPath != null && fileURLOrPath.href - && fileURLOrPath.origin - ? fileURLToPath(fileURLOrPath) - : fileURLOrPath - return path -} - -async function cpFn (src, dest, opts) { - // Warn about using preserveTimestamps on 32-bit node - // istanbul ignore next - if (opts.preserveTimestamps && process.arch === 'ia32') { - const warning = 'Using the preserveTimestamps option in 32-bit ' + - 'node is not recommended' - process.emitWarning(warning, 'TimestampPrecisionWarning') - } - const stats = await checkPaths(src, dest, opts) - const { srcStat, destStat } = stats - await checkParentPaths(src, srcStat, dest) - if (opts.filter) { - return handleFilter(checkParentDir, destStat, src, dest, opts) - } - return checkParentDir(destStat, src, dest, opts) -} - -async function checkPaths (src, dest, opts) { - const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: 'src and dest cannot be the same', - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new ERR_FS_CP_DIR_TO_NON_DIR({ - message: `cannot overwrite directory ${src} ` + - `with non-directory ${dest}`, - path: dest, - syscall: 'cp', - errno: EISDIR, - }) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new ERR_FS_CP_NON_DIR_TO_DIR({ - message: `cannot overwrite non-directory ${src} ` + - `with directory ${dest}`, - path: dest, - syscall: 'cp', - errno: ENOTDIR, - }) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return { srcStat, destStat } -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && - destStat.dev === srcStat.dev -} - -function getStats (src, dest, opts) { - const statFunc = opts.dereference ? - (file) => stat(file, { bigint: true }) : - (file) => lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch((err) => { - // istanbul ignore next: unsure how to cover. - if (err.code === 'ENOENT') { - return null - } - // istanbul ignore next: unsure how to cover. - throw err - }), - ]) -} - -async function checkParentDir (destStat, src, dest, opts) { - const destParent = dirname(dest) - const dirExists = await pathExists(destParent) - if (dirExists) { - return getStatsForCopy(destStat, src, dest, opts) - } - await mkdir(destParent, { recursive: true }) - return getStatsForCopy(destStat, src, dest, opts) -} - -function pathExists (dest) { - return stat(dest).then( - () => true, - // istanbul ignore next: not sure when this would occur - (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) -} - -// Recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest) { - const srcParent = resolve(dirname(src)) - const destParent = resolve(dirname(dest)) - if (destParent === srcParent || destParent === parse(destParent).root) { - return - } - let destStat - try { - destStat = await stat(destParent, { bigint: true }) - } catch (err) { - // istanbul ignore else: not sure when this would occur - if (err.code === 'ENOENT') { - return - } - // istanbul ignore next: not sure when this would occur - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return checkParentPaths(src, srcStat, destParent) -} - -const normalizePathToArray = (path) => - resolve(path).split(sep).filter(Boolean) - -// Return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = normalizePathToArray(src) - const destArr = normalizePathToArray(dest) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -async function handleFilter (onInclude, destStat, src, dest, opts, cb) { - const include = await opts.filter(src, dest) - if (include) { - return onInclude(destStat, src, dest, opts, cb) - } -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter) { - return handleFilter(getStatsForCopy, destStat, src, dest, opts) - } - return getStatsForCopy(destStat, src, dest, opts) -} - -async function getStatsForCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? stat : lstat - const srcStat = await statFn(src) - // istanbul ignore else: can't portably test FIFO - if (srcStat.isDirectory() && opts.recursive) { - return onDir(srcStat, destStat, src, dest, opts) - } else if (srcStat.isDirectory()) { - throw new ERR_FS_EISDIR({ - message: `${src} is a directory (not copied)`, - path: src, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) { - return onFile(srcStat, destStat, src, dest, opts) - } else if (srcStat.isSymbolicLink()) { - return onLink(destStat, src, dest) - } else if (srcStat.isSocket()) { - throw new ERR_FS_CP_SOCKET({ - message: `cannot copy a socket file: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFIFO()) { - throw new ERR_FS_CP_FIFO_PIPE({ - message: `cannot copy a FIFO pipe: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // istanbul ignore next: should be unreachable - throw new ERR_FS_CP_UNKNOWN({ - message: `cannot copy an unknown file type: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return _copyFile(srcStat, src, dest, opts) - } - return mayCopyFile(srcStat, src, dest, opts) -} - -async function mayCopyFile (srcStat, src, dest, opts) { - if (opts.force) { - await unlink(dest) - return _copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new ERR_FS_CP_EEXIST({ - message: `${dest} already exists`, - path: dest, - syscall: 'cp', - errno: EEXIST, - }) - } -} - -async function _copyFile (srcStat, src, dest, opts) { - await copyFile(src, dest) - if (opts.preserveTimestamps) { - return handleTimestampsAndMode(srcStat.mode, src, dest) - } - return setDestMode(dest, srcStat.mode) -} - -async function handleTimestampsAndMode (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - await makeFileWritable(dest, srcMode) - return setDestTimestampsAndMode(srcMode, src, dest) - } - return setDestTimestampsAndMode(srcMode, src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -async function setDestTimestampsAndMode (srcMode, src, dest) { - await setDestTimestamps(src, dest) - return setDestMode(dest, srcMode) -} - -function setDestMode (dest, srcMode) { - return chmod(dest, srcMode) -} - -async function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await stat(src) - return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return mkDirAndCopy(srcStat.mode, src, dest, opts) - } - return copyDir(src, dest, opts) -} - -async function mkDirAndCopy (srcMode, src, dest, opts) { - await mkdir(dest) - await copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -async function copyDir (src, dest, opts) { - const dir = await readdir(src) - for (let i = 0; i < dir.length; i++) { - const item = dir[i] - const srcItem = join(src, item) - const destItem = join(dest, item) - const { destStat } = await checkPaths(srcItem, destItem, opts) - await startCopy(destStat, srcItem, destItem, opts) - } -} - -async function onLink (destStat, src, dest) { - let resolvedSrc = await readlink(src) - if (!isAbsolute(resolvedSrc)) { - resolvedSrc = resolve(dirname(src), resolvedSrc) - } - if (!destStat) { - return symlink(resolvedSrc, dest) - } - let resolvedDest - try { - resolvedDest = await readlink(dest) - } catch (err) { - // Dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - // istanbul ignore next: can only test on windows - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { - return symlink(resolvedSrc, dest) - } - // istanbul ignore next: should not be possible - throw err - } - if (!isAbsolute(resolvedDest)) { - resolvedDest = resolve(dirname(dest), resolvedDest) - } - if (isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + - `${resolvedDest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // Do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - const srcStat = await stat(src) - if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ - message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return copyLink(resolvedSrc, dest) -} - -async function copyLink (resolvedSrc, dest) { - await unlink(dest) - return symlink(resolvedSrc, dest) -} - -module.exports = cp diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/index.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/index.js deleted file mode 100644 index 81c746304cc42..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/index.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const cp = require('./cp/index.js') -const withTempDir = require('./with-temp-dir.js') -const readdirScoped = require('./readdir-scoped.js') -const moveFile = require('./move-file.js') - -module.exports = { - cp, - withTempDir, - readdirScoped, - moveFile, -} diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/move-file.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/move-file.js deleted file mode 100644 index d56e06d384659..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/move-file.js +++ /dev/null @@ -1,78 +0,0 @@ -const { dirname, join, resolve, relative, isAbsolute } = require('path') -const fs = require('fs/promises') - -const pathExists = async path => { - try { - await fs.access(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && await pathExists(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - await fs.mkdir(dirname(destination), { recursive: true }) - - try { - await fs.rename(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = await fs.lstat(source) - if (sourceStat.isDirectory()) { - const files = await fs.readdir(source) - await Promise.all(files.map((file) => - moveFile(join(source, file), join(destination, file), options, false, symlinks) - )) - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - await fs.copyFile(source, destination) - } - } else { - throw error - } - } - - if (root) { - await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { - let target = await fs.readlink(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = await fs.stat(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - await fs.symlink( - target, - symDestination, - targetStat - ) - })) - await fs.rm(source, { recursive: true, force: true }) - } -} - -module.exports = moveFile diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/readdir-scoped.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/readdir-scoped.js deleted file mode 100644 index cd601dfbe7486..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/readdir-scoped.js +++ /dev/null @@ -1,20 +0,0 @@ -const { readdir } = require('fs/promises') -const { join } = require('path') - -const readdirScoped = async (dir) => { - const results = [] - - for (const item of await readdir(dir)) { - if (item.startsWith('@')) { - for (const scopedItem of await readdir(join(dir, item))) { - results.push(join(item, scopedItem)) - } - } else { - results.push(item) - } - } - - return results -} - -module.exports = readdirScoped diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/with-temp-dir.js deleted file mode 100644 index 0738ac4f29e1b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/lib/with-temp-dir.js +++ /dev/null @@ -1,39 +0,0 @@ -const { join, sep } = require('path') - -const getOptions = require('./common/get-options.js') -const { mkdir, mkdtemp, rm } = require('fs/promises') - -// create a temp directory, ensure its permissions match its parent, then call -// the supplied function passing it the path to the directory. clean up after -// the function finishes, whether it throws or not -const withTempDir = async (root, fn, opts) => { - const options = getOptions(opts, { - copy: ['tmpPrefix'], - }) - // create the directory - await mkdir(root, { recursive: true }) - - const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) - let err - let result - - try { - result = await fn(target) - } catch (_err) { - err = _err - } - - try { - await rm(target, { force: true, recursive: true }) - } catch { - // ignore errors - } - - if (err) { - throw err - } - - return result -} - -module.exports = withTempDir diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/package.json b/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/package.json deleted file mode 100644 index 5261a11b78000..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/@npmcli/fs/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "@npmcli/fs", - "version": "3.1.1", - "description": "filesystem utilities for the npm cli", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "snap": "tap", - "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/fs.git" - }, - "keywords": [ - "npm", - "oss" - ], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.1" - }, - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/abbrev/LICENSE b/node_modules/@npmcli/run-script/node_modules/abbrev/LICENSE deleted file mode 100644 index 9bcfa9d7d8d26..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/abbrev/LICENSE +++ /dev/null @@ -1,46 +0,0 @@ -This software is dual-licensed under the ISC and MIT licenses. -You may use this software under EITHER of the following licenses. - ----------- - -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - ----------- - -Copyright Isaac Z. Schlueter and Contributors -All rights reserved. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/abbrev/lib/index.js b/node_modules/@npmcli/run-script/node_modules/abbrev/lib/index.js deleted file mode 100644 index 9f48801f049c9..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/abbrev/lib/index.js +++ /dev/null @@ -1,50 +0,0 @@ -module.exports = abbrev - -function abbrev (...args) { - let list = args.length === 1 || Array.isArray(args[0]) ? args[0] : args - - for (let i = 0, l = list.length; i < l; i++) { - list[i] = typeof list[i] === 'string' ? list[i] : String(list[i]) - } - - // sort them lexicographically, so that they're next to their nearest kin - list = list.sort(lexSort) - - // walk through each, seeing how much it has in common with the next and previous - const abbrevs = {} - let prev = '' - for (let ii = 0, ll = list.length; ii < ll; ii++) { - const current = list[ii] - const next = list[ii + 1] || '' - let nextMatches = true - let prevMatches = true - if (current === next) { - continue - } - let j = 0 - const cl = current.length - for (; j < cl; j++) { - const curChar = current.charAt(j) - nextMatches = nextMatches && curChar === next.charAt(j) - prevMatches = prevMatches && curChar === prev.charAt(j) - if (!nextMatches && !prevMatches) { - j++ - break - } - } - prev = current - if (j === cl) { - abbrevs[current] = current - continue - } - for (let a = current.slice(0, j); j <= cl; j++) { - abbrevs[a] = current - a += current.charAt(j) - } - } - return abbrevs -} - -function lexSort (a, b) { - return a === b ? 0 : a > b ? 1 : -1 -} diff --git a/node_modules/@npmcli/run-script/node_modules/abbrev/package.json b/node_modules/@npmcli/run-script/node_modules/abbrev/package.json deleted file mode 100644 index e26400445631a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/abbrev/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "abbrev", - "version": "2.0.0", - "description": "Like ruby's abbrev module, but in js", - "author": "GitHub Inc.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/abbrev-js.git" - }, - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.8.0", - "tap": "^16.3.0" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.8.0" - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/run-script/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf866d93..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 5f6192c3cec56..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,165 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - - if (stat.size !== data.length) { - throw sizeError(stat.size, data.length) - } - - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath) => { - return fs.copyFile(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index ce58d679e4cb2..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const contentPath = require('./path') -const { hasContent } = require('./read') - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) - return true - } else { - return false - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/write.js deleted file mode 100644 index e7187abca8788..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const events = require('events') - -const contentPath = require('./path') -const fs = require('fs/promises') -const { moveFile } = require('@npmcli/fs') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -// Cache of move operations in process so we don't duplicate -const moveOperations = new Map() - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - for (const algo in sri) { - const tmp = await makeTmp(cache, opts) - const hash = sri[algo].toString() - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, hash, opts) - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } - } - return { integrity: sri, size: data.length } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - this.handleContentP.catch(error => this.emit('error', error)) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - if (moveOperations.has(destination)) { - return moveOperations.get(destination) - } - moveOperations.set( - destination, - fs.mkdir(destDir, { recursive: true }) - .then(async () => { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - return tmp.moved - }) - .catch(err => { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } - }).finally(() => { - moveOperations.delete(destination) - }) - - ) - return moveOperations.get(destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 89c28f2f257d4..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,336 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { - appendFile, - mkdir, - readFile, - readdir, - rm, - writeFile, -} = require('fs/promises') -const { Minipass } = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const { moveFile } = require('@npmcli/fs') - -const pMap = require('p-map') -const lsStreamConcurrency = 5 - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await mkdir(path.dirname(target), { recursive: true }) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rm(tmp.target, { recursive: true, force: true }) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await mkdir(path.dirname(bucket), { recursive: true }) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size, time } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: time || Date.now(), - size, - metadata, - } - try { - await mkdir(path.dirname(bucket), { recursive: true }) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rm(bucket, { recursive: true, force: true }) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await pMap(buckets, async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await pMap(subbuckets, async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await pMap(subbucketEntries, async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (_) { - // eslint-ignore-next-line no-empty-block - } - // coverage disabled here, no need to test with an entry that parses to something falsey - // istanbul ignore else - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/get.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/get.js deleted file mode 100644 index 80ec206c7ecaa..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/index.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/index.js deleted file mode 100644 index c9b0da5f3a271..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 2ecc60912e456..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') - -const MEMOIZED = new LRUCache({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/put.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/rm.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/rm.js deleted file mode 100644 index a94760c7cf243..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const { rm } = require('fs/promises') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -async function all (cache) { - memo.clearMemoized() - const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) - return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/glob.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/glob.js deleted file mode 100644 index 8500c1c16a429..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/glob.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const { glob } = require('glob') -const path = require('path') - -const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) -module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0bf5302136ebe..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const { withTempDir } = require('@npmcli/fs') -const fs = require('fs/promises') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return withTempDir(path.join(cache, 'tmp'), cb, opts) -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/run-script/node_modules/cacache/lib/verify.js deleted file mode 100644 index d7423da1295b6..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const { - mkdir, - readFile, - rm, - stat, - truncate, - writeFile, -} = require('fs/promises') -const pMap = require('p-map') -const contentPath = require('./content/path') -const fsm = require('fs-minipass') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const path = require('path') -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime () { - return { startTime: new Date() } -} - -async function markEndTime () { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await mkdir(cache, { recursive: true }) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rm it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - // integrity is stringified, re-parse it so we can get each hash - const integrity = ssri.parse(entry.integrity) - for (const algo in integrity) { - liveContent.add(integrity[algo].toString()) - } - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await stat(f) - await rm(f, { recursive: true, force: true }) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rm(filepath, { recursive: true, force: true }) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats) { - await truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - time: entry.time, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) -} - -async function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - return writeFile(verifile, `${Date.now()}`) -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/@npmcli/run-script/node_modules/cacache/package.json b/node_modules/@npmcli/run-script/node_modules/cacache/package.json deleted file mode 100644 index 6e6219158ed75..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/cacache/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "cacache", - "version": "18.0.4", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "tap": "^16.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "4.22.0", - "publish": "true" - }, - "author": "GitHub Inc.", - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/LICENSE b/node_modules/@npmcli/run-script/node_modules/isexe/LICENSE deleted file mode 100644 index c925dbe826b67..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/index.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/index.js deleted file mode 100644 index cefcb66b5c543..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/index.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.sync = exports.isexe = exports.posix = exports.win32 = void 0; -const posix = __importStar(require("./posix.js")); -exports.posix = posix; -const win32 = __importStar(require("./win32.js")); -exports.win32 = win32; -__exportStar(require("./options.js"), exports); -const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; -const impl = platform === 'win32' ? win32 : posix; -/** - * Determine whether a path is executable on the current platform. - */ -exports.isexe = impl.isexe; -/** - * Synchronously determine whether a path is executable on the - * current platform. - */ -exports.sync = impl.sync; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/options.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/options.js deleted file mode 100644 index 0dfad0762cc32..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/options.js +++ /dev/null @@ -1,3 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/package.json b/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/posix.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/posix.js deleted file mode 100644 index 3bc5e79d7007e..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/posix.js +++ /dev/null @@ -1,67 +0,0 @@ -"use strict"; -/** - * This is the Posix implementation of isexe, which uses the file - * mode and uid/gid values. - * - * @module - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.sync = exports.isexe = void 0; -const fs_1 = require("fs"); -const promises_1 = require("fs/promises"); -/** - * Determine whether a path is executable according to the mode and - * current (or specified) user and group IDs. - */ -const isexe = async (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat(await (0, promises_1.stat)(path), options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -exports.isexe = isexe; -/** - * Synchronously determine whether a path is executable according to - * the mode and current (or specified) user and group IDs. - */ -const sync = (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat((0, fs_1.statSync)(path), options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -exports.sync = sync; -const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); -const checkMode = (stat, options) => { - const myUid = options.uid ?? process.getuid?.(); - const myGroups = options.groups ?? process.getgroups?.() ?? []; - const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; - if (myUid === undefined || myGid === undefined) { - throw new Error('cannot get uid or gid'); - } - const groups = new Set([myGid, ...myGroups]); - const mod = stat.mode; - const uid = stat.uid; - const gid = stat.gid; - const u = parseInt('100', 8); - const g = parseInt('010', 8); - const o = parseInt('001', 8); - const ug = u | g; - return !!(mod & o || - (mod & g && groups.has(gid)) || - (mod & u && uid === myUid) || - (mod & ug && myUid === 0)); -}; -//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/win32.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/win32.js deleted file mode 100644 index fa7a4d2f7d240..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/cjs/win32.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -/** - * This is the Windows implementation of isexe, which uses the file - * extension and PATHEXT setting. - * - * @module - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.sync = exports.isexe = void 0; -const fs_1 = require("fs"); -const promises_1 = require("fs/promises"); -/** - * Determine whether a path is executable based on the file extension - * and PATHEXT environment variable (or specified pathExt option) - */ -const isexe = async (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat(await (0, promises_1.stat)(path), path, options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -exports.isexe = isexe; -/** - * Synchronously determine whether a path is executable based on the file - * extension and PATHEXT environment variable (or specified pathExt option) - */ -const sync = (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat((0, fs_1.statSync)(path), path, options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -exports.sync = sync; -const checkPathExt = (path, options) => { - const { pathExt = process.env.PATHEXT || '' } = options; - const peSplit = pathExt.split(';'); - if (peSplit.indexOf('') !== -1) { - return true; - } - for (let i = 0; i < peSplit.length; i++) { - const p = peSplit[i].toLowerCase(); - const ext = path.substring(path.length - p.length).toLowerCase(); - if (p && ext === p) { - return true; - } - } - return false; -}; -const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); -//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/index.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/index.js deleted file mode 100644 index 1e309acd7355e..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/index.js +++ /dev/null @@ -1,16 +0,0 @@ -import * as posix from './posix.js'; -import * as win32 from './win32.js'; -export * from './options.js'; -export { win32, posix }; -const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; -const impl = platform === 'win32' ? win32 : posix; -/** - * Determine whether a path is executable on the current platform. - */ -export const isexe = impl.isexe; -/** - * Synchronously determine whether a path is executable on the - * current platform. - */ -export const sync = impl.sync; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/options.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/options.js deleted file mode 100644 index e9ded40bd5b2c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/options.js +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/package.json b/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/posix.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/posix.js deleted file mode 100644 index c453776c0452f..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/posix.js +++ /dev/null @@ -1,62 +0,0 @@ -/** - * This is the Posix implementation of isexe, which uses the file - * mode and uid/gid values. - * - * @module - */ -import { statSync } from 'fs'; -import { stat } from 'fs/promises'; -/** - * Determine whether a path is executable according to the mode and - * current (or specified) user and group IDs. - */ -export const isexe = async (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat(await stat(path), options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -/** - * Synchronously determine whether a path is executable according to - * the mode and current (or specified) user and group IDs. - */ -export const sync = (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat(statSync(path), options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); -const checkMode = (stat, options) => { - const myUid = options.uid ?? process.getuid?.(); - const myGroups = options.groups ?? process.getgroups?.() ?? []; - const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; - if (myUid === undefined || myGid === undefined) { - throw new Error('cannot get uid or gid'); - } - const groups = new Set([myGid, ...myGroups]); - const mod = stat.mode; - const uid = stat.uid; - const gid = stat.gid; - const u = parseInt('100', 8); - const g = parseInt('010', 8); - const o = parseInt('001', 8); - const ug = u | g; - return !!(mod & o || - (mod & g && groups.has(gid)) || - (mod & u && uid === myUid) || - (mod & ug && myUid === 0)); -}; -//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/win32.js b/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/win32.js deleted file mode 100644 index a354ee2a5115c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/dist/mjs/win32.js +++ /dev/null @@ -1,57 +0,0 @@ -/** - * This is the Windows implementation of isexe, which uses the file - * extension and PATHEXT setting. - * - * @module - */ -import { statSync } from 'fs'; -import { stat } from 'fs/promises'; -/** - * Determine whether a path is executable based on the file extension - * and PATHEXT environment variable (or specified pathExt option) - */ -export const isexe = async (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat(await stat(path), path, options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -/** - * Synchronously determine whether a path is executable based on the file - * extension and PATHEXT environment variable (or specified pathExt option) - */ -export const sync = (path, options = {}) => { - const { ignoreErrors = false } = options; - try { - return checkStat(statSync(path), path, options); - } - catch (e) { - const er = e; - if (ignoreErrors || er.code === 'EACCES') - return false; - throw er; - } -}; -const checkPathExt = (path, options) => { - const { pathExt = process.env.PATHEXT || '' } = options; - const peSplit = pathExt.split(';'); - if (peSplit.indexOf('') !== -1) { - return true; - } - for (let i = 0; i < peSplit.length; i++) { - const p = peSplit[i].toLowerCase(); - const ext = path.substring(path.length - p.length).toLowerCase(); - if (p && ext === p) { - return true; - } - } - return false; -}; -const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); -//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/isexe/package.json b/node_modules/@npmcli/run-script/node_modules/isexe/package.json deleted file mode 100644 index a0e2cd04bfdbf..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/isexe/package.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "name": "isexe", - "version": "3.1.1", - "description": "Minimal module to check if a file is executable.", - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/cjs/index.js", - "files": [ - "dist" - ], - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - }, - "./posix": { - "import": { - "types": "./dist/mjs/posix.d.ts", - "default": "./dist/mjs/posix.js" - }, - "require": { - "types": "./dist/cjs/posix.d.ts", - "default": "./dist/cjs/posix.js" - } - }, - "./win32": { - "import": { - "types": "./dist/mjs/win32.d.ts", - "default": "./dist/mjs/win32.js" - }, - "require": { - "types": "./dist/cjs/win32.d.ts", - "default": "./dist/cjs/win32.js" - } - }, - "./package.json": "./package.json" - }, - "devDependencies": { - "@types/node": "^20.4.5", - "@types/tap": "^15.0.8", - "c8": "^8.0.1", - "mkdirp": "^0.5.1", - "prettier": "^2.8.8", - "rimraf": "^2.5.0", - "sync-content": "^1.0.2", - "tap": "^16.3.8", - "ts-node": "^10.9.1", - "typedoc": "^0.24.8", - "typescript": "^5.1.6" - }, - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", - "typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts" - }, - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "tap": { - "coverage": false, - "node-arg": [ - "--enable-source-maps", - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "repository": "https://github.com/isaacs/isexe", - "engines": { - "node": ">=16" - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/LICENSE b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2844231..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index bfcfacbcc95e1..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,471 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const { Minipass } = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - for (const name of options.cacheAdditionalHeaders) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }).catch((err) => { - body.emit('error', err) - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - for (const name of options.cacheAdditionalHeaders) { - const inMeta = hasOwnProperty(metadata.resHeaders, name) - const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) - const inPolicy = hasOwnProperty(this.policy.response.headers, name) - - // if the header is in the existing entry, but it is not in the metadata - // then we need to write it to the metadata as this will refresh the on-disk cache - if (!inMeta && inEntry) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - // if the header is in the metadata, but not in the policy, then we need to set - // it in the policy so that it's included in the immediate response. future - // responses will load a new cache entry, so we don't need to change that - if (!inPolicy && inMeta) { - this.policy.response.headers[name] = metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a66573bebe6..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d23fb933..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d562b7fa..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c8600dae9..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67e16550..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/index.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e1b6113..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/options.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index f77511279f831..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,54 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b2d0ce3..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/remote.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index 8554564074de6..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,131 +0,0 @@ -const { Minipass } = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') -const { log } = require('proc-log') - -const CachingMinipassPipeline = require('./pipeline.js') -const { getAgent } = require('@npmcli/agent') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - // from @npmcli/agent - 'ECONNECTIONTIMEOUT', - 'EIDLETIMEOUT', - 'ERESPONSETIMEOUT', - 'ETRANSFERTIMEOUT', - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) - // EINVALIDPROXY // invalid protocol from @npmcli/agent - // EINVALIDRESPONSE // invalid status code from @npmcli/agent -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - /* eslint-disable-next-line max-len */ - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/LICENSE b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/LICENSE deleted file mode 100644 index 83837797202b7..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) GitHub, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/lib/index.js b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/lib/index.js deleted file mode 100644 index 86d90861078da..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/lib/index.js +++ /dev/null @@ -1,153 +0,0 @@ -const META = Symbol('proc-log.meta') -module.exports = { - META: META, - output: { - LEVELS: [ - 'standard', - 'error', - 'buffer', - 'flush', - ], - KEYS: { - standard: 'standard', - error: 'error', - buffer: 'buffer', - flush: 'flush', - }, - standard: function (...args) { - return process.emit('output', 'standard', ...args) - }, - error: function (...args) { - return process.emit('output', 'error', ...args) - }, - buffer: function (...args) { - return process.emit('output', 'buffer', ...args) - }, - flush: function (...args) { - return process.emit('output', 'flush', ...args) - }, - }, - log: { - LEVELS: [ - 'notice', - 'error', - 'warn', - 'info', - 'verbose', - 'http', - 'silly', - 'timing', - 'pause', - 'resume', - ], - KEYS: { - notice: 'notice', - error: 'error', - warn: 'warn', - info: 'info', - verbose: 'verbose', - http: 'http', - silly: 'silly', - timing: 'timing', - pause: 'pause', - resume: 'resume', - }, - error: function (...args) { - return process.emit('log', 'error', ...args) - }, - notice: function (...args) { - return process.emit('log', 'notice', ...args) - }, - warn: function (...args) { - return process.emit('log', 'warn', ...args) - }, - info: function (...args) { - return process.emit('log', 'info', ...args) - }, - verbose: function (...args) { - return process.emit('log', 'verbose', ...args) - }, - http: function (...args) { - return process.emit('log', 'http', ...args) - }, - silly: function (...args) { - return process.emit('log', 'silly', ...args) - }, - timing: function (...args) { - return process.emit('log', 'timing', ...args) - }, - pause: function () { - return process.emit('log', 'pause') - }, - resume: function () { - return process.emit('log', 'resume') - }, - }, - time: { - LEVELS: [ - 'start', - 'end', - ], - KEYS: { - start: 'start', - end: 'end', - }, - start: function (name, fn) { - process.emit('time', 'start', name) - function end () { - return process.emit('time', 'end', name) - } - if (typeof fn === 'function') { - const res = fn() - if (res && res.finally) { - return res.finally(end) - } - end() - return res - } - return end - }, - end: function (name) { - return process.emit('time', 'end', name) - }, - }, - input: { - LEVELS: [ - 'start', - 'end', - 'read', - ], - KEYS: { - start: 'start', - end: 'end', - read: 'read', - }, - start: function (fn) { - process.emit('input', 'start') - function end () { - return process.emit('input', 'end') - } - if (typeof fn === 'function') { - const res = fn() - if (res && res.finally) { - return res.finally(end) - } - end() - return res - } - return end - }, - end: function () { - return process.emit('input', 'end') - }, - read: function (...args) { - let resolve, reject - const promise = new Promise((_resolve, _reject) => { - resolve = _resolve - reject = _reject - }) - process.emit('input', 'read', resolve, reject, ...args) - return promise - }, - }, -} diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/package.json b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/package.json deleted file mode 100644 index 4ab89102ecc9b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "proc-log", - "version": "4.2.0", - "files": [ - "bin/", - "lib/" - ], - "main": "lib/index.js", - "description": "just emit 'log' events on the process object", - "repository": { - "type": "git", - "url": "https://github.com/npm/proc-log.git" - }, - "author": "GitHub Inc.", - "license": "ISC", - "scripts": { - "test": "tap", - "snap": "tap", - "posttest": "npm run lint", - "postsnap": "eslint index.js test/*.js --fix", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", - "tap": "^16.0.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", - "publish": true - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/package.json b/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/package.json deleted file mode 100644 index 7adb4d1e7f971..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "13.0.1", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", - "nock": "^13.2.4", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", - "publish": "true" - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/LICENSE b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/LICENSE deleted file mode 100644 index 3c3410cdc12ee..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Isaac Z. Schlueter and Contributors -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - ---- - -Note: This is a derivative work based on "node-fetch" by David Frank, -modified and distributed under the terms of the MIT license above. -https://github.com/bitinn/node-fetch diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/abort-error.js b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/abort-error.js deleted file mode 100644 index b18f643269e37..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/abort-error.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' -class AbortError extends Error { - constructor (message) { - super(message) - this.code = 'FETCH_ABORTED' - this.type = 'aborted' - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'AbortError' - } - - // don't allow name to be overridden, but don't throw either - set name (s) {} -} -module.exports = AbortError diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/blob.js b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/blob.js deleted file mode 100644 index 121b1730102e7..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/blob.js +++ /dev/null @@ -1,97 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const TYPE = Symbol('type') -const BUFFER = Symbol('buffer') - -class Blob { - constructor (blobParts, options) { - this[TYPE] = '' - - const buffers = [] - let size = 0 - - if (blobParts) { - const a = blobParts - const length = Number(a.length) - for (let i = 0; i < length; i++) { - const element = a[i] - const buffer = element instanceof Buffer ? element - : ArrayBuffer.isView(element) - ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) - : element instanceof ArrayBuffer ? Buffer.from(element) - : element instanceof Blob ? element[BUFFER] - : typeof element === 'string' ? Buffer.from(element) - : Buffer.from(String(element)) - size += buffer.length - buffers.push(buffer) - } - } - - this[BUFFER] = Buffer.concat(buffers, size) - - const type = options && options.type !== undefined - && String(options.type).toLowerCase() - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type - } - } - - get size () { - return this[BUFFER].length - } - - get type () { - return this[TYPE] - } - - text () { - return Promise.resolve(this[BUFFER].toString()) - } - - arrayBuffer () { - const buf = this[BUFFER] - const off = buf.byteOffset - const len = buf.byteLength - const ab = buf.buffer.slice(off, off + len) - return Promise.resolve(ab) - } - - stream () { - return new Minipass().end(this[BUFFER]) - } - - slice (start, end, type) { - const size = this.size - const relativeStart = start === undefined ? 0 - : start < 0 ? Math.max(size + start, 0) - : Math.min(start, size) - const relativeEnd = end === undefined ? size - : end < 0 ? Math.max(size + end, 0) - : Math.min(end, size) - const span = Math.max(relativeEnd - relativeStart, 0) - - const buffer = this[BUFFER] - const slicedBuffer = buffer.slice( - relativeStart, - relativeStart + span - ) - const blob = new Blob([], { type }) - blob[BUFFER] = slicedBuffer - return blob - } - - get [Symbol.toStringTag] () { - return 'Blob' - } - - static get BUFFER () { - return BUFFER - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, -}) - -module.exports = Blob diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/body.js b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/body.js deleted file mode 100644 index 62286bd1de0d9..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/body.js +++ /dev/null @@ -1,350 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const MinipassSized = require('minipass-sized') - -const Blob = require('./blob.js') -const { BUFFER } = Blob -const FetchError = require('./fetch-error.js') - -// optional dependency on 'encoding' -let convert -try { - convert = require('encoding').convert -} catch (e) { - // defer error until textConverted is called -} - -const INTERNALS = Symbol('Body internals') -const CONSUME_BODY = Symbol('consumeBody') - -class Body { - constructor (bodyArg, options = {}) { - const { size = 0, timeout = 0 } = options - const body = bodyArg === undefined || bodyArg === null ? null - : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) - : isBlob(bodyArg) ? bodyArg - : Buffer.isBuffer(bodyArg) ? bodyArg - : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' - ? Buffer.from(bodyArg) - : ArrayBuffer.isView(bodyArg) - ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) - : Minipass.isStream(bodyArg) ? bodyArg - : Buffer.from(String(bodyArg)) - - this[INTERNALS] = { - body, - disturbed: false, - error: null, - } - - this.size = size - this.timeout = timeout - - if (Minipass.isStream(body)) { - body.on('error', er => { - const error = er.name === 'AbortError' ? er - : new FetchError(`Invalid response while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - this[INTERNALS].error = error - }) - } - } - - get body () { - return this[INTERNALS].body - } - - get bodyUsed () { - return this[INTERNALS].disturbed - } - - arrayBuffer () { - return this[CONSUME_BODY]().then(buf => - buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) - } - - blob () { - const ct = this.headers && this.headers.get('content-type') || '' - return this[CONSUME_BODY]().then(buf => Object.assign( - new Blob([], { type: ct.toLowerCase() }), - { [BUFFER]: buf } - )) - } - - async json () { - const buf = await this[CONSUME_BODY]() - try { - return JSON.parse(buf.toString()) - } catch (er) { - throw new FetchError( - `invalid json response body at ${this.url} reason: ${er.message}`, - 'invalid-json' - ) - } - } - - text () { - return this[CONSUME_BODY]().then(buf => buf.toString()) - } - - buffer () { - return this[CONSUME_BODY]() - } - - textConverted () { - return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) - } - - [CONSUME_BODY] () { - if (this[INTERNALS].disturbed) { - return Promise.reject(new TypeError(`body used already for: ${ - this.url}`)) - } - - this[INTERNALS].disturbed = true - - if (this[INTERNALS].error) { - return Promise.reject(this[INTERNALS].error) - } - - // body is null - if (this.body === null) { - return Promise.resolve(Buffer.alloc(0)) - } - - if (Buffer.isBuffer(this.body)) { - return Promise.resolve(this.body) - } - - const upstream = isBlob(this.body) ? this.body.stream() : this.body - - /* istanbul ignore if: should never happen */ - if (!Minipass.isStream(upstream)) { - return Promise.resolve(Buffer.alloc(0)) - } - - const stream = this.size && upstream instanceof MinipassSized ? upstream - : !this.size && upstream instanceof Minipass && - !(upstream instanceof MinipassSized) ? upstream - : this.size ? new MinipassSized({ size: this.size }) - : new Minipass() - - // allow timeout on slow response body, but only if the stream is still writable. this - // makes the timeout center on the socket stream from lib/index.js rather than the - // intermediary minipass stream we create to receive the data - const resTimeout = this.timeout && stream.writable ? setTimeout(() => { - stream.emit('error', new FetchError( - `Response timeout while trying to fetch ${ - this.url} (over ${this.timeout}ms)`, 'body-timeout')) - }, this.timeout) : null - - // do not keep the process open just for this timeout, even - // though we expect it'll get cleared eventually. - if (resTimeout && resTimeout.unref) { - resTimeout.unref() - } - - // do the pipe in the promise, because the pipe() can send too much - // data through right away and upset the MP Sized object - return new Promise((resolve) => { - // if the stream is some other kind of stream, then pipe through a MP - // so we can collect it more easily. - if (stream !== upstream) { - upstream.on('error', er => stream.emit('error', er)) - upstream.pipe(stream) - } - resolve() - }).then(() => stream.concat()).then(buf => { - clearTimeout(resTimeout) - return buf - }).catch(er => { - clearTimeout(resTimeout) - // request was aborted, reject with this Error - if (er.name === 'AbortError' || er.name === 'FetchError') { - throw er - } else if (er.name === 'RangeError') { - throw new FetchError(`Could not create Buffer from response body for ${ - this.url}: ${er.message}`, 'system', er) - } else { - // other errors, such as incorrect content-encoding or content-length - throw new FetchError(`Invalid response body while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - } - }) - } - - static clone (instance) { - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used') - } - - const body = instance.body - - // check that body is a stream and not form-data object - // NB: can't clone the form-data object without having it as a dependency - if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { - // create a dedicated tee stream so that we don't lose data - // potentially sitting in the body stream's buffer by writing it - // immediately to p1 and not having it for p2. - const tee = new Minipass() - const p1 = new Minipass() - const p2 = new Minipass() - tee.on('error', er => { - p1.emit('error', er) - p2.emit('error', er) - }) - body.on('error', er => tee.emit('error', er)) - tee.pipe(p1) - tee.pipe(p2) - body.pipe(tee) - // set instance body to one fork, return the other - instance[INTERNALS].body = p1 - return p2 - } else { - return instance.body - } - } - - static extractContentType (body) { - return body === null || body === undefined ? null - : typeof body === 'string' ? 'text/plain;charset=UTF-8' - : isURLSearchParams(body) - ? 'application/x-www-form-urlencoded;charset=UTF-8' - : isBlob(body) ? body.type || null - : Buffer.isBuffer(body) ? null - : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null - : ArrayBuffer.isView(body) ? null - : typeof body.getBoundary === 'function' - ? `multipart/form-data;boundary=${body.getBoundary()}` - : Minipass.isStream(body) ? null - : 'text/plain;charset=UTF-8' - } - - static getTotalBytes (instance) { - const { body } = instance - return (body === null || body === undefined) ? 0 - : isBlob(body) ? body.size - : Buffer.isBuffer(body) ? body.length - : body && typeof body.getLengthSync === 'function' && ( - // detect form data input from form-data module - body._lengthRetrievers && - /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) // 2.x - ? body.getLengthSync() - : null - } - - static writeToStream (dest, instance) { - const { body } = instance - - if (body === null || body === undefined) { - dest.end() - } else if (Buffer.isBuffer(body) || typeof body === 'string') { - dest.end(body) - } else { - // body is stream or blob - const stream = isBlob(body) ? body.stream() : body - stream.on('error', er => dest.emit('error', er)).pipe(dest) - } - - return dest - } -} - -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true }, -}) - -const isURLSearchParams = obj => - // Duck-typing as a necessary condition. - (typeof obj !== 'object' || - typeof obj.append !== 'function' || - typeof obj.delete !== 'function' || - typeof obj.get !== 'function' || - typeof obj.getAll !== 'function' || - typeof obj.has !== 'function' || - typeof obj.set !== 'function') ? false - // Brand-checking and more duck-typing as optional condition. - : obj.constructor.name === 'URLSearchParams' || - Object.prototype.toString.call(obj) === '[object URLSearchParams]' || - typeof obj.sort === 'function' - -const isBlob = obj => - typeof obj === 'object' && - typeof obj.arrayBuffer === 'function' && - typeof obj.type === 'string' && - typeof obj.stream === 'function' && - typeof obj.constructor === 'function' && - typeof obj.constructor.name === 'string' && - /^(Blob|File)$/.test(obj.constructor.name) && - /^(Blob|File)$/.test(obj[Symbol.toStringTag]) - -const convertBody = (buffer, headers) => { - /* istanbul ignore if */ - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function') - } - - const ct = headers && headers.get('content-type') - let charset = 'utf-8' - let res - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct) - } - - // no charset in content type, peek at response body for at most 1024 bytes - const str = buffer.slice(0, 1024).toString() - - // html5 - if (!res && str) { - res = / this.expect - ? 'max-size' : type - this.message = message - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'FetchError' - } - - // don't allow name to be overwritten - set name (n) {} - - get [Symbol.toStringTag] () { - return 'FetchError' - } -} -module.exports = FetchError diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/headers.js b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/headers.js deleted file mode 100644 index dd6e854d5ba39..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/headers.js +++ /dev/null @@ -1,267 +0,0 @@ -'use strict' -const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - -const validateName = name => { - name = `${name}` - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`) - } -} - -const validateValue = value => { - value = `${value}` - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`) - } -} - -const find = (map, name) => { - name = name.toLowerCase() - for (const key in map) { - if (key.toLowerCase() === name) { - return key - } - } - return undefined -} - -const MAP = Symbol('map') -class Headers { - constructor (init = undefined) { - this[MAP] = Object.create(null) - if (init instanceof Headers) { - const rawHeaders = init.raw() - const headerNames = Object.keys(rawHeaders) - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value) - } - } - return - } - - // no-op - if (init === undefined || init === null) { - return - } - - if (typeof init === 'object') { - const method = init[Symbol.iterator] - if (method !== null && method !== undefined) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable') - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = [] - for (const pair of init) { - if (typeof pair !== 'object' || - typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable') - } - const arrPair = Array.from(pair) - if (arrPair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple') - } - pairs.push(arrPair) - } - - for (const pair of pairs) { - this.append(pair[0], pair[1]) - } - } else { - // record - for (const key of Object.keys(init)) { - this.append(key, init[key]) - } - } - } else { - throw new TypeError('Provided initializer must be an object') - } - } - - get (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key === undefined) { - return null - } - - return this[MAP][key].join(', ') - } - - forEach (callback, thisArg = undefined) { - let pairs = getHeaders(this) - for (let i = 0; i < pairs.length; i++) { - const [name, value] = pairs[i] - callback.call(thisArg, value, name, this) - // refresh in case the callback added more headers - pairs = getHeaders(this) - } - } - - set (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - this[MAP][key !== undefined ? key : name] = [value] - } - - append (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - if (key !== undefined) { - this[MAP][key].push(value) - } else { - this[MAP][name] = [value] - } - } - - has (name) { - name = `${name}` - validateName(name) - return find(this[MAP], name) !== undefined - } - - delete (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key !== undefined) { - delete this[MAP][key] - } - } - - raw () { - return this[MAP] - } - - keys () { - return new HeadersIterator(this, 'key') - } - - values () { - return new HeadersIterator(this, 'value') - } - - [Symbol.iterator] () { - return new HeadersIterator(this, 'key+value') - } - - entries () { - return new HeadersIterator(this, 'key+value') - } - - get [Symbol.toStringTag] () { - return 'Headers' - } - - static exportNodeCompatibleHeaders (headers) { - const obj = Object.assign(Object.create(null), headers[MAP]) - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host') - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0] - } - - return obj - } - - static createHeadersLenient (obj) { - const headers = new Headers() - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue - } - - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue - } - - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val] - } else { - headers[MAP][name].push(val) - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]] - } - } - return headers - } -} - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true }, -}) - -const getHeaders = (headers, kind = 'key+value') => - Object.keys(headers[MAP]).sort().map( - kind === 'key' ? k => k.toLowerCase() - : kind === 'value' ? k => headers[MAP][k].join(', ') - : k => [k.toLowerCase(), headers[MAP][k].join(', ')] - ) - -const INTERNAL = Symbol('internal') - -class HeadersIterator { - constructor (target, kind) { - this[INTERNAL] = { - target, - kind, - index: 0, - } - } - - get [Symbol.toStringTag] () { - return 'HeadersIterator' - } - - next () { - /* istanbul ignore if: should be impossible */ - if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { - throw new TypeError('Value of `this` is not a HeadersIterator') - } - - const { target, kind, index } = this[INTERNAL] - const values = getHeaders(target, kind) - const len = values.length - if (index >= len) { - return { - value: undefined, - done: true, - } - } - - this[INTERNAL].index++ - - return { value: values[index], done: false } - } -} - -// manually extend because 'extends' requires a ctor -Object.setPrototypeOf(HeadersIterator.prototype, - Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) - -module.exports = Headers diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/index.js b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/index.js deleted file mode 100644 index da402161670e6..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/index.js +++ /dev/null @@ -1,377 +0,0 @@ -'use strict' -const { URL } = require('url') -const http = require('http') -const https = require('https') -const zlib = require('minizlib') -const { Minipass } = require('minipass') - -const Body = require('./body.js') -const { writeToStream, getTotalBytes } = Body -const Response = require('./response.js') -const Headers = require('./headers.js') -const { createHeadersLenient } = Headers -const Request = require('./request.js') -const { getNodeRequestOptions } = Request -const FetchError = require('./fetch-error.js') -const AbortError = require('./abort-error.js') - -// XXX this should really be split up and unit-ized for easier testing -// and better DRY implementation of data/http request aborting -const fetch = async (url, opts) => { - if (/^data:/.test(url)) { - const request = new Request(url, opts) - // delay 1 promise tick so that the consumer can abort right away - return Promise.resolve().then(() => new Promise((resolve, reject) => { - let type, data - try { - const { pathname, search } = new URL(url) - const split = pathname.split(',') - if (split.length < 2) { - throw new Error('invalid data: URI') - } - const mime = split.shift() - const base64 = /;base64$/.test(mime) - type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime - const rawData = decodeURIComponent(split.join(',') + search) - data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) - } catch (er) { - return reject(new FetchError(`[${request.method}] ${ - request.url} invalid URL, ${er.message}`, 'system', er)) - } - - const { signal } = request - if (signal && signal.aborted) { - return reject(new AbortError('The user aborted a request.')) - } - - const headers = { 'Content-Length': data.length } - if (type) { - headers['Content-Type'] = type - } - return resolve(new Response(data, { headers })) - })) - } - - return new Promise((resolve, reject) => { - // build request object - const request = new Request(url, opts) - let options - try { - options = getNodeRequestOptions(request) - } catch (er) { - return reject(er) - } - - const send = (options.protocol === 'https:' ? https : http).request - const { signal } = request - let response = null - const abort = () => { - const error = new AbortError('The user aborted a request.') - reject(error) - if (Minipass.isStream(request.body) && - typeof request.body.destroy === 'function') { - request.body.destroy(error) - } - if (response && response.body) { - response.body.emit('error', error) - } - } - - if (signal && signal.aborted) { - return abort() - } - - const abortAndFinalize = () => { - abort() - finalize() - } - - const finalize = () => { - req.abort() - if (signal) { - signal.removeEventListener('abort', abortAndFinalize) - } - clearTimeout(reqTimeout) - } - - // send request - const req = send(options) - - if (signal) { - signal.addEventListener('abort', abortAndFinalize) - } - - let reqTimeout = null - if (request.timeout) { - req.once('socket', () => { - reqTimeout = setTimeout(() => { - reject(new FetchError(`network timeout at: ${ - request.url}`, 'request-timeout')) - finalize() - }, request.timeout) - }) - } - - req.on('error', er => { - // if a 'response' event is emitted before the 'error' event, then by the - // time this handler is run it's too late to reject the Promise for the - // response. instead, we forward the error event to the response stream - // so that the error will surface to the user when they try to consume - // the body. this is done as a side effect of aborting the request except - // for in windows, where we must forward the event manually, otherwise - // there is no longer a ref'd socket attached to the request and the - // stream never ends so the event loop runs out of work and the process - // exits without warning. - // coverage skipped here due to the difficulty in testing - // istanbul ignore next - if (req.res) { - req.res.emit('error', er) - } - reject(new FetchError(`request to ${request.url} failed, reason: ${ - er.message}`, 'system', er)) - finalize() - }) - - req.on('response', res => { - clearTimeout(reqTimeout) - - const headers = createHeadersLenient(res.headers) - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location') - - // HTTP fetch step 5.3 - let locationURL = null - try { - locationURL = location === null ? null : new URL(location, request.url).toString() - } catch { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - /* eslint-disable-next-line max-len */ - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) - finalize() - return - } - } - - // HTTP fetch step 5.5 - if (request.redirect === 'error') { - reject(new FetchError('uri requested responds with a redirect, ' + - `redirect mode is set to error: ${request.url}`, 'no-redirect')) - finalize() - return - } else if (request.redirect === 'manual') { - // node-fetch-specific step: make manual redirect a bit easier to - // use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL) - } catch (err) { - /* istanbul ignore next: nodejs server prevent invalid - response headers, we can't test this through normal - request */ - reject(err) - } - } - } else if (request.redirect === 'follow' && locationURL !== null) { - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${ - request.url}`, 'max-redirect')) - finalize() - return - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && - request.body && - getTotalBytes(request) === null) { - reject(new FetchError( - 'Cannot follow redirect with body being a readable stream', - 'unsupported-redirect' - )) - finalize() - return - } - - // Update host due to redirection - request.headers.set('host', (new URL(locationURL)).host) - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - } - - // if the redirect is to a new hostname, strip the authorization and cookie headers - const parsedOriginal = new URL(request.url) - const parsedRedirect = new URL(locationURL) - if (parsedOriginal.hostname !== parsedRedirect.hostname) { - requestOpts.headers.delete('authorization') - requestOpts.headers.delete('cookie') - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || ( - (res.statusCode === 301 || res.statusCode === 302) && - request.method === 'POST' - )) { - requestOpts.method = 'GET' - requestOpts.body = undefined - requestOpts.headers.delete('content-length') - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))) - finalize() - return - } - } // end if(isRedirect) - - // prepare response - res.once('end', () => - signal && signal.removeEventListener('abort', abortAndFinalize)) - - const body = new Minipass() - // if an error occurs, either on the response stream itself, on one of the - // decoder streams, or a response length timeout from the Body class, we - // forward the error through to our internal body stream. If we see an - // error event on that, we call finalize to abort the request and ensure - // we don't leave a socket believing a request is in flight. - // this is difficult to test, so lacks specific coverage. - body.on('error', finalize) - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) - res.on('data', (chunk) => body.write(chunk)) - res.on('end', () => body.end()) - - const responseOptions = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter, - trailer: new Promise(resolveTrailer => - res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), - } - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding') - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || - request.method === 'HEAD' || - codings === null || - res.statusCode === 204 || - res.statusCode === 304) { - response = new Response(body, responseOptions) - resolve(response) - return - } - - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.constants.Z_SYNC_FLUSH, - finishFlush: zlib.constants.Z_SYNC_FLUSH, - } - - // for gzip - if (codings === 'gzip' || codings === 'x-gzip') { - const unzip = new zlib.Gunzip(zlibOptions) - response = new Response( - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), - responseOptions - ) - resolve(response) - return - } - - // for deflate - if (codings === 'deflate' || codings === 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new Minipass()) - raw.once('data', chunk => { - // see http://stackoverflow.com/questions/37519828 - const decoder = (chunk[0] & 0x0F) === 0x08 - ? new zlib.Inflate() - : new zlib.InflateRaw() - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - }) - return - } - - // for br - if (codings === 'br') { - // ignoring coverage so tests don't have to fake support (or lack of) for brotli - // istanbul ignore next - try { - var decoder = new zlib.BrotliDecompress() - } catch (err) { - reject(err) - finalize() - return - } - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - return - } - - // otherwise, use response as-is - response = new Response(body, responseOptions) - resolve(response) - }) - - writeToStream(req, request) - }) -} - -module.exports = fetch - -fetch.isRedirect = code => - code === 301 || - code === 302 || - code === 303 || - code === 307 || - code === 308 - -fetch.Headers = Headers -fetch.Request = Request -fetch.Response = Response -fetch.FetchError = FetchError -fetch.AbortError = AbortError diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/request.js b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/request.js deleted file mode 100644 index 054439e669910..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/request.js +++ /dev/null @@ -1,282 +0,0 @@ -'use strict' -const { URL } = require('url') -const { Minipass } = require('minipass') -const Headers = require('./headers.js') -const { exportNodeCompatibleHeaders } = Headers -const Body = require('./body.js') -const { clone, extractContentType, getTotalBytes } = Body - -const version = require('../package.json').version -const defaultUserAgent = - `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` - -const INTERNALS = Symbol('Request internals') - -const isRequest = input => - typeof input === 'object' && typeof input[INTERNALS] === 'object' - -const isAbortSignal = signal => { - const proto = ( - signal - && typeof signal === 'object' - && Object.getPrototypeOf(signal) - ) - return !!(proto && proto.constructor.name === 'AbortSignal') -} - -class Request extends Body { - constructor (input, init = {}) { - const parsedURL = isRequest(input) ? new URL(input.url) - : input && input.href ? new URL(input.href) - : new URL(`${input}`) - - if (isRequest(input)) { - init = { ...input[INTERNALS], ...init } - } else if (!input || typeof input === 'string') { - input = {} - } - - const method = (init.method || input.method || 'GET').toUpperCase() - const isGETHEAD = method === 'GET' || method === 'HEAD' - - if ((init.body !== null && init.body !== undefined || - isRequest(input) && input.body !== null) && isGETHEAD) { - throw new TypeError('Request with GET/HEAD method cannot have body') - } - - const inputBody = init.body !== null && init.body !== undefined ? init.body - : isRequest(input) && input.body !== null ? clone(input) - : null - - super(inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0, - }) - - const headers = new Headers(init.headers || input.headers || {}) - - if (inputBody !== null && inputBody !== undefined && - !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - const signal = 'signal' in init ? init.signal - : null - - if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { - throw new TypeError('Expected signal must be an instanceof AbortSignal') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = init - - this[INTERNALS] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow - : input.follow !== undefined ? input.follow - : 20 - this.compress = init.compress !== undefined ? init.compress - : input.compress !== undefined ? input.compress - : true - this.counter = init.counter || input.counter || 0 - this.agent = init.agent || input.agent - } - - get method () { - return this[INTERNALS].method - } - - get url () { - return this[INTERNALS].parsedURL.toString() - } - - get headers () { - return this[INTERNALS].headers - } - - get redirect () { - return this[INTERNALS].redirect - } - - get signal () { - return this[INTERNALS].signal - } - - clone () { - return new Request(this) - } - - get [Symbol.toStringTag] () { - return 'Request' - } - - static getNodeRequestOptions (request) { - const parsedURL = request[INTERNALS].parsedURL - const headers = new Headers(request[INTERNALS].headers) - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*') - } - - // Basic fetch - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported') - } - - if (request.signal && - Minipass.isStream(request.body) && - typeof request.body.destroy !== 'function') { - throw new Error( - 'Cancellation of streamed requests with AbortSignal is not supported') - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - const contentLengthValue = - (request.body === null || request.body === undefined) && - /^(POST|PUT)$/i.test(request.method) ? '0' - : request.body !== null && request.body !== undefined - ? getTotalBytes(request) - : null - - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue + '') - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', defaultUserAgent) - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate') - } - - const agent = typeof request.agent === 'function' - ? request.agent(parsedURL) - : request.agent - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = request[INTERNALS] - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - // we cannot spread parsedURL directly, so we have to read each property one-by-one - // and map them to the equivalent https?.request() method options - const urlProps = { - auth: parsedURL.username || parsedURL.password - ? `${parsedURL.username}:${parsedURL.password}` - : '', - host: parsedURL.host, - hostname: parsedURL.hostname, - path: `${parsedURL.pathname}${parsedURL.search}`, - port: parsedURL.port, - protocol: parsedURL.protocol, - } - - return { - ...urlProps, - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - timeout: request.timeout, - } - } -} - -module.exports = Request - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true }, -}) diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/response.js b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/response.js deleted file mode 100644 index 54cb52db3594a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/lib/response.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' -const http = require('http') -const { STATUS_CODES } = http - -const Headers = require('./headers.js') -const Body = require('./body.js') -const { clone, extractContentType } = Body - -const INTERNALS = Symbol('Response internals') - -class Response extends Body { - constructor (body = null, opts = {}) { - super(body, opts) - - const status = opts.status || 200 - const headers = new Headers(opts.headers) - - if (body !== null && body !== undefined && !headers.has('Content-Type')) { - const contentType = extractContentType(body) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - this[INTERNALS] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter, - trailer: Promise.resolve(opts.trailer || new Headers()), - } - } - - get trailer () { - return this[INTERNALS].trailer - } - - get url () { - return this[INTERNALS].url || '' - } - - get status () { - return this[INTERNALS].status - } - - get ok () { - return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 - } - - get redirected () { - return this[INTERNALS].counter > 0 - } - - get statusText () { - return this[INTERNALS].statusText - } - - get headers () { - return this[INTERNALS].headers - } - - clone () { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected, - trailer: this.trailer, - }) - } - - get [Symbol.toStringTag] () { - return 'Response' - } -} - -module.exports = Response - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true }, -}) diff --git a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/package.json b/node_modules/@npmcli/run-script/node_modules/minipass-fetch/package.json deleted file mode 100644 index d491a7fba126d..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/minipass-fetch/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "minipass-fetch", - "version": "3.0.5", - "description": "An implementation of window.fetch in Node.js using Minipass streams", - "license": "MIT", - "main": "lib/index.js", - "scripts": { - "test:tls-fixtures": "./test/fixtures/tls/setup.sh", - "test": "tap", - "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "tap": { - "coverage-map": "map.js", - "check-coverage": true, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "@ungap/url-search-params": "^0.2.2", - "abort-controller": "^3.0.0", - "abortcontroller-polyfill": "~1.7.3", - "encoding": "^0.1.13", - "form-data": "^4.0.0", - "nock": "^13.2.4", - "parted": "^0.1.1", - "string-to-arraybuffer": "^1.0.2", - "tap": "^16.0.0" - }, - "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/minipass-fetch.git" - }, - "keywords": [ - "fetch", - "minipass", - "node-fetch", - "window.fetch" - ], - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", - "publish": "true" - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/.release-please-manifest.json b/node_modules/@npmcli/run-script/node_modules/node-gyp/.release-please-manifest.json deleted file mode 100644 index 9e5fec54a4cb2..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/.release-please-manifest.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - ".": "10.3.1" -} diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/CODE_OF_CONDUCT.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/CODE_OF_CONDUCT.md deleted file mode 100644 index 4c211405596cb..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,4 +0,0 @@ -# Code of Conduct - -* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md) -* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/master/Moderation-Policy.md) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/CONTRIBUTING.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/CONTRIBUTING.md deleted file mode 100644 index 5b977898f104b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/CONTRIBUTING.md +++ /dev/null @@ -1,34 +0,0 @@ -# Contributing to node-gyp - -## Code of Conduct - -Please read the -[Code of Conduct](https://github.com/nodejs/admin/blob/main/CODE_OF_CONDUCT.md) -which explains the minimum behavior expectations for node-gyp contributors. - - -## Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -* (a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - -* (b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -* (c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -* (d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/LICENSE b/node_modules/@npmcli/run-script/node_modules/node-gyp/LICENSE deleted file mode 100644 index 2ea4dc5efb872..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2012 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/SECURITY.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/SECURITY.md deleted file mode 100644 index 1e168d76e3b1b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/SECURITY.md +++ /dev/null @@ -1,2 +0,0 @@ -If you believe you have found a security issue in the software in this -repository, please consult https://github.com/nodejs/node/blob/HEAD/SECURITY.md. \ No newline at end of file diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/addon.gypi b/node_modules/@npmcli/run-script/node_modules/node-gyp/addon.gypi deleted file mode 100644 index b4ac369acb4f1..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/addon.gypi +++ /dev/null @@ -1,204 +0,0 @@ -{ - 'variables' : { - 'node_engine_include_dir%': 'deps/v8/include', - 'node_host_binary%': 'node', - 'node_with_ltcg%': 'true', - }, - 'target_defaults': { - 'type': 'loadable_module', - 'win_delay_load_hook': 'true', - 'product_prefix': '', - - 'conditions': [ - [ 'node_engine=="chakracore"', { - 'variables': { - 'node_engine_include_dir%': 'deps/chakrashim/include' - }, - }] - ], - - 'include_dirs': [ - '<(node_root_dir)/include/node', - '<(node_root_dir)/src', - '<(node_root_dir)/deps/openssl/config', - '<(node_root_dir)/deps/openssl/openssl/include', - '<(node_root_dir)/deps/uv/include', - '<(node_root_dir)/deps/zlib', - '<(node_root_dir)/<(node_engine_include_dir)' - ], - 'defines!': [ - 'BUILDING_UV_SHARED=1', # Inherited from common.gypi. - 'BUILDING_V8_SHARED=1', # Inherited from common.gypi. - ], - 'defines': [ - 'NODE_GYP_MODULE_NAME=>(_target_name)', - 'USING_UV_SHARED=1', - 'USING_V8_SHARED=1', - # Warn when using deprecated V8 APIs. - 'V8_DEPRECATION_WARNINGS=1' - ], - - 'target_conditions': [ - ['_type=="loadable_module"', { - 'product_extension': 'node', - 'defines': [ - 'BUILDING_NODE_EXTENSION' - ], - 'xcode_settings': { - 'OTHER_LDFLAGS': [ - '-undefined dynamic_lookup' - ], - }, - }], - - ['_type=="static_library"', { - # set to `1` to *disable* the -T thin archive 'ld' flag. - # older linkers don't support this flag. - 'standalone_static_library': '<(standalone_static_library)' - }], - - ['_type!="executable"', { - 'conditions': [ - [ 'OS=="android"', { - 'cflags!': [ '-fPIE' ], - }] - ] - }], - - ['_win_delay_load_hook=="true"', { - # If the addon specifies `'win_delay_load_hook': 'true'` in its - # binding.gyp, link a delay-load hook into the DLL. This hook ensures - # that the addon will work regardless of whether the node/iojs binary - # is named node.exe, iojs.exe, or something else. - 'conditions': [ - [ 'OS=="win"', { - 'defines': [ 'HOST_BINARY=\"<(node_host_binary)<(EXECUTABLE_SUFFIX)\"', ], - 'sources': [ - '<(node_gyp_dir)/src/win_delay_load_hook.cc', - ], - 'msvs_settings': { - 'VCLinkerTool': { - 'DelayLoadDLLs': [ '<(node_host_binary)<(EXECUTABLE_SUFFIX)' ], - # Don't print a linker warning when no imports from either .exe - # are used. - 'AdditionalOptions': [ '/ignore:4199' ], - }, - }, - }], - ], - }], - ], - - 'conditions': [ - [ 'OS=="mac"', { - 'defines': [ - '_DARWIN_USE_64_BIT_INODE=1' - ], - 'xcode_settings': { - 'DYLIB_INSTALL_NAME_BASE': '@rpath' - }, - }], - [ 'OS=="aix"', { - 'ldflags': [ - '-Wl,-bimport:<(node_exp_file)' - ], - }], - [ 'OS=="os400"', { - 'ldflags': [ - '-Wl,-bimport:<(node_exp_file)' - ], - }], - [ 'OS=="zos"', { - 'conditions': [ - [ '"' - # needs to have dll-interface to be used by - # clients of class 'node::ObjectWrap' - 4251 - ], - }, { - # OS!="win" - 'defines': [ - '_LARGEFILE_SOURCE', - '_FILE_OFFSET_BITS=64' - ], - }], - [ 'OS in "freebsd openbsd netbsd solaris android" or \ - (OS=="linux" and target_arch!="ia32")', { - 'cflags': [ '-fPIC' ], - }], - ] - } -} diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/bin/node-gyp.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/bin/node-gyp.js deleted file mode 100755 index f8317b47b3414..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/bin/node-gyp.js +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env node - -'use strict' - -process.title = 'node-gyp' - -const envPaths = require('env-paths') -const gyp = require('../') -const log = require('../lib/log') -const os = require('os') - -/** - * Process and execute the selected commands. - */ - -const prog = gyp() -let completed = false -prog.parseArgv(process.argv) -prog.devDir = prog.opts.devdir - -const homeDir = os.homedir() -if (prog.devDir) { - prog.devDir = prog.devDir.replace(/^~/, homeDir) -} else if (homeDir) { - prog.devDir = envPaths('node-gyp', { suffix: '' }).cache -} else { - throw new Error( - "node-gyp requires that the user's home directory is specified " + - 'in either of the environmental variables HOME or USERPROFILE. ' + - 'Overide with: --devdir /path/to/.node-gyp') -} - -if (prog.todo.length === 0) { - if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { - log.stdout('v%s', prog.version) - } else { - log.stdout('%s', prog.usage()) - } - process.exit(0) -} - -log.info('it worked if it ends with', 'ok') -log.verbose('cli', process.argv) -log.info('using', 'node-gyp@%s', prog.version) -log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch) - -/** - * Change dir if -C/--directory was passed. - */ - -const dir = prog.opts.directory -if (dir) { - const fs = require('fs') - try { - const stat = fs.statSync(dir) - if (stat.isDirectory()) { - log.info('chdir', dir) - process.chdir(dir) - } else { - log.warn('chdir', dir + ' is not a directory') - } - } catch (e) { - if (e.code === 'ENOENT') { - log.warn('chdir', dir + ' is not a directory') - } else { - log.warn('chdir', 'error during chdir() "%s"', e.message) - } - } -} - -async function run () { - const command = prog.todo.shift() - if (!command) { - // done! - completed = true - log.info('ok') - return - } - - try { - const args = await prog.commands[command.name](command.args) ?? [] - - if (command.name === 'list') { - if (args.length) { - args.forEach((version) => log.stdout(version)) - } else { - log.stdout('No node development files installed. Use `node-gyp install` to install a version.') - } - } else if (args.length >= 1) { - log.stdout(...args.slice(1)) - } - - // now run the next command in the queue - return run() - } catch (err) { - log.error(command.name + ' error') - log.error('stack', err.stack) - errorMessage() - log.error('not ok') - return process.exit(1) - } -} - -process.on('exit', function (code) { - if (!completed && !code) { - log.error('Completion callback never invoked!') - issueMessage() - process.exit(6) - } -}) - -process.on('uncaughtException', function (err) { - log.error('UNCAUGHT EXCEPTION') - log.error('stack', err.stack) - issueMessage() - process.exit(7) -}) - -function errorMessage () { - // copied from npm's lib/utils/error-handler.js - const os = require('os') - log.error('System', os.type() + ' ' + os.release()) - log.error('command', process.argv - .map(JSON.stringify).join(' ')) - log.error('cwd', process.cwd()) - log.error('node -v', process.version) - log.error('node-gyp -v', 'v' + prog.package.version) -} - -function issueMessage () { - errorMessage() - log.error('', ['Node-gyp failed to build your package.', - 'Try to update npm and/or node-gyp and if it does not help file an issue with the package author.' - ].join('\n')) -} - -// start running the given commands! -run() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/.release-please-manifest.json b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/.release-please-manifest.json deleted file mode 100644 index cbd0ca0683d98..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/.release-please-manifest.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - ".": "0.18.1" -} diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/LICENSE b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/LICENSE deleted file mode 100644 index c6944c5e4ed48..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2020 Node.js contributors. All rights reserved. -Copyright (c) 2009 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/data/ninja/build.ninja b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/data/ninja/build.ninja deleted file mode 100644 index 2400dbb1f0dab..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/data/ninja/build.ninja +++ /dev/null @@ -1,4 +0,0 @@ -rule cc - command = cc $in $out - -build my.out: cc my.in diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc deleted file mode 100644 index 8bca510815e0a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) 2013 Google Inc. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is -// then used during the final link for modules that have large PDBs. Otherwise, -// the linker will generate a pdb with a page size of 1KB, which imposes a limit -// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler -// (rather than the linker), this limit is avoided. With this in place PDBs may -// grow to 2GB. -// -// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/GypVsCMake.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/GypVsCMake.md deleted file mode 100644 index 6d659a6123b66..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/GypVsCMake.md +++ /dev/null @@ -1,116 +0,0 @@ -# vs. CMake - -GYP was originally created to generate native IDE project files (Visual Studio, Xcode) for building [Chromium](http://www.chromim.org). - -The functionality of GYP is very similar to the [CMake](http://www.cmake.org) -build tool. Bradley Nelson wrote up the following description of why the team -created GYP instead of using CMake. The text below is copied from -http://www.mail-archive.com/webkit-dev@lists.webkit.org/msg11029.html - -``` - -Re: [webkit-dev] CMake as a build system? -Bradley Nelson -Mon, 19 Apr 2010 22:38:30 -0700 - -Here's the innards of an email with a laundry list of stuff I came up with a -while back on the gyp-developers list in response to Mike Craddick regarding -what motivated gyp's development, since we were aware of cmake at the time -(we'd even started a speculative port): - - -I did an exploratory port of portions of Chromium to cmake (I think I got as -far as net, base, sandbox, and part of webkit). -There were a number of motivations, not all of which would apply to other -projects. Also, some of the design of gyp was informed by experience at -Google with large projects built wholly from source, leading to features -absent from cmake, but not strictly required for Chromium. - -1. Ability to incrementally transition on Windows. It took us about 6 months -to switch fully to gyp. Previous attempts to move to scons had taken a long -time and failed, due to the requirement to transition while in flight. For a -substantial period of time, we had a hybrid of checked in vcproj and gyp generated -vcproj. To this day we still have a good number of GUIDs pinned in the gyp files, -because different parts of our release pipeline have leftover assumptions -regarding manipulating the raw sln/vcprojs. This transition occurred from -the bottom up, largely because modules like base were easier to convert, and -had a lower churn rate. During early stages of the transition, the majority -of the team wasn't even aware they were using gyp, as it integrated into -their existing workflow, and only affected modules that had been converted. - -2. Generation of a more 'normal' vcproj file. Gyp attempts, particularly on -Windows, to generate vcprojs which resemble hand generated projects. It -doesn't generate any Makefile type projects, but instead produces msvs -Custom Build Steps and Custom Build Rules. This makes the resulting projects -easier to understand from the IDE and avoids parts of the IDE that simply -don't function correctly if you use Makefile projects. Our early hope with -gyp was to support the least common denominator of features present in each -of the platform specific project file formats, rather than falling back on -generated Makefiles/shell scripts to emulate some common abstraction. CMake by -comparison makes a good faith attempt to use native project features, but -falls back on generated scripts in order to preserve the same semantics on -each platforms. - -3. Abstraction on the level of project settings, rather than command line -flags. In gyp's syntax you can add nearly any option present in a hand -generated xcode/vcproj file. This allows you to use abstractions built into -the IDEs rather than reverse engineering them possibly incorrectly for -things like: manifest generation, precompiled headers, bundle generation. -When somebody wants to use a particular menu option from msvs, I'm able to -do a web search on the name of the setting from the IDE and provide them -with a gyp stanza that does the equivalent. In many cases, not all project -file constructs correspond to command line flags. - -4. Strong notion of module public/private interface. Gyp allows targets to -publish a set of direct_dependent_settings, specifying things like -include_dirs, defines, platforms specific settings, etc. This means that -when module A depends on module B, it automatically acquires the right build -settings without module A being filled with assumptions/knowledge of exactly -how module B is built. Additionally, all of the transitive dependencies of -module B are pulled in. This avoids their being a single top level view of -the project, rather each gyp file expresses knowledge about its immediate -neighbors. This keep local knowledge local. CMake effectively has a large -shared global namespace. - -5. Cross platform generation. CMake is not able to generate all project -files on all platforms. For example xcode projects cannot be generated from -windows (cmake uses mac specific libraries to do project generation). This -means that for instance generating a tarball containing pregenerated -projects for all platforms is hard with Cmake (requires distribution to -several machine types). - -6. Gyp has rudimentary cross compile support. Currently we've added enough -functionality to gyp to support x86 -> arm cross compiles. Last I checked -this functionality wasn't present in cmake. (This occurred later). - - -That being said there are a number of drawbacks currently to gyp: - -1. Because platform specific settings are expressed at the project file -level (rather than the command line level). Settings which might otherwise -be shared in common between platforms (flags to gcc on mac/linux), end up -being repeated twice. Though in fairness there is actually less sharing here -than you'd think. include_dirs and defines actually represent 90% of what -can be typically shared. - -2. CMake may be more mature, having been applied to a broader range of -projects. There a number of 'tool modules' for cmake, which are shared in a -common community. - -3. gyp currently makes some nasty assumptions about the availability of -chromium's hermetic copy of cygwin on windows. This causes you to either -have to special case a number of rules, or swallow this copy of cygwin as a -build time dependency. - -4. CMake includes a fairly readable imperative language. Currently Gyp has a -somewhat poorly specified declarative language (variable expansion happens -in sometimes weird and counter-intuitive ways). In fairness though, gyp assumes -that external python scripts can be used as an escape hatch. Also gyp avoids -a lot of the things you'd need imperative code for, by having a nice target -settings publication mechanism. - -5. (Feature/drawback depending on personal preference). Gyp's syntax is -DEEPLY nested. It suffers from all of Lisp's advantages and drawbacks. - --BradN -``` diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/Hacking.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/Hacking.md deleted file mode 100644 index 89b3b8bea923e..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/Hacking.md +++ /dev/null @@ -1,46 +0,0 @@ -# Hacking - -## Getting the sources - -Git is required to hack on anything, you can set up a git clone of GYP -as follows: - -``` -mkdir foo -cd foo -git clone git@github.com:nodejs/gyp-next.git -cd gyp -``` - -(this will clone gyp underneath it into `foo/gyp`. -`foo` can be any directory name you want. Once you've done that, -you can use the repo like anything other Git repo. - -## Testing your change - -GYP has a suite of tests which you can run with the provided test driver -to make sure your changes aren't breaking anything important. - -You run the test driver with e.g. - -``` sh -$ python -m pip install --upgrade pip setuptools -$ pip install --editable ".[dev]" -$ python -m pytest -``` - -See [Testing](Testing.md) for more details on the test framework. - -Note that it can be handy to look at the project files output by the tests -to diagnose problems. The easiest way to do that is by kindly asking the -test driver to leave the temporary directories it creates in-place. -This is done by setting the enviroment variable "PRESERVE", e.g. - -``` -set PRESERVE=all # On Windows -export PRESERVE=all # On saner platforms. -``` - -## Reviewing your change - -All changes to GYP must be code reviewed before submission. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/InputFormatReference.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/InputFormatReference.md deleted file mode 100644 index 2b2c180f4443c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/InputFormatReference.md +++ /dev/null @@ -1,1080 +0,0 @@ -# Input Format Reference - -## Primitive Types - -The following primitive types are found within input files: - - * String values, which may be represented by enclosing them in - `'single quotes'` or `"double quotes"`. By convention, single - quotes are used. - * Integer values, which are represented in decimal without any special - decoration. Integers are fairly rare in input files, but have a few - applications in boolean contexts, where the convention is to - represent true values with `1` and false with `0`. - * Lists, which are represented as a sequence of items separated by - commas (`,`) within square brackets (`[` and `]`). A list may - contain any other primitive types, including other lists. - Generally, each item of a list must be of the same type as all other - items in the list, but in some cases (such as within `conditions` - sections), the list structure is more tightly specified. A trailing - comma is permitted. - - This example list contains three string values. - - ``` - [ 'Generate', 'Your', 'Projects', ] - ``` - - * Dictionaries, which map keys to values. All keys are strings. - Values may be of any other primitive type, including other - dictionaries. A dictionary is enclosed within curly braces (`{` and - `}`). Keys precede values, separated by a colon (`:`). Successive - dictionary entries are separated by commas (`,`). A trailing comma - is permitted. It is an error for keys to be duplicated within a - single dictionary as written in an input file, although keys may - replace other keys during [merging](#Merging). - - This example dictionary maps each of three keys to different values. - - ``` - { - 'inputs': ['version.c.in'], - 'outputs': ['version.c'], - 'process_outputs_as_sources': 1, - } - ``` - -## Overall Structure - -A GYP input file is organized as structured data. At the root scope of -each `.gyp` or `.gypi` (include) file is a dictionary. The keys and -values of this dictionary, along with any descendants contained within -the values, provide the data contained within the file. This data is -given meaning by interpreting specific key names and their associated -values in specific ways (see [Settings Keys](#Settings_Keys)). - -### Comments (#) - -Within an input file, a comment is introduced by a pound sign (`#`) not -within a string. Any text following the pound sign, up until the end of -the line, is treated as a comment. - -#### Example - -``` -{ - 'school_supplies': [ - 'Marble composition book', - 'Sharp #2 pencil', - 'Safety scissors', # You still shouldn't run with these - ], -} -``` - -In this example, the # in `'Sharp #2 pencil'` is not taken as -introducing a comment because it occurs within a string, but the text -after `'Safety scissors'` is treated as a comment having no impact on -the data within the file. - -## Merging - -### Merge Basics (=, ?, +) - -Many operations on GYP input files occurs by merging dictionary and list -items together. During merge operations, it is important to recognize -the distinction between source and destination values. Items from the -source value are merged into the destination, which leaves the source -unchanged and the destination modified by the source. A dictionary may -only be merged into another dictionary, and a list may only be merged -into another list. - - * When merging a dictionary, for each key in the source: - * If the key does not exist in the destination dictionary, insert it - and copy the associated value directly. - * If the key does exist: - * If the associated value is a dictionary, perform the dictionary - merging procedure using the source's and destination's value - dictionaries. - * If the associated value is a list, perform the list merging - procedure using the source's and destination's value lists. - * If the associated value is a string or integer, the destination - value is replaced by the source value. - * When merging a list, merge according to the suffix appended to the - key name, if the list is a value within a dictionary. - * If the key ends with an equals sign (`=`), the policy is for the - source list to completely replace the destination list if it - exists. _Mnemonic: `=` for assignment._ - * If the key ends with a question mark (`?`), the policy is for the - source list to be set as the destination list only if the key is - not already present in the destination. _Mnemonic: `?` for - conditional assignment_. - * If the key ends with a plus sign (`+`), the policy is for the - source list contents to be prepended to the destination list. - _Mnemonic: `+` for addition or concatenation._ - * If the list key is undecorated, the policy is for the source list - contents to be appended to the destination list. This is the - default list merge policy. - -#### Example - -Source dictionary: - -``` -{ - 'include_dirs+': [ - 'shared_stuff/public', - ], - 'link_settings': { - 'libraries': [ - '-lshared_stuff', - ], - }, - 'test': 1, -} -``` - -Destination dictionary: - -``` -{ - 'target_name': 'hello', - 'sources': [ - 'kitty.cc', - ], - 'include_dirs': [ - 'headers', - ], - 'link_settings': { - 'libraries': [ - '-lm', - ], - 'library_dirs': [ - '/usr/lib', - ], - }, - 'test': 0, -} -``` - -Merged dictionary: - -``` -{ - 'target_name': 'hello', - 'sources': [ - 'kitty.cc', - ], - 'include_dirs': [ - 'shared_stuff/public', # Merged, list item prepended due to include_dirs+ - 'headers', - ], - 'link_settings': { - 'libraries': [ - '-lm', - '-lshared_stuff', # Merged, list item appended - ], - 'library_dirs': [ - '/usr/lib', - ], - }, - 'test': 1, # Merged, int value replaced -} -``` - -## Pathname Relativization - -In a `.gyp` or `.gypi` file, many string values are treated as pathnames -relative to the file in which they are defined. - -String values associated with the following keys, or contained within -lists associated with the following keys, are treated as pathnames: - - * destination - * files - * include\_dirs - * inputs - * libraries - * outputs - * sources - * mac\_bundle\_resources - * mac\_framework\_dirs - * msvs\_cygwin\_dirs - * msvs\_props - -Additionally, string values associated with keys ending in the following -suffixes, or contained within lists associated with keys ending in the -following suffixes, are treated as pathnames: - - * `_dir` - * `_dirs` - * `_file` - * `_files` - * `_path` - * `_paths` - -However, any string value beginning with any of these characters is -excluded from pathname relativization: - - * `/` for identifying absolute paths. - * `$` for introducing build system variable expansions. - * `-` to support specifying such items as `-llib`, meaning “library - `lib` in the library search path.” - * `<`, `>`, and `!` for GYP expansions. - -When merging such relative pathnames, they are adjusted so that they can -remain valid relative pathnames, despite being relative to a new home. - -#### Example - -Source dictionary from `../build/common.gypi`: - -``` -{ - 'include_dirs': ['include'], # Treated as relative to ../build - 'libraries': ['-lz'], # Not treated as a pathname, begins with a dash - 'defines': ['NDEBUG'], # defines does not contain pathnames -} -``` - -Target dictionary, from `base.gyp`: - -``` -{ - 'sources': ['string_util.cc'], -} -``` - -Merged dictionary: - -``` -{ - 'sources': ['string_util.cc'], - 'include_dirs': ['../build/include'], - 'libraries': ['-lz'], - 'defines': ['NDEBUG'], -} -``` - -Because of pathname relativization, after the merge is complete, all of -the pathnames in the merged dictionary are valid relative to the -directory containing `base.gyp`. - -## List Singletons - -Some list items are treated as singletons, and the list merge process -will enforce special rules when merging them. At present, any string -item in a list that does not begin with a dash (`-`) is treated as a -singleton, although **this is subject to change.** When appending or -prepending a singleton to a list, if the item is already in the list, -only the earlier instance is retained in the merged list. - -#### Example - -Source dictionary: - -``` -{ - 'defines': [ - 'EXPERIMENT=1', - 'NDEBUG', - ], -} -``` - -Destination dictionary: - -``` -{ - 'defines': [ - 'NDEBUG', - 'USE_THREADS', - ], -} -``` - -Merged dictionary: - -``` -{ - 'defines': [ - 'NDEBUG', - 'USE_THREADS', - 'EXPERIMENT=1', # Note that NDEBUG is not appended after this. - ], -} -``` - -## Including Other Files - -If the `-I` (`--include`) argument was used to invoke GYP, any files -specified will be implicitly merged into the root dictionary of all -`.gyp` files. - -An [includes](#includes) section may be placed anywhere within a -`.gyp` or `.gypi` (include) file. `includes` sections contain lists of -other files to include. They are processed sequentially and merged into -the enclosing dictionary at the point that the `includes` section was -found. `includes` sections at the root of a `.gyp` file dictionary are -merged after any `-I` includes from the command line. - -[includes](#includes) sections are processed immediately after a file is -loaded, even before [variable and conditional -processing](#Variables_and_Conditionals), so it is not possible to -include a file based on a [variable reference](#Variable_Expansions). -While it would be useful to be able to include files based on variable -expansions, it is most likely more useful to allow included files access -to variables set by the files that included them. - -An [includes](#includes) section may, however, be placed within a -[conditional](#Conditionals) section. The included file itself will -be loaded unconditionally, but its dictionary will be discarded if the -associated condition is not true. - -## Variables and Conditionals - -### Variables - -There are three main types of variables within GYP. - - * Predefined variables. By convention, these are named with - `CAPITAL_LETTERS`. Predefined variables are set automatically by - GYP. They may be overridden, but it is not advisable to do so. See - [Predefined Variables](#Predefined_Variables) for a list of - variables that GYP provides. - * User-defined variables. Within any dictionary, a key named - `variables` can be provided, containing a mapping between variable - names (keys) and their contents (values), which may be strings, - integers, or lists of strings. By convention, user-defined - variables are named with `lowercase_letters`. - * Automatic variables. Within any dictionary, any key with a string - value has a corresponding automatic variable whose name is the same - as the key name with an underscore (`_`) prefixed. For example, if - your dictionary contains `type: 'static_library'`, an automatic - variable named `_type` will be provided, and its value will be a - string, `'static_library'`. - -Variables are inherited from enclosing scopes. - -### Providing Default Values for Variables (%) - -Within a `variables` section, keys named with percent sign (`%`) -suffixes mean that the variable should be set only if it is undefined at -the time it is processed. This can be used to provide defaults for -variables that would otherwise be undefined, so that they may reliably -be used in [variable expansion or conditional -processing](#Variables_and_Conditionals). - -### Predefined Variables - -Each GYP generator module provides defaults for the following variables: - - * `OS`: The name of the operating system that the generator produces - output for. Common values for values for `OS` are: - - * `'linux'` - * `'mac'` - * `'win'` - - But other values may be encountered and this list should not be - considered exhaustive. The `gypd` (debug) generator module does not - provide a predefined value for `OS`. When invoking GYP with the - `gypd` module, if a value for `OS` is needed, it must be provided on - the command line, such as `gyp -f gypd -DOS=mac`. - - GYP generators also provide defaults for these variables. They may - be expressed in terms of variables used by the build system that - they generate for, often in `$(VARIABLE)` format. For example, the - GYP `PRODUCT_DIR` variable maps to the Xcode `BUILT_PRODUCTS_DIR` - variable, so `PRODUCT_DIR` is defined by the Xcode generator as - `$(BUILT_PRODUCTS_DIR)`. - * `EXECUTABLE_PREFIX`: A prefix, if any, applied to executable names. - Usually this will be an empty string. - * `EXECUTABLE_SUFFIX`: A suffix, if any, applied to executable names. - On Windows, this will be `.exe`, elsewhere, it will usually be an - empty string. - * `INTERMEDIATE_DIR`: A directory that can be used to place - intermediate build results in. `INTERMEDIATE_DIR` is only - guaranteed to be accessible within a single target (See targets). - This variable is most useful within the context of rules and actions - (See rules, See actions). Compare with `SHARED_INTERMEDIATE_DIR`. - * `PRODUCT_DIR`: The directory in which the primary output of each - target, such as executables and libraries, is placed. - * `RULE_INPUT_ROOT`: The base name for the input file (e.g. "`foo`"). - See Rules. - * `RULE_INPUT_EXT`: The file extension for the input file (e.g. - "`.cc`"). See Rules. - * `RULE_INPUT_NAME`: Full name of the input file (e.g. "`foo.cc`"). - See Rules. - * `RULE_INPUT_PATH`: Full path to the input file (e.g. - "`/bar/foo.cc`"). See Rules. - * `SHARED_INTERMEDIATE_DIR`: A directory that can be used to place - intermediate build results in, and have them be accessible to other - targets. Unlike `INTERMEDIATE_DIR`, each target in a project, - possibly spanning multiple `.gyp` files, shares the same - `SHARED_INTERMEDIATE_DIR`. - -The following additional predefined variables may be available under -certain circumstances: - - * `DEPTH`. When GYP is invoked with a `--depth` argument, when - processing any `.gyp` file, `DEPTH` will be a relative path from the - `.gyp` file to the directory specified by the `--depth` argument. - -### User-Defined Variables - -A user-defined variable may be defined in terms of other variables, but -not other variables that have definitions provided in the same scope. - -### Variable Expansions (<, >, <@, >@) - -GYP provides two forms of variable expansions, “early” or “pre” -expansions, and “late,” “post,” or “target” expansions. They have -similar syntax, differing only in the character used to introduce them. - - * Early expansions are introduced by a less-than (`<`) character. - _Mnemonic: the arrow points to the left, earlier on a timeline._ - * Late expansions are introduced by a less-than (`>`) character. - _Mnemonic: the arrow points to the right, later on a timeline._ - -The difference the two phases of expansion is described in [Early and -Late Phases](#Early_and_Late_Phases). - -These characters were chosen based upon the requirement that they not -conflict with the variable format used natively by build systems. While -the dollar sign (`$`) is the most natural fit for variable expansions, -its use was ruled out because most build systems already use that -character for their own variable expansions. Using different characters -means that no escaping mechanism was needed to differentiate between GYP -variables and build system variables, and writing build system variables -into GYP files is not cumbersome. - -Variables may contain lists or strings, and variable expansions may -occur in list or string context. There are variant forms of variable -expansions that may be used to determine how each type of variable is to -be expanded in each context. - - * When a variable is referenced by `<(VAR)` or `>(VAR)`: - * If `VAR` is a string, the variable reference within the string is - replaced by variable's string value. - * If `VAR` is a list, the variable reference within the string is - replaced by a string containing the concatenation of all of the - variable’s list items. Generally, the items are joined with - spaces between each, but the specific behavior is - generator-specific. The precise encoding used by any generator - should be one that would allow each list item to be treated as a - separate argument when used as program arguments on the system - that the generator produces output for. - * When a variable is referenced by `<@(VAR)` or `>@(VAR)`: - * The expansion must occur in list context. - * The list item must be `'<@(VAR)'` or `'>@(VAR)'` exactly. - * If `VAR` is a list, each of its elements are inserted into the - list in which expansion is taking place, replacing the list item - containing the variable reference. - * If `VAR` is a string, the string is converted to a list which is - inserted into the list in which expansion is taking place as - above. The conversion into a list is generator-specific, but - generally, spaces in the string are taken as separators between - list items. The specific method of converting the string to a - list should be the inverse of the encoding method used to expand - list variables in string context, above. - -GYP treats references to undefined variables as errors. - -### Command Expansions (` form - of [variable expansions](#Variable_Expansions), - and on the `!` form of [command - expansions](#Command_Expansions_(!,_!@)). - -These two phases are provided because there are some circumstances in -which each is desirable. - -The “early” phase is appropriate for most expansions and evaluations. -“Early” expansions and evaluations may be performed anywhere within any -`.gyp` or `.gypi` file. - -The “late” phase is appropriate when expansion or evaluation must be -deferred until a specific section has been merged into target context. -“Late” expansions and evaluations only occur within `targets` sections -and their descendants. The typical use case for a late-phase expansion -is to provide, in some globally-included `.gypi` file, distinct -behaviors depending on the specifics of a target. - -#### Example - -Given this input: - -``` -{ - 'target_defaults': { - 'target_conditions': [ - ['_type=="shared_library"', {'cflags': ['-fPIC']}], - ], - }, - 'targets': [ - { - 'target_name': 'sharing_is_caring', - 'type': 'shared_library', - }, - { - 'target_name': 'static_in_the_attic', - 'type': 'static_library', - }, - ] -} -``` - -The conditional needs to be evaluated only in target context; it is -nonsense outside of target context because no `_type` variable is -defined. [target\_conditions](#target_conditions) allows evaluation -to be deferred until after the [targets](#targets) sections are -merged into their copies of [target\_defaults](#target_defaults). -The resulting targets, after “late” phase processing: - -``` -{ - 'targets': [ - { - 'target_name': 'sharing_is_caring', - 'type': 'shared_library', - 'cflags': ['-fPIC'], - }, - { - 'target_name': 'static_in_the_attic', - 'type': 'static_library', - }, - ] -} -``` - -### Expansion and Evaluation Performed Simultaneously - -During any expansion and evaluation phase, both expansion and evaluation -are performed simultaneously. The process for handling variable -expansions and conditional evaluation within a dictionary is: - - * Load [automatic variables](#Variables) (those with leading - underscores). - * If a [variables](#variables) section is present, recurse into its - dictionary. This allows [conditionals](#Conditionals) to be - present within the `variables` dictionary. - * Load [Variables user-defined variables](#User-Defined) from the - [variables](#variables) section. - * For each string value in the dictionary, perform [variable - expansion](#Variable_Expansions) and, if operating - during the “late” phase, [command - expansions](#Command_Expansions). - * Reload [automatic variables](#Variables) and [Variables - user-defined variables](#User-Defined) because the variable - expansion step may have resulted in changes to the automatic - variables. - * If a [conditions](#conditions) or - [target\_conditions](#target_conditions) section (depending on - phase) is present, recurse into its dictionary. This is done after - variable expansion so that conditionals may take advantage of - expanded automatic variables. - * Evaluate [conditionals](#Conditionals). - * Reload [automatic variables](#Variables) and [Variables - user-defined variables](#User-Defined) because the conditional - evaluation step may have resulted in changes to the automatic - variables. - * Recurse into child dictionaries or lists that have not yet been - processed. - -One quirk of this ordering is that you cannot expect a -[variables](#variables) section within a dictionary’s -[conditional](#Conditionals) to be effective in the dictionary -itself, but the added variables will be effective in any child -dictionaries or lists. It is thought to be far more worthwhile to -provide resolved [automatic variables](#Variables) to -[conditional](#Conditionals) sections, though. As a workaround, to -conditionalize variable values, place a [conditions](#conditions) or -[target\_conditions](#target_conditions) section within the -[variables](#variables) section. - -## Dependencies and Dependents - -In GYP, “dependents” are targets that rely on other targets, called -“dependencies.” Dependents declare their reliance with a special -section within their target dictionary, -[dependencies](#dependencies). - -### Dependent Settings - -It is useful for targets to “advertise” settings to their dependents. -For example, a target might require that all of its dependents add -certain directories to their include paths, link against special -libraries, or define certain preprocessor macros. GYP allows these -cases to be handled gracefully with “dependent settings” sections. -There are three types of such sections: - - * [direct\_dependent\_settings](#direct_dependent_settings), which - advertises settings to a target's direct dependents only. - * [all\_dependent\_settings](#all_dependnet_settings), which - advertises settings to all of a target's dependents, both direct and - indirect. - * [link\_settings](#link_settings), which contains settings that - should be applied when a target’s object files are used as linker - input. - -Furthermore, in some cases, a target needs to pass its dependencies’ -settings on to its own dependents. This might happen when a target’s -own public header files include header files provided by its dependency. -[export\_dependent\_settings](#export_dependent_settings) allows a -target to declare dependencies for which -[direct\_dependent\_settings](#direct_dependent_settings) should be -passed through to its own dependents. - -Dependent settings processing merges a copy of the relevant dependent -settings dictionary from a dependency into its relevant dependent -targets. - -In most instances, -[direct\_dependent\_settings](#direct_dependent_settings) will be -used. There are very few cases where -[all\_dependent\_settings](#all_dependent_settings) is actually -correct; in most of the cases where it is tempting to use, it would be -preferable to declare -[export\_dependent\_settings](#export_dependent_settings). Most -[libraries](#libraries) and [library\_dirs](#library_dirs) -sections should be placed within [link\_settings](#link_settings) -sections. - -#### Example - -Given: - -``` -{ - 'targets': [ - { - 'target_name': 'cruncher', - 'type': 'static_library', - 'sources': ['cruncher.cc'], - 'direct_dependent_settings': { - 'include_dirs': ['.'], # dependents need to find cruncher.h. - }, - 'link_settings': { - 'libraries': ['-lm'], # cruncher.cc does math. - }, - }, - { - 'target_name': 'cruncher_test', - 'type': 'executable', - 'dependencies': ['cruncher'], - 'sources': ['cruncher_test.cc'], - }, - ], -} -``` - -After dependent settings processing, the dictionary for `cruncher_test` -will be: - -``` -{ - 'target_name': 'cruncher_test', - 'type': 'executable', - 'dependencies': ['cruncher'], # implies linking against cruncher - 'sources': ['cruncher_test.cc'], - 'include_dirs': ['.'] - 'libraries': ['-lm'], -}, -``` - -If `cruncher` was declared as a `shared_library` instead of a -`static_library`, the `cruncher_test` target would not contain `-lm`, -but instead, `cruncher` itself would link against `-lm`. - -## Linking Dependencies - -The precise meaning of a dependency relationship varies with the -[types](#type) of the [targets](#targets) at either end of the -relationship. In GYP, a dependency relationship can indicate two things -about how targets relate to each other: - - * Whether the dependent target needs to link against the dependency. - * Whether the dependency target needs to be built prior to the - dependent. If the former case is true, this case must be true as - well. - -The analysis of the first item is complicated by the differences between -static and shared libraries. - - * Static libraries are simply collections of object files (`.o` or - `.obj`) that are used as inputs to a linker (`ld` or `link.exe`). - Static libraries don't link against other libraries, they’re - collected together and used when eventually linking a shared library - or executable. - * Shared libraries are linker output and must undergo symbol - resolution. They must link against other libraries (static or - shared) in order to facilitate symbol resolution. They may be used - as libraries in subsequent link steps. - * Executables are also linker output, and also undergo symbol - resolution. Like shared libraries, they must link against static - and shared libraries to facilitate symbol resolution. They may not - be reused as linker inputs in subsequent link steps. - -Accordingly, GYP performs an operation referred to as “static library -dependency adjustment,” in which it makes each linker output target -(shared libraries and executables) link against the static libraries it -depends on, either directly or indirectly. Because the linkable targets -link against these static libraries, they are also made direct -dependents of the static libraries. - -As part of this process, GYP is also able to remove the direct -dependency relationships between two static library targets, as a -dependent static library does not actually need to link against a -dependency static library. This removal facilitates speedier builds -under some build systems, as they are now free to build the two targets -in parallel. The removal of this dependency is incorrect in some cases, -such as when the dependency target contains [rules](#rules) or -[actions](#actions) that generate header files required by the -dependent target. In such cases, the dependency target, the one -providing the side-effect files, must declare itself as a -[hard\_dependency](#hard_dependency). This setting instructs GYP to -not remove the dependency link between two static library targets in its -generated output. - -## Loading Files to Resolve Dependencies - -When GYP runs, it loads all `.gyp` files needed to resolve dependencies -found in [dependencies](#dependencies) sections. These files are not -merged into the files that reference them, but they may contain special -sections that are merged into dependent target dictionaries. - -## Build Configurations - -Explain this. - -## List Filters - -GYP allows list items to be filtered by “exclusions” and “patterns.” -Any list containing string values in a dictionary may have this -filtering applied. For the purposes of this section, a list modified by -exclusions or patterns is referred to as a “base list”, in contrast to -the “exclusion list” and “pattern list” that operates on it. - - * For a base list identified by key name `key`, the `key!` list - provides exclusions. - * For a base list identified by key name `key`, the `key/` list - provides regular expression pattern-based filtering. - -Both `key!` and `key/` may be present. The `key!` exclusion list will -be processed first, followed by the `key/` pattern list. - -Exclusion lists are most powerful when used in conjunction with -[conditionals](#Conditionals). - -## Exclusion Lists (!) - -An exclusion list provides a way to remove items from the related list -based on exact matching. Any item found in an exclusion list will be -removed from the corresponding base list. - -#### Example - -This example excludes files from the `sources` based on the setting of -the `OS` variable. - -``` -{ - 'sources:' [ - 'mac_util.mm', - 'win_util.cc', - ], - 'conditions': [ - ['OS=="mac"', {'sources!': ['win_util.cc']}], - ['OS=="win"', {'sources!': ['mac_util.cc']}], - ], -} -``` - -## Pattern Lists (/) - -Pattern lists are similar to, but more powerful than, [exclusion -lists](#Exclusion_Lists_(!)). Each item in a pattern list is itself -a two-element list. The first item is a string, either `'include'` or -`'exclude'`, specifying the action to take. The second item is a string -specifying a regular expression. Any item in the base list matching the -regular expression pattern will either be included or excluded, based on -the action specified. - -Items in a pattern list are processed in sequence, and an excluded item -that is later included will not be removed from the list (unless it is -subsequently excluded again.) - -Pattern lists are processed after [exclusion -lists](#Exclusion_Lists_(!)), so it is possible for a pattern list to -re-include items previously excluded by an exclusion list. - -Nothing is actually removed from a base list until all items in an -[exclusion list](#Exclusion_Lists_(!)) and pattern list have been -evaluated. This allows items to retain their correct position relative -to one another even after being excluded and subsequently included. - -#### Example - -In this example, a uniform naming scheme is adopted for -platform-specific files. - -``` -{ - 'sources': [ - 'io_posix.cc', - 'io_win.cc', - 'launcher_mac.cc', - 'main.cc', - 'platform_util_linux.cc', - 'platform_util_mac.mm', - ], - 'sources/': [ - ['exclude', '_win\\.cc$'], - ], - 'conditions': [ - ['OS!="linux"', {'sources/': [['exclude', '_linux\\.cc$']]}], - ['OS!="mac"', {'sources/': [['exclude', '_mac\\.cc|mm?$']]}], - ['OS=="win"', {'sources/': [ - ['include', '_win\\.cc$'], - ['exclude', '_posix\\.cc$'], - ]}], - ], -} -``` - -After the pattern list is applied, `sources` will have the following -values, depending on the setting of `OS`: - - * When `OS` is `linux`: `['io_posix.cc', 'main.cc', - 'platform_util_linux.cc']` - * When `OS` is `mac`: `['io_posix.cc', 'launcher_mac.cc', 'main.cc', - 'platform_util_mac.mm']` - * When `OS` is `win`: `['io_win.cc', 'main.cc', - 'platform_util_win.cc']` - -Note that when `OS` is `win`, the `include` for `_win.cc` files is -processed after the `exclude` matching the same pattern, because the -`sources/` list participates in [merging](#Merging) during -[conditional evaluation](#Conditonals) just like any other list -would. This guarantees that the `_win.cc` files, previously -unconditionally excluded, will be re-included when `OS` is `win`. - -## Locating Excluded Items - -In some cases, a GYP generator needs to access to items that were -excluded by an [exclusion list](#Exclusion_Lists_(!)) or [pattern -list](#Pattern_Lists_(/)). When GYP excludes items during processing -of either of these list types, it places the results in an `_excluded` -list. In the example above, when `OS` is `mac`, `sources_excluded` -would be set to `['io_win.cc', 'platform_util_linux.cc']`. Some GYP -generators use this feature to display excluded files in the project -files they generate for the convenience of users, who may wish to refer -to other implementations. - -## Processing Order - -GYP uses a defined and predictable order to execute the various steps -performed between loading files and generating output. - - * Load files. - * Load `.gyp` files. Merge any [command-line - includes](#Including_Other_Files) into each `.gyp` file’s root - dictionary. As [includes](#Including_Other_Files) are found, - load them as well and [merge](#Merging) them into the scope in - which the [includes](#includes) section was found. - * Perform [“early” or “pre”](#Early_and_Late_Phases) [variable - expansion and conditional - evaluation](#Variables_and_Conditionals). - * [Merge](#Merging) each [target’s](#targets) dictionary into - the `.gyp` file’s root [target\_defaults](#target_defaults) - dictionary. - * Scan each [target](#targets) for - [dependencies](#dependencies), and repeat the above steps for - any newly-referenced `.gyp` files not yet loaded. - * Scan each [target](#targets) for wildcard - [dependencies](#dependencies), expanding the wildcards. - * Process [dependent settings](#Dependent_Settings). These - sections are processed, in order: - * [all\_dependent\_settings](#all_dependent_settings) - * [direct\_dependent\_settings](#direct_dependent_settings) - * [link\_dependent\_settings](#link_dependent_settings) - * Perform [static library dependency - adjustment](#Linking_Dependencies). - * Perform [“late,” “post,” or “target”](#Early_and_Late_Phases) - [variable expansion and conditional - evaluation](#Variables_and_Conditionals) on [target](#targets) - dictionaries. - * Merge [target](#targets) settings into - [configurations](#configurations) as appropriate. - * Process [exclusion and pattern - lists](#List_Exclusions_and_Patterns). - -## Settings Keys - -### Settings that may appear anywhere - -#### conditions - -_List of `condition` items_ - -A `conditions` section introduces a subdictionary that is only merged -into the enclosing scope based on the evaluation of a conditional -expression. Each `condition` within a `conditions` list is itself a -list of at least two items: - - 1. A string containing the conditional expression itself. Conditional - expressions may take the following forms: - * For string values, `var=="value"` and `var!="value"` to test - equality and inequality. For example, `'OS=="linux"'` is true - when the `OS` variable is set to `"linux"`. - * For integer values, `var==value`, `var!=value`, `var=value`, and `var>value`, to test equality and - several common forms of inequality. For example, - `'chromium_code==0'` is true when the `chromium_code` variable is - set to `0`. - * It is an error for a conditional expression to reference any - undefined variable. - 1. A dictionary containing the subdictionary to be merged into the - enclosing scope if the conditional expression evaluates to true. - -These two items can be followed by any number of similar two items that -will be evaluated if the previous conditional expression does not -evaluate to true. - -An additional optional dictionary can be appended to this sequence of -two items. This optional dictionary will be merged into the enclosing -scope if none of the conditional expressions evaluate to true. - -Within a `conditions` section, each item is processed sequentially, so -it is possible to predict the order in which operations will occur. - -There is no restriction on nesting `conditions` sections. - -`conditions` sections are very similar to `target_conditions` sections. -See target\_conditions. - -#### Example - -``` -{ - 'sources': [ - 'common.cc', - ], - 'conditions': [ - ['OS=="mac"', {'sources': ['mac_util.mm']}], - ['OS=="win"', {'sources': ['win_main.cc']}, {'sources': ['posix_main.cc']}], - ['OS=="mac"', {'sources': ['mac_impl.mm']}, - 'OS=="win"', {'sources': ['win_impl.cc']}, - {'sources': ['default_impl.cc']} - ], - ], -} -``` - -Given this input, the `sources` list will take on different values based -on the `OS` variable. - - * If `OS` is `"mac"`, `sources` will contain `['common.cc', - 'mac_util.mm', 'posix_main.cc', 'mac_impl.mm']`. - * If `OS` is `"win"`, `sources` will contain `['common.cc', - 'win_main.cc', 'win_impl.cc']`. - * If `OS` is any other value such as `"linux"`, `sources` will contain - `['common.cc', 'posix_main.cc', 'default_impl.cc']`. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/LanguageSpecification.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/LanguageSpecification.md deleted file mode 100644 index 178b8c8316991..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/LanguageSpecification.md +++ /dev/null @@ -1,430 +0,0 @@ -# Language Specification - -## Objective - -Create a tool for the Chromium project that generates native Visual Studio, -Xcode and SCons and/or make build files from a platform-independent input -format. Make the input format as reasonably general as possible without -spending extra time trying to "get everything right," except where not doing so -would likely lead Chromium to an eventual dead end. When in doubt, do what -Chromium needs and don't worry about generalizing the solution. - -## Background - -Numerous other projects, both inside and outside Google, have tried to -create a simple, universal cross-platform build representation that -still allows sufficient per-platform flexibility to accommodate -irreconcilable differences. The fact that no obvious working candidate -exists that meets Chromium's requirements indicates this is probably a -tougher problem than it appears at first glance. We aim to succeed by -creating a tool that is highly specific to Chromium's specific use case, -not to the general case of design a completely platform-independent tool -for expressing any possible build. - -The Mac has the most sophisticated model for application development -through an IDE. Consequently, we will use the Xcode model as the -starting point (the input file format must handle Chromium's use of -Xcode seamlessly) and adapt the design as necessary for the other -platforms. - -## Overview - -The overall design has the following characteristics: - - * Input configurations are specified in files with the suffix `.gyp`. - * Each `.gyp` file specifies how to build the targets for the - "component" defined by that file. - * Each `.gyp` file generates one or more output files appropriate to - the platform: - * On Mac, a `.gyp` file generates one Xcode .xcodeproj bundle with - information about how its targets are built. - * On Windows, a `.gyp` file generates one Visual Studio .sln file, - and one Visual Studio .vcproj file per target. - * On Linux, a `.gyp` file generates one SCons file and/or one - Makefile per target - * The `.gyp` file syntax is a Python data structure. - * Use of arbitrary Python in `.gyp` files is forbidden. - * Use of eval() with restricted globals and locals on `.gyp` file - contents restricts the input to an evaluated expression, not - arbitrary Python statements. - * All input is expected to comply with JSON, with two exceptions: - the # character (not inside strings) begins a comment that lasts - until the end of the line, and trailing commas are permitted at - the end of list and dict contents. - * Input data is a dictionary of keywords and values. - * "Invalid" keywords on any given data structure are not illegal, - they're just ignored. - * TODO: providing warnings on use of illegal keywords would help - users catch typos. Figure out something nice to do with this. - -## Detailed Design - -Some up-front design principles/thoughts/TODOs: - - * Re-use keywords consistently. - * Keywords that allow configuration of a platform-specific concept get - prefixed appropriately: - * Examples: `msvs_disabled_warnings`, `xcode_framework_dirs` - * The input syntax is declarative and data-driven. - * This gets enforced by using Python `eval()` (which only evaluates - an expression) instead of `exec` (which executes arbitrary python) - * Semantic meanings of specific keyword values get deferred until all - are read and the configuration is being evaluated to spit out the - appropriate file(s) - * Source file lists: - * Are flat lists. Any imposed ordering within the `.gyp` file (e.g. - alphabetically) is purely by convention and for developer - convenience. When source files are linked or archived together, - it is expected that this will occur in the order that files are - listed in the `.gyp` file. - * Source file lists contain no mechanism for by-hand folder - configuration (`Filter` tags in Visual Studio, `Groups` in Xcode) - * A folder hierarchy is created automatically that mirrors the file - system - -### Example - -``` -{ - 'target_defaults': { - 'defines': [ - 'U_STATIC_IMPLEMENTATION', - ['LOGFILE', 'foo.log',], - ], - 'include_dirs': [ - '..', - ], - }, - 'targets': [ - { - 'target_name': 'foo', - 'type': 'static_library', - 'sources': [ - 'foo/src/foo.cc', - 'foo/src/foo_main.cc', - ], - 'include_dirs': [ - 'foo', - 'foo/include', - ], - 'conditions': [ - [ 'OS==mac', { 'sources': [ 'platform_test_mac.mm' ] } ] - ], - 'direct_dependent_settings': { - 'defines': [ - 'UNIT_TEST', - ], - 'include_dirs': [ - 'foo', - 'foo/include', - ], - }, - }, - ], -} -``` - -### Structural Elements - -### Top-level Dictionary - -This is the single dictionary in the `.gyp` file that defines the -targets and how they're to be built. - -The following keywords are meaningful within the top-level dictionary -definition: - -| *Keyword* | *Description* | -|:------------------|:------------------| -| `conditions` | A conditional section that may contain other items that can be present in a top-level dictionary, on a conditional basis. See the "Conditionals" section below. | -| `includes` | A list of `.gypi` files to be included in the top-level dictionary. | -| `target_defaults` | A dictionary of default settings to be inherited by all targets in the top-level dictionary. See the "Settings keywords" section below. | -| `targets` | A list of target specifications. See the "targets" below. | -| `variables` | A dictionary containing variable definitions. Each key in this dictionary is the name of a variable, and each value must be a string value that the variable is to be set to. | - -### targets - -A list of dictionaries defining targets to be built by the files -generated from this `.gyp` file. - -Targets may contain `includes`, `conditions`, and `variables` sections -as permitted in the root dictionary. The following additional keywords -have structural meaning for target definitions: - -| *Keyword* | *Description* | -|:---------------------------- |:------------------------------------------| -| `actions` | A list of special custom actions to perform on a specific input file, or files, to produce output files. See the "Actions" section below. | -| `all_dependent_settings` | A dictionary of settings to be applied to all dependents of the target, transitively. This includes direct dependents and the entire set of their dependents, and so on. This section may contain anything found within a `target` dictionary, except `configurations`, `target_name`, and `type` sections. Compare `direct_dependent_settings` and `link_settings`. | -| `configurations` | A list of dictionaries defining build configurations for the target. See the "Configurations" section below. | -| `copies` | A list of copy actions to perform. See the "Copies" section below. | -| `defines` | A list of preprocesor definitions to be passed on the command line to the C/C++ compiler (via `-D` or `/D` options). | -| `dependencies` | A list of targets on which this target depends. Targets in other `.gyp` files are specified as `../path/to/other.gyp:target_we_want`. | -| `direct_dependent_settings` | A dictionary of settings to be applied to other targets that depend on this target. These settings will only be applied to direct dependents. This section may contain anything found within a `target` dictionary, except `configurations`, `target_name`, and `type` sections. Compare with `all_dependent_settings` and `link_settings`. | -| `include_dirs` | A list of include directories to be passed on the command line to the C/C++ compiler (via `-I` or `/I` options). | -| `libraries` | A list of list of libraries (and/or frameworks) on which this target depends. | -| `link_settings` | A dictionary of settings to be applied to targets in which this target's contents are linked. `executable` and `shared_library` targets are linkable, so if they depend on a non-linkable target such as a `static_library`, they will adopt its `link_settings`. This section can contain anything found within a `target` dictionary, except `configurations`, `target_name`, and `type` sections. Compare `all_dependent_settings` and `direct_dependent_settings`. | -| `rules` | A special custom action to perform on a list of input files, to produce output files. See the "Rules" section below. | -| `sources` | A list of source files that are used to build this target or which should otherwise show up in the IDE for this target. In practice, we expect this list to be a union of all files necessary to build the target on all platforms, as well as other related files that aren't actually used for building, like README files. | -| `target_conditions` | Like `conditions`, but evaluation is delayed until the settings have been merged into an actual target. `target_conditions` may be used to place conditionals into a `target_defaults` section but have them still depend on specific target settings. | -| `target_name` | The name of a target being defined. | -| `type` | The type of target being defined. This field currently supports `executable`, `static_library`, `shared_library`, and `none`. The `none` target type is useful when producing output which is not linked. For example, converting raw translation files into resources or documentation into platform specific help files. | -| `msvs_props` | A list of Visual Studio property sheets (`.vsprops` files) to be used to build the target. | -| `xcode_config_file` | An Xcode configuration (`.xcconfig` file) to be used to build the target. | -| `xcode_framework_dirs` | A list of framework directories be used to build the target. | - -You can affect the way that lists/dictionaries are merged together (for -example the way a list in target\_defaults interacts with the same named -list in the target itself) with a couple of special characters, which -are covered in [Merge -Basics](InputFormatReference#Merge_Basics_(=,_?,_+).md) and [List -Filters](InputFormatReference#List_Filters.md) on the -InputFormatReference page. - -### configurations - -`configurations` sections may be found within `targets` or -`target_defaults` sections. The `configurations` section is a list of -dictionaries specifying different build configurations. Because -configurations are implemented as lists, it is not currently possible to -override aspects of configurations that are imported into a target from -a `target_defaults` section. - -NOTE: It is extremely important that each target within a project define -the same set of configurations. This continues to apply even when a -project spans across multiple `.gyp` files. - -A configuration dictionary may contain anything that can be found within -a target dictionary, except for `actions`, `all_dependent_settings`, -`configurations`, `dependencies`, `direct_dependent_settings`, -`libraries`, `link_settings`, `sources`, `target_name`, and `type`. - -Configuration dictionaries may also contain these elements: - -| *Keyword* | *Description* | -|:---------------------|:----------------------------------------------------| -| `configuration_name` | Required attribute. The name of the configuration. | - -### Conditionals - -Conditionals may appear within any dictionary in a `.gyp` file. There -are two tpes of conditionals, which differ only in the timing of their -processing. `conditons` sections are processed shortly after loading -`.gyp` files, and `target_conditons` sections are processed after all -dependencies have been computed. - -A conditional section is introduced with a `conditions` or -`target_conditions` dictionary keyword, and is composed of a list. Each -list contains two or three elements. The first two elements, which are -always required, are the conditional expression to evaluate and a -dictionary containing settings to merge into the dictionary containing -the `conditions` or `target_conditions` section if the expression -evaluates to true. The third, optional, list element is a dictionary to -merge if the expression evaluates to false. - -The `eval()` of the expression string takes place in the context of -global and/or local dictionaries that constructed from the `.gyp` input -data, and overrides the `__builtin__` dictionary, to prevent the -execution of arbitrary Python code. - -### Actions - -An `actions` section provides a list of custom build actions to perform -on inputs, producing outputs. The `actions` section is organized as a -list. Each item in the list is a dictionary having the following form: - -| *Keyword* | *Type* | *Description* | -|:--------------|:-------|:-----------------------------| -| `action_name` | string | The name of the action. Depending on how actions are implemented in the various generators, some may desire or require this property to be set to a unique name; others may ignore this property entirely. | -| `inputs` | list | A list of pathnames treated as inputs to the custom action. | -| `outputs` | list | A list of pathnames that the custom action produces. | -| `action` | list | A command line invocation used to produce `outputs` from `inputs`. For maximum cross-platform compatibility, invocations that require a Python interpreter should be specified with a first element `"python"`. This will enable generators for environments with specialized Python installations to be able to perform the action in an appropriate Python environment. | -| `message` | string | A message to be displayed to the user by the build system when the action is run. | - -Build environments will compare `inputs` and `outputs`. If any `output` -is missing or is outdated relative to any `input`, the custom action -will be invoked. If all `outputs` are present and newer than all -`inputs`, the `outputs` are considered up-to-date and the action need -not be invoked. - -Actions are implemented in Xcode as shell script build phases performed -prior to the compilation phase. In the Visual Studio generator, actions -appear files with a `FileConfiguration` containing a custom -`VCCustomBuildTool` specifying the remainder of the inputs, the outputs, -and the action. - -Combined with variable expansions, actions can be quite powerful. Here -is an example action that leverages variable expansions to minimize -duplication of pathnames: - -``` - 'sources': [ - # libraries.cc is generated by the js2c action below. - '<(INTERMEDIATE_DIR)/libraries.cc', - ], - 'actions': [ - { - 'variables': { - 'core_library_files': [ - 'src/runtime.js', - 'src/v8natives.js', - 'src/macros.py', - ], - }, - 'action_name': 'js2c', - 'inputs': [ - 'tools/js2c.py', - '<@(core_library_files)', - ], - 'outputs': [ - '<(INTERMEDIATE_DIR)/libraries.cc', - '<(INTERMEDIATE_DIR)/libraries-empty.cc', - ], - 'action': ['python', 'tools/js2c.py', '<@(_outputs)', 'CORE', '<@(core_library_files)'], - }, - ], -``` - -### Rules - -A `rules` section provides custom build action to perform on inputs, producing -outputs. The `rules` section is organized as a list. Each item in the list is -a dictionary having the following form: - -| *Keyword* | *Type* | *Description* | -|:------------|:-------|:-----------------------------------------| -| `rule_name` | string | The name of the rule. Depending on how Rules are implemented in the various generators, some may desire or require this property to be set to a unique name; others may ignore this property entirely. | -| `extension` | string | All source files of the current target with the given extension will be treated successively as inputs to the rule. | -| `inputs` | list | Additional dependencies of the rule. | -| `outputs` | list | A list of pathnames that the rule produces. Has access to `RULE_INPUT_` variables (see below). | -| `action` | list | A command line invocation used to produce `outputs` from `inputs`. For maximum cross-platform compatibility, invocations that require a Python interpreter should be specified with a first element `"python"`. This will enable generators for environments with specialized Python installations to be able to perform the action in an appropriate Python environment. Has access to `RULE_INPUT_` variables (see below). | -| `message` | string | A message to be displayed to the user by the build system when the action is run. Has access to `RULE_INPUT_` variables (see below). | - -There are several variables available to `outputs`, `action`, and `message`. - -| *Variable* | *Description* | -|:---------------------|:------------------------------------| -| `RULE_INPUT_PATH` | The full path to the current input. | -| `RULE_INPUT_DIRNAME` | The directory of the current input. | -| `RULE_INPUT_NAME` | The file name of the current input. | -| `RULE_INPUT_ROOT` | The file name of the current input without extension. | -| `RULE_INPUT_EXT` | The file name extension of the current input. | - -Rules can be thought of as Action generators. For each source selected -by `extension` an special action is created. This action starts out with -the same `inputs`, `outputs`, `action`, and `message` as the rule. The -source is added to the action's `inputs`. The `outputs`, `action`, and -`message` are then handled the same but with the additional variables. -If the `_output` variable is used in the `action` or `message` the -`RULE_INPUT_` variables in `output` will be expanded for the current -source. - -### Copies - -A `copies` section provides a simple means of copying files. The -`copies` section is organized as a list. Each item in the list is a -dictionary having the following form: - -| *Keyword* | *Type* | *Description* | -|:--------------|:-------|:------------------------------| -| `destination` | string | The directory into which the `files` will be copied. | -| `files` | list | A list of files to be copied. | - -The copies will be created in `destination` and have the same file name -as the file they are copied from. Even if the `files` are from multiple -directories they will all be copied into the `destination` directory. -Each `destination` file has an implicit build dependency on the file it -is copied from. - -### Generated Xcode .pbxproj Files - -We derive the following things in a `project.pbxproj` plist file within -an `.xcodeproj` bundle from the above input file formats as follows: - - * `Group hierarchy`: This is generated in a fixed format with contents - derived from the input files. There is no provision for the user to - specify additional groups or create a custom hierarchy. - * `Configuration group`: This will be used with the - `xcode_config_file` property above, if needed. - * `Source group`: The union of the `sources` lists of all `targets` - after applying appropriate `conditions`. The resulting list is - sorted and put into a group hierarchy that matches the layout of - the directory tree on disk, with a root of // (the top of the - hierarchy). - * `Frameworks group`: Taken directly from `libraries` value for the - target, after applying appropriate conditions. - * `Projects group`: References to other `.xcodeproj` bundles that - are needed by the `.xcodeproj` in which the group is contained. - * `Products group`: Output from the various targets. - * `Project References`: - * `Project Configurations`: - * Per-`.xcodeproj` file settings are not supported, all settings are - applied at the target level. - * `Targets`: - * `Phases`: Copy sources, link with libraries/frameworks, ... - * `Target Configurations`: Specified by input. - * `Dependencies`: (local and remote) - -### Generated Visual Studio .vcproj Files - -We derive the following sections in a `.vcproj` file from the above -input file formats as follows: - - * `VisualStudioProject`: - * `Platforms`: - * `ToolFiles`: - * `Configurations`: - * `Configuration`: - * `References`: - * `Files`: - * `Filter`: - * `File`: - * `FileConfiguration`: - * `Tool`: - * `Globals`: - -### Generated Visual Studio .sln Files - -We derive the following sections in a `.sln` file from the above input -file formats as follows: - - * `Projects`: - * `WebsiteProperties`: - * `ProjectDependencies`: - * `Global`: - * `SolutionConfigurationPlatforms`: - * `ProjectConfigurationPlatforms`: - * `SolutionProperties`: - * `NestedProjects`: - -## Caveats - -Notes/Question from very first prototype draft of the language. -Make sure these issues are addressed somewhere before deleting. - - * Libraries are easy, application abstraction is harder - * Applications involves resource compilation - * Applications involve many inputs - * Applications include transitive closure of dependencies - * Specific use cases like cc\_library - * Mac compiles more than just .c/.cpp files (specifically, .m and .mm - files) - * Compiler options vary by: - * File type - * Target type - * Individual file - * Files may have custom settings per file per platform, but we probably - don't care or need to support this in gyp. - * Will all linked non-Chromium projects always use the same versions of every - subsystem? - * Variants are difficult. We've identified the following variants (some - specific to Chromium, some typical of other projects in the same ballpark): - * Target platform - * V8 vs. JSC - * Debug vs. Release - * Toolchain (VS version, gcc, version) - * Host platform - * L10N - * Vendor - * Purify / Valgrind - * Will everyone upgrade VS at once? - * What does a dylib dependency mean? diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/Testing.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/Testing.md deleted file mode 100644 index baeb65f9441c7..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/Testing.md +++ /dev/null @@ -1,450 +0,0 @@ -# Testing - -NOTE: this document is outdated and needs to be updated. Read with your own discretion. - -## Introduction - -This document describes the GYP testing infrastructure, -as provided by the `TestGyp.py` module. - -These tests emphasize testing the _behavior_ of the -various GYP-generated build configurations: -Visual Studio, Xcode, SCons, Make, etc. -The goal is _not_ to test the output of the GYP generators by, -for example, comparing a GYP-generated Makefile -against a set of known "golden" Makefiles -(although the testing infrastructure could -be used to write those kinds of tests). -The idea is that the generated build configuration files -could be completely written to add a feature or fix a bug -so long as they continue to support the functional behaviors -defined by the tests: building programs, shared libraries, etc. - -## "Hello, world!" GYP test configuration - -Here is an actual test configuration, -a simple build of a C program to print `"Hello, world!"`. - -``` - $ ls -l test/hello - total 20 - -rw-r--r-- 1 knight knight 312 Jul 30 20:22 gyptest-all.py - -rw-r--r-- 1 knight knight 307 Jul 30 20:22 gyptest-default.py - -rwxr-xr-x 1 knight knight 326 Jul 30 20:22 gyptest-target.py - -rw-r--r-- 1 knight knight 98 Jul 30 20:22 hello.c - -rw-r--r-- 1 knight knight 142 Jul 30 20:22 hello.gyp - $ -``` - -The `gyptest-*.py` files are three separate tests (test scripts) -that use this configuration. The first one, `gyptest-all.py`, -looks like this: - -``` - #!/usr/bin/env python - - """ - Verifies simplest-possible build of a "Hello, world!" program - using an explicit build target of 'all'. - """ - - import TestGyp - - test = TestGyp.TestGyp() - - test.run_gyp('hello.gyp') - - test.build_all('hello.gyp') - - test.run_built_executable('hello', stdout="Hello, world!\n") - - test.pass_test() -``` - -The test script above runs GYP against the specified input file -(`hello.gyp`) to generate a build configuration. -It then tries to build the `'all'` target -(or its equivalent) using the generated build configuration. -Last, it verifies that the build worked as expected -by running the executable program (`hello`) -that was just presumably built by the generated configuration, -and verifies that the output from the program -matches the expected `stdout` string (`"Hello, world!\n"`). - -Which configuration is generated -(i.e., which build tool to test) -is specified when the test is run; -see the next section. - -Surrounding the functional parts of the test -described above are the header, -which should be basically the same for each test -(modulo a different description in the docstring): - -``` - #!/usr/bin/env python - - """ - Verifies simplest-possible build of a "Hello, world!" program - using an explicit build target of 'all'. - """ - - import TestGyp - - test = TestGyp.TestGyp() -``` - -Similarly, the footer should be the same in every test: - -``` - test.pass_test() -``` - -## Running tests - -Test scripts are run by the `gyptest.py` script. -You can specify (an) explicit test script(s) to run: - -``` - $ python gyptest.py test/hello/gyptest-all.py - PYTHONPATH=/home/knight/src/gyp/trunk/test/lib - TESTGYP_FORMAT=scons - /usr/bin/python test/hello/gyptest-all.py - PASSED - $ -``` - -If you specify a directory, all test scripts -(scripts prefixed with `gyptest-`) underneath -the directory will be run: - -``` - $ python gyptest.py test/hello - PYTHONPATH=/home/knight/src/gyp/trunk/test/lib - TESTGYP_FORMAT=scons - /usr/bin/python test/hello/gyptest-all.py - PASSED - /usr/bin/python test/hello/gyptest-default.py - PASSED - /usr/bin/python test/hello/gyptest-target.py - PASSED - $ -``` - -Or you can specify the `-a` option to run all scripts -in the tree: - -``` - $ python gyptest.py -a - PYTHONPATH=/home/knight/src/gyp/trunk/test/lib - TESTGYP_FORMAT=scons - /usr/bin/python test/configurations/gyptest-configurations.py - PASSED - /usr/bin/python test/defines/gyptest-defines.py - PASSED - . - . - . - . - /usr/bin/python test/variables/gyptest-commands.py - PASSED - $ -``` - -If any tests fail during the run, -the `gyptest.py` script will report them in a -summary at the end. - -## Debugging tests - -Tests that create intermediate output do so under the gyp/out/testworkarea -directory. On test completion, intermediate output is cleaned up. To preserve -this output, set the environment variable PRESERVE=1. This can be handy to -inspect intermediate data when debugging a test. - -You can also set PRESERVE\_PASS=1, PRESERVE\_FAIL=1 or PRESERVE\_NO\_RESULT=1 -to preserve output for tests that fall into one of those categories. - -# Specifying the format (build tool) to use - -By default, the `gyptest.py` script will generate configurations for -the "primary" supported build tool for the platform you're on: -Visual Studio on Windows, -Xcode on Mac, -and (currently) SCons on Linux. -An alternate format (build tool) may be specified -using the `-f` option: - -``` - $ python gyptest.py -f make test/hello/gyptest-all.py - PYTHONPATH=/home/knight/src/gyp/trunk/test/lib - TESTGYP_FORMAT=make - /usr/bin/python test/hello/gyptest-all.py - PASSED - $ -``` - -Multiple tools may be specified in a single pass as -a comma-separated list: - -``` - $ python gyptest.py -f make,scons test/hello/gyptest-all.py - PYTHONPATH=/home/knight/src/gyp/trunk/test/lib - TESTGYP_FORMAT=make - /usr/bin/python test/hello/gyptest-all.py - PASSED - TESTGYP_FORMAT=scons - /usr/bin/python test/hello/gyptest-all.py - PASSED - $ -``` - -## Test script functions and methods - -The `TestGyp` class contains a lot of functionality -intended to make it easy to write tests. -This section describes the most useful pieces for GYP testing. - -(The `TestGyp` class is actually a subclass of more generic -`TestCommon` and `TestCmd` base classes -that contain even more functionality than is -described here.) - -### Initialization - -The standard initialization formula is: - -``` - import TestGyp - test = TestGyp.TestGyp() -``` - -This copies the contents of the directory tree in which -the test script lives to a temporary directory for execution, -and arranges for the temporary directory's removal on exit. - -By default, any comparisons of output or file contents -must be exact matches for the test to pass. -If you need to use regular expressions for matches, -a useful alternative initialization is: - -``` - import TestGyp - test = TestGyp.TestGyp(match = TestGyp.match_re, - diff = TestGyp.diff_re)` -``` - -### Running GYP - -The canonical invocation is to simply specify the `.gyp` file to be executed: - -``` - test.run_gyp('file.gyp') -``` - -Additional GYP arguments may be specified: - -``` - test.run_gyp('file.gyp', arguments=['arg1', 'arg2', ...]) -``` - -To execute GYP from a subdirectory (where, presumably, the specified file -lives): - -``` - test.run_gyp('file.gyp', chdir='subdir') -``` - -### Running the build tool - -Running the build tool requires passing in a `.gyp` file, which may be used to -calculate the name of a specific build configuration file (such as a MSVS -solution file corresponding to the `.gyp` file). - -There are several different `.build_*()` methods for invoking different types -of builds. - -To invoke a build tool with an explicit `all` target (or equivalent): - -``` - test.build_all('file.gyp') -``` - -To invoke a build tool with its default behavior (for example, executing `make` -with no targets specified): - -``` - test.build_default('file.gyp') -``` - -To invoke a build tool with an explicit specified target: - -``` - test.build_target('file.gyp', 'target') -``` - -### Running executables - -The most useful method executes a program built by the GYP-generated -configuration: - -``` - test.run_built_executable('program') -``` - -The `.run_built_executable()` method will account for the actual built target -output location for the build tool being tested, as well as tack on any -necessary executable file suffix for the platform (for example `.exe` on -Windows). - -`stdout=` and `stderr=` keyword arguments specify expected standard output and -error output, respectively. Failure to match these (if specified) will cause -the test to fail. An explicit `None` value will suppress that verification: - -``` - test.run_built_executable('program', - stdout="expect this output\n", - stderr=None) -``` - -Note that the default values are `stdout=None` and `stderr=''` (that is, no -check for standard output, and error output must be empty). - -Arbitrary executables (not necessarily those built by GYP) can be executed with -the lower-level `.run()` method: - -``` - test.run('program') -``` - -The program must be in the local directory (that is, the temporary directory -for test execution) or be an absolute path name. - -### Fetching command output - -``` - test.stdout() -``` - -Returns the standard output from the most recent executed command (including -`.run_gyp()`, `.build_*()`, or `.run*()` methods). - -``` - test.stderr() -``` - -Returns the error output from the most recent executed command (including -`.run_gyp()`, `.build_*()`, or `.run*()` methods). - -### Verifying existence or non-existence of files or directories - -``` - test.must_exist('file_or_dir') -``` - -Verifies that the specified file or directory exists, and fails the test if it -doesn't. - -``` - test.must_not_exist('file_or_dir') -``` - -Verifies that the specified file or directory does not exist, and fails the -test if it does. - -### Verifying file contents - -``` - test.must_match('file', 'expected content\n') -``` - -Verifies that the content of the specified file match the expected string, and -fails the test if it does not. By default, the match must be exact, but -line-by-line regular expressions may be used if the `TestGyp` object was -initialized with `TestGyp.match_re`. - -``` - test.must_not_match('file', 'expected content\n') -``` - -Verifies that the content of the specified file does _not_ match the expected -string, and fails the test if it does. By default, the match must be exact, -but line-by-line regular expressions may be used if the `TestGyp` object was -initialized with `TestGyp.match_re`. - -``` - test.must_contain('file', 'substring') -``` - -Verifies that the specified file contains the specified substring, and fails -the test if it does not. - -``` - test.must_not_contain('file', 'substring') -``` - -Verifies that the specified file does not contain the specified substring, and -fails the test if it does. - -``` - test.must_contain_all_lines(output, lines) -``` - -Verifies that the output string contains all of the "lines" in the specified -list of lines. In practice, the lines can be any substring and need not be -`\n`-terminaed lines per se. If any line is missing, the test fails. - -``` - test.must_not_contain_any_lines(output, lines) -``` - -Verifies that the output string does _not_ contain any of the "lines" in the -specified list of lines. In practice, the lines can be any substring and need -not be `\n`-terminaed lines per se. If any line exists in the output string, -the test fails. - -``` - test.must_contain_any_line(output, lines) -``` - -Verifies that the output string contains at least one of the "lines" in the -specified list of lines. In practice, the lines can be any substring and need -not be `\n`-terminaed lines per se. If none of the specified lines is present, -the test fails. - -### Reading file contents - -``` - test.read('file') -``` - -Returns the contents of the specified file. Directory elements contained in a -list will be joined: - -``` - test.read(['subdir', 'file']) -``` - -### Test success or failure - -``` - test.fail_test() -``` - -Fails the test, reporting `FAILED` on standard output and exiting with an exit -status of `1`. - -``` - test.pass_test() -``` - -Passes the test, reporting `PASSED` on standard output and exiting with an exit -status of `0`. - -``` - test.no_result() -``` - -Indicates the test had no valid result (i.e., the conditions could not be -tested because of an external factor like a full file system). Reports `NO -RESULT` on standard output and exits with a status of `2`. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/UserDocumentation.md b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/UserDocumentation.md deleted file mode 100644 index 808f37a1a9361..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/docs/UserDocumentation.md +++ /dev/null @@ -1,965 +0,0 @@ -# User Documentation - -## Introduction - -This document is intended to provide a user-level guide to GYP. The -emphasis here is on how to use GYP to accomplish specific tasks, not on -the complete technical language specification. (For that, see the -[LanguageSpecification](LanguageSpecification.md).) - -The document below starts with some overviews to provide context: an -overview of the structure of a `.gyp` file itself, an overview of a -typical executable-program target in a `.gyp` file, an an overview of a -typical library target in a `.gyp` file. - -After the overviews, there are examples of `gyp` patterns for different -common use cases. - -## Skeleton of a typical Chromium .gyp file - -Here is the skeleton of a typical `.gyp` file in the Chromium tree: - -``` - { - 'variables': { - . - . - . - }, - 'includes': [ - '../build/common.gypi', - ], - 'target_defaults': { - . - . - . - }, - 'targets': [ - { - 'target_name': 'target_1', - . - . - . - }, - { - 'target_name': 'target_2', - . - . - . - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'targets': [ - { - 'target_name': 'linux_target_3', - . - . - . - }, - ], - }], - ['OS=="win"', { - 'targets': [ - { - 'target_name': 'windows_target_4', - . - . - . - }, - ], - }, { # OS != "win" - 'targets': [ - { - 'target_name': 'non_windows_target_5', - . - . - . - }, - }], - ], - } -``` - -The entire file just contains a Python dictionary. (It's actually JSON, -with two small Pythonic deviations: comments are introduced with `#`, -and a `,` (comma)) is legal after the last element in a list or -dictionary.) - -The top-level pieces in the `.gyp` file are as follows: - -`'variables'`: Definitions of variables that can be interpolated and -used in various other parts of the file. - -`'includes'`: A list of of other files that will be included in this -file. By convention, included files have the suffix `.gypi` (gyp -include). - -`'target_defaults'`: Settings that will apply to _all_ of the targets -defined in this `.gyp` file. - -`'targets'`: The list of targets for which this `.gyp` file can -generate builds. Each target is a dictionary that contains settings -describing all the information necessary to build the target. - -`'conditions'`: A list of condition specifications that can modify the -contents of the items in the global dictionary defined by this `.gyp` -file based on the values of different variablwes. As implied by the -above example, the most common use of a `conditions` section in the -top-level dictionary is to add platform-specific targets to the -`targets` list. - -## Skeleton of a typical executable target in a .gyp file - -The most straightforward target is probably a simple executable program. -Here is an example `executable` target that demonstrates the features -that should cover most simple uses of gyp: - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'type': 'executable', - 'msvs_guid': '5ECEC9E5-8F23-47B6-93E0-C3B328B3BE65', - 'dependencies': [ - 'xyzzy', - '../bar/bar.gyp:bar', - ], - 'defines': [ - 'DEFINE_FOO', - 'DEFINE_A_VALUE=value', - ], - 'include_dirs': [ - '..', - ], - 'sources': [ - 'file1.cc', - 'file2.cc', - ], - 'conditions': [ - ['OS=="linux"', { - 'defines': [ - 'LINUX_DEFINE', - ], - 'include_dirs': [ - 'include/linux', - ], - }], - ['OS=="win"', { - 'defines': [ - 'WINDOWS_SPECIFIC_DEFINE', - ], - }, { # OS != "win", - 'defines': [ - 'NON_WINDOWS_DEFINE', - ], - }] - ], - }, - ], - } -``` - -The top-level settings in the target include: - -`'target_name'`: The name by which the target should be known, which -should be unique across all `.gyp` files. This name will be used as the -project name in the generated Visual Studio solution, as the target name -in the generated XCode configuration, and as the alias for building this -target from the command line of the generated SCons configuration. - -`'type'`: Set to `executable`, logically enough. - -`'msvs_guid'`: THIS IS ONLY TRANSITIONAL. This is a hard-coded GUID -values that will be used in the generated Visual Studio solution -file(s). This allows us to check in a `chrome.sln` file that -interoperates with gyp-generated project files. Once everything in -Chromium is being generated by gyp, it will no longer be important that -the GUIDs stay constant across invocations, and we'll likely get rid of -these settings, - -`'dependencies'`: This lists other targets that this target depends on. -The gyp-generated files will guarantee that the other targets are built -before this target. Any library targets in the `dependencies` list will -be linked with this target. The various settings (`defines`, -`include_dirs`, etc.) listed in the `direct_dependent_settings` sections -of the targets in this list will be applied to how _this_ target is -built and linked. See the more complete discussion of -`direct_dependent_settings`, below. - -`'defines'`: The C preprocessor definitions that will be passed in on -compilation command lines (using `-D` or `/D` options). - -`'include_dirs'`: The directories in which included header files live. -These will be passed in on compilation command lines (using `-I` or `/I` -options). - -`'sources'`: The source files for this target. - -`'conditions'`: A block of conditions that will be evaluated to update -the different settings in the target dictionary. - -## Skeleton of a typical library target in a .gyp file - -The vast majority of targets are libraries. Here is an example of a -library target including the additional features that should cover most -needs of libraries: - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'type': '<(library)' - 'msvs_guid': '5ECEC9E5-8F23-47B6-93E0-C3B328B3BE65', - 'dependencies': [ - 'xyzzy', - '../bar/bar.gyp:bar', - ], - 'defines': [ - 'DEFINE_FOO', - 'DEFINE_A_VALUE=value', - ], - 'include_dirs': [ - '..', - ], - 'direct_dependent_settings': { - 'defines': [ - 'DEFINE_FOO', - 'DEFINE_ADDITIONAL', - ], - 'linkflags': [ - ], - }, - 'export_dependent_settings': [ - '../bar/bar.gyp:bar', - ], - 'sources': [ - 'file1.cc', - 'file2.cc', - ], - 'conditions': [ - ['OS=="linux"', { - 'defines': [ - 'LINUX_DEFINE', - ], - 'include_dirs': [ - 'include/linux', - ], - ], - ['OS=="win"', { - 'defines': [ - 'WINDOWS_SPECIFIC_DEFINE', - ], - }, { # OS != "win", - 'defines': [ - 'NON_WINDOWS_DEFINE', - ], - }] - ], - ], - } -``` - -The possible entries in a library target are largely the same as those -that can be specified for an executable target (`defines`, -`include_dirs`, etc.). The differences include: - -`'type'`: This should almost always be set to '<(library)', which allows -the user to define at gyp time whether libraries are to be built static -or shared. (On Linux, at least, linking with shared libraries saves -significant link time.) If it's necessary to pin down the type of -library to be built, the `type` can be set explicitly to -`static_library` or `shared_library`. - -`'direct_dependent_settings'`: This defines the settings that will be -applied to other targets that _directly depend_ on this target--that is, -that list _this_ target in their `'dependencies'` setting. This is -where you list the `defines`, `include_dirs`, `cflags` and `linkflags` -that other targets that compile or link against this target need to -build consistently. - -`'export_dependent_settings'`: This lists the targets whose -`direct_dependent_settings` should be "passed on" to other targets that -use (depend on) this target. `TODO: expand on this description.` - -## Use Cases - -These use cases are intended to cover the most common actions performed -by developers using GYP. - -Note that these examples are _not_ fully-functioning, self-contained -examples (or else they'd be way too long). Each example mostly contains -just the keywords and settings relevant to the example, with perhaps a -few extra keywords for context. The intent is to try to show the -specific pieces you need to pay attention to when doing something. -[NOTE: if practical use shows that these examples are confusing without -additional context, please add what's necessary to clarify things.] - -### Add new source files - -There are similar but slightly different patterns for adding a -platform-independent source file vs. adding a source file that only -builds on some of the supported platforms. - -#### Add a source file that builds on all platforms - -**Simplest possible case**: You are adding a file(s) that builds on all -platforms. - -Just add the file(s) to the `sources` list of the appropriate dictionary -in the `targets` list: - -``` - { - 'targets': [ - { - 'target_name': 'my_target', - 'type': 'executable', - 'sources': [ - '../other/file_1.cc', - 'new_file.cc', - 'subdir/file3.cc', - ], - }, - ], - }, -``` - -File path names are relative to the directory in which the `.gyp` file lives. - -Keep the list sorted alphabetically (unless there's a really, really, -_really_ good reason not to). - -#### Add a platform-specific source file - -##### Your platform-specific file is named `*_linux.{ext}`, `*_mac.{ext}`, `*_posix.{ext}` or `*_win.{ext}` - -The simplest way to add a platform-specific source file, assuming you're -adding a completely new file and get to name it, is to use one of the -following standard suffixes: - - * `_linux` (e.g. `foo_linux.cc`) - * `_mac` (e.g. `foo_mac.cc`) - * `_posix` (e.g. `foo_posix.cc`) - * `_win` (e.g. `foo_win.cc`) - -Simply add the file to the `sources` list of the appropriate dict within -the `targets` list, like you would any other source file. - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'type': 'executable', - 'sources': [ - 'independent.cc', - 'specific_win.cc', - ], - }, - ], - }, -``` - -The Chromium `.gyp` files all have appropriate `conditions` entries to -filter out the files that aren't appropriate for the current platform. -In the above example, the `specific_win.cc` file will be removed -automatically from the source-list on non-Windows builds. - -##### Your platform-specific file does not use an already-defined pattern - -If your platform-specific file does not contain a -`*_{linux,mac,posix,win}` substring (or some other pattern that's -already in the `conditions` for the target), and you can't change the -file name, there are two patterns that can be used. - -**Prefererred**: Add the file to the `sources` list of the appropriate -dictionary within the `targets` list. Add an appropriate `conditions` -section to exclude the specific files name: - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'type': 'executable', - 'sources': [ - 'linux_specific.cc', - ], - 'conditions': [ - ['OS != "linux"', { - 'sources!': [ - # Linux-only; exclude on other platforms. - 'linux_specific.cc', - ] - }[, - ], - }, - ], - }, -``` - -Despite the duplicate listing, the above is generally preferred because -the `sources` list contains a useful global list of all sources on all -platforms with consistent sorting on all platforms. - -**Non-preferred**: In some situations, however, it might make sense to -list a platform-specific file only in a `conditions` section that -specifically _includes_ it in the `sources` list: - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'type': 'executable', - 'sources': [], - ['OS == "linux"', { - 'sources': [ - # Only add to sources list on Linux. - 'linux_specific.cc', - ] - }], - }, - ], - }, -``` - -The above two examples end up generating equivalent builds, with the -small exception that the `sources` lists will list the files in -different orders. (The first example defines explicitly where -`linux_specific.cc` appears in the list--perhaps in in the -middle--whereas the second example will always tack it on to the end of -the list.) - -**Including or excluding files using patterns**: There are more -complicated ways to construct a `sources` list based on patterns. See -`TODO` below. - -### Add a new executable - -An executable program is probably the most straightforward type of -target, since all it typically needs is a list of source files, some -compiler/linker settings (probably varied by platform), and some library -targets on which it depends and which must be used in the final link. - -#### Add an executable that builds on all platforms - -Add a dictionary defining the new executable target to the `targets` -list in the appropriate `.gyp` file. Example: - -``` - { - 'targets': [ - { - 'target_name': 'new_unit_tests', - 'type': 'executable', - 'defines': [ - 'FOO', - ], - 'include_dirs': [ - '..', - ], - 'dependencies': [ - 'other_target_in_this_file', - 'other_gyp2:target_in_other_gyp2', - ], - 'sources': [ - 'new_additional_source.cc', - 'new_unit_tests.cc', - ], - }, - ], - } -``` - -#### Add a platform-specific executable - -Add a dictionary defining the new executable target to the `targets` -list within an appropriate `conditions` block for the platform. The -`conditions` block should be a sibling to the top-level `targets` list: - -``` - { - 'targets': [ - ], - 'conditions': [ - ['OS=="win"', { - 'targets': [ - { - 'target_name': 'new_unit_tests', - 'type': 'executable', - 'defines': [ - 'FOO', - ], - 'include_dirs': [ - '..', - ], - 'dependencies': [ - 'other_target_in_this_file', - 'other_gyp2:target_in_other_gyp2', - ], - 'sources': [ - 'new_additional_source.cc', - 'new_unit_tests.cc', - ], - }, - ], - }], - ], - } -``` - -### Add settings to a target - -There are several different types of settings that can be defined for -any given target. - -#### Add new preprocessor definitions (`-D` or `/D` flags) - -New preprocessor definitions are added by the `defines` setting: - -``` - { - 'targets': [ - { - 'target_name': 'existing_target', - 'defines': [ - 'FOO', - 'BAR=some_value', - ], - }, - ], - }, -``` - -These may be specified directly in a target's settings, as in the above -example, or in a `conditions` section. - -#### Add a new include directory (`-I` or `/I` flags) - -New include directories are added by the `include_dirs` setting: - -``` - { - 'targets': [ - { - 'target_name': 'existing_target', - 'include_dirs': [ - '..', - 'include', - ], - }, - ], - }, -``` - -These may be specified directly in a target's settings, as in the above -example, or in a `conditions` section. - -#### Add new compiler flags - -Specific compiler flags can be added with the `cflags` setting: - -``` - { - 'targets': [ - { - 'target_name': 'existing_target', - 'conditions': [ - ['OS=="win"', { - 'cflags': [ - '/WX', - ], - }, { # OS != "win" - 'cflags': [ - '-Werror', - ], - }], - ], - }, - ], - }, -``` - -Because these flags will be specific to the actual compiler involved, -they will almost always be only set within a `conditions` section. - -#### Add new linker flags - -Setting linker flags is OS-specific. On linux and most non-mac posix -systems, they can be added with the `ldflags` setting: - -``` - { - 'targets': [ - { - 'target_name': 'existing_target', - 'conditions': [ - ['OS=="linux"', { - 'ldflags': [ - '-pthread', - ], - }], - ], - }, - ], - }, -``` - -Because these flags will be specific to the actual linker involved, -they will almost always be only set within a `conditions` section. - -On OS X, linker settings are set via `xcode_settings`, on Windows via -`msvs_settings`. - -#### Exclude settings on a platform - -Any given settings keyword (`defines`, `include_dirs`, etc.) has a -corresponding form with a trailing `!` (exclamation point) to remove -values from a setting. One useful example of this is to remove the -Linux `-Werror` flag from the global settings defined in -`build/common.gypi`: - -``` - { - 'targets': [ - { - 'target_name': 'third_party_target', - 'conditions': [ - ['OS=="linux"', { - 'cflags!': [ - '-Werror', - ], - }], - ], - }, - ], - }, -``` - -### Cross-compiling - -GYP has some (relatively limited) support for cross-compiling. - -If the variable `GYP_CROSSCOMPILE` or one of the toolchain-related -variables (like `CC_host` or `CC_target`) is set, GYP will think that -you wish to do a cross-compile. - -When cross-compiling, each target can be part of a "host" build, a -"target" build, or both. By default, the target is assumed to be (only) -part of the "target" build. The 'toolsets' property can be set on a -target to change the default. - -A target's dependencies are assumed to match the build type (so, if A -depends on B, by default that means that a target build of A depends on -a target build of B). You can explicitly depend on targets across -toolchains by specifying "#host" or "#target" in the dependencies list. -If GYP is not doing a cross-compile, the "#host" and "#target" will be -stripped as needed, so nothing breaks. - -### Add a new library - -TODO: write intro - -#### Add a library that builds on all platforms - -Add the a dictionary defining the new library target to the `targets` -list in the appropriate `.gyp` file. Example: - -``` - { - 'targets': [ - { - 'target_name': 'new_library', - 'type': '<(library)', - 'defines': [ - 'FOO', - 'BAR=some_value', - ], - 'include_dirs': [ - '..', - ], - 'dependencies': [ - 'other_target_in_this_file', - 'other_gyp2:target_in_other_gyp2', - ], - 'direct_dependent_settings': { - 'include_dirs': '.', - }, - 'export_dependent_settings': [ - 'other_target_in_this_file', - ], - 'sources': [ - 'new_additional_source.cc', - 'new_library.cc', - ], - }, - ], - } -``` - -The use of the `<(library)` variable above should be the default `type` -setting for most library targets, as it allows the developer to choose, -at `gyp` time, whether to build with static or shared libraries. -(Building with shared libraries saves a _lot_ of link time on Linux.) - -It may be necessary to build a specific library as a fixed type. Is so, -the `type` field can be hard-wired appropriately. For a static library: - -``` - 'type': 'static_library', -``` - -For a shared library: - -``` - 'type': 'shared_library', -``` - -#### Add a platform-specific library - -Add a dictionary defining the new library target to the `targets` list -within a `conditions` block that's a sibling to the top-level `targets` -list: - -``` - { - 'targets': [ - ], - 'conditions': [ - ['OS=="win"', { - 'targets': [ - { - 'target_name': 'new_library', - 'type': '<(library)', - 'defines': [ - 'FOO', - 'BAR=some_value', - ], - 'include_dirs': [ - '..', - ], - 'dependencies': [ - 'other_target_in_this_file', - 'other_gyp2:target_in_other_gyp2', - ], - 'direct_dependent_settings': { - 'include_dirs': '.', - }, - 'export_dependent_settings': [ - 'other_target_in_this_file', - ], - 'sources': [ - 'new_additional_source.cc', - 'new_library.cc', - ], - }, - ], - }], - ], - } -``` - -### Dependencies between targets - -GYP provides useful primitives for establishing dependencies between -targets, which need to be configured in the following situations. - -#### Linking with another library target - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'dependencies': [ - 'libbar', - ], - }, - { - 'target_name': 'libbar', - 'type': '<(library)', - 'sources': [ - ], - }, - ], - } -``` - -Note that if the library target is in a different `.gyp` file, you have -to specify the path to other `.gyp` file, relative to this `.gyp` file's -directory: - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'dependencies': [ - '../bar/bar.gyp:libbar', - ], - }, - ], - } -``` - -Adding a library often involves updating multiple `.gyp` files, adding -the target to the approprate `.gyp` file (possibly a newly-added `.gyp` -file), and updating targets in the other `.gyp` files that depend on -(link with) the new library. - -#### Compiling with necessary flags for a library target dependency - -We need to build a library (often a third-party library) with specific -preprocessor definitions or command-line flags, and need to ensure that -targets that depend on the library build with the same settings. This -situation is handled by a `direct_dependent_settings` block: - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'type': 'executable', - 'dependencies': [ - 'libbar', - ], - }, - { - 'target_name': 'libbar', - 'type': '<(library)', - 'defines': [ - 'LOCAL_DEFINE_FOR_LIBBAR', - 'DEFINE_TO_USE_LIBBAR', - ], - 'include_dirs': [ - '..', - 'include/libbar', - ], - 'direct_dependent_settings': { - 'defines': [ - 'DEFINE_TO_USE_LIBBAR', - ], - 'include_dirs': [ - 'include/libbar', - ], - }, - }, - ], - } -``` - -In the above example, the sources of the `foo` executable will be -compiled with the options `-DDEFINE_TO_USE_LIBBAR -Iinclude/libbar`, -because of those settings' being listed in the -`direct_dependent_settings` block. - -Note that these settings will likely need to be replicated in the -settings for the library target itsef, so that the library will build -with the same options. This does not prevent the target from defining -additional options for its "internal" use when compiling its own source -files. (In the above example, these are the `LOCAL_DEFINE_FOR_LIBBAR` -define, and the `..` entry in the `include_dirs` list.) - -#### When a library depends on an additional library at final link time - -``` - { - 'targets': [ - { - 'target_name': 'foo', - 'type': 'executable', - 'dependencies': [ - 'libbar', - ], - }, - { - 'target_name': 'libbar', - 'type': '<(library)', - 'dependencies': [ - 'libother' - ], - 'export_dependent_settings': [ - 'libother' - ], - }, - { - 'target_name': 'libother', - 'type': '<(library)', - 'direct_dependent_settings': { - 'defines': [ - 'DEFINE_FOR_LIBOTHER', - ], - 'include_dirs': [ - 'include/libother', - ], - }, - }, - ], - } -``` - -### Support for Mac OS X bundles - -gyp supports building bundles on OS X (.app, .framework, .bundle, etc). -Here is an example of this: - -``` - { - 'target_name': 'test_app', - 'product_name': 'Test App Gyp', - 'type': 'executable', - 'mac_bundle': 1, - 'sources': [ - 'main.m', - 'TestAppAppDelegate.h', - 'TestAppAppDelegate.m', - ], - 'mac_bundle_resources': [ - 'TestApp/English.lproj/InfoPlist.strings', - 'TestApp/English.lproj/MainMenu.xib', - ], - 'link_settings': { - 'libraries': [ - '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework', - ], - }, - 'xcode_settings': { - 'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist', - }, - }, -``` - -The `mac_bundle` key tells gyp that this target should be a bundle. -`executable` targets get extension `.app` by default, `shared_library` -targets get `.framework` – but you can change the bundle extensions by -setting `product_extension` if you want. Files listed in -`mac_bundle_resources` will be copied to the bundle's `Resource` folder -of the bundle. You can also set -`process_outputs_as_mac_bundle_resources` to 1 in actions and rules to -let the output of actions and rules be added to that folder (similar to -`process_outputs_as_sources`). If `product_name` is not set, the bundle -will be named after `target_name`as usual. - -### Move files (refactoring) - -TODO - -### Custom build steps - -TODO - -#### Adding an explicit build step to generate specific files - -TODO - -#### Adding a rule to handle files with a new suffix - -TODO - -### Build flavors - -TODO diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp deleted file mode 100755 index 1b8b9bdfb05f5..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# Copyright 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -set -e -base=$(dirname "$0") -exec python "${base}/gyp_main.py" "$@" diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp.bat b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp.bat deleted file mode 100755 index c0b4ca24e5df0..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp.bat +++ /dev/null @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python "%~dp0gyp_main.py" %* diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp_main.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp_main.py deleted file mode 100755 index f23dcdf882d1b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/gyp_main.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os -import sys -import subprocess - - -def IsCygwin(): - # Function copied from pylib/gyp/common.py - try: - out = subprocess.Popen( - "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - stdout, _ = out.communicate() - return "CYGWIN" in stdout.decode("utf-8") - except Exception: - return False - - -def UnixifyPath(path): - try: - if not IsCygwin(): - return path - out = subprocess.Popen( - ["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - stdout, _ = out.communicate() - return stdout.decode("utf-8") - except Exception: - return path - - -# Make sure we're using the version of pylib in this repo, not one installed -# elsewhere on the system. Also convert to Unix style path on Cygwin systems, -# else the 'gyp' library will not be found -path = UnixifyPath(sys.argv[0]) -sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib")) -import gyp # noqa: E402 - -if __name__ == "__main__": - sys.exit(gyp.script_main()) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py deleted file mode 100644 index bc0e93d07f890..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py +++ /dev/null @@ -1,365 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""New implementation of Visual Studio project generation.""" - -import hashlib -import os -import random -from operator import attrgetter - -import gyp.common - - -def cmp(x, y): - return (x > y) - (x < y) - - -# Initialize random number generator -random.seed() - -# GUIDs for project types -ENTRY_TYPE_GUIDS = { - "project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}", - "folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}", -} - -# ------------------------------------------------------------------------------ -# Helper functions - - -def MakeGuid(name, seed="msvs_new"): - """Returns a GUID for the specified target name. - - Args: - name: Target name. - seed: Seed for MD5 hash. - Returns: - A GUID-line string calculated from the name and seed. - - This generates something which looks like a GUID, but depends only on the - name and seed. This means the same name/seed will always generate the same - GUID, so that projects and solutions which refer to each other can explicitly - determine the GUID to refer to explicitly. It also means that the GUID will - not change when the project for a target is rebuilt. - """ - # Calculate a MD5 signature for the seed and name. - d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper() - # Convert most of the signature to GUID form (discard the rest) - guid = ( - "{" - + d[:8] - + "-" - + d[8:12] - + "-" - + d[12:16] - + "-" - + d[16:20] - + "-" - + d[20:32] - + "}" - ) - return guid - - -# ------------------------------------------------------------------------------ - - -class MSVSSolutionEntry: - def __cmp__(self, other): - # Sort by name then guid (so things are in order on vs2008). - return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) - - -class MSVSFolder(MSVSSolutionEntry): - """Folder in a Visual Studio project or solution.""" - - def __init__(self, path, name=None, entries=None, guid=None, items=None): - """Initializes the folder. - - Args: - path: Full path to the folder. - name: Name of the folder. - entries: List of folder entries to nest inside this folder. May contain - Folder or Project objects. May be None, if the folder is empty. - guid: GUID to use for folder, if not None. - items: List of solution items to include in the folder project. May be - None, if the folder does not directly contain items. - """ - if name: - self.name = name - else: - # Use last layer. - self.name = os.path.basename(path) - - self.path = path - self.guid = guid - - # Copy passed lists (or set to empty lists) - self.entries = sorted(entries or [], key=attrgetter("path")) - self.items = list(items or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"] - - def get_guid(self): - if self.guid is None: - # Use consistent guids for folders (so things don't regenerate). - self.guid = MakeGuid(self.path, seed="msvs_folder") - return self.guid - - -# ------------------------------------------------------------------------------ - - -class MSVSProject(MSVSSolutionEntry): - """Visual Studio project.""" - - def __init__( - self, - path, - name=None, - dependencies=None, - guid=None, - spec=None, - build_file=None, - config_platform_overrides=None, - fixpath_prefix=None, - ): - """Initializes the project. - - Args: - path: Absolute path to the project file. - name: Name of project. If None, the name will be the same as the base - name of the project file. - dependencies: List of other Project objects this project is dependent - upon, if not None. - guid: GUID to use for project, if not None. - spec: Dictionary specifying how to build this project. - build_file: Filename of the .gyp file that the vcproj file comes from. - config_platform_overrides: optional dict of configuration platforms to - used in place of the default for this target. - fixpath_prefix: the path used to adjust the behavior of _fixpath - """ - self.path = path - self.guid = guid - self.spec = spec - self.build_file = build_file - # Use project filename if name not specified - self.name = name or os.path.splitext(os.path.basename(path))[0] - - # Copy passed lists (or set to empty lists) - self.dependencies = list(dependencies or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS["project"] - - if config_platform_overrides: - self.config_platform_overrides = config_platform_overrides - else: - self.config_platform_overrides = {} - self.fixpath_prefix = fixpath_prefix - self.msbuild_toolset = None - - def set_dependencies(self, dependencies): - self.dependencies = list(dependencies or []) - - def get_guid(self): - if self.guid is None: - # Set GUID from path - # TODO(rspangler): This is fragile. - # 1. We can't just use the project filename sans path, since there could - # be multiple projects with the same base name (for example, - # foo/unittest.vcproj and bar/unittest.vcproj). - # 2. The path needs to be relative to $SOURCE_ROOT, so that the project - # GUID is the same whether it's included from base/base.sln or - # foo/bar/baz/baz.sln. - # 3. The GUID needs to be the same each time this builder is invoked, so - # that we don't need to rebuild the solution when the project changes. - # 4. We should be able to handle pre-built project files by reading the - # GUID from the files. - self.guid = MakeGuid(self.name) - return self.guid - - def set_msbuild_toolset(self, msbuild_toolset): - self.msbuild_toolset = msbuild_toolset - - -# ------------------------------------------------------------------------------ - - -class MSVSSolution: - """Visual Studio solution.""" - - def __init__( - self, path, version, entries=None, variants=None, websiteProperties=True - ): - """Initializes the solution. - - Args: - path: Path to solution file. - version: Format version to emit. - entries: List of entries in solution. May contain Folder or Project - objects. May be None, if the folder is empty. - variants: List of build variant strings. If none, a default list will - be used. - websiteProperties: Flag to decide if the website properties section - is generated. - """ - self.path = path - self.websiteProperties = websiteProperties - self.version = version - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - - if variants: - # Copy passed list - self.variants = variants[:] - else: - # Use default - self.variants = ["Debug|Win32", "Release|Win32"] - # TODO(rspangler): Need to be able to handle a mapping of solution config - # to project config. Should we be able to handle variants being a dict, - # or add a separate variant_map variable? If it's a dict, we can't - # guarantee the order of variants since dict keys aren't ordered. - - # TODO(rspangler): Automatically write to disk for now; should delay until - # node-evaluation time. - self.Write() - - def Write(self, writer=gyp.common.WriteOnDiff): - """Writes the solution file to disk. - - Raises: - IndexError: An entry appears multiple times. - """ - # Walk the entry tree and collect all the folders and projects. - all_entries = set() - entries_to_check = self.entries[:] - while entries_to_check: - e = entries_to_check.pop(0) - - # If this entry has been visited, nothing to do. - if e in all_entries: - continue - - all_entries.add(e) - - # If this is a folder, check its entries too. - if isinstance(e, MSVSFolder): - entries_to_check += e.entries - - all_entries = sorted(all_entries, key=attrgetter("path")) - - # Open file and print header - f = writer(self.path) - f.write( - "Microsoft Visual Studio Solution File, " - "Format Version %s\r\n" % self.version.SolutionVersion() - ) - f.write("# %s\r\n" % self.version.Description()) - - # Project entries - sln_root = os.path.split(self.path)[0] - for e in all_entries: - relative_path = gyp.common.RelativePath(e.path, sln_root) - # msbuild does not accept an empty folder_name. - # use '.' in case relative_path is empty. - folder_name = relative_path.replace("/", "\\") or "." - f.write( - 'Project("%s") = "%s", "%s", "%s"\r\n' - % ( - e.entry_type_guid, # Entry type GUID - e.name, # Folder name - folder_name, # Folder name (again) - e.get_guid(), # Entry GUID - ) - ) - - # TODO(rspangler): Need a way to configure this stuff - if self.websiteProperties: - f.write( - "\tProjectSection(WebsiteProperties) = preProject\r\n" - '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' - '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' - "\tEndProjectSection\r\n" - ) - - if isinstance(e, MSVSFolder) and e.items: - f.write("\tProjectSection(SolutionItems) = preProject\r\n") - for i in e.items: - f.write(f"\t\t{i} = {i}\r\n") - f.write("\tEndProjectSection\r\n") - - if isinstance(e, MSVSProject) and e.dependencies: - f.write("\tProjectSection(ProjectDependencies) = postProject\r\n") - for d in e.dependencies: - f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n") - f.write("\tEndProjectSection\r\n") - - f.write("EndProject\r\n") - - # Global section - f.write("Global\r\n") - - # Configurations (variants) - f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n") - for v in self.variants: - f.write(f"\t\t{v} = {v}\r\n") - f.write("\tEndGlobalSection\r\n") - - # Sort config guids for easier diffing of solution changes. - config_guids = [] - config_guids_overrides = {} - for e in all_entries: - if isinstance(e, MSVSProject): - config_guids.append(e.get_guid()) - config_guids_overrides[e.get_guid()] = e.config_platform_overrides - config_guids.sort() - - f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n") - for g in config_guids: - for v in self.variants: - nv = config_guids_overrides[g].get(v, v) - # Pick which project configuration to build for this solution - # configuration. - f.write( - "\t\t%s.%s.ActiveCfg = %s\r\n" - % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - ) - ) - - # Enable project in this solution configuration. - f.write( - "\t\t%s.%s.Build.0 = %s\r\n" - % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - ) - ) - f.write("\tEndGlobalSection\r\n") - - # TODO(rspangler): Should be able to configure this stuff too (though I've - # never seen this be any different) - f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n") - f.write("\t\tHideSolutionNode = FALSE\r\n") - f.write("\tEndGlobalSection\r\n") - - # Folder mappings - # Omit this section if there are no folders - if any(e.entries for e in all_entries if isinstance(e, MSVSFolder)): - f.write("\tGlobalSection(NestedProjects) = preSolution\r\n") - for e in all_entries: - if not isinstance(e, MSVSFolder): - continue # Does not apply to projects, only folders - for subentry in e.entries: - f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n") - f.write("\tEndGlobalSection\r\n") - - f.write("EndGlobal\r\n") - - f.close() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py deleted file mode 100644 index 629f3f61b4819..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py +++ /dev/null @@ -1,206 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.easy_xml as easy_xml - -# ------------------------------------------------------------------------------ - - -class Tool: - """Visual Studio tool.""" - - def __init__(self, name, attrs=None): - """Initializes the tool. - - Args: - name: Tool name. - attrs: Dict of tool attributes; may be None. - """ - self._attrs = attrs or {} - self._attrs["Name"] = name - - def _GetSpecification(self): - """Creates an element for the tool. - - Returns: - A new xml.dom.Element for the tool. - """ - return ["Tool", self._attrs] - - -class Filter: - """Visual Studio filter - that is, a virtual folder.""" - - def __init__(self, name, contents=None): - """Initializes the folder. - - Args: - name: Filter (folder) name. - contents: List of filenames and/or Filter objects contained. - """ - self.name = name - self.contents = list(contents or []) - - -# ------------------------------------------------------------------------------ - - -class Writer: - """Visual Studio XML project writer.""" - - def __init__(self, project_path, version, name, guid=None, platforms=None): - """Initializes the project. - - Args: - project_path: Path to the project file. - version: Format version to emit. - name: Name of the project. - guid: GUID to use for project, if not None. - platforms: Array of string, the supported platforms. If null, ['Win32'] - """ - self.project_path = project_path - self.version = version - self.name = name - self.guid = guid - - # Default to Win32 for platforms. - if not platforms: - platforms = ["Win32"] - - # Initialize the specifications of the various sections. - self.platform_section = ["Platforms"] - for platform in platforms: - self.platform_section.append(["Platform", {"Name": platform}]) - self.tool_files_section = ["ToolFiles"] - self.configurations_section = ["Configurations"] - self.files_section = ["Files"] - - # Keep a dict keyed on filename to speed up access. - self.files_dict = {} - - def AddToolFile(self, path): - """Adds a tool file to the project. - - Args: - path: Relative path from project to tool file. - """ - self.tool_files_section.append(["ToolFile", {"RelativePath": path}]) - - def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools): - """Returns the specification for a configuration. - - Args: - config_type: Type of configuration node. - config_name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - Returns: - """ - # Handle defaults - if not attrs: - attrs = {} - if not tools: - tools = [] - - # Add configuration node and its attributes - node_attrs = attrs.copy() - node_attrs["Name"] = config_name - specification = [config_type, node_attrs] - - # Add tool nodes and their attributes - if tools: - for t in tools: - if isinstance(t, Tool): - specification.append(t._GetSpecification()) - else: - specification.append(Tool(t)._GetSpecification()) - return specification - - def AddConfig(self, name, attrs=None, tools=None): - """Adds a configuration to the project. - - Args: - name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools) - self.configurations_section.append(spec) - - def _AddFilesToNode(self, parent, files): - """Adds files and/or filters to the parent node. - - Args: - parent: Destination node - files: A list of Filter objects and/or relative paths to files. - - Will call itself recursively, if the files list contains Filter objects. - """ - for f in files: - if isinstance(f, Filter): - node = ["Filter", {"Name": f.name}] - self._AddFilesToNode(node, f.contents) - else: - node = ["File", {"RelativePath": f}] - self.files_dict[f] = node - parent.append(node) - - def AddFiles(self, files): - """Adds files to the project. - - Args: - files: A list of Filter objects and/or relative paths to files. - - This makes a copy of the file/filter tree at the time of this call. If you - later add files to a Filter object which was passed into a previous call - to AddFiles(), it will not be reflected in this project. - """ - self._AddFilesToNode(self.files_section, files) - # TODO(rspangler) This also doesn't handle adding files to an existing - # filter. That is, it doesn't merge the trees. - - def AddFileConfig(self, path, config, attrs=None, tools=None): - """Adds a configuration to a file. - - Args: - path: Relative path to the file. - config: Name of configuration to add. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - - Raises: - ValueError: Relative path does not match any file added via AddFiles(). - """ - # Find the file node with the right relative path - parent = self.files_dict.get(path) - if not parent: - raise ValueError('AddFileConfig: file "%s" not in project.' % path) - - # Add the config to the file node - spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools) - parent.append(spec) - - def WriteIfChanged(self): - """Writes the project file.""" - # First create XML content definition - content = [ - "VisualStudioProject", - { - "ProjectType": "Visual C++", - "Version": self.version.ProjectVersion(), - "Name": self.name, - "ProjectGUID": self.guid, - "RootNamespace": self.name, - "Keyword": "Win32Proj", - }, - self.platform_section, - self.tool_files_section, - self.configurations_section, - ["References"], # empty section - self.files_section, - ["Globals"], # empty section - ] - easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252") diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py deleted file mode 100644 index ac87f572b240d..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py +++ /dev/null @@ -1,1272 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -r"""Code to validate and convert settings of the Microsoft build tools. - -This file contains code to validate and convert settings of the Microsoft -build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), -and ValidateMSBuildSettings() are the entry points. - -This file was created by comparing the projects created by Visual Studio 2008 -and Visual Studio 2010 for all available settings through the user interface. -The MSBuild schemas were also considered. They are typically found in the -MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild -""" - -import re -import sys - -# Dictionaries of settings validators. The key is the tool name, the value is -# a dictionary mapping setting names to validation functions. -_msvs_validators = {} -_msbuild_validators = {} - - -# A dictionary of settings converters. The key is the tool name, the value is -# a dictionary mapping setting names to conversion functions. -_msvs_to_msbuild_converters = {} - - -# Tool name mapping from MSVS to MSBuild. -_msbuild_name_of_tool = {} - - -class _Tool: - """Represents a tool used by MSVS or MSBuild. - - Attributes: - msvs_name: The name of the tool in MSVS. - msbuild_name: The name of the tool in MSBuild. - """ - - def __init__(self, msvs_name, msbuild_name): - self.msvs_name = msvs_name - self.msbuild_name = msbuild_name - - -def _AddTool(tool): - """Adds a tool to the four dictionaries used to process settings. - - This only defines the tool. Each setting also needs to be added. - - Args: - tool: The _Tool object to be added. - """ - _msvs_validators[tool.msvs_name] = {} - _msbuild_validators[tool.msbuild_name] = {} - _msvs_to_msbuild_converters[tool.msvs_name] = {} - _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name - - -def _GetMSBuildToolSettings(msbuild_settings, tool): - """Returns an MSBuild tool dictionary. Creates it if needed.""" - return msbuild_settings.setdefault(tool.msbuild_name, {}) - - -class _Type: - """Type of settings (Base class).""" - - def ValidateMSVS(self, value): - """Verifies that the value is legal for MSVS. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSVS. - """ - - def ValidateMSBuild(self, value): - """Verifies that the value is legal for MSBuild. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSBuild. - """ - - def ConvertToMSBuild(self, value): - """Returns the MSBuild equivalent of the MSVS value given. - - Args: - value: the MSVS value to convert. - - Returns: - the MSBuild equivalent. - - Raises: - ValueError if value is not valid. - """ - return value - - -class _String(_Type): - """A setting that's just a string.""" - - def ValidateMSVS(self, value): - if not isinstance(value, str): - raise ValueError("expected string; got %r" % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, str): - raise ValueError("expected string; got %r" % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - return ConvertVCMacrosToMSBuild(value) - - -class _StringList(_Type): - """A settings that's a list of strings.""" - - def ValidateMSVS(self, value): - if not isinstance(value, (list, str)): - raise ValueError("expected string list; got %r" % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, (list, str)): - raise ValueError("expected string list; got %r" % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - if isinstance(value, list): - return [ConvertVCMacrosToMSBuild(i) for i in value] - else: - return ConvertVCMacrosToMSBuild(value) - - -class _Boolean(_Type): - """Boolean settings, can have the values 'false' or 'true'.""" - - def _Validate(self, value): - if value not in {"true", "false"}: - raise ValueError("expected bool; got %r" % value) - - def ValidateMSVS(self, value): - self._Validate(value) - - def ValidateMSBuild(self, value): - self._Validate(value) - - def ConvertToMSBuild(self, value): - self._Validate(value) - return value - - -class _Integer(_Type): - """Integer settings.""" - - def __init__(self, msbuild_base=10): - _Type.__init__(self) - self._msbuild_base = msbuild_base - - def ValidateMSVS(self, value): - # Try to convert, this will raise ValueError if invalid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - # Try to convert, this will raise ValueError if invalid. - int(value, self._msbuild_base) - - def ConvertToMSBuild(self, value): - msbuild_format = (self._msbuild_base == 10) and "%d" or "0x%04x" - return msbuild_format % int(value) - - -class _Enumeration(_Type): - """Type of settings that is an enumeration. - - In MSVS, the values are indexes like '0', '1', and '2'. - MSBuild uses text labels that are more representative, like 'Win32'. - - Constructor args: - label_list: an array of MSBuild labels that correspond to the MSVS index. - In the rare cases where MSVS has skipped an index value, None is - used in the array to indicate the unused spot. - new: an array of labels that are new to MSBuild. - """ - - def __init__(self, label_list, new=None): - _Type.__init__(self) - self._label_list = label_list - self._msbuild_values = {value for value in label_list if value is not None} - if new is not None: - self._msbuild_values.update(new) - - def ValidateMSVS(self, value): - # Try to convert. It will raise an exception if not valid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - if value not in self._msbuild_values: - raise ValueError("unrecognized enumerated value %s" % value) - - def ConvertToMSBuild(self, value): - index = int(value) - if index < 0 or index >= len(self._label_list): - raise ValueError( - "index value (%d) not in expected range [0, %d)" - % (index, len(self._label_list)) - ) - label = self._label_list[index] - if label is None: - raise ValueError("converted value for %s not specified." % value) - return label - - -# Instantiate the various generic types. -_boolean = _Boolean() -_integer = _Integer() -# For now, we don't do any special validation on these types: -_string = _String() -_file_name = _String() -_folder_name = _String() -_file_list = _StringList() -_folder_list = _StringList() -_string_list = _StringList() -# Some boolean settings went from numerical values to boolean. The -# mapping is 0: default, 1: false, 2: true. -_newly_boolean = _Enumeration(["", "false", "true"]) - - -def _Same(tool, name, setting_type): - """Defines a setting that has the same name in MSVS and MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - _Renamed(tool, name, name, setting_type) - - -def _Renamed(tool, msvs_name, msbuild_name, setting_type): - """Defines a setting for which the name has changed. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting. - msbuild_name: the name of the MSBuild setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS - _msbuild_validators[tool.msbuild_name][msbuild_name] = setting_type.ValidateMSBuild - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _Moved(tool, settings_name, msbuild_tool_name, setting_type): - _MovedAndRenamed( - tool, settings_name, msbuild_tool_name, settings_name, setting_type - ) - - -def _MovedAndRenamed( - tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type -): - """Defines a setting that may have moved to a new section. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_settings_name: the MSVS name of the setting. - msbuild_tool_name: the name of the MSBuild tool to place the setting under. - msbuild_settings_name: the MSBuild name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) - tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_settings_name] = setting_type.ValidateMSVS - validator = setting_type.ValidateMSBuild - _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate - - -def _MSVSOnly(tool, name, setting_type): - """Defines a setting that is only found in MSVS. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(unused_value, unused_msbuild_settings): - # Since this is for MSVS only settings, no translation will happen. - pass - - _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate - - -def _MSBuildOnly(tool, name, setting_type): - """Defines a setting that is only found in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - # Let msbuild-only properties get translated as-is from msvs_settings. - tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {}) - tool_settings[name] = value - - _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild - _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate - - -def _ConvertedToAdditionalOption(tool, msvs_name, flag): - """Defines a setting that's handled via a command line option in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting that if 'true' becomes a flag - flag: the flag to insert at the end of the AdditionalOptions - """ - - def _Translate(value, msbuild_settings): - if value == "true": - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if "AdditionalOptions" in tool_settings: - new_flags = "{} {}".format(tool_settings["AdditionalOptions"], flag) - else: - new_flags = flag - tool_settings["AdditionalOptions"] = new_flags - - _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _CustomGeneratePreprocessedFile(tool, msvs_name): - def _Translate(value, msbuild_settings): - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if value == "0": - tool_settings["PreprocessToFile"] = "false" - tool_settings["PreprocessSuppressLineNumbers"] = "false" - elif value == "1": # /P - tool_settings["PreprocessToFile"] = "true" - tool_settings["PreprocessSuppressLineNumbers"] = "false" - elif value == "2": # /EP /P - tool_settings["PreprocessToFile"] = "true" - tool_settings["PreprocessSuppressLineNumbers"] = "true" - else: - raise ValueError("value must be one of [0, 1, 2]; got %s" % value) - - # Create a bogus validator that looks for '0', '1', or '2' - msvs_validator = _Enumeration(["a", "b", "c"]).ValidateMSVS - _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator - msbuild_validator = _boolean.ValidateMSBuild - msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] - msbuild_tool_validators["PreprocessToFile"] = msbuild_validator - msbuild_tool_validators["PreprocessSuppressLineNumbers"] = msbuild_validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -fix_vc_macro_slashes_regex_list = ("IntDir", "OutDir") -fix_vc_macro_slashes_regex = re.compile( - r"(\$\((?:%s)\))(?:[\\/]+)" % "|".join(fix_vc_macro_slashes_regex_list) -) - -# Regular expression to detect keys that were generated by exclusion lists -_EXCLUDED_SUFFIX_RE = re.compile("^(.*)_excluded$") - - -def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): - """Verify that 'setting' is valid if it is generated from an exclusion list. - - If the setting appears to be generated from an exclusion list, the root name - is checked. - - Args: - setting: A string that is the setting name to validate - settings: A dictionary where the keys are valid settings - error_msg: The message to emit in the event of error - stderr: The stream receiving the error messages. - """ - # This may be unrecognized because it's an exclusion list. If the - # setting name has the _excluded suffix, then check the root name. - unrecognized = True - m = re.match(_EXCLUDED_SUFFIX_RE, setting) - if m: - root_setting = m.group(1) - unrecognized = root_setting not in settings - - if unrecognized: - # We don't know this setting. Give a warning. - print(error_msg, file=stderr) - - -def FixVCMacroSlashes(s): - """Replace macros which have excessive following slashes. - - These macros are known to have a built-in trailing slash. Furthermore, many - scripts hiccup on processing paths with extra slashes in the middle. - - This list is probably not exhaustive. Add as needed. - """ - if "$" in s: - s = fix_vc_macro_slashes_regex.sub(r"\1", s) - return s - - -def ConvertVCMacrosToMSBuild(s): - """Convert the MSVS macros found in the string to the MSBuild equivalent. - - This list is probably not exhaustive. Add as needed. - """ - if "$" in s: - replace_map = { - "$(ConfigurationName)": "$(Configuration)", - "$(InputDir)": "%(RelativeDir)", - "$(InputExt)": "%(Extension)", - "$(InputFileName)": "%(Filename)%(Extension)", - "$(InputName)": "%(Filename)", - "$(InputPath)": "%(Identity)", - "$(ParentName)": "$(ProjectFileName)", - "$(PlatformName)": "$(Platform)", - "$(SafeInputName)": "%(Filename)", - } - for old, new in replace_map.items(): - s = s.replace(old, new) - s = FixVCMacroSlashes(s) - return s - - -def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): - """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). - - Args: - msvs_settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - - Returns: - A dictionary of MSBuild settings. The key is either the MSBuild tool name - or the empty string (for the global settings). The values are themselves - dictionaries of settings and their values. - """ - msbuild_settings = {} - for msvs_tool_name, msvs_tool_settings in msvs_settings.items(): - if msvs_tool_name in _msvs_to_msbuild_converters: - msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] - for msvs_setting, msvs_value in msvs_tool_settings.items(): - if msvs_setting in msvs_tool: - # Invoke the translation function. - try: - msvs_tool[msvs_setting](msvs_value, msbuild_settings) - except ValueError as e: - print( - "Warning: while converting %s/%s to MSBuild, " - "%s" % (msvs_tool_name, msvs_setting, e), - file=stderr, - ) - else: - _ValidateExclusionSetting( - msvs_setting, - msvs_tool, - ( - "Warning: unrecognized setting %s/%s " - "while converting to MSBuild." - % (msvs_tool_name, msvs_setting) - ), - stderr, - ) - else: - print( - "Warning: unrecognized tool %s while converting to " - "MSBuild." % msvs_tool_name, - file=stderr, - ) - return msbuild_settings - - -def ValidateMSVSSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSVS. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msvs_validators, settings, stderr) - - -def ValidateMSBuildSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSBuild. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msbuild_validators, settings, stderr) - - -def _ValidateSettings(validators, settings, stderr): - """Validates that the settings are valid for MSBuild or MSVS. - - We currently only validate the names of the settings, not their values. - - Args: - validators: A dictionary of tools and their validators. - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - for tool_name in settings: - if tool_name in validators: - tool_validators = validators[tool_name] - for setting, value in settings[tool_name].items(): - if setting in tool_validators: - try: - tool_validators[setting](value) - except ValueError as e: - print( - f"Warning: for {tool_name}/{setting}, {e}", - file=stderr, - ) - else: - _ValidateExclusionSetting( - setting, - tool_validators, - (f"Warning: unrecognized setting {tool_name}/{setting}"), - stderr, - ) - - else: - print("Warning: unrecognized tool %s" % (tool_name), file=stderr) - - -# MSVS and MBuild names of the tools. -_compile = _Tool("VCCLCompilerTool", "ClCompile") -_link = _Tool("VCLinkerTool", "Link") -_midl = _Tool("VCMIDLTool", "Midl") -_rc = _Tool("VCResourceCompilerTool", "ResourceCompile") -_lib = _Tool("VCLibrarianTool", "Lib") -_manifest = _Tool("VCManifestTool", "Manifest") -_masm = _Tool("MASM", "MASM") -_armasm = _Tool("ARMASM", "ARMASM") - - -_AddTool(_compile) -_AddTool(_link) -_AddTool(_midl) -_AddTool(_rc) -_AddTool(_lib) -_AddTool(_manifest) -_AddTool(_masm) -_AddTool(_armasm) -# Add sections only found in the MSBuild settings. -_msbuild_validators[""] = {} -_msbuild_validators["ProjectReference"] = {} -_msbuild_validators["ManifestResourceCompile"] = {} - -# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and -# ClCompile in MSBuild. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for -# the schema of the MSBuild ClCompile settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_compile, "AdditionalIncludeDirectories", _folder_list) # /I -_Same(_compile, "AdditionalOptions", _string_list) -_Same(_compile, "AdditionalUsingDirectories", _folder_list) # /AI -_Same(_compile, "AssemblerListingLocation", _file_name) # /Fa -_Same(_compile, "BrowseInformationFile", _file_name) -_Same(_compile, "BufferSecurityCheck", _boolean) # /GS -_Same(_compile, "DisableLanguageExtensions", _boolean) # /Za -_Same(_compile, "DisableSpecificWarnings", _string_list) # /wd -_Same(_compile, "EnableFiberSafeOptimizations", _boolean) # /GT -_Same(_compile, "EnablePREfast", _boolean) # /analyze Visible='false' -_Same(_compile, "ExpandAttributedSource", _boolean) # /Fx -_Same(_compile, "FloatingPointExceptions", _boolean) # /fp:except -_Same(_compile, "ForceConformanceInForLoopScope", _boolean) # /Zc:forScope -_Same(_compile, "ForcedIncludeFiles", _file_list) # /FI -_Same(_compile, "ForcedUsingFiles", _file_list) # /FU -_Same(_compile, "GenerateXMLDocumentationFiles", _boolean) # /doc -_Same(_compile, "IgnoreStandardIncludePath", _boolean) # /X -_Same(_compile, "MinimalRebuild", _boolean) # /Gm -_Same(_compile, "OmitDefaultLibName", _boolean) # /Zl -_Same(_compile, "OmitFramePointers", _boolean) # /Oy -_Same(_compile, "PreprocessorDefinitions", _string_list) # /D -_Same(_compile, "ProgramDataBaseFileName", _file_name) # /Fd -_Same(_compile, "RuntimeTypeInfo", _boolean) # /GR -_Same(_compile, "ShowIncludes", _boolean) # /showIncludes -_Same(_compile, "SmallerTypeCheck", _boolean) # /RTCc -_Same(_compile, "StringPooling", _boolean) # /GF -_Same(_compile, "SuppressStartupBanner", _boolean) # /nologo -_Same(_compile, "TreatWChar_tAsBuiltInType", _boolean) # /Zc:wchar_t -_Same(_compile, "UndefineAllPreprocessorDefinitions", _boolean) # /u -_Same(_compile, "UndefinePreprocessorDefinitions", _string_list) # /U -_Same(_compile, "UseFullPaths", _boolean) # /FC -_Same(_compile, "WholeProgramOptimization", _boolean) # /GL -_Same(_compile, "XMLDocumentationFileName", _file_name) -_Same(_compile, "CompileAsWinRT", _boolean) # /ZW - -_Same( - _compile, - "AssemblerOutput", - _Enumeration( - [ - "NoListing", - "AssemblyCode", # /FA - "All", # /FAcs - "AssemblyAndMachineCode", # /FAc - "AssemblyAndSourceCode", - ] - ), -) # /FAs -_Same( - _compile, - "BasicRuntimeChecks", - _Enumeration( - [ - "Default", - "StackFrameRuntimeCheck", # /RTCs - "UninitializedLocalUsageCheck", # /RTCu - "EnableFastChecks", - ] - ), -) # /RTC1 -_Same( - _compile, "BrowseInformation", _Enumeration(["false", "true", "true"]) # /FR -) # /Fr -_Same( - _compile, - "CallingConvention", - _Enumeration(["Cdecl", "FastCall", "StdCall", "VectorCall"]), # /Gd # /Gr # /Gz -) # /Gv -_Same( - _compile, - "CompileAs", - _Enumeration(["Default", "CompileAsC", "CompileAsCpp"]), # /TC -) # /TP -_Same( - _compile, - "DebugInformationFormat", - _Enumeration( - [ - "", # Disabled - "OldStyle", # /Z7 - None, - "ProgramDatabase", # /Zi - "EditAndContinue", - ] - ), -) # /ZI -_Same( - _compile, - "EnableEnhancedInstructionSet", - _Enumeration( - [ - "NotSet", - "StreamingSIMDExtensions", # /arch:SSE - "StreamingSIMDExtensions2", # /arch:SSE2 - "AdvancedVectorExtensions", # /arch:AVX (vs2012+) - "NoExtensions", # /arch:IA32 (vs2012+) - # This one only exists in the new msbuild format. - "AdvancedVectorExtensions2", # /arch:AVX2 (vs2013r2+) - ] - ), -) -_Same( - _compile, - "ErrorReporting", - _Enumeration( - [ - "None", # /errorReport:none - "Prompt", # /errorReport:prompt - "Queue", - ], # /errorReport:queue - new=["Send"], - ), -) # /errorReport:send" -_Same( - _compile, - "ExceptionHandling", - _Enumeration(["false", "Sync", "Async"], new=["SyncCThrow"]), # /EHsc # /EHa -) # /EHs -_Same( - _compile, "FavorSizeOrSpeed", _Enumeration(["Neither", "Speed", "Size"]) # /Ot -) # /Os -_Same( - _compile, - "FloatingPointModel", - _Enumeration(["Precise", "Strict", "Fast"]), # /fp:precise # /fp:strict -) # /fp:fast -_Same( - _compile, - "InlineFunctionExpansion", - _Enumeration( - ["Default", "OnlyExplicitInline", "AnySuitable"], # /Ob1 # /Ob2 - new=["Disabled"], - ), -) # /Ob0 -_Same( - _compile, - "Optimization", - _Enumeration(["Disabled", "MinSpace", "MaxSpeed", "Full"]), # /Od # /O1 # /O2 -) # /Ox -_Same( - _compile, - "RuntimeLibrary", - _Enumeration( - [ - "MultiThreaded", # /MT - "MultiThreadedDebug", # /MTd - "MultiThreadedDLL", # /MD - "MultiThreadedDebugDLL", - ] - ), -) # /MDd -_Same( - _compile, - "StructMemberAlignment", - _Enumeration( - [ - "Default", - "1Byte", # /Zp1 - "2Bytes", # /Zp2 - "4Bytes", # /Zp4 - "8Bytes", # /Zp8 - "16Bytes", - ] - ), -) # /Zp16 -_Same( - _compile, - "WarningLevel", - _Enumeration( - [ - "TurnOffAllWarnings", # /W0 - "Level1", # /W1 - "Level2", # /W2 - "Level3", # /W3 - "Level4", - ], # /W4 - new=["EnableAllWarnings"], - ), -) # /Wall - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed( - _compile, "EnableFunctionLevelLinking", "FunctionLevelLinking", _boolean -) # /Gy -_Renamed(_compile, "EnableIntrinsicFunctions", "IntrinsicFunctions", _boolean) # /Oi -_Renamed(_compile, "KeepComments", "PreprocessKeepComments", _boolean) # /C -_Renamed(_compile, "ObjectFile", "ObjectFileName", _file_name) # /Fo -_Renamed(_compile, "OpenMP", "OpenMPSupport", _boolean) # /openmp -_Renamed( - _compile, "PrecompiledHeaderThrough", "PrecompiledHeaderFile", _file_name -) # Used with /Yc and /Yu -_Renamed( - _compile, "PrecompiledHeaderFile", "PrecompiledHeaderOutputFile", _file_name -) # /Fp -_Renamed( - _compile, - "UsePrecompiledHeader", - "PrecompiledHeader", - _Enumeration( - ["NotUsing", "Create", "Use"] # VS recognized '' for this value too. # /Yc - ), -) # /Yu -_Renamed(_compile, "WarnAsError", "TreatWarningAsError", _boolean) # /WX - -_ConvertedToAdditionalOption(_compile, "DefaultCharIsUnsigned", "/J") - -# MSVS options not found in MSBuild. -_MSVSOnly(_compile, "Detect64BitPortabilityProblems", _boolean) -_MSVSOnly(_compile, "UseUnicodeResponseFiles", _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_compile, "BuildingInIDE", _boolean) -_MSBuildOnly( - _compile, "CompileAsManaged", _Enumeration([], new=["false", "true"]) -) # /clr -_MSBuildOnly(_compile, "CreateHotpatchableImage", _boolean) # /hotpatch -_MSBuildOnly(_compile, "LanguageStandard", _string) -_MSBuildOnly(_compile, "LanguageStandard_C", _string) -_MSBuildOnly(_compile, "MultiProcessorCompilation", _boolean) # /MP -_MSBuildOnly(_compile, "PreprocessOutputPath", _string) # /Fi -_MSBuildOnly(_compile, "ProcessorNumber", _integer) # the number of processors -_MSBuildOnly(_compile, "TrackerLogDirectory", _folder_name) -_MSBuildOnly(_compile, "TreatSpecificWarningsAsErrors", _string_list) # /we -_MSBuildOnly(_compile, "UseUnicodeForAssemblerListing", _boolean) # /FAu - -# Defines a setting that needs very customized processing -_CustomGeneratePreprocessedFile(_compile, "GeneratePreprocessedFile") - - -# Directives for converting MSVS VCLinkerTool to MSBuild Link. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for -# the schema of the MSBuild Link settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_link, "AdditionalDependencies", _file_list) -_Same(_link, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH -# /MANIFESTDEPENDENCY: -_Same(_link, "AdditionalManifestDependencies", _file_list) -_Same(_link, "AdditionalOptions", _string_list) -_Same(_link, "AddModuleNamesToAssembly", _file_list) # /ASSEMBLYMODULE -_Same(_link, "AllowIsolation", _boolean) # /ALLOWISOLATION -_Same(_link, "AssemblyLinkResource", _file_list) # /ASSEMBLYLINKRESOURCE -_Same(_link, "BaseAddress", _string) # /BASE -_Same(_link, "CLRUnmanagedCodeCheck", _boolean) # /CLRUNMANAGEDCODECHECK -_Same(_link, "DelayLoadDLLs", _file_list) # /DELAYLOAD -_Same(_link, "DelaySign", _boolean) # /DELAYSIGN -_Same(_link, "EmbedManagedResourceFile", _file_list) # /ASSEMBLYRESOURCE -_Same(_link, "EnableUAC", _boolean) # /MANIFESTUAC -_Same(_link, "EntryPointSymbol", _string) # /ENTRY -_Same(_link, "ForceSymbolReferences", _file_list) # /INCLUDE -_Same(_link, "FunctionOrder", _file_name) # /ORDER -_Same(_link, "GenerateDebugInformation", _boolean) # /DEBUG -_Same(_link, "GenerateMapFile", _boolean) # /MAP -_Same(_link, "HeapCommitSize", _string) -_Same(_link, "HeapReserveSize", _string) # /HEAP -_Same(_link, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB -_Same(_link, "IgnoreEmbeddedIDL", _boolean) # /IGNOREIDL -_Same(_link, "ImportLibrary", _file_name) # /IMPLIB -_Same(_link, "KeyContainer", _file_name) # /KEYCONTAINER -_Same(_link, "KeyFile", _file_name) # /KEYFILE -_Same(_link, "ManifestFile", _file_name) # /ManifestFile -_Same(_link, "MapExports", _boolean) # /MAPINFO:EXPORTS -_Same(_link, "MapFileName", _file_name) -_Same(_link, "MergedIDLBaseFileName", _file_name) # /IDLOUT -_Same(_link, "MergeSections", _string) # /MERGE -_Same(_link, "MidlCommandFile", _file_name) # /MIDL -_Same(_link, "ModuleDefinitionFile", _file_name) # /DEF -_Same(_link, "OutputFile", _file_name) # /OUT -_Same(_link, "PerUserRedirection", _boolean) -_Same(_link, "Profile", _boolean) # /PROFILE -_Same(_link, "ProfileGuidedDatabase", _file_name) # /PGD -_Same(_link, "ProgramDatabaseFile", _file_name) # /PDB -_Same(_link, "RegisterOutput", _boolean) -_Same(_link, "SetChecksum", _boolean) # /RELEASE -_Same(_link, "StackCommitSize", _string) -_Same(_link, "StackReserveSize", _string) # /STACK -_Same(_link, "StripPrivateSymbols", _file_name) # /PDBSTRIPPED -_Same(_link, "SupportUnloadOfDelayLoadedDLL", _boolean) # /DELAY:UNLOAD -_Same(_link, "SuppressStartupBanner", _boolean) # /NOLOGO -_Same(_link, "SwapRunFromCD", _boolean) # /SWAPRUN:CD -_Same(_link, "TurnOffAssemblyGeneration", _boolean) # /NOASSEMBLY -_Same(_link, "TypeLibraryFile", _file_name) # /TLBOUT -_Same(_link, "TypeLibraryResourceID", _integer) # /TLBID -_Same(_link, "UACUIAccess", _boolean) # /uiAccess='true' -_Same(_link, "Version", _string) # /VERSION - -_Same(_link, "EnableCOMDATFolding", _newly_boolean) # /OPT:ICF -_Same(_link, "FixedBaseAddress", _newly_boolean) # /FIXED -_Same(_link, "LargeAddressAware", _newly_boolean) # /LARGEADDRESSAWARE -_Same(_link, "OptimizeReferences", _newly_boolean) # /OPT:REF -_Same(_link, "RandomizedBaseAddress", _newly_boolean) # /DYNAMICBASE -_Same(_link, "TerminalServerAware", _newly_boolean) # /TSAWARE - -_subsystem_enumeration = _Enumeration( - [ - "NotSet", - "Console", # /SUBSYSTEM:CONSOLE - "Windows", # /SUBSYSTEM:WINDOWS - "Native", # /SUBSYSTEM:NATIVE - "EFI Application", # /SUBSYSTEM:EFI_APPLICATION - "EFI Boot Service Driver", # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER - "EFI ROM", # /SUBSYSTEM:EFI_ROM - "EFI Runtime", # /SUBSYSTEM:EFI_RUNTIME_DRIVER - "WindowsCE", - ], # /SUBSYSTEM:WINDOWSCE - new=["POSIX"], -) # /SUBSYSTEM:POSIX - -_target_machine_enumeration = _Enumeration( - [ - "NotSet", - "MachineX86", # /MACHINE:X86 - None, - "MachineARM", # /MACHINE:ARM - "MachineEBC", # /MACHINE:EBC - "MachineIA64", # /MACHINE:IA64 - None, - "MachineMIPS", # /MACHINE:MIPS - "MachineMIPS16", # /MACHINE:MIPS16 - "MachineMIPSFPU", # /MACHINE:MIPSFPU - "MachineMIPSFPU16", # /MACHINE:MIPSFPU16 - None, - None, - None, - "MachineSH4", # /MACHINE:SH4 - None, - "MachineTHUMB", # /MACHINE:THUMB - "MachineX64", - ] -) # /MACHINE:X64 - -_Same( - _link, "AssemblyDebug", _Enumeration(["", "true", "false"]) # /ASSEMBLYDEBUG -) # /ASSEMBLYDEBUG:DISABLE -_Same( - _link, - "CLRImageType", - _Enumeration( - [ - "Default", - "ForceIJWImage", # /CLRIMAGETYPE:IJW - "ForcePureILImage", # /Switch="CLRIMAGETYPE:PURE - "ForceSafeILImage", - ] - ), -) # /Switch="CLRIMAGETYPE:SAFE -_Same( - _link, - "CLRThreadAttribute", - _Enumeration( - [ - "DefaultThreadingAttribute", # /CLRTHREADATTRIBUTE:NONE - "MTAThreadingAttribute", # /CLRTHREADATTRIBUTE:MTA - "STAThreadingAttribute", - ] - ), -) # /CLRTHREADATTRIBUTE:STA -_Same( - _link, - "DataExecutionPrevention", - _Enumeration(["", "false", "true"]), # /NXCOMPAT:NO -) # /NXCOMPAT -_Same( - _link, - "Driver", - _Enumeration(["NotSet", "Driver", "UpOnly", "WDM"]), # /Driver # /DRIVER:UPONLY -) # /DRIVER:WDM -_Same( - _link, - "LinkTimeCodeGeneration", - _Enumeration( - [ - "Default", - "UseLinkTimeCodeGeneration", # /LTCG - "PGInstrument", # /LTCG:PGInstrument - "PGOptimization", # /LTCG:PGOptimize - "PGUpdate", - ] - ), -) # /LTCG:PGUpdate -_Same( - _link, - "ShowProgress", - _Enumeration( - ["NotSet", "LinkVerbose", "LinkVerboseLib"], # /VERBOSE # /VERBOSE:Lib - new=[ - "LinkVerboseICF", # /VERBOSE:ICF - "LinkVerboseREF", # /VERBOSE:REF - "LinkVerboseSAFESEH", # /VERBOSE:SAFESEH - "LinkVerboseCLR", - ], - ), -) # /VERBOSE:CLR -_Same(_link, "SubSystem", _subsystem_enumeration) -_Same(_link, "TargetMachine", _target_machine_enumeration) -_Same( - _link, - "UACExecutionLevel", - _Enumeration( - [ - "AsInvoker", # /level='asInvoker' - "HighestAvailable", # /level='highestAvailable' - "RequireAdministrator", - ] - ), -) # /level='requireAdministrator' -_Same(_link, "MinimumRequiredVersion", _string) -_Same(_link, "TreatLinkerWarningAsErrors", _boolean) # /WX - - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed( - _link, - "ErrorReporting", - "LinkErrorReporting", - _Enumeration( - [ - "NoErrorReport", # /ERRORREPORT:NONE - "PromptImmediately", # /ERRORREPORT:PROMPT - "QueueForNextLogin", - ], # /ERRORREPORT:QUEUE - new=["SendErrorReport"], - ), -) # /ERRORREPORT:SEND -_Renamed( - _link, "IgnoreDefaultLibraryNames", "IgnoreSpecificDefaultLibraries", _file_list -) # /NODEFAULTLIB -_Renamed(_link, "ResourceOnlyDLL", "NoEntryPoint", _boolean) # /NOENTRY -_Renamed(_link, "SwapRunFromNet", "SwapRunFromNET", _boolean) # /SWAPRUN:NET - -_Moved(_link, "GenerateManifest", "", _boolean) -_Moved(_link, "IgnoreImportLibrary", "", _boolean) -_Moved(_link, "LinkIncremental", "", _newly_boolean) -_Moved(_link, "LinkLibraryDependencies", "ProjectReference", _boolean) -_Moved(_link, "UseLibraryDependencyInputs", "ProjectReference", _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_link, "OptimizeForWindows98", _newly_boolean) -_MSVSOnly(_link, "UseUnicodeResponseFiles", _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_link, "BuildingInIDE", _boolean) -_MSBuildOnly(_link, "ImageHasSafeExceptionHandlers", _boolean) # /SAFESEH -_MSBuildOnly(_link, "LinkDLL", _boolean) # /DLL Visible='false' -_MSBuildOnly(_link, "LinkStatus", _boolean) # /LTCG:STATUS -_MSBuildOnly(_link, "PreventDllBinding", _boolean) # /ALLOWBIND -_MSBuildOnly(_link, "SupportNobindOfDelayLoadedDLL", _boolean) # /DELAY:NOBIND -_MSBuildOnly(_link, "TrackerLogDirectory", _folder_name) -_MSBuildOnly(_link, "MSDOSStubFileName", _file_name) # /STUB Visible='false' -_MSBuildOnly(_link, "SectionAlignment", _integer) # /ALIGN -_MSBuildOnly(_link, "SpecifySectionAttributes", _string) # /SECTION -_MSBuildOnly( - _link, - "ForceFileOutput", - _Enumeration( - [], - new=[ - "Enabled", # /FORCE - # /FORCE:MULTIPLE - "MultiplyDefinedSymbolOnly", - "UndefinedSymbolOnly", - ], - ), -) # /FORCE:UNRESOLVED -_MSBuildOnly( - _link, - "CreateHotPatchableImage", - _Enumeration( - [], - new=[ - "Enabled", # /FUNCTIONPADMIN - "X86Image", # /FUNCTIONPADMIN:5 - "X64Image", # /FUNCTIONPADMIN:6 - "ItaniumImage", - ], - ), -) # /FUNCTIONPADMIN:16 -_MSBuildOnly( - _link, - "CLRSupportLastError", - _Enumeration( - [], - new=[ - "Enabled", # /CLRSupportLastError - "Disabled", # /CLRSupportLastError:NO - # /CLRSupportLastError:SYSTEMDLL - "SystemDlls", - ], - ), -) - - -# Directives for converting VCResourceCompilerTool to ResourceCompile. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for -# the schema of the MSBuild ResourceCompile settings. - -_Same(_rc, "AdditionalOptions", _string_list) -_Same(_rc, "AdditionalIncludeDirectories", _folder_list) # /I -_Same(_rc, "Culture", _Integer(msbuild_base=16)) -_Same(_rc, "IgnoreStandardIncludePath", _boolean) # /X -_Same(_rc, "PreprocessorDefinitions", _string_list) # /D -_Same(_rc, "ResourceOutputFileName", _string) # /fo -_Same(_rc, "ShowProgress", _boolean) # /v -# There is no UI in VisualStudio 2008 to set the following properties. -# However they are found in CL and other tools. Include them here for -# completeness, as they are very likely to have the same usage pattern. -_Same(_rc, "SuppressStartupBanner", _boolean) # /nologo -_Same(_rc, "UndefinePreprocessorDefinitions", _string_list) # /u - -# MSBuild options not found in MSVS. -_MSBuildOnly(_rc, "NullTerminateStrings", _boolean) # /n -_MSBuildOnly(_rc, "TrackerLogDirectory", _folder_name) - - -# Directives for converting VCMIDLTool to Midl. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for -# the schema of the MSBuild Midl settings. - -_Same(_midl, "AdditionalIncludeDirectories", _folder_list) # /I -_Same(_midl, "AdditionalOptions", _string_list) -_Same(_midl, "CPreprocessOptions", _string) # /cpp_opt -_Same(_midl, "ErrorCheckAllocations", _boolean) # /error allocation -_Same(_midl, "ErrorCheckBounds", _boolean) # /error bounds_check -_Same(_midl, "ErrorCheckEnumRange", _boolean) # /error enum -_Same(_midl, "ErrorCheckRefPointers", _boolean) # /error ref -_Same(_midl, "ErrorCheckStubData", _boolean) # /error stub_data -_Same(_midl, "GenerateStublessProxies", _boolean) # /Oicf -_Same(_midl, "GenerateTypeLibrary", _boolean) -_Same(_midl, "HeaderFileName", _file_name) # /h -_Same(_midl, "IgnoreStandardIncludePath", _boolean) # /no_def_idir -_Same(_midl, "InterfaceIdentifierFileName", _file_name) # /iid -_Same(_midl, "MkTypLibCompatible", _boolean) # /mktyplib203 -_Same(_midl, "OutputDirectory", _string) # /out -_Same(_midl, "PreprocessorDefinitions", _string_list) # /D -_Same(_midl, "ProxyFileName", _file_name) # /proxy -_Same(_midl, "RedirectOutputAndErrors", _file_name) # /o -_Same(_midl, "SuppressStartupBanner", _boolean) # /nologo -_Same(_midl, "TypeLibraryName", _file_name) # /tlb -_Same(_midl, "UndefinePreprocessorDefinitions", _string_list) # /U -_Same(_midl, "WarnAsError", _boolean) # /WX - -_Same( - _midl, - "DefaultCharType", - _Enumeration(["Unsigned", "Signed", "Ascii"]), # /char unsigned # /char signed -) # /char ascii7 -_Same( - _midl, - "TargetEnvironment", - _Enumeration( - [ - "NotSet", - "Win32", # /env win32 - "Itanium", # /env ia64 - "X64", # /env x64 - "ARM64", # /env arm64 - ] - ), -) -_Same( - _midl, - "EnableErrorChecks", - _Enumeration(["EnableCustom", "None", "All"]), # /error none -) # /error all -_Same( - _midl, - "StructMemberAlignment", - _Enumeration(["NotSet", "1", "2", "4", "8"]), # Zp1 # Zp2 # Zp4 -) # Zp8 -_Same( - _midl, - "WarningLevel", - _Enumeration(["0", "1", "2", "3", "4"]), # /W0 # /W1 # /W2 # /W3 -) # /W4 - -_Renamed(_midl, "DLLDataFileName", "DllDataFileName", _file_name) # /dlldata -_Renamed(_midl, "ValidateParameters", "ValidateAllParameters", _boolean) # /robust - -# MSBuild options not found in MSVS. -_MSBuildOnly(_midl, "ApplicationConfigurationMode", _boolean) # /app_config -_MSBuildOnly(_midl, "ClientStubFile", _file_name) # /cstub -_MSBuildOnly( - _midl, "GenerateClientFiles", _Enumeration([], new=["Stub", "None"]) # /client stub -) # /client none -_MSBuildOnly( - _midl, "GenerateServerFiles", _Enumeration([], new=["Stub", "None"]) # /client stub -) # /client none -_MSBuildOnly(_midl, "LocaleID", _integer) # /lcid DECIMAL -_MSBuildOnly(_midl, "ServerStubFile", _file_name) # /sstub -_MSBuildOnly(_midl, "SuppressCompilerWarnings", _boolean) # /no_warn -_MSBuildOnly(_midl, "TrackerLogDirectory", _folder_name) -_MSBuildOnly( - _midl, "TypeLibFormat", _Enumeration([], new=["NewFormat", "OldFormat"]) # /newtlb -) # /oldtlb - - -# Directives for converting VCLibrarianTool to Lib. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for -# the schema of the MSBuild Lib settings. - -_Same(_lib, "AdditionalDependencies", _file_list) -_Same(_lib, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH -_Same(_lib, "AdditionalOptions", _string_list) -_Same(_lib, "ExportNamedFunctions", _string_list) # /EXPORT -_Same(_lib, "ForceSymbolReferences", _string) # /INCLUDE -_Same(_lib, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB -_Same(_lib, "IgnoreSpecificDefaultLibraries", _file_list) # /NODEFAULTLIB -_Same(_lib, "ModuleDefinitionFile", _file_name) # /DEF -_Same(_lib, "OutputFile", _file_name) # /OUT -_Same(_lib, "SuppressStartupBanner", _boolean) # /NOLOGO -_Same(_lib, "UseUnicodeResponseFiles", _boolean) -_Same(_lib, "LinkTimeCodeGeneration", _boolean) # /LTCG -_Same(_lib, "TargetMachine", _target_machine_enumeration) - -# TODO(jeanluc) _link defines the same value that gets moved to -# ProjectReference. We may want to validate that they are consistent. -_Moved(_lib, "LinkLibraryDependencies", "ProjectReference", _boolean) - -_MSBuildOnly(_lib, "DisplayLibrary", _string) # /LIST Visible='false' -_MSBuildOnly( - _lib, - "ErrorReporting", - _Enumeration( - [], - new=[ - "PromptImmediately", # /ERRORREPORT:PROMPT - "QueueForNextLogin", # /ERRORREPORT:QUEUE - "SendErrorReport", # /ERRORREPORT:SEND - "NoErrorReport", - ], - ), -) # /ERRORREPORT:NONE -_MSBuildOnly(_lib, "MinimumRequiredVersion", _string) -_MSBuildOnly(_lib, "Name", _file_name) # /NAME -_MSBuildOnly(_lib, "RemoveObjects", _file_list) # /REMOVE -_MSBuildOnly(_lib, "SubSystem", _subsystem_enumeration) -_MSBuildOnly(_lib, "TrackerLogDirectory", _folder_name) -_MSBuildOnly(_lib, "TreatLibWarningAsErrors", _boolean) # /WX -_MSBuildOnly(_lib, "Verbose", _boolean) - - -# Directives for converting VCManifestTool to Mt. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for -# the schema of the MSBuild Lib settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_manifest, "AdditionalManifestFiles", _file_list) # /manifest -_Same(_manifest, "AdditionalOptions", _string_list) -_Same(_manifest, "AssemblyIdentity", _string) # /identity: -_Same(_manifest, "ComponentFileName", _file_name) # /dll -_Same(_manifest, "GenerateCatalogFiles", _boolean) # /makecdfs -_Same(_manifest, "InputResourceManifests", _string) # /inputresource -_Same(_manifest, "OutputManifestFile", _file_name) # /out -_Same(_manifest, "RegistrarScriptFile", _file_name) # /rgs -_Same(_manifest, "ReplacementsFile", _file_name) # /replacements -_Same(_manifest, "SuppressStartupBanner", _boolean) # /nologo -_Same(_manifest, "TypeLibraryFile", _file_name) # /tlb: -_Same(_manifest, "UpdateFileHashes", _boolean) # /hashupdate -_Same(_manifest, "UpdateFileHashesSearchPath", _file_name) -_Same(_manifest, "VerboseOutput", _boolean) # /verbose - -# Options that have moved location. -_MovedAndRenamed( - _manifest, - "ManifestResourceFile", - "ManifestResourceCompile", - "ResourceOutputFileName", - _file_name, -) -_Moved(_manifest, "EmbedManifest", "", _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_manifest, "DependencyInformationFile", _file_name) -_MSVSOnly(_manifest, "UseFAT32Workaround", _boolean) -_MSVSOnly(_manifest, "UseUnicodeResponseFiles", _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_manifest, "EnableDPIAwareness", _boolean) -_MSBuildOnly(_manifest, "GenerateCategoryTags", _boolean) # /category -_MSBuildOnly( - _manifest, "ManifestFromManagedAssembly", _file_name -) # /managedassemblyname -_MSBuildOnly(_manifest, "OutputResourceManifests", _string) # /outputresource -_MSBuildOnly(_manifest, "SuppressDependencyElement", _boolean) # /nodependency -_MSBuildOnly(_manifest, "TrackerLogDirectory", _folder_name) - - -# Directives for MASM. -# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the -# MSBuild MASM settings. - -# Options that have the same name in MSVS and MSBuild. -_Same(_masm, "UseSafeExceptionHandlers", _boolean) # /safeseh diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py deleted file mode 100755 index 6ca09687ad7f1..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py +++ /dev/null @@ -1,1547 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the MSVSSettings.py file.""" - -import unittest -import gyp.MSVSSettings as MSVSSettings - -from io import StringIO - - -class TestSequenceFunctions(unittest.TestCase): - def setUp(self): - self.stderr = StringIO() - - def _ExpectedWarnings(self, expected): - """Compares recorded lines to expected warnings.""" - self.stderr.seek(0) - actual = self.stderr.read().split("\n") - actual = [line for line in actual if line] - self.assertEqual(sorted(expected), sorted(actual)) - - def testValidateMSVSSettings_tool_names(self): - """Tests that only MSVS tool names are allowed.""" - MSVSSettings.ValidateMSVSSettings( - { - "VCCLCompilerTool": {}, - "VCLinkerTool": {}, - "VCMIDLTool": {}, - "foo": {}, - "VCResourceCompilerTool": {}, - "VCLibrarianTool": {}, - "VCManifestTool": {}, - "ClCompile": {}, - }, - self.stderr, - ) - self._ExpectedWarnings( - ["Warning: unrecognized tool foo", "Warning: unrecognized tool ClCompile"] - ) - - def testValidateMSVSSettings_settings(self): - """Tests that for invalid MSVS settings.""" - MSVSSettings.ValidateMSVSSettings( - { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": ["string1", "string2"], - "AdditionalUsingDirectories": "folder1;folder2", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "0", - "BasicRuntimeChecks": "5", - "BrowseInformation": "fdkslj", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "CallingConvention": "-1", - "CompileAs": "1", - "DebugInformationFormat": "2", - "DefaultCharIsUnsigned": "true", - "Detect64BitPortabilityProblems": "true", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "string1;string2", - "EnableEnhancedInstructionSet": "1", - "EnableFiberSafeOptimizations": "true", - "EnableFunctionLevelLinking": "true", - "EnableIntrinsicFunctions": "true", - "EnablePREfast": "true", - "Enableprefast": "bogus", - "ErrorReporting": "1", - "ExceptionHandling": "1", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "1", - "FloatingPointExceptions": "true", - "FloatingPointModel": "1", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2", - "ForcedUsingFiles": "file1;file2", - "GeneratePreprocessedFile": "1", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "1", - "KeepComments": "true", - "MinimalRebuild": "true", - "ObjectFile": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMP": "true", - "Optimization": "1", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderThrough": "a_file_name", - "PreprocessorDefinitions": "string1;string2", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "1", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1", - "SuppressStartupBanner": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "string1;string2", - "UseFullPaths": "true", - "UsePrecompiledHeader": "1", - "UseUnicodeResponseFiles": "true", - "WarnAsError": "true", - "WarningLevel": "1", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - "ZZXYZ": "bogus", - }, - "VCLinkerTool": { - "AdditionalDependencies": "file1;file2", - "AdditionalDependencies_excluded": "file3", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalManifestDependencies": "file1;file2", - "AdditionalOptions": "a string1", - "AddModuleNamesToAssembly": "file1;file2", - "AllowIsolation": "true", - "AssemblyDebug": "2", - "AssemblyLinkResource": "file1;file2", - "BaseAddress": "a string1", - "CLRImageType": "2", - "CLRThreadAttribute": "2", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "2", - "DelayLoadDLLs": "file1;file2", - "DelaySign": "true", - "Driver": "2", - "EmbedManagedResourceFile": "file1;file2", - "EnableCOMDATFolding": "2", - "EnableUAC": "true", - "EntryPointSymbol": "a string1", - "ErrorReporting": "2", - "FixedBaseAddress": "2", - "ForceSymbolReferences": "file1;file2", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateManifest": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a string1", - "HeapReserveSize": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreDefaultLibraryNames": "file1;file2", - "IgnoreEmbeddedIDL": "true", - "IgnoreImportLibrary": "true", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "2", - "LinkIncremental": "2", - "LinkLibraryDependencies": "true", - "LinkTimeCodeGeneration": "2", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a string1", - "MidlCommandFile": "a_file_name", - "ModuleDefinitionFile": "a_file_name", - "OptimizeForWindows98": "1", - "OptimizeReferences": "2", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "2", - "RegisterOutput": "true", - "ResourceOnlyDLL": "true", - "SetChecksum": "true", - "ShowProgress": "2", - "StackCommitSize": "a string1", - "StackReserveSize": "a string1", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "2", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNet": "true", - "TargetMachine": "2", - "TerminalServerAware": "2", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "2", - "UACUIAccess": "true", - "UseLibraryDependencyInputs": "true", - "UseUnicodeResponseFiles": "true", - "Version": "a string1", - }, - "VCMIDLTool": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "CPreprocessOptions": "a string1", - "DefaultCharType": "1", - "DLLDataFileName": "a_file_name", - "EnableErrorChecks": "1", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "MkTypLibCompatible": "true", - "notgood": "bogus", - "OutputDirectory": "a string1", - "PreprocessorDefinitions": "string1;string2", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "StructMemberAlignment": "1", - "SuppressStartupBanner": "true", - "TargetEnvironment": "1", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "string1;string2", - "ValidateParameters": "true", - "WarnAsError": "true", - "WarningLevel": "1", - }, - "VCResourceCompilerTool": { - "AdditionalOptions": "a string1", - "AdditionalIncludeDirectories": "folder1;folder2", - "Culture": "1003", - "IgnoreStandardIncludePath": "true", - "notgood2": "bogus", - "PreprocessorDefinitions": "string1;string2", - "ResourceOutputFileName": "a string1", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "UndefinePreprocessorDefinitions": "string1;string2", - }, - "VCLibrarianTool": { - "AdditionalDependencies": "file1;file2", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "ExportNamedFunctions": "string1;string2", - "ForceSymbolReferences": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2", - "LinkLibraryDependencies": "true", - "ModuleDefinitionFile": "a_file_name", - "OutputFile": "a_file_name", - "SuppressStartupBanner": "true", - "UseUnicodeResponseFiles": "true", - }, - "VCManifestTool": { - "AdditionalManifestFiles": "file1;file2", - "AdditionalOptions": "a string1", - "AssemblyIdentity": "a string1", - "ComponentFileName": "a_file_name", - "DependencyInformationFile": "a_file_name", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "a string1", - "ManifestResourceFile": "a_file_name", - "OutputManifestFile": "a_file_name", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressStartupBanner": "true", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "truel", - "UpdateFileHashesSearchPath": "a_file_name", - "UseFAT32Workaround": "true", - "UseUnicodeResponseFiles": "true", - "VerboseOutput": "true", - }, - }, - self.stderr, - ) - self._ExpectedWarnings( - [ - "Warning: for VCCLCompilerTool/BasicRuntimeChecks, " - "index value (5) not in expected range [0, 4)", - "Warning: for VCCLCompilerTool/BrowseInformation, " - "invalid literal for int() with base 10: 'fdkslj'", - "Warning: for VCCLCompilerTool/CallingConvention, " - "index value (-1) not in expected range [0, 4)", - "Warning: for VCCLCompilerTool/DebugInformationFormat, " - "converted value for 2 not specified.", - "Warning: unrecognized setting VCCLCompilerTool/Enableprefast", - "Warning: unrecognized setting VCCLCompilerTool/ZZXYZ", - "Warning: for VCLinkerTool/TargetMachine, " - "converted value for 2 not specified.", - "Warning: unrecognized setting VCMIDLTool/notgood", - "Warning: unrecognized setting VCResourceCompilerTool/notgood2", - "Warning: for VCManifestTool/UpdateFileHashes, " - "expected bool; got 'truel'" - "", - ] - ) - - def testValidateMSBuildSettings_settings(self): - """Tests that for invalid MSBuild settings.""" - MSVSSettings.ValidateMSBuildSettings( - { - "ClCompile": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": ["string1", "string2"], - "AdditionalUsingDirectories": "folder1;folder2", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "NoListing", - "BasicRuntimeChecks": "StackFrameRuntimeCheck", - "BrowseInformation": "false", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "BuildingInIDE": "true", - "CallingConvention": "Cdecl", - "CompileAs": "CompileAsC", - "CompileAsManaged": "true", - "CreateHotpatchableImage": "true", - "DebugInformationFormat": "ProgramDatabase", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "string1;string2", - "EnableEnhancedInstructionSet": "StreamingSIMDExtensions", - "EnableFiberSafeOptimizations": "true", - "EnablePREfast": "true", - "Enableprefast": "bogus", - "ErrorReporting": "Prompt", - "ExceptionHandling": "SyncCThrow", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "Neither", - "FloatingPointExceptions": "true", - "FloatingPointModel": "Precise", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2", - "ForcedUsingFiles": "file1;file2", - "FunctionLevelLinking": "false", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "OnlyExplicitInline", - "IntrinsicFunctions": "false", - "MinimalRebuild": "true", - "MultiProcessorCompilation": "true", - "ObjectFileName": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMPSupport": "true", - "Optimization": "Disabled", - "PrecompiledHeader": "NotUsing", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderOutputFile": "a_file_name", - "PreprocessKeepComments": "true", - "PreprocessorDefinitions": "string1;string2", - "PreprocessOutputPath": "a string1", - "PreprocessSuppressLineNumbers": "false", - "PreprocessToFile": "false", - "ProcessorNumber": "33", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "MultiThreaded", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1Byte", - "SuppressStartupBanner": "true", - "TrackerLogDirectory": "a_folder", - "TreatSpecificWarningsAsErrors": "string1;string2", - "TreatWarningAsError": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "string1;string2", - "UseFullPaths": "true", - "UseUnicodeForAssemblerListing": "true", - "WarningLevel": "TurnOffAllWarnings", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - "ZZXYZ": "bogus", - }, - "Link": { - "AdditionalDependencies": "file1;file2", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalManifestDependencies": "file1;file2", - "AdditionalOptions": "a string1", - "AddModuleNamesToAssembly": "file1;file2", - "AllowIsolation": "true", - "AssemblyDebug": "", - "AssemblyLinkResource": "file1;file2", - "BaseAddress": "a string1", - "BuildingInIDE": "true", - "CLRImageType": "ForceIJWImage", - "CLRSupportLastError": "Enabled", - "CLRThreadAttribute": "MTAThreadingAttribute", - "CLRUnmanagedCodeCheck": "true", - "CreateHotPatchableImage": "X86Image", - "DataExecutionPrevention": "false", - "DelayLoadDLLs": "file1;file2", - "DelaySign": "true", - "Driver": "NotSet", - "EmbedManagedResourceFile": "file1;file2", - "EnableCOMDATFolding": "false", - "EnableUAC": "true", - "EntryPointSymbol": "a string1", - "FixedBaseAddress": "false", - "ForceFileOutput": "Enabled", - "ForceSymbolReferences": "file1;file2", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a string1", - "HeapReserveSize": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreEmbeddedIDL": "true", - "IgnoreSpecificDefaultLibraries": "a_file_list", - "ImageHasSafeExceptionHandlers": "true", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "false", - "LinkDLL": "true", - "LinkErrorReporting": "SendErrorReport", - "LinkStatus": "true", - "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a string1", - "MidlCommandFile": "a_file_name", - "MinimumRequiredVersion": "a string1", - "ModuleDefinitionFile": "a_file_name", - "MSDOSStubFileName": "a_file_name", - "NoEntryPoint": "true", - "OptimizeReferences": "false", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "PreventDllBinding": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "false", - "RegisterOutput": "true", - "SectionAlignment": "33", - "SetChecksum": "true", - "ShowProgress": "LinkVerboseREF", - "SpecifySectionAttributes": "a string1", - "StackCommitSize": "a string1", - "StackReserveSize": "a string1", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "Console", - "SupportNobindOfDelayLoadedDLL": "true", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNET": "true", - "TargetMachine": "MachineX86", - "TerminalServerAware": "false", - "TrackerLogDirectory": "a_folder", - "TreatLinkerWarningAsErrors": "true", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "AsInvoker", - "UACUIAccess": "true", - "Version": "a string1", - }, - "ResourceCompile": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "Culture": "0x236", - "IgnoreStandardIncludePath": "true", - "NullTerminateStrings": "true", - "PreprocessorDefinitions": "string1;string2", - "ResourceOutputFileName": "a string1", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "TrackerLogDirectory": "a_folder", - "UndefinePreprocessorDefinitions": "string1;string2", - }, - "Midl": { - "AdditionalIncludeDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "ApplicationConfigurationMode": "true", - "ClientStubFile": "a_file_name", - "CPreprocessOptions": "a string1", - "DefaultCharType": "Signed", - "DllDataFileName": "a_file_name", - "EnableErrorChecks": "EnableCustom", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateClientFiles": "Stub", - "GenerateServerFiles": "None", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "LocaleID": "33", - "MkTypLibCompatible": "true", - "OutputDirectory": "a string1", - "PreprocessorDefinitions": "string1;string2", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "ServerStubFile": "a_file_name", - "StructMemberAlignment": "NotSet", - "SuppressCompilerWarnings": "true", - "SuppressStartupBanner": "true", - "TargetEnvironment": "Itanium", - "TrackerLogDirectory": "a_folder", - "TypeLibFormat": "NewFormat", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "string1;string2", - "ValidateAllParameters": "true", - "WarnAsError": "true", - "WarningLevel": "1", - }, - "Lib": { - "AdditionalDependencies": "file1;file2", - "AdditionalLibraryDirectories": "folder1;folder2", - "AdditionalOptions": "a string1", - "DisplayLibrary": "a string1", - "ErrorReporting": "PromptImmediately", - "ExportNamedFunctions": "string1;string2", - "ForceSymbolReferences": "a string1", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2", - "LinkTimeCodeGeneration": "true", - "MinimumRequiredVersion": "a string1", - "ModuleDefinitionFile": "a_file_name", - "Name": "a_file_name", - "OutputFile": "a_file_name", - "RemoveObjects": "file1;file2", - "SubSystem": "Console", - "SuppressStartupBanner": "true", - "TargetMachine": "MachineX86i", - "TrackerLogDirectory": "a_folder", - "TreatLibWarningAsErrors": "true", - "UseUnicodeResponseFiles": "true", - "Verbose": "true", - }, - "Manifest": { - "AdditionalManifestFiles": "file1;file2", - "AdditionalOptions": "a string1", - "AssemblyIdentity": "a string1", - "ComponentFileName": "a_file_name", - "EnableDPIAwareness": "fal", - "GenerateCatalogFiles": "truel", - "GenerateCategoryTags": "true", - "InputResourceManifests": "a string1", - "ManifestFromManagedAssembly": "a_file_name", - "notgood3": "bogus", - "OutputManifestFile": "a_file_name", - "OutputResourceManifests": "a string1", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressDependencyElement": "true", - "SuppressStartupBanner": "true", - "TrackerLogDirectory": "a_folder", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "a_file_name", - "VerboseOutput": "true", - }, - "ProjectReference": { - "LinkLibraryDependencies": "true", - "UseLibraryDependencyInputs": "true", - }, - "ManifestResourceCompile": {"ResourceOutputFileName": "a_file_name"}, - "": { - "EmbedManifest": "true", - "GenerateManifest": "true", - "IgnoreImportLibrary": "true", - "LinkIncremental": "false", - }, - }, - self.stderr, - ) - self._ExpectedWarnings( - [ - "Warning: unrecognized setting ClCompile/Enableprefast", - "Warning: unrecognized setting ClCompile/ZZXYZ", - "Warning: unrecognized setting Manifest/notgood3", - "Warning: for Manifest/GenerateCatalogFiles, " - "expected bool; got 'truel'", - "Warning: for Lib/TargetMachine, unrecognized enumerated value " - "MachineX86i", - "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'", - ] - ) - - def testConvertToMSBuildSettings_empty(self): - """Tests an empty conversion.""" - msvs_settings = {} - expected_msbuild_settings = {} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_minimal(self): - """Tests a minimal conversion.""" - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/foo", - "BasicRuntimeChecks": "0", - }, - "VCLinkerTool": { - "LinkTimeCodeGeneration": "1", - "ErrorReporting": "1", - "DataExecutionPrevention": "2", - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/foo", - "BasicRuntimeChecks": "Default", - }, - "Link": { - "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", - "LinkErrorReporting": "PromptImmediately", - "DataExecutionPrevention": "true", - }, - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_warnings(self): - """Tests conversion that generates warnings.""" - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "1", - "AdditionalOptions": "2", - # These are incorrect values: - "BasicRuntimeChecks": "12", - "BrowseInformation": "21", - "UsePrecompiledHeader": "13", - "GeneratePreprocessedFile": "14", - }, - "VCLinkerTool": { - # These are incorrect values: - "Driver": "10", - "LinkTimeCodeGeneration": "31", - "ErrorReporting": "21", - "FixedBaseAddress": "6", - }, - "VCResourceCompilerTool": { - # Custom - "Culture": "1003" - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "1", - "AdditionalOptions": "2", - }, - "Link": {}, - "ResourceCompile": { - # Custom - "Culture": "0x03eb" - }, - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings( - [ - "Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to " - "MSBuild, index value (12) not in expected range [0, 4)", - "Warning: while converting VCCLCompilerTool/BrowseInformation to " - "MSBuild, index value (21) not in expected range [0, 3)", - "Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to " - "MSBuild, index value (13) not in expected range [0, 3)", - "Warning: while converting " - "VCCLCompilerTool/GeneratePreprocessedFile to " - "MSBuild, value must be one of [0, 1, 2]; got 14", - "Warning: while converting VCLinkerTool/Driver to " - "MSBuild, index value (10) not in expected range [0, 4)", - "Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to " - "MSBuild, index value (31) not in expected range [0, 5)", - "Warning: while converting VCLinkerTool/ErrorReporting to " - "MSBuild, index value (21) not in expected range [0, 3)", - "Warning: while converting VCLinkerTool/FixedBaseAddress to " - "MSBuild, index value (6) not in expected range [0, 3)", - ] - ) - - def testConvertToMSBuildSettings_full_synthetic(self): - """Tests conversion of all the MSBuild settings.""" - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "AdditionalUsingDirectories": "folder1;folder2;folder3", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "0", - "BasicRuntimeChecks": "1", - "BrowseInformation": "2", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "CallingConvention": "0", - "CompileAs": "1", - "DebugInformationFormat": "4", - "DefaultCharIsUnsigned": "true", - "Detect64BitPortabilityProblems": "true", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "d1;d2;d3", - "EnableEnhancedInstructionSet": "0", - "EnableFiberSafeOptimizations": "true", - "EnableFunctionLevelLinking": "true", - "EnableIntrinsicFunctions": "true", - "EnablePREfast": "true", - "ErrorReporting": "1", - "ExceptionHandling": "2", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "0", - "FloatingPointExceptions": "true", - "FloatingPointModel": "1", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2;file3", - "ForcedUsingFiles": "file1;file2;file3", - "GeneratePreprocessedFile": "1", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "2", - "KeepComments": "true", - "MinimalRebuild": "true", - "ObjectFile": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMP": "true", - "Optimization": "3", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderThrough": "a_file_name", - "PreprocessorDefinitions": "d1;d2;d3", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "0", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1", - "SuppressStartupBanner": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "UseFullPaths": "true", - "UsePrecompiledHeader": "1", - "UseUnicodeResponseFiles": "true", - "WarnAsError": "true", - "WarningLevel": "2", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - }, - "VCLinkerTool": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3", - "AdditionalManifestDependencies": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AddModuleNamesToAssembly": "file1;file2;file3", - "AllowIsolation": "true", - "AssemblyDebug": "0", - "AssemblyLinkResource": "file1;file2;file3", - "BaseAddress": "a_string", - "CLRImageType": "1", - "CLRThreadAttribute": "2", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "0", - "DelayLoadDLLs": "file1;file2;file3", - "DelaySign": "true", - "Driver": "1", - "EmbedManagedResourceFile": "file1;file2;file3", - "EnableCOMDATFolding": "0", - "EnableUAC": "true", - "EntryPointSymbol": "a_string", - "ErrorReporting": "0", - "FixedBaseAddress": "1", - "ForceSymbolReferences": "file1;file2;file3", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateManifest": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a_string", - "HeapReserveSize": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreDefaultLibraryNames": "file1;file2;file3", - "IgnoreEmbeddedIDL": "true", - "IgnoreImportLibrary": "true", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "2", - "LinkIncremental": "1", - "LinkLibraryDependencies": "true", - "LinkTimeCodeGeneration": "2", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a_string", - "MidlCommandFile": "a_file_name", - "ModuleDefinitionFile": "a_file_name", - "OptimizeForWindows98": "1", - "OptimizeReferences": "0", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "1", - "RegisterOutput": "true", - "ResourceOnlyDLL": "true", - "SetChecksum": "true", - "ShowProgress": "0", - "StackCommitSize": "a_string", - "StackReserveSize": "a_string", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "2", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNet": "true", - "TargetMachine": "3", - "TerminalServerAware": "2", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "1", - "UACUIAccess": "true", - "UseLibraryDependencyInputs": "false", - "UseUnicodeResponseFiles": "true", - "Version": "a_string", - }, - "VCResourceCompilerTool": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "Culture": "1003", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "d1;d2;d3", - "ResourceOutputFileName": "a_string", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - }, - "VCMIDLTool": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "CPreprocessOptions": "a_string", - "DefaultCharType": "0", - "DLLDataFileName": "a_file_name", - "EnableErrorChecks": "2", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "MkTypLibCompatible": "true", - "OutputDirectory": "a_string", - "PreprocessorDefinitions": "d1;d2;d3", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "StructMemberAlignment": "3", - "SuppressStartupBanner": "true", - "TargetEnvironment": "1", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "ValidateParameters": "true", - "WarnAsError": "true", - "WarningLevel": "4", - }, - "VCLibrarianTool": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "ExportNamedFunctions": "d1;d2;d3", - "ForceSymbolReferences": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2;file3", - "LinkLibraryDependencies": "true", - "ModuleDefinitionFile": "a_file_name", - "OutputFile": "a_file_name", - "SuppressStartupBanner": "true", - "UseUnicodeResponseFiles": "true", - }, - "VCManifestTool": { - "AdditionalManifestFiles": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AssemblyIdentity": "a_string", - "ComponentFileName": "a_file_name", - "DependencyInformationFile": "a_file_name", - "EmbedManifest": "true", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "a_string", - "ManifestResourceFile": "my_name", - "OutputManifestFile": "a_file_name", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressStartupBanner": "true", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "a_file_name", - "UseFAT32Workaround": "true", - "UseUnicodeResponseFiles": "true", - "VerboseOutput": "true", - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string /J", - "AdditionalUsingDirectories": "folder1;folder2;folder3", - "AssemblerListingLocation": "a_file_name", - "AssemblerOutput": "NoListing", - "BasicRuntimeChecks": "StackFrameRuntimeCheck", - "BrowseInformation": "true", - "BrowseInformationFile": "a_file_name", - "BufferSecurityCheck": "true", - "CallingConvention": "Cdecl", - "CompileAs": "CompileAsC", - "DebugInformationFormat": "EditAndContinue", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "d1;d2;d3", - "EnableEnhancedInstructionSet": "NotSet", - "EnableFiberSafeOptimizations": "true", - "EnablePREfast": "true", - "ErrorReporting": "Prompt", - "ExceptionHandling": "Async", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "Neither", - "FloatingPointExceptions": "true", - "FloatingPointModel": "Strict", - "ForceConformanceInForLoopScope": "true", - "ForcedIncludeFiles": "file1;file2;file3", - "ForcedUsingFiles": "file1;file2;file3", - "FunctionLevelLinking": "true", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "AnySuitable", - "IntrinsicFunctions": "true", - "MinimalRebuild": "true", - "ObjectFileName": "a_file_name", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMPSupport": "true", - "Optimization": "Full", - "PrecompiledHeader": "Create", - "PrecompiledHeaderFile": "a_file_name", - "PrecompiledHeaderOutputFile": "a_file_name", - "PreprocessKeepComments": "true", - "PreprocessorDefinitions": "d1;d2;d3", - "PreprocessSuppressLineNumbers": "false", - "PreprocessToFile": "true", - "ProgramDataBaseFileName": "a_file_name", - "RuntimeLibrary": "MultiThreaded", - "RuntimeTypeInfo": "true", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "1Byte", - "SuppressStartupBanner": "true", - "TreatWarningAsError": "true", - "TreatWChar_tAsBuiltInType": "true", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "UseFullPaths": "true", - "WarningLevel": "Level2", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "a_file_name", - }, - "Link": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalManifestDependencies": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AddModuleNamesToAssembly": "file1;file2;file3", - "AllowIsolation": "true", - "AssemblyDebug": "", - "AssemblyLinkResource": "file1;file2;file3", - "BaseAddress": "a_string", - "CLRImageType": "ForceIJWImage", - "CLRThreadAttribute": "STAThreadingAttribute", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "", - "DelayLoadDLLs": "file1;file2;file3", - "DelaySign": "true", - "Driver": "Driver", - "EmbedManagedResourceFile": "file1;file2;file3", - "EnableCOMDATFolding": "", - "EnableUAC": "true", - "EntryPointSymbol": "a_string", - "FixedBaseAddress": "false", - "ForceSymbolReferences": "file1;file2;file3", - "FunctionOrder": "a_file_name", - "GenerateDebugInformation": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "a_string", - "HeapReserveSize": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreEmbeddedIDL": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2;file3", - "ImportLibrary": "a_file_name", - "KeyContainer": "a_file_name", - "KeyFile": "a_file_name", - "LargeAddressAware": "true", - "LinkErrorReporting": "NoErrorReport", - "LinkTimeCodeGeneration": "PGInstrument", - "ManifestFile": "a_file_name", - "MapExports": "true", - "MapFileName": "a_file_name", - "MergedIDLBaseFileName": "a_file_name", - "MergeSections": "a_string", - "MidlCommandFile": "a_file_name", - "ModuleDefinitionFile": "a_file_name", - "NoEntryPoint": "true", - "OptimizeReferences": "", - "OutputFile": "a_file_name", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "a_file_name", - "ProgramDatabaseFile": "a_file_name", - "RandomizedBaseAddress": "false", - "RegisterOutput": "true", - "SetChecksum": "true", - "ShowProgress": "NotSet", - "StackCommitSize": "a_string", - "StackReserveSize": "a_string", - "StripPrivateSymbols": "a_file_name", - "SubSystem": "Windows", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "true", - "SwapRunFromCD": "true", - "SwapRunFromNET": "true", - "TargetMachine": "MachineARM", - "TerminalServerAware": "true", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "a_file_name", - "TypeLibraryResourceID": "33", - "UACExecutionLevel": "HighestAvailable", - "UACUIAccess": "true", - "Version": "a_string", - }, - "ResourceCompile": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "Culture": "0x03eb", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "d1;d2;d3", - "ResourceOutputFileName": "a_string", - "ShowProgress": "true", - "SuppressStartupBanner": "true", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - }, - "Midl": { - "AdditionalIncludeDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "CPreprocessOptions": "a_string", - "DefaultCharType": "Unsigned", - "DllDataFileName": "a_file_name", - "EnableErrorChecks": "All", - "ErrorCheckAllocations": "true", - "ErrorCheckBounds": "true", - "ErrorCheckEnumRange": "true", - "ErrorCheckRefPointers": "true", - "ErrorCheckStubData": "true", - "GenerateStublessProxies": "true", - "GenerateTypeLibrary": "true", - "HeaderFileName": "a_file_name", - "IgnoreStandardIncludePath": "true", - "InterfaceIdentifierFileName": "a_file_name", - "MkTypLibCompatible": "true", - "OutputDirectory": "a_string", - "PreprocessorDefinitions": "d1;d2;d3", - "ProxyFileName": "a_file_name", - "RedirectOutputAndErrors": "a_file_name", - "StructMemberAlignment": "4", - "SuppressStartupBanner": "true", - "TargetEnvironment": "Win32", - "TypeLibraryName": "a_file_name", - "UndefinePreprocessorDefinitions": "d1;d2;d3", - "ValidateAllParameters": "true", - "WarnAsError": "true", - "WarningLevel": "4", - }, - "Lib": { - "AdditionalDependencies": "file1;file2;file3", - "AdditionalLibraryDirectories": "folder1;folder2;folder3", - "AdditionalOptions": "a_string", - "ExportNamedFunctions": "d1;d2;d3", - "ForceSymbolReferences": "a_string", - "IgnoreAllDefaultLibraries": "true", - "IgnoreSpecificDefaultLibraries": "file1;file2;file3", - "ModuleDefinitionFile": "a_file_name", - "OutputFile": "a_file_name", - "SuppressStartupBanner": "true", - "UseUnicodeResponseFiles": "true", - }, - "Manifest": { - "AdditionalManifestFiles": "file1;file2;file3", - "AdditionalOptions": "a_string", - "AssemblyIdentity": "a_string", - "ComponentFileName": "a_file_name", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "a_string", - "OutputManifestFile": "a_file_name", - "RegistrarScriptFile": "a_file_name", - "ReplacementsFile": "a_file_name", - "SuppressStartupBanner": "true", - "TypeLibraryFile": "a_file_name", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "a_file_name", - "VerboseOutput": "true", - }, - "ManifestResourceCompile": {"ResourceOutputFileName": "my_name"}, - "ProjectReference": { - "LinkLibraryDependencies": "true", - "UseLibraryDependencyInputs": "false", - }, - "": { - "EmbedManifest": "true", - "GenerateManifest": "true", - "IgnoreImportLibrary": "true", - "LinkIncremental": "false", - }, - } - self.maxDiff = 9999 # on failure display a long diff - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_actual(self): - """Tests the conversion of an actual project. - - A VS2008 project with most of the options defined was created through the - VS2008 IDE. It was then converted to VS2010. The tool settings found in - the .vcproj and .vcxproj files were converted to the two dictionaries - msvs_settings and expected_msbuild_settings. - - Note that for many settings, the VS2010 converter adds macros like - %(AdditionalIncludeDirectories) to make sure than inherited values are - included. Since the Gyp projects we generate do not use inheritance, - we removed these macros. They were: - ClCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)' - AdditionalOptions: ' %(AdditionalOptions)' - AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)' - DisableSpecificWarnings: ';%(DisableSpecificWarnings)', - ForcedIncludeFiles: ';%(ForcedIncludeFiles)', - ForcedUsingFiles: ';%(ForcedUsingFiles)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - UndefinePreprocessorDefinitions: - ';%(UndefinePreprocessorDefinitions)', - Link: - AdditionalDependencies: ';%(AdditionalDependencies)', - AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)', - AdditionalManifestDependencies: - ';%(AdditionalManifestDependencies)', - AdditionalOptions: ' %(AdditionalOptions)', - AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)', - AssemblyLinkResource: ';%(AssemblyLinkResource)', - DelayLoadDLLs: ';%(DelayLoadDLLs)', - EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)', - ForceSymbolReferences: ';%(ForceSymbolReferences)', - IgnoreSpecificDefaultLibraries: - ';%(IgnoreSpecificDefaultLibraries)', - ResourceCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', - AdditionalOptions: ' %(AdditionalOptions)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - Manifest: - AdditionalManifestFiles: ';%(AdditionalManifestFiles)', - AdditionalOptions: ' %(AdditionalOptions)', - InputResourceManifests: ';%(InputResourceManifests)', - """ - msvs_settings = { - "VCCLCompilerTool": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/more", - "AdditionalUsingDirectories": "test", - "AssemblerListingLocation": "$(IntDir)\\a", - "AssemblerOutput": "1", - "BasicRuntimeChecks": "3", - "BrowseInformation": "1", - "BrowseInformationFile": "$(IntDir)\\e", - "BufferSecurityCheck": "false", - "CallingConvention": "1", - "CompileAs": "1", - "DebugInformationFormat": "4", - "DefaultCharIsUnsigned": "true", - "Detect64BitPortabilityProblems": "true", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "abc", - "EnableEnhancedInstructionSet": "1", - "EnableFiberSafeOptimizations": "true", - "EnableFunctionLevelLinking": "true", - "EnableIntrinsicFunctions": "true", - "EnablePREfast": "true", - "ErrorReporting": "2", - "ExceptionHandling": "2", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "2", - "FloatingPointExceptions": "true", - "FloatingPointModel": "1", - "ForceConformanceInForLoopScope": "false", - "ForcedIncludeFiles": "def", - "ForcedUsingFiles": "ge", - "GeneratePreprocessedFile": "2", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "1", - "KeepComments": "true", - "MinimalRebuild": "true", - "ObjectFile": "$(IntDir)\\b", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMP": "true", - "Optimization": "3", - "PrecompiledHeaderFile": "$(IntDir)\\$(TargetName).pche", - "PrecompiledHeaderThrough": "StdAfx.hd", - "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE", - "ProgramDataBaseFileName": "$(IntDir)\\vc90b.pdb", - "RuntimeLibrary": "3", - "RuntimeTypeInfo": "false", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "3", - "SuppressStartupBanner": "false", - "TreatWChar_tAsBuiltInType": "false", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "wer", - "UseFullPaths": "true", - "UsePrecompiledHeader": "0", - "UseUnicodeResponseFiles": "false", - "WarnAsError": "true", - "WarningLevel": "3", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "$(IntDir)\\c", - }, - "VCLinkerTool": { - "AdditionalDependencies": "zx", - "AdditionalLibraryDirectories": "asd", - "AdditionalManifestDependencies": "s2", - "AdditionalOptions": "/mor2", - "AddModuleNamesToAssembly": "d1", - "AllowIsolation": "false", - "AssemblyDebug": "1", - "AssemblyLinkResource": "d5", - "BaseAddress": "23423", - "CLRImageType": "3", - "CLRThreadAttribute": "1", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "0", - "DelayLoadDLLs": "d4", - "DelaySign": "true", - "Driver": "2", - "EmbedManagedResourceFile": "d2", - "EnableCOMDATFolding": "1", - "EnableUAC": "false", - "EntryPointSymbol": "f5", - "ErrorReporting": "2", - "FixedBaseAddress": "1", - "ForceSymbolReferences": "d3", - "FunctionOrder": "fssdfsd", - "GenerateDebugInformation": "true", - "GenerateManifest": "false", - "GenerateMapFile": "true", - "HeapCommitSize": "13", - "HeapReserveSize": "12", - "IgnoreAllDefaultLibraries": "true", - "IgnoreDefaultLibraryNames": "flob;flok", - "IgnoreEmbeddedIDL": "true", - "IgnoreImportLibrary": "true", - "ImportLibrary": "f4", - "KeyContainer": "f7", - "KeyFile": "f6", - "LargeAddressAware": "2", - "LinkIncremental": "0", - "LinkLibraryDependencies": "false", - "LinkTimeCodeGeneration": "1", - "ManifestFile": "$(IntDir)\\$(TargetFileName).2intermediate.manifest", - "MapExports": "true", - "MapFileName": "d5", - "MergedIDLBaseFileName": "f2", - "MergeSections": "f5", - "MidlCommandFile": "f1", - "ModuleDefinitionFile": "sdsd", - "OptimizeForWindows98": "2", - "OptimizeReferences": "2", - "OutputFile": "$(OutDir)\\$(ProjectName)2.exe", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd", - "ProgramDatabaseFile": "Flob.pdb", - "RandomizedBaseAddress": "1", - "RegisterOutput": "true", - "ResourceOnlyDLL": "true", - "SetChecksum": "false", - "ShowProgress": "1", - "StackCommitSize": "15", - "StackReserveSize": "14", - "StripPrivateSymbols": "d3", - "SubSystem": "1", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "false", - "SwapRunFromCD": "true", - "SwapRunFromNet": "true", - "TargetMachine": "1", - "TerminalServerAware": "1", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "f3", - "TypeLibraryResourceID": "12", - "UACExecutionLevel": "2", - "UACUIAccess": "true", - "UseLibraryDependencyInputs": "true", - "UseUnicodeResponseFiles": "false", - "Version": "333", - }, - "VCResourceCompilerTool": { - "AdditionalIncludeDirectories": "f3", - "AdditionalOptions": "/more3", - "Culture": "3084", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "_UNICODE;UNICODE2", - "ResourceOutputFileName": "$(IntDir)/$(InputName)3.res", - "ShowProgress": "true", - }, - "VCManifestTool": { - "AdditionalManifestFiles": "sfsdfsd", - "AdditionalOptions": "afdsdafsd", - "AssemblyIdentity": "sddfdsadfsa", - "ComponentFileName": "fsdfds", - "DependencyInformationFile": "$(IntDir)\\mt.depdfd", - "EmbedManifest": "false", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "asfsfdafs", - "ManifestResourceFile": - "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf", - "OutputManifestFile": "$(TargetPath).manifestdfs", - "RegistrarScriptFile": "sdfsfd", - "ReplacementsFile": "sdffsd", - "SuppressStartupBanner": "false", - "TypeLibraryFile": "sfsd", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "sfsd", - "UseFAT32Workaround": "true", - "UseUnicodeResponseFiles": "false", - "VerboseOutput": "true", - }, - } - expected_msbuild_settings = { - "ClCompile": { - "AdditionalIncludeDirectories": "dir1", - "AdditionalOptions": "/more /J", - "AdditionalUsingDirectories": "test", - "AssemblerListingLocation": "$(IntDir)a", - "AssemblerOutput": "AssemblyCode", - "BasicRuntimeChecks": "EnableFastChecks", - "BrowseInformation": "true", - "BrowseInformationFile": "$(IntDir)e", - "BufferSecurityCheck": "false", - "CallingConvention": "FastCall", - "CompileAs": "CompileAsC", - "DebugInformationFormat": "EditAndContinue", - "DisableLanguageExtensions": "true", - "DisableSpecificWarnings": "abc", - "EnableEnhancedInstructionSet": "StreamingSIMDExtensions", - "EnableFiberSafeOptimizations": "true", - "EnablePREfast": "true", - "ErrorReporting": "Queue", - "ExceptionHandling": "Async", - "ExpandAttributedSource": "true", - "FavorSizeOrSpeed": "Size", - "FloatingPointExceptions": "true", - "FloatingPointModel": "Strict", - "ForceConformanceInForLoopScope": "false", - "ForcedIncludeFiles": "def", - "ForcedUsingFiles": "ge", - "FunctionLevelLinking": "true", - "GenerateXMLDocumentationFiles": "true", - "IgnoreStandardIncludePath": "true", - "InlineFunctionExpansion": "OnlyExplicitInline", - "IntrinsicFunctions": "true", - "MinimalRebuild": "true", - "ObjectFileName": "$(IntDir)b", - "OmitDefaultLibName": "true", - "OmitFramePointers": "true", - "OpenMPSupport": "true", - "Optimization": "Full", - "PrecompiledHeader": "NotUsing", # Actual conversion gives '' - "PrecompiledHeaderFile": "StdAfx.hd", - "PrecompiledHeaderOutputFile": "$(IntDir)$(TargetName).pche", - "PreprocessKeepComments": "true", - "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE", - "PreprocessSuppressLineNumbers": "true", - "PreprocessToFile": "true", - "ProgramDataBaseFileName": "$(IntDir)vc90b.pdb", - "RuntimeLibrary": "MultiThreadedDebugDLL", - "RuntimeTypeInfo": "false", - "ShowIncludes": "true", - "SmallerTypeCheck": "true", - "StringPooling": "true", - "StructMemberAlignment": "4Bytes", - "SuppressStartupBanner": "false", - "TreatWarningAsError": "true", - "TreatWChar_tAsBuiltInType": "false", - "UndefineAllPreprocessorDefinitions": "true", - "UndefinePreprocessorDefinitions": "wer", - "UseFullPaths": "true", - "WarningLevel": "Level3", - "WholeProgramOptimization": "true", - "XMLDocumentationFileName": "$(IntDir)c", - }, - "Link": { - "AdditionalDependencies": "zx", - "AdditionalLibraryDirectories": "asd", - "AdditionalManifestDependencies": "s2", - "AdditionalOptions": "/mor2", - "AddModuleNamesToAssembly": "d1", - "AllowIsolation": "false", - "AssemblyDebug": "true", - "AssemblyLinkResource": "d5", - "BaseAddress": "23423", - "CLRImageType": "ForceSafeILImage", - "CLRThreadAttribute": "MTAThreadingAttribute", - "CLRUnmanagedCodeCheck": "true", - "DataExecutionPrevention": "", - "DelayLoadDLLs": "d4", - "DelaySign": "true", - "Driver": "UpOnly", - "EmbedManagedResourceFile": "d2", - "EnableCOMDATFolding": "false", - "EnableUAC": "false", - "EntryPointSymbol": "f5", - "FixedBaseAddress": "false", - "ForceSymbolReferences": "d3", - "FunctionOrder": "fssdfsd", - "GenerateDebugInformation": "true", - "GenerateMapFile": "true", - "HeapCommitSize": "13", - "HeapReserveSize": "12", - "IgnoreAllDefaultLibraries": "true", - "IgnoreEmbeddedIDL": "true", - "IgnoreSpecificDefaultLibraries": "flob;flok", - "ImportLibrary": "f4", - "KeyContainer": "f7", - "KeyFile": "f6", - "LargeAddressAware": "true", - "LinkErrorReporting": "QueueForNextLogin", - "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", - "ManifestFile": "$(IntDir)$(TargetFileName).2intermediate.manifest", - "MapExports": "true", - "MapFileName": "d5", - "MergedIDLBaseFileName": "f2", - "MergeSections": "f5", - "MidlCommandFile": "f1", - "ModuleDefinitionFile": "sdsd", - "NoEntryPoint": "true", - "OptimizeReferences": "true", - "OutputFile": "$(OutDir)$(ProjectName)2.exe", - "PerUserRedirection": "true", - "Profile": "true", - "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd", - "ProgramDatabaseFile": "Flob.pdb", - "RandomizedBaseAddress": "false", - "RegisterOutput": "true", - "SetChecksum": "false", - "ShowProgress": "LinkVerbose", - "StackCommitSize": "15", - "StackReserveSize": "14", - "StripPrivateSymbols": "d3", - "SubSystem": "Console", - "SupportUnloadOfDelayLoadedDLL": "true", - "SuppressStartupBanner": "false", - "SwapRunFromCD": "true", - "SwapRunFromNET": "true", - "TargetMachine": "MachineX86", - "TerminalServerAware": "false", - "TurnOffAssemblyGeneration": "true", - "TypeLibraryFile": "f3", - "TypeLibraryResourceID": "12", - "UACExecutionLevel": "RequireAdministrator", - "UACUIAccess": "true", - "Version": "333", - }, - "ResourceCompile": { - "AdditionalIncludeDirectories": "f3", - "AdditionalOptions": "/more3", - "Culture": "0x0c0c", - "IgnoreStandardIncludePath": "true", - "PreprocessorDefinitions": "_UNICODE;UNICODE2", - "ResourceOutputFileName": "$(IntDir)%(Filename)3.res", - "ShowProgress": "true", - }, - "Manifest": { - "AdditionalManifestFiles": "sfsdfsd", - "AdditionalOptions": "afdsdafsd", - "AssemblyIdentity": "sddfdsadfsa", - "ComponentFileName": "fsdfds", - "GenerateCatalogFiles": "true", - "InputResourceManifests": "asfsfdafs", - "OutputManifestFile": "$(TargetPath).manifestdfs", - "RegistrarScriptFile": "sdfsfd", - "ReplacementsFile": "sdffsd", - "SuppressStartupBanner": "false", - "TypeLibraryFile": "sfsd", - "UpdateFileHashes": "true", - "UpdateFileHashesSearchPath": "sfsd", - "VerboseOutput": "true", - }, - "ProjectReference": { - "LinkLibraryDependencies": "false", - "UseLibraryDependencyInputs": "true", - }, - "": { - "EmbedManifest": "false", - "GenerateManifest": "false", - "IgnoreImportLibrary": "true", - "LinkIncremental": "", - }, - "ManifestResourceCompile": { - "ResourceOutputFileName": - "$(IntDir)$(TargetFileName).embed.manifest.resfdsf" - }, - } - self.maxDiff = 9999 # on failure display a long diff - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, self.stderr - ) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py deleted file mode 100644 index 2e5c811bdde32..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.easy_xml as easy_xml - - -class Writer: - """Visual Studio XML tool file writer.""" - - def __init__(self, tool_file_path, name): - """Initializes the tool file. - - Args: - tool_file_path: Path to the tool file. - name: Name of the tool file. - """ - self.tool_file_path = tool_file_path - self.name = name - self.rules_section = ["Rules"] - - def AddCustomBuildRule( - self, name, cmd, description, additional_dependencies, outputs, extensions - ): - """Adds a rule to the tool file. - - Args: - name: Name of the rule. - description: Description of the rule. - cmd: Command line of the rule. - additional_dependencies: other files which may trigger the rule. - outputs: outputs of the rule. - extensions: extensions handled by the rule. - """ - rule = [ - "CustomBuildRule", - { - "Name": name, - "ExecutionDescription": description, - "CommandLine": cmd, - "Outputs": ";".join(outputs), - "FileExtensions": ";".join(extensions), - "AdditionalDependencies": ";".join(additional_dependencies), - }, - ] - self.rules_section.append(rule) - - def WriteIfChanged(self): - """Writes the tool file.""" - content = [ - "VisualStudioToolFile", - {"Version": "8.00", "Name": self.name}, - self.rules_section, - ] - easy_xml.WriteXmlIfChanged( - content, self.tool_file_path, encoding="Windows-1252" - ) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py deleted file mode 100644 index e580c00fb76d3..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio user preferences file writer.""" - -import os -import re -import socket # for gethostname - -import gyp.easy_xml as easy_xml - - -# ------------------------------------------------------------------------------ - - -def _FindCommandInPath(command): - """If there are no slashes in the command given, this function - searches the PATH env to find the given command, and converts it - to an absolute path. We have to do this because MSVS is looking - for an actual file to launch a debugger on, not just a command - line. Note that this happens at GYP time, so anything needing to - be built needs to have a full path.""" - if "/" in command or "\\" in command: - # If the command already has path elements (either relative or - # absolute), then assume it is constructed properly. - return command - else: - # Search through the path list and find an existing file that - # we can access. - paths = os.environ.get("PATH", "").split(os.pathsep) - for path in paths: - item = os.path.join(path, command) - if os.path.isfile(item) and os.access(item, os.X_OK): - return item - return command - - -def _QuoteWin32CommandLineArgs(args): - new_args = [] - for arg in args: - # Replace all double-quotes with double-double-quotes to escape - # them for cmd shell, and then quote the whole thing if there - # are any. - if arg.find('"') != -1: - arg = '""'.join(arg.split('"')) - arg = '"%s"' % arg - - # Otherwise, if there are any spaces, quote the whole arg. - elif re.search(r"[ \t\n]", arg): - arg = '"%s"' % arg - new_args.append(arg) - return new_args - - -class Writer: - """Visual Studio XML user user file writer.""" - - def __init__(self, user_file_path, version, name): - """Initializes the user file. - - Args: - user_file_path: Path to the user file. - version: Version info. - name: Name of the user file. - """ - self.user_file_path = user_file_path - self.version = version - self.name = name - self.configurations = {} - - def AddConfig(self, name): - """Adds a configuration to the project. - - Args: - name: Configuration name. - """ - self.configurations[name] = ["Configuration", {"Name": name}] - - def AddDebugSettings( - self, config_name, command, environment={}, working_directory="" - ): - """Adds a DebugSettings node to the user file for a particular config. - - Args: - command: command line to run. First element in the list is the - executable. All elements of the command will be quoted if - necessary. - working_directory: other files which may trigger the rule. (optional) - """ - command = _QuoteWin32CommandLineArgs(command) - - abs_command = _FindCommandInPath(command[0]) - - if environment and isinstance(environment, dict): - env_list = [f'{key}="{val}"' for (key, val) in environment.items()] - environment = " ".join(env_list) - else: - environment = "" - - n_cmd = [ - "DebugSettings", - { - "Command": abs_command, - "WorkingDirectory": working_directory, - "CommandArguments": " ".join(command[1:]), - "RemoteMachine": socket.gethostname(), - "Environment": environment, - "EnvironmentMerge": "true", - # Currently these are all "dummy" values that we're just setting - # in the default manner that MSVS does it. We could use some of - # these to add additional capabilities, I suppose, but they might - # not have parity with other platforms then. - "Attach": "false", - "DebuggerType": "3", # 'auto' debugger - "Remote": "1", - "RemoteCommand": "", - "HttpUrl": "", - "PDBPath": "", - "SQLDebugging": "", - "DebuggerFlavor": "0", - "MPIRunCommand": "", - "MPIRunArguments": "", - "MPIRunWorkingDirectory": "", - "ApplicationCommand": "", - "ApplicationArguments": "", - "ShimCommand": "", - "MPIAcceptMode": "", - "MPIAcceptFilter": "", - }, - ] - - # Find the config, and add it if it doesn't exist. - if config_name not in self.configurations: - self.AddConfig(config_name) - - # Add the DebugSettings onto the appropriate config. - self.configurations[config_name].append(n_cmd) - - def WriteIfChanged(self): - """Writes the user file.""" - configs = ["Configurations"] - for config, spec in sorted(self.configurations.items()): - configs.append(spec) - - content = [ - "VisualStudioUserFile", - {"Version": self.version.ProjectVersion(), "Name": self.name}, - configs, - ] - easy_xml.WriteXmlIfChanged( - content, self.user_file_path, encoding="Windows-1252" - ) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py deleted file mode 100644 index 36bb782bd319a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py +++ /dev/null @@ -1,271 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions shared amongst the Windows generators.""" - -import copy -import os - - -# A dictionary mapping supported target types to extensions. -TARGET_TYPE_EXT = { - "executable": "exe", - "loadable_module": "dll", - "shared_library": "dll", - "static_library": "lib", - "windows_driver": "sys", -} - - -def _GetLargePdbShimCcPath(): - """Returns the path of the large_pdb_shim.cc file.""" - this_dir = os.path.abspath(os.path.dirname(__file__)) - src_dir = os.path.abspath(os.path.join(this_dir, "..", "..")) - win_data_dir = os.path.join(src_dir, "data", "win") - large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc") - return large_pdb_shim_cc - - -def _DeepCopySomeKeys(in_dict, keys): - """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. - - Arguments: - in_dict: The dictionary to copy. - keys: The keys to be copied. If a key is in this list and doesn't exist in - |in_dict| this is not an error. - Returns: - The partially deep-copied dictionary. - """ - d = {} - for key in keys: - if key not in in_dict: - continue - d[key] = copy.deepcopy(in_dict[key]) - return d - - -def _SuffixName(name, suffix): - """Add a suffix to the end of a target. - - Arguments: - name: name of the target (foo#target) - suffix: the suffix to be added - Returns: - Target name with suffix added (foo_suffix#target) - """ - parts = name.rsplit("#", 1) - parts[0] = f"{parts[0]}_{suffix}" - return "#".join(parts) - - -def _ShardName(name, number): - """Add a shard number to the end of a target. - - Arguments: - name: name of the target (foo#target) - number: shard number - Returns: - Target name with shard added (foo_1#target) - """ - return _SuffixName(name, str(number)) - - -def ShardTargets(target_list, target_dicts): - """Shard some targets apart to work around the linkers limits. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - Returns: - Tuple of the new sharded versions of the inputs. - """ - # Gather the targets to shard, and how many pieces. - targets_to_shard = {} - for t in target_dicts: - shards = int(target_dicts[t].get("msvs_shard", 0)) - if shards: - targets_to_shard[t] = shards - # Shard target_list. - new_target_list = [] - for t in target_list: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - new_target_list.append(_ShardName(t, i)) - else: - new_target_list.append(t) - # Shard target_dict. - new_target_dicts = {} - for t in target_dicts: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - name = _ShardName(t, i) - new_target_dicts[name] = copy.copy(target_dicts[t]) - new_target_dicts[name]["target_name"] = _ShardName( - new_target_dicts[name]["target_name"], i - ) - sources = new_target_dicts[name].get("sources", []) - new_sources = [] - for pos in range(i, len(sources), targets_to_shard[t]): - new_sources.append(sources[pos]) - new_target_dicts[name]["sources"] = new_sources - else: - new_target_dicts[t] = target_dicts[t] - # Shard dependencies. - for t in sorted(new_target_dicts): - for deptype in ("dependencies", "dependencies_original"): - dependencies = copy.copy(new_target_dicts[t].get(deptype, [])) - new_dependencies = [] - for d in dependencies: - if d in targets_to_shard: - for i in range(targets_to_shard[d]): - new_dependencies.append(_ShardName(d, i)) - else: - new_dependencies.append(d) - new_target_dicts[t][deptype] = new_dependencies - - return (new_target_list, new_target_dicts) - - -def _GetPdbPath(target_dict, config_name, vars): - """Returns the path to the PDB file that will be generated by a given - configuration. - - The lookup proceeds as follows: - - Look for an explicit path in the VCLinkerTool configuration block. - - Look for an 'msvs_large_pdb_path' variable. - - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is - specified. - - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'. - - Arguments: - target_dict: The target dictionary to be searched. - config_name: The name of the configuration of interest. - vars: A dictionary of common GYP variables with generator-specific values. - Returns: - The path of the corresponding PDB file. - """ - config = target_dict["configurations"][config_name] - msvs = config.setdefault("msvs_settings", {}) - - linker = msvs.get("VCLinkerTool", {}) - - pdb_path = linker.get("ProgramDatabaseFile") - if pdb_path: - return pdb_path - - variables = target_dict.get("variables", {}) - pdb_path = variables.get("msvs_large_pdb_path", None) - if pdb_path: - return pdb_path - - pdb_base = target_dict.get("product_name", target_dict["target_name"]) - pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]]) - pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base - - return pdb_path - - -def InsertLargePdbShims(target_list, target_dicts, vars): - """Insert a shim target that forces the linker to use 4KB pagesize PDBs. - - This is a workaround for targets with PDBs greater than 1GB in size, the - limit for the 1KB pagesize PDBs created by the linker by default. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - vars: A dictionary of common GYP variables with generator-specific values. - Returns: - Tuple of the shimmed version of the inputs. - """ - # Determine which targets need shimming. - targets_to_shim = [] - for t in target_dicts: - target_dict = target_dicts[t] - - # We only want to shim targets that have msvs_large_pdb enabled. - if not int(target_dict.get("msvs_large_pdb", 0)): - continue - # This is intended for executable, shared_library and loadable_module - # targets where every configuration is set up to produce a PDB output. - # If any of these conditions is not true then the shim logic will fail - # below. - targets_to_shim.append(t) - - large_pdb_shim_cc = _GetLargePdbShimCcPath() - - for t in targets_to_shim: - target_dict = target_dicts[t] - target_name = target_dict.get("target_name") - - base_dict = _DeepCopySomeKeys( - target_dict, ["configurations", "default_configuration", "toolset"] - ) - - # This is the dict for copying the source file (part of the GYP tree) - # to the intermediate directory of the project. This is necessary because - # we can't always build a relative path to the shim source file (on Windows - # GYP and the project may be on different drives), and Ninja hates absolute - # paths (it ends up generating the .obj and .obj.d alongside the source - # file, polluting GYPs tree). - copy_suffix = "large_pdb_copy" - copy_target_name = target_name + "_" + copy_suffix - full_copy_target_name = _SuffixName(t, copy_suffix) - shim_cc_basename = os.path.basename(large_pdb_shim_cc) - shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name - shim_cc_path = shim_cc_dir + "/" + shim_cc_basename - copy_dict = copy.deepcopy(base_dict) - copy_dict["target_name"] = copy_target_name - copy_dict["type"] = "none" - copy_dict["sources"] = [large_pdb_shim_cc] - copy_dict["copies"] = [ - {"destination": shim_cc_dir, "files": [large_pdb_shim_cc]} - ] - - # This is the dict for the PDB generating shim target. It depends on the - # copy target. - shim_suffix = "large_pdb_shim" - shim_target_name = target_name + "_" + shim_suffix - full_shim_target_name = _SuffixName(t, shim_suffix) - shim_dict = copy.deepcopy(base_dict) - shim_dict["target_name"] = shim_target_name - shim_dict["type"] = "static_library" - shim_dict["sources"] = [shim_cc_path] - shim_dict["dependencies"] = [full_copy_target_name] - - # Set up the shim to output its PDB to the same location as the final linker - # target. - for config_name, config in shim_dict.get("configurations").items(): - pdb_path = _GetPdbPath(target_dict, config_name, vars) - - # A few keys that we don't want to propagate. - for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]: - config.pop(key, None) - - msvs = config.setdefault("msvs_settings", {}) - - # Update the compiler directives in the shim target. - compiler = msvs.setdefault("VCCLCompilerTool", {}) - compiler["DebugInformationFormat"] = "3" - compiler["ProgramDataBaseFileName"] = pdb_path - - # Set the explicit PDB path in the appropriate configuration of the - # original target. - config = target_dict["configurations"][config_name] - msvs = config.setdefault("msvs_settings", {}) - linker = msvs.setdefault("VCLinkerTool", {}) - linker["GenerateDebugInformation"] = "true" - linker["ProgramDatabaseFile"] = pdb_path - - # Add the new targets. They must go to the beginning of the list so that - # the dependency generation works as expected in ninja. - target_list.insert(0, full_copy_target_name) - target_list.insert(0, full_shim_target_name) - target_dicts[full_copy_target_name] = copy_dict - target_dicts[full_shim_target_name] = shim_dict - - # Update the original target to depend on the shim target. - target_dict.setdefault("dependencies", []).append(full_shim_target_name) - - return (target_list, target_dicts) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py deleted file mode 100644 index 8d7f21e82dd2f..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py +++ /dev/null @@ -1,574 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Handle version information related to Visual Stuio.""" - -import errno -import os -import re -import subprocess -import sys -import glob - - -def JoinPath(*args): - return os.path.normpath(os.path.join(*args)) - - -class VisualStudioVersion: - """Information regarding a version of Visual Studio.""" - - def __init__( - self, - short_name, - description, - solution_version, - project_version, - flat_sln, - uses_vcxproj, - path, - sdk_based, - default_toolset=None, - compatible_sdks=None, - ): - self.short_name = short_name - self.description = description - self.solution_version = solution_version - self.project_version = project_version - self.flat_sln = flat_sln - self.uses_vcxproj = uses_vcxproj - self.path = path - self.sdk_based = sdk_based - self.default_toolset = default_toolset - compatible_sdks = compatible_sdks or [] - compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True) - self.compatible_sdks = compatible_sdks - - def ShortName(self): - return self.short_name - - def Description(self): - """Get the full description of the version.""" - return self.description - - def SolutionVersion(self): - """Get the version number of the sln files.""" - return self.solution_version - - def ProjectVersion(self): - """Get the version number of the vcproj or vcxproj files.""" - return self.project_version - - def FlatSolution(self): - return self.flat_sln - - def UsesVcxproj(self): - """Returns true if this version uses a vcxproj file.""" - return self.uses_vcxproj - - def ProjectExtension(self): - """Returns the file extension for the project.""" - return self.uses_vcxproj and ".vcxproj" or ".vcproj" - - def Path(self): - """Returns the path to Visual Studio installation.""" - return self.path - - def ToolPath(self, tool): - """Returns the path to a given compiler tool. """ - return os.path.normpath(os.path.join(self.path, "VC/bin", tool)) - - def DefaultToolset(self): - """Returns the msbuild toolset version that will be used in the absence - of a user override.""" - return self.default_toolset - - def _SetupScriptInternal(self, target_arch): - """Returns a command (with arguments) to be used to set up the - environment.""" - assert target_arch in ("x86", "x64"), "target_arch not supported" - # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the - # depot_tools build tools and should run SetEnv.Cmd to set up the - # environment. The check for WindowsSDKDir alone is not sufficient because - # this is set by running vcvarsall.bat. - sdk_dir = os.environ.get("WindowsSDKDir", "") - setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd") - if self.sdk_based and sdk_dir and os.path.exists(setup_path): - return [setup_path, "/" + target_arch] - - is_host_arch_x64 = ( - os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64" - or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64" - ) - - # For VS2017 (and newer) it's fairly easy - if self.short_name >= "2017": - script_path = JoinPath( - self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat" - ) - - # Always use a native executable, cross-compiling if necessary. - host_arch = "amd64" if is_host_arch_x64 else "x86" - msvc_target_arch = "amd64" if target_arch == "x64" else "x86" - arg = host_arch - if host_arch != msvc_target_arch: - arg += "_" + msvc_target_arch - - return [script_path, arg] - - # We try to find the best version of the env setup batch. - vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat") - if target_arch == "x86": - if ( - self.short_name >= "2013" - and self.short_name[-1] != "e" - and is_host_arch_x64 - ): - # VS2013 and later, non-Express have a x64-x86 cross that we want - # to prefer. - return [vcvarsall, "amd64_x86"] - else: - # Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat - # for x86 because vcvarsall calls vcvars32, which it can only find if - # VS??COMNTOOLS is set, which isn't guaranteed. - return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")] - elif target_arch == "x64": - arg = "x86_amd64" - # Use the 64-on-64 compiler if we're not using an express edition and - # we're running on a 64bit OS. - if self.short_name[-1] != "e" and is_host_arch_x64: - arg = "amd64" - return [vcvarsall, arg] - - def SetupScript(self, target_arch): - script_data = self._SetupScriptInternal(target_arch) - script_path = script_data[0] - if not os.path.exists(script_path): - raise Exception( - "%s is missing - make sure VC++ tools are installed." % script_path - ) - return script_data - - -def _RegistryQueryBase(sysdir, key, value): - """Use reg.exe to read a particular key. - - While ideally we might use the win32 module, we would like gyp to be - python neutral, so for instance cygwin python lacks this module. - - Arguments: - sysdir: The system subdirectory to attempt to launch reg.exe from. - key: The registry key to read from. - value: The particular value to read. - Return: - stdout from reg.exe, or None for failure. - """ - # Skip if not on Windows or Python Win32 setup issue - if sys.platform not in ("win32", "cygwin"): - return None - # Setup params to pass to and attempt to launch reg.exe - cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key] - if value: - cmd.extend(["/v", value]) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid - # Note that the error text may be in [1] in some cases - text = p.communicate()[0].decode("utf-8") - # Check return code from reg.exe; officially 0==success and 1==error - if p.returncode: - return None - return text - - -def _RegistryQuery(key, value=None): - r"""Use reg.exe to read a particular key through _RegistryQueryBase. - - First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If - that fails, it falls back to System32. Sysnative is available on Vista and - up and available on Windows Server 2003 and XP through KB patch 942589. Note - that Sysnative will always fail if using 64-bit python due to it being a - virtual directory and System32 will work correctly in the first place. - - KB 942589 - http://support.microsoft.com/kb/942589/en-us. - - Arguments: - key: The registry key. - value: The particular registry value to read (optional). - Return: - stdout from reg.exe, or None for failure. - """ - text = None - try: - text = _RegistryQueryBase("Sysnative", key, value) - except OSError as e: - if e.errno == errno.ENOENT: - text = _RegistryQueryBase("System32", key, value) - else: - raise - return text - - -def _RegistryGetValueUsingWinReg(key, value): - """Use the _winreg module to obtain the value of a registry key. - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. Throws - ImportError if winreg is unavailable. - """ - from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx - try: - root, subkey = key.split("\\", 1) - assert root == "HKLM" # Only need HKLM for now. - with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey: - return QueryValueEx(hkey, value)[0] - except OSError: - return None - - -def _RegistryGetValue(key, value): - """Use _winreg or reg.exe to obtain the value of a registry key. - - Using _winreg is preferable because it solves an issue on some corporate - environments where access to reg.exe is locked down. However, we still need - to fallback to reg.exe for the case where the _winreg module is not available - (for example in cygwin python). - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. - """ - try: - return _RegistryGetValueUsingWinReg(key, value) - except ImportError: - pass - - # Fallback to reg.exe if we fail to import _winreg. - text = _RegistryQuery(key, value) - if not text: - return None - # Extract value. - match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text) - if not match: - return None - return match.group(1) - - -def _CreateVersion(name, path, sdk_based=False): - """Sets up MSVS project generation. - - Setup is based off the GYP_MSVS_VERSION environment variable or whatever is - autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is - passed in that doesn't match a value in versions python will throw a error. - """ - if path: - path = os.path.normpath(path) - versions = { - "2022": VisualStudioVersion( - "2022", - "Visual Studio 2022", - solution_version="12.00", - project_version="17.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v143", - compatible_sdks=["v8.1", "v10.0"], - ), - "2019": VisualStudioVersion( - "2019", - "Visual Studio 2019", - solution_version="12.00", - project_version="16.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v142", - compatible_sdks=["v8.1", "v10.0"], - ), - "2017": VisualStudioVersion( - "2017", - "Visual Studio 2017", - solution_version="12.00", - project_version="15.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v141", - compatible_sdks=["v8.1", "v10.0"], - ), - "2015": VisualStudioVersion( - "2015", - "Visual Studio 2015", - solution_version="12.00", - project_version="14.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v140", - ), - "2013": VisualStudioVersion( - "2013", - "Visual Studio 2013", - solution_version="13.00", - project_version="12.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v120", - ), - "2013e": VisualStudioVersion( - "2013e", - "Visual Studio 2013", - solution_version="13.00", - project_version="12.0", - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v120", - ), - "2012": VisualStudioVersion( - "2012", - "Visual Studio 2012", - solution_version="12.00", - project_version="4.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v110", - ), - "2012e": VisualStudioVersion( - "2012e", - "Visual Studio 2012", - solution_version="12.00", - project_version="4.0", - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset="v110", - ), - "2010": VisualStudioVersion( - "2010", - "Visual Studio 2010", - solution_version="11.00", - project_version="4.0", - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - ), - "2010e": VisualStudioVersion( - "2010e", - "Visual C++ Express 2010", - solution_version="11.00", - project_version="4.0", - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - ), - "2008": VisualStudioVersion( - "2008", - "Visual Studio 2008", - solution_version="10.00", - project_version="9.00", - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - "2008e": VisualStudioVersion( - "2008e", - "Visual Studio 2008", - solution_version="10.00", - project_version="9.00", - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - "2005": VisualStudioVersion( - "2005", - "Visual Studio 2005", - solution_version="9.00", - project_version="8.00", - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - "2005e": VisualStudioVersion( - "2005e", - "Visual Studio 2005", - solution_version="9.00", - project_version="8.00", - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based, - ), - } - return versions[str(name)] - - -def _ConvertToCygpath(path): - """Convert to cygwin path if we are using cygwin.""" - if sys.platform == "cygwin": - p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE) - path = p.communicate()[0].decode("utf-8").strip() - return path - - -def _DetectVisualStudioVersions(versions_to_check, force_express): - """Collect the list of installed visual studio versions. - - Returns: - A list of visual studio versions installed in descending order of - usage preference. - Base this on the registry and a quick check if devenv.exe exists. - Possibilities are: - 2005(e) - Visual Studio 2005 (8) - 2008(e) - Visual Studio 2008 (9) - 2010(e) - Visual Studio 2010 (10) - 2012(e) - Visual Studio 2012 (11) - 2013(e) - Visual Studio 2013 (12) - 2015 - Visual Studio 2015 (14) - 2017 - Visual Studio 2017 (15) - 2019 - Visual Studio 2019 (16) - 2022 - Visual Studio 2022 (17) - Where (e) is e for express editions of MSVS and blank otherwise. - """ - version_to_year = { - "8.0": "2005", - "9.0": "2008", - "10.0": "2010", - "11.0": "2012", - "12.0": "2013", - "14.0": "2015", - "15.0": "2017", - "16.0": "2019", - "17.0": "2022", - } - versions = [] - for version in versions_to_check: - # Old method of searching for which VS version is installed - # We don't use the 2010-encouraged-way because we also want to get the - # path to the binaries, which it doesn't offer. - keys = [ - r"HKLM\Software\Microsoft\VisualStudio\%s" % version, - r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version, - r"HKLM\Software\Microsoft\VCExpress\%s" % version, - r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version, - ] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], "InstallDir") - if not path: - continue - path = _ConvertToCygpath(path) - # Check for full. - full_path = os.path.join(path, "devenv.exe") - express_path = os.path.join(path, "*express.exe") - if not force_express and os.path.exists(full_path): - # Add this one. - versions.append( - _CreateVersion( - version_to_year[version], os.path.join(path, "..", "..") - ) - ) - # Check for express. - elif glob.glob(express_path): - # Add this one. - versions.append( - _CreateVersion( - version_to_year[version] + "e", os.path.join(path, "..", "..") - ) - ) - - # The old method above does not work when only SDK is installed. - keys = [ - r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7", - r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7", - r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7", - r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7", - ] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], version) - if not path: - continue - path = _ConvertToCygpath(path) - if version == "15.0": - if os.path.exists(path): - versions.append(_CreateVersion("2017", path)) - elif version != "14.0": # There is no Express edition for 2015. - versions.append( - _CreateVersion( - version_to_year[version] + "e", - os.path.join(path, ".."), - sdk_based=True, - ) - ) - - return versions - - -def SelectVisualStudioVersion(version="auto", allow_fallback=True): - """Select which version of Visual Studio projects to generate. - - Arguments: - version: Hook to allow caller to force a particular version (vs auto). - Returns: - An object representing a visual studio project format version. - """ - # In auto mode, check environment variable for override. - if version == "auto": - version = os.environ.get("GYP_MSVS_VERSION", "auto") - version_map = { - "auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"), - "2005": ("8.0",), - "2005e": ("8.0",), - "2008": ("9.0",), - "2008e": ("9.0",), - "2010": ("10.0",), - "2010e": ("10.0",), - "2012": ("11.0",), - "2012e": ("11.0",), - "2013": ("12.0",), - "2013e": ("12.0",), - "2015": ("14.0",), - "2017": ("15.0",), - "2019": ("16.0",), - "2022": ("17.0",), - } - override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH") - if override_path: - msvs_version = os.environ.get("GYP_MSVS_VERSION") - if not msvs_version: - raise ValueError( - "GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be " - "set to a particular version (e.g. 2010e)." - ) - return _CreateVersion(msvs_version, override_path, sdk_based=True) - version = str(version) - versions = _DetectVisualStudioVersions(version_map[version], "e" in version) - if not versions: - if not allow_fallback: - raise ValueError("Could not locate Visual Studio installation.") - if version == "auto": - # Default to 2005 if we couldn't find anything - return _CreateVersion("2005", None) - else: - return _CreateVersion(version, None) - return versions[0] diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/__init__.py deleted file mode 100755 index d6cc01307d997..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/__init__.py +++ /dev/null @@ -1,692 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -import copy -import gyp.input -import argparse -import os.path -import re -import shlex -import sys -import traceback -from gyp.common import GypError - - -# Default debug modes for GYP -debug = {} - -# List of "official" debug modes, but you can use anything you like. -DEBUG_GENERAL = "general" -DEBUG_VARIABLES = "variables" -DEBUG_INCLUDES = "includes" - - -def DebugOutput(mode, message, *args): - if "all" in gyp.debug or mode in gyp.debug: - ctx = ("unknown", 0, "unknown") - try: - f = traceback.extract_stack(limit=2) - if f: - ctx = f[0][:3] - except Exception: - pass - if args: - message %= args - print( - "%s:%s:%d:%s %s" - % (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message) - ) - - -def FindBuildFiles(): - extension = ".gyp" - files = os.listdir(os.getcwd()) - build_files = [] - for file in files: - if file.endswith(extension): - build_files.append(file) - return build_files - - -def Load( - build_files, - format, - default_variables={}, - includes=[], - depth=".", - params=None, - check=False, - circular_check=True, -): - """ - Loads one or more specified build files. - default_variables and includes will be copied before use. - Returns the generator for the specified format and the - data returned by loading the specified build files. - """ - if params is None: - params = {} - - if "-" in format: - format, params["flavor"] = format.split("-", 1) - - default_variables = copy.copy(default_variables) - - # Default variables provided by this program and its modules should be - # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, - # avoiding collisions with user and automatic variables. - default_variables["GENERATOR"] = format - default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "") - - # Format can be a custom python file, or by default the name of a module - # within gyp.generator. - if format.endswith(".py"): - generator_name = os.path.splitext(format)[0] - path, generator_name = os.path.split(generator_name) - - # Make sure the path to the custom generator is in sys.path - # Don't worry about removing it once we are done. Keeping the path - # to each generator that is used in sys.path is likely harmless and - # arguably a good idea. - path = os.path.abspath(path) - if path not in sys.path: - sys.path.insert(0, path) - else: - generator_name = "gyp.generator." + format - - # These parameters are passed in order (as opposed to by key) - # because ActivePython cannot handle key parameters to __import__. - generator = __import__(generator_name, globals(), locals(), generator_name) - for (key, val) in generator.generator_default_variables.items(): - default_variables.setdefault(key, val) - - output_dir = params["options"].generator_output or params["options"].toplevel_dir - if default_variables["GENERATOR"] == "ninja": - default_variables.setdefault( - "PRODUCT_DIR_ABS", - os.path.join( - output_dir, "out", default_variables.get("build_type", "default") - ), - ) - else: - default_variables.setdefault( - "PRODUCT_DIR_ABS", - os.path.join(output_dir, default_variables["CONFIGURATION_NAME"]), - ) - - # Give the generator the opportunity to set additional variables based on - # the params it will receive in the output phase. - if getattr(generator, "CalculateVariables", None): - generator.CalculateVariables(default_variables, params) - - # Give the generator the opportunity to set generator_input_info based on - # the params it will receive in the output phase. - if getattr(generator, "CalculateGeneratorInputInfo", None): - generator.CalculateGeneratorInputInfo(params) - - # Fetch the generator specific info that gets fed to input, we use getattr - # so we can default things and the generators only have to provide what - # they need. - generator_input_info = { - "non_configuration_keys": getattr( - generator, "generator_additional_non_configuration_keys", [] - ), - "path_sections": getattr(generator, "generator_additional_path_sections", []), - "extra_sources_for_rules": getattr( - generator, "generator_extra_sources_for_rules", [] - ), - "generator_supports_multiple_toolsets": getattr( - generator, "generator_supports_multiple_toolsets", False - ), - "generator_wants_static_library_dependencies_adjusted": getattr( - generator, "generator_wants_static_library_dependencies_adjusted", True - ), - "generator_wants_sorted_dependencies": getattr( - generator, "generator_wants_sorted_dependencies", False - ), - "generator_filelist_paths": getattr( - generator, "generator_filelist_paths", None - ), - } - - # Process the input specific to this generator. - result = gyp.input.Load( - build_files, - default_variables, - includes[:], - depth, - generator_input_info, - check, - circular_check, - params["parallel"], - params["root_targets"], - ) - return [generator] + result - - -def NameValueListToDict(name_value_list): - """ - Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary - of the pairs. If a string is simply NAME, then the value in the dictionary - is set to True. If VALUE can be converted to an integer, it is. - """ - result = {} - for item in name_value_list: - tokens = item.split("=", 1) - if len(tokens) == 2: - # If we can make it an int, use that, otherwise, use the string. - try: - token_value = int(tokens[1]) - except ValueError: - token_value = tokens[1] - # Set the variable to the supplied value. - result[tokens[0]] = token_value - else: - # No value supplied, treat it as a boolean and set it. - result[tokens[0]] = True - return result - - -def ShlexEnv(env_name): - flags = os.environ.get(env_name, []) - if flags: - flags = shlex.split(flags) - return flags - - -def FormatOpt(opt, value): - if opt.startswith("--"): - return f"{opt}={value}" - return opt + value - - -def RegenerateAppendFlag(flag, values, predicate, env_name, options): - """Regenerate a list of command line flags, for an option of action='append'. - - The |env_name|, if given, is checked in the environment and used to generate - an initial list of options, then the options that were specified on the - command line (given in |values|) are appended. This matches the handling of - environment variables and command line flags where command line flags override - the environment, while not requiring the environment to be set when the flags - are used again. - """ - flags = [] - if options.use_environment and env_name: - for flag_value in ShlexEnv(env_name): - value = FormatOpt(flag, predicate(flag_value)) - if value in flags: - flags.remove(value) - flags.append(value) - if values: - for flag_value in values: - flags.append(FormatOpt(flag, predicate(flag_value))) - return flags - - -def RegenerateFlags(options): - """Given a parsed options object, and taking the environment variables into - account, returns a list of flags that should regenerate an equivalent options - object (even in the absence of the environment variables.) - - Any path options will be normalized relative to depth. - - The format flag is not included, as it is assumed the calling generator will - set that as appropriate. - """ - - def FixPath(path): - path = gyp.common.FixIfRelativePath(path, options.depth) - if not path: - return os.path.curdir - return path - - def Noop(value): - return value - - # We always want to ignore the environment when regenerating, to avoid - # duplicate or changed flags in the environment at the time of regeneration. - flags = ["--ignore-environment"] - for name, metadata in options._regeneration_metadata.items(): - opt = metadata["opt"] - value = getattr(options, name) - value_predicate = metadata["type"] == "path" and FixPath or Noop - action = metadata["action"] - env_name = metadata["env_name"] - if action == "append": - flags.extend( - RegenerateAppendFlag(opt, value, value_predicate, env_name, options) - ) - elif action in ("store", None): # None is a synonym for 'store'. - if value: - flags.append(FormatOpt(opt, value_predicate(value))) - elif options.use_environment and env_name and os.environ.get(env_name): - flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) - elif action in ("store_true", "store_false"): - if (action == "store_true" and value) or ( - action == "store_false" and not value - ): - flags.append(opt) - elif options.use_environment and env_name: - print( - "Warning: environment regeneration unimplemented " - "for %s flag %r env_name %r" % (action, opt, env_name), - file=sys.stderr, - ) - else: - print( - "Warning: regeneration unimplemented for action %r " - "flag %r" % (action, opt), - file=sys.stderr, - ) - - return flags - - -class RegeneratableOptionParser(argparse.ArgumentParser): - def __init__(self, usage): - self.__regeneratable_options = {} - argparse.ArgumentParser.__init__(self, usage=usage) - - def add_argument(self, *args, **kw): - """Add an option to the parser. - - This accepts the same arguments as ArgumentParser.add_argument, plus the - following: - regenerate: can be set to False to prevent this option from being included - in regeneration. - env_name: name of environment variable that additional values for this - option come from. - type: adds type='path', to tell the regenerator that the values of - this option need to be made relative to options.depth - """ - env_name = kw.pop("env_name", None) - if "dest" in kw and kw.pop("regenerate", True): - dest = kw["dest"] - - # The path type is needed for regenerating, for optparse we can just treat - # it as a string. - type = kw.get("type") - if type == "path": - kw["type"] = str - - self.__regeneratable_options[dest] = { - "action": kw.get("action"), - "type": type, - "env_name": env_name, - "opt": args[0], - } - - argparse.ArgumentParser.add_argument(self, *args, **kw) - - def parse_args(self, *args): - values, args = argparse.ArgumentParser.parse_known_args(self, *args) - values._regeneration_metadata = self.__regeneratable_options - return values, args - - -def gyp_main(args): - my_name = os.path.basename(sys.argv[0]) - usage = "usage: %(prog)s [options ...] [build_file ...]" - - parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s")) - parser.add_argument( - "--build", - dest="configs", - action="append", - help="configuration for build after project generation", - ) - parser.add_argument( - "--check", dest="check", action="store_true", help="check format of gyp files" - ) - parser.add_argument( - "--config-dir", - dest="config_dir", - action="store", - env_name="GYP_CONFIG_DIR", - default=None, - help="The location for configuration files like " "include.gypi.", - ) - parser.add_argument( - "-d", - "--debug", - dest="debug", - metavar="DEBUGMODE", - action="append", - default=[], - help="turn on a debugging " - 'mode for debugging GYP. Supported modes are "variables", ' - '"includes" and "general" or "all" for all of them.', - ) - parser.add_argument( - "-D", - dest="defines", - action="append", - metavar="VAR=VAL", - env_name="GYP_DEFINES", - help="sets variable VAR to value VAL", - ) - parser.add_argument( - "--depth", - dest="depth", - metavar="PATH", - type="path", - help="set DEPTH gyp variable to a relative path to PATH", - ) - parser.add_argument( - "-f", - "--format", - dest="formats", - action="append", - env_name="GYP_GENERATORS", - regenerate=False, - help="output formats to generate", - ) - parser.add_argument( - "-G", - dest="generator_flags", - action="append", - default=[], - metavar="FLAG=VAL", - env_name="GYP_GENERATOR_FLAGS", - help="sets generator flag FLAG to VAL", - ) - parser.add_argument( - "--generator-output", - dest="generator_output", - action="store", - default=None, - metavar="DIR", - type="path", - env_name="GYP_GENERATOR_OUTPUT", - help="puts generated build files under DIR", - ) - parser.add_argument( - "--ignore-environment", - dest="use_environment", - action="store_false", - default=True, - regenerate=False, - help="do not read options from environment variables", - ) - parser.add_argument( - "-I", - "--include", - dest="includes", - action="append", - metavar="INCLUDE", - type="path", - help="files to include in all loaded .gyp files", - ) - # --no-circular-check disables the check for circular relationships between - # .gyp files. These relationships should not exist, but they've only been - # observed to be harmful with the Xcode generator. Chromium's .gyp files - # currently have some circular relationships on non-Mac platforms, so this - # option allows the strict behavior to be used on Macs and the lenient - # behavior to be used elsewhere. - # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_argument( - "--no-circular-check", - dest="circular_check", - action="store_false", - default=True, - regenerate=False, - help="don't check for circular relationships between files", - ) - parser.add_argument( - "--no-parallel", - action="store_true", - default=False, - help="Disable multiprocessing", - ) - parser.add_argument( - "-S", - "--suffix", - dest="suffix", - default="", - help="suffix to add to generated files", - ) - parser.add_argument( - "--toplevel-dir", - dest="toplevel_dir", - action="store", - default=None, - metavar="DIR", - type="path", - help="directory to use as the root of the source tree", - ) - parser.add_argument( - "-R", - "--root-target", - dest="root_targets", - action="append", - metavar="TARGET", - help="include only TARGET and its deep dependencies", - ) - parser.add_argument( - "-V", - "--version", - dest="version", - action="store_true", - help="Show the version and exit.", - ) - - options, build_files_arg = parser.parse_args(args) - if options.version: - import pkg_resources - print(f"v{pkg_resources.get_distribution('gyp-next').version}") - return 0 - build_files = build_files_arg - - # Set up the configuration directory (defaults to ~/.gyp) - if not options.config_dir: - home = None - home_dot_gyp = None - if options.use_environment: - home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None) - if home_dot_gyp: - home_dot_gyp = os.path.expanduser(home_dot_gyp) - - if not home_dot_gyp: - home_vars = ["HOME"] - if sys.platform in ("cygwin", "win32"): - home_vars.append("USERPROFILE") - for home_var in home_vars: - home = os.getenv(home_var) - if home: - home_dot_gyp = os.path.join(home, ".gyp") - if not os.path.exists(home_dot_gyp): - home_dot_gyp = None - else: - break - else: - home_dot_gyp = os.path.expanduser(options.config_dir) - - if home_dot_gyp and not os.path.exists(home_dot_gyp): - home_dot_gyp = None - - if not options.formats: - # If no format was given on the command line, then check the env variable. - generate_formats = [] - if options.use_environment: - generate_formats = os.environ.get("GYP_GENERATORS", []) - if generate_formats: - generate_formats = re.split(r"[\s,]", generate_formats) - if generate_formats: - options.formats = generate_formats - else: - # Nothing in the variable, default based on platform. - if sys.platform == "darwin": - options.formats = ["xcode"] - elif sys.platform in ("win32", "cygwin"): - options.formats = ["msvs"] - else: - options.formats = ["make"] - - if not options.generator_output and options.use_environment: - g_o = os.environ.get("GYP_GENERATOR_OUTPUT") - if g_o: - options.generator_output = g_o - - options.parallel = not options.no_parallel - - for mode in options.debug: - gyp.debug[mode] = 1 - - # Do an extra check to avoid work when we're not debugging. - if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, "running with these options:") - for option, value in sorted(options.__dict__.items()): - if option[0] == "_": - continue - if isinstance(value, str): - DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) - else: - DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) - - if not build_files: - build_files = FindBuildFiles() - if not build_files: - raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name)) - - # TODO(mark): Chromium-specific hack! - # For Chromium, the gyp "depth" variable should always be a relative path - # to Chromium's top-level "src" directory. If no depth variable was set - # on the command line, try to find a "src" directory by looking at the - # absolute path to each build file's directory. The first "src" component - # found will be treated as though it were the path used for --depth. - if not options.depth: - for build_file in build_files: - build_file_dir = os.path.abspath(os.path.dirname(build_file)) - build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in range(components_len - 1, -1, -1): - if build_file_dir_components[index] == "src": - options.depth = os.path.sep.join(build_file_dir_components) - break - del build_file_dir_components[index] - - # If the inner loop found something, break without advancing to another - # build file. - if options.depth: - break - - if not options.depth: - raise GypError( - "Could not automatically locate src directory. This is" - "a temporary Chromium feature that will be removed. Use" - "--depth as a workaround." - ) - - # If toplevel-dir is not set, we assume that depth is the root of our source - # tree. - if not options.toplevel_dir: - options.toplevel_dir = options.depth - - # -D on the command line sets variable defaults - D isn't just for define, - # it's for default. Perhaps there should be a way to force (-F?) a - # variable's value so that it can't be overridden by anything else. - cmdline_default_variables = {} - defines = [] - if options.use_environment: - defines += ShlexEnv("GYP_DEFINES") - if options.defines: - defines += options.defines - cmdline_default_variables = NameValueListToDict(defines) - if DEBUG_GENERAL in gyp.debug: - DebugOutput( - DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables - ) - - # Set up includes. - includes = [] - - # If ~/.gyp/include.gypi exists, it'll be forcibly included into every - # .gyp file that's loaded, before anything else is included. - if home_dot_gyp: - default_include = os.path.join(home_dot_gyp, "include.gypi") - if os.path.exists(default_include): - print("Using overrides found in " + default_include) - includes.append(default_include) - - # Command-line --include files come after the default include. - if options.includes: - includes.extend(options.includes) - - # Generator flags should be prefixed with the target generator since they - # are global across all generator runs. - gen_flags = [] - if options.use_environment: - gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS") - if options.generator_flags: - gen_flags += options.generator_flags - generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) - - # Generate all requested formats (use a set in case we got one format request - # twice) - for format in set(options.formats): - params = { - "options": options, - "build_files": build_files, - "generator_flags": generator_flags, - "cwd": os.getcwd(), - "build_files_arg": build_files_arg, - "gyp_binary": sys.argv[0], - "home_dot_gyp": home_dot_gyp, - "parallel": options.parallel, - "root_targets": options.root_targets, - "target_arch": cmdline_default_variables.get("target_arch", ""), - } - - # Start with the default variables from the command line. - [generator, flat_list, targets, data] = Load( - build_files, - format, - cmdline_default_variables, - includes, - options.depth, - params, - options.check, - options.circular_check, - ) - - # TODO(mark): Pass |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - # NOTE: flat_list is the flattened dependency graph specifying the order - # that targets may be built. Build systems that operate serially or that - # need to have dependencies defined before dependents reference them should - # generate targets in the order specified in flat_list. - generator.GenerateOutput(flat_list, targets, data, params) - - if options.configs: - valid_configs = targets[flat_list[0]]["configurations"] - for conf in options.configs: - if conf not in valid_configs: - raise GypError("Invalid config specified via --build: %s" % conf) - generator.PerformBuild(data, options.configs, params) - - # Done - return 0 - - -def main(args): - try: - return gyp_main(args) - except GypError as e: - sys.stderr.write("gyp: %s\n" % e) - return 1 - - -# NOTE: setuptools generated console_scripts calls function with no arguments -def script_main(): - return main(sys.argv[1:]) - - -if __name__ == "__main__": - sys.exit(script_main()) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/common.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/common.py deleted file mode 100644 index 762ae021090ca..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/common.py +++ /dev/null @@ -1,711 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import errno -import filecmp -import os.path -import re -import tempfile -import sys -import subprocess -import shlex - -from collections.abc import MutableSet - - -# A minimal memoizing decorator. It'll blow up if the args aren't immutable, -# among other "problems". -class memoize: - def __init__(self, func): - self.func = func - self.cache = {} - - def __call__(self, *args): - try: - return self.cache[args] - except KeyError: - result = self.func(*args) - self.cache[args] = result - return result - - -class GypError(Exception): - """Error class representing an error, which is to be presented - to the user. The main entry point will catch and display this. - """ - - pass - - -def ExceptionAppend(e, msg): - """Append a message to the given exception's message.""" - if not e.args: - e.args = (msg,) - elif len(e.args) == 1: - e.args = (str(e.args[0]) + " " + msg,) - else: - e.args = (str(e.args[0]) + " " + msg,) + e.args[1:] - - -def FindQualifiedTargets(target, qualified_list): - """ - Given a list of qualified targets, return the qualified targets for the - specified |target|. - """ - return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target] - - -def ParseQualifiedTarget(target): - # Splits a qualified target into a build file, target name and toolset. - - # NOTE: rsplit is used to disambiguate the Windows drive letter separator. - target_split = target.rsplit(":", 1) - if len(target_split) == 2: - [build_file, target] = target_split - else: - build_file = None - - target_split = target.rsplit("#", 1) - if len(target_split) == 2: - [target, toolset] = target_split - else: - toolset = None - - return [build_file, target, toolset] - - -def ResolveTarget(build_file, target, toolset): - # This function resolves a target into a canonical form: - # - a fully defined build file, either absolute or relative to the current - # directory - # - a target name - # - a toolset - # - # build_file is the file relative to which 'target' is defined. - # target is the qualified target. - # toolset is the default toolset for that target. - [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) - - if parsed_build_file: - if build_file: - # If a relative path, parsed_build_file is relative to the directory - # containing build_file. If build_file is not in the current directory, - # parsed_build_file is not a usable path as-is. Resolve it by - # interpreting it as relative to build_file. If parsed_build_file is - # absolute, it is usable as a path regardless of the current directory, - # and os.path.join will return it as-is. - build_file = os.path.normpath( - os.path.join(os.path.dirname(build_file), parsed_build_file) - ) - # Further (to handle cases like ../cwd), make it relative to cwd) - if not os.path.isabs(build_file): - build_file = RelativePath(build_file, ".") - else: - build_file = parsed_build_file - - if parsed_toolset: - toolset = parsed_toolset - - return [build_file, target, toolset] - - -def BuildFile(fully_qualified_target): - # Extracts the build file from the fully qualified target. - return ParseQualifiedTarget(fully_qualified_target)[0] - - -def GetEnvironFallback(var_list, default): - """Look up a key in the environment, with fallback to secondary keys - and finally falling back to a default value.""" - for var in var_list: - if var in os.environ: - return os.environ[var] - return default - - -def QualifiedTarget(build_file, target, toolset): - # "Qualified" means the file that a target was defined in and the target - # name, separated by a colon, suffixed by a # and the toolset name: - # /path/to/file.gyp:target_name#toolset - fully_qualified = build_file + ":" + target - if toolset: - fully_qualified = fully_qualified + "#" + toolset - return fully_qualified - - -@memoize -def RelativePath(path, relative_to, follow_path_symlink=True): - # Assuming both |path| and |relative_to| are relative to the current - # directory, returns a relative path that identifies path relative to - # relative_to. - # If |follow_symlink_path| is true (default) and |path| is a symlink, then - # this method returns a path to the real file represented by |path|. If it is - # false, this method returns a path to the symlink. If |path| is not a - # symlink, this option has no effect. - - # Convert to normalized (and therefore absolute paths). - path = os.path.realpath(path) if follow_path_symlink else os.path.abspath(path) - relative_to = os.path.realpath(relative_to) - - # On Windows, we can't create a relative path to a different drive, so just - # use the absolute path. - if sys.platform == "win32" and ( - os.path.splitdrive(path)[0].lower() - != os.path.splitdrive(relative_to)[0].lower() - ): - return path - - # Split the paths into components. - path_split = path.split(os.path.sep) - relative_to_split = relative_to.split(os.path.sep) - - # Determine how much of the prefix the two paths share. - prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) - - # Put enough ".." components to back up out of relative_to to the common - # prefix, and then append the part of path_split after the common prefix. - relative_split = [os.path.pardir] * ( - len(relative_to_split) - prefix_len - ) + path_split[prefix_len:] - - if len(relative_split) == 0: - # The paths were the same. - return "" - - # Turn it back into a string and we're done. - return os.path.join(*relative_split) - - -@memoize -def InvertRelativePath(path, toplevel_dir=None): - """Given a path like foo/bar that is relative to toplevel_dir, return - the inverse relative path back to the toplevel_dir. - - E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) - should always produce the empty string, unless the path contains symlinks. - """ - if not path: - return path - toplevel_dir = "." if toplevel_dir is None else toplevel_dir - return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path)) - - -def FixIfRelativePath(path, relative_to): - # Like RelativePath but returns |path| unchanged if it is absolute. - if os.path.isabs(path): - return path - return RelativePath(path, relative_to) - - -def UnrelativePath(path, relative_to): - # Assuming that |relative_to| is relative to the current directory, and |path| - # is a path relative to the dirname of |relative_to|, returns a path that - # identifies |path| relative to the current directory. - rel_dir = os.path.dirname(relative_to) - return os.path.normpath(os.path.join(rel_dir, path)) - - -# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at -# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 -# and the documentation for various shells. - -# _quote is a pattern that should match any argument that needs to be quoted -# with double-quotes by EncodePOSIXShellArgument. It matches the following -# characters appearing anywhere in an argument: -# \t, \n, space parameter separators -# # comments -# $ expansions (quoted to always expand within one argument) -# % called out by IEEE 1003.1 XCU.2.2 -# & job control -# ' quoting -# (, ) subshell execution -# *, ?, [ pathname expansion -# ; command delimiter -# <, >, | redirection -# = assignment -# {, } brace expansion (bash) -# ~ tilde expansion -# It also matches the empty string, because "" (or '') is the only way to -# represent an empty string literal argument to a POSIX shell. -# -# This does not match the characters in _escape, because those need to be -# backslash-escaped regardless of whether they appear in a double-quoted -# string. -_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$") - -# _escape is a pattern that should match any character that needs to be -# escaped with a backslash, whether or not the argument matched the _quote -# pattern. _escape is used with re.sub to backslash anything in _escape's -# first match group, hence the (parentheses) in the regular expression. -# -# _escape matches the following characters appearing anywhere in an argument: -# " to prevent POSIX shells from interpreting this character for quoting -# \ to prevent POSIX shells from interpreting this character for escaping -# ` to prevent POSIX shells from interpreting this character for command -# substitution -# Missing from this list is $, because the desired behavior of -# EncodePOSIXShellArgument is to permit parameter (variable) expansion. -# -# Also missing from this list is !, which bash will interpret as the history -# expansion character when history is enabled. bash does not enable history -# by default in non-interactive shells, so this is not thought to be a problem. -# ! was omitted from this list because bash interprets "\!" as a literal string -# including the backslash character (avoiding history expansion but retaining -# the backslash), which would not be correct for argument encoding. Handling -# this case properly would also be problematic because bash allows the history -# character to be changed with the histchars shell variable. Fortunately, -# as history is not enabled in non-interactive shells and -# EncodePOSIXShellArgument is only expected to encode for non-interactive -# shells, there is no room for error here by ignoring !. -_escape = re.compile(r'(["\\`])') - - -def EncodePOSIXShellArgument(argument): - """Encodes |argument| suitably for consumption by POSIX shells. - - argument may be quoted and escaped as necessary to ensure that POSIX shells - treat the returned value as a literal representing the argument passed to - this function. Parameter (variable) expansions beginning with $ are allowed - to remain intact without escaping the $, to allow the argument to contain - references to variables to be expanded by the shell. - """ - - if not isinstance(argument, str): - argument = str(argument) - - quote = '"' if _quote.search(argument) else "" - - encoded = quote + re.sub(_escape, r"\\\1", argument) + quote - - return encoded - - -def EncodePOSIXShellList(list): - """Encodes |list| suitably for consumption by POSIX shells. - - Returns EncodePOSIXShellArgument for each item in list, and joins them - together using the space character as an argument separator. - """ - - encoded_arguments = [] - for argument in list: - encoded_arguments.append(EncodePOSIXShellArgument(argument)) - return " ".join(encoded_arguments) - - -def DeepDependencyTargets(target_dicts, roots): - """Returns the recursive list of target dependencies.""" - dependencies = set() - pending = set(roots) - while pending: - # Pluck out one. - r = pending.pop() - # Skip if visited already. - if r in dependencies: - continue - # Add it. - dependencies.add(r) - # Add its children. - spec = target_dicts[r] - pending.update(set(spec.get("dependencies", []))) - pending.update(set(spec.get("dependencies_original", []))) - return list(dependencies - set(roots)) - - -def BuildFileTargets(target_list, build_file): - """From a target_list, returns the subset from the specified build_file. - """ - return [p for p in target_list if BuildFile(p) == build_file] - - -def AllTargets(target_list, target_dicts, build_file): - """Returns all targets (direct and dependencies) for the specified build_file. - """ - bftargets = BuildFileTargets(target_list, build_file) - deptargets = DeepDependencyTargets(target_dicts, bftargets) - return bftargets + deptargets - - -def WriteOnDiff(filename): - """Write to a file only if the new contents differ. - - Arguments: - filename: name of the file to potentially write to. - Returns: - A file like object which will write to temporary file and only overwrite - the target if it differs (on close). - """ - - class Writer: - """Wrapper around file which only covers the target if it differs.""" - - def __init__(self): - # On Cygwin remove the "dir" argument - # `C:` prefixed paths are treated as relative, - # consequently ending up with current dir "/cygdrive/c/..." - # being prefixed to those, which was - # obviously a non-existent path, - # for example: "/cygdrive/c//C:\". - # For more details see: - # https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp - base_temp_dir = "" if IsCygwin() else os.path.dirname(filename) - # Pick temporary file. - tmp_fd, self.tmp_path = tempfile.mkstemp( - suffix=".tmp", - prefix=os.path.split(filename)[1] + ".gyp.", - dir=base_temp_dir, - ) - try: - self.tmp_file = os.fdopen(tmp_fd, "wb") - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def __getattr__(self, attrname): - # Delegate everything else to self.tmp_file - return getattr(self.tmp_file, attrname) - - def close(self): - try: - # Close tmp file. - self.tmp_file.close() - # Determine if different. - same = False - try: - same = filecmp.cmp(self.tmp_path, filename, False) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(self.tmp_path) - else: - # The new file is different from the old one, - # or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, - # which means that an extra hoop is required - # to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(0o77) - os.umask(umask) - os.chmod(self.tmp_path, 0o666 & ~umask) - if sys.platform == "win32" and os.path.exists(filename): - # NOTE: on windows (but not cygwin) rename will not replace an - # existing file, so it must be preceded with a remove. - # Sadly there is no way to make the switch atomic. - os.remove(filename) - os.rename(self.tmp_path, filename) - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def write(self, s): - self.tmp_file.write(s.encode("utf-8")) - - return Writer() - - -def EnsureDirExists(path): - """Make sure the directory for |path| exists.""" - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - -def GetCrossCompilerPredefines(): # -> dict - cmd = [] - - # shlex.split() will eat '\' in posix mode, but - # setting posix=False will preserve extra '"' cause CreateProcess fail on Windows - # this makes '\' in %CC_target% and %CFLAGS% work - def replace_sep(s): - return s.replace(os.sep, "/") if os.sep != "/" else s - - if CC := os.environ.get("CC_target") or os.environ.get("CC"): - cmd += shlex.split(replace_sep(CC)) - if CFLAGS := os.environ.get("CFLAGS"): - cmd += shlex.split(replace_sep(CFLAGS)) - elif CXX := os.environ.get("CXX_target") or os.environ.get("CXX"): - cmd += shlex.split(replace_sep(CXX)) - if CXXFLAGS := os.environ.get("CXXFLAGS"): - cmd += shlex.split(replace_sep(CXXFLAGS)) - else: - return {} - - if sys.platform == "win32": - fd, input = tempfile.mkstemp(suffix=".c") - real_cmd = [*cmd, "-dM", "-E", "-x", "c", input] - try: - os.close(fd) - stdout = subprocess.run( - real_cmd, shell=True, - capture_output=True, check=True - ).stdout - finally: - os.unlink(input) - else: - input = "/dev/null" - real_cmd = [*cmd, "-dM", "-E", "-x", "c", input] - stdout = subprocess.run( - real_cmd, shell=False, - capture_output=True, check=True - ).stdout - - defines = {} - lines = stdout.decode("utf-8").replace("\r\n", "\n").split("\n") - for line in lines: - if (line or "").startswith("#define "): - _, key, *value = line.split(" ") - defines[key] = " ".join(value) - return defines - -def GetFlavorByPlatform(): - """Returns |params.flavor| if it's set, the system's default flavor else.""" - flavors = { - "cygwin": "win", - "win32": "win", - "darwin": "mac", - } - - if sys.platform in flavors: - return flavors[sys.platform] - if sys.platform.startswith("sunos"): - return "solaris" - if sys.platform.startswith(("dragonfly", "freebsd")): - return "freebsd" - if sys.platform.startswith("openbsd"): - return "openbsd" - if sys.platform.startswith("netbsd"): - return "netbsd" - if sys.platform.startswith("aix"): - return "aix" - if sys.platform.startswith(("os390", "zos")): - return "zos" - if sys.platform == "os400": - return "os400" - - return "linux" - -def GetFlavor(params): - if "flavor" in params: - return params["flavor"] - - defines = GetCrossCompilerPredefines() - if "__EMSCRIPTEN__" in defines: - return "emscripten" - if "__wasm__" in defines: - return "wasi" if "__wasi__" in defines else "wasm" - - return GetFlavorByPlatform() - - -def CopyTool(flavor, out_path, generator_flags={}): - """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it - to |out_path|.""" - # aix and solaris just need flock emulation. mac and win use more complicated - # support scripts. - prefix = { - "aix": "flock", - "os400": "flock", - "solaris": "flock", - "mac": "mac", - "ios": "mac", - "win": "win", - }.get(flavor, None) - if not prefix: - return - - # Slurp input file. - source_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix - ) - with open(source_path) as source_file: - source = source_file.readlines() - - # Set custom header flags. - header = "# Generated by gyp. Do not edit.\n" - mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) - if flavor == "mac" and mac_toolchain_dir: - header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir - - # Add header and write it out. - tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix) - with open(tool_path, "w") as tool_file: - tool_file.write("".join([source[0], header] + source[1:])) - - # Make file executable. - os.chmod(tool_path, 0o755) - - -# From Alex Martelli, -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# First comment, dated 2001/10/13. -# (Also in the printed Python Cookbook.) - - -def uniquer(seq, idfun=lambda x: x): - seen = {} - result = [] - for item in seq: - marker = idfun(item) - if marker in seen: - continue - seen[marker] = 1 - result.append(item) - return result - - -# Based on http://code.activestate.com/recipes/576694/. -class OrderedSet(MutableSet): - def __init__(self, iterable=None): - self.end = end = [] - end += [None, end, end] # sentinel node for doubly linked list - self.map = {} # key --> [key, prev, next] - if iterable is not None: - self |= iterable - - def __len__(self): - return len(self.map) - - def __contains__(self, key): - return key in self.map - - def add(self, key): - if key not in self.map: - end = self.end - curr = end[1] - curr[2] = end[1] = self.map[key] = [key, curr, end] - - def discard(self, key): - if key in self.map: - key, prev_item, next_item = self.map.pop(key) - prev_item[2] = next_item - next_item[1] = prev_item - - def __iter__(self): - end = self.end - curr = end[2] - while curr is not end: - yield curr[0] - curr = curr[2] - - def __reversed__(self): - end = self.end - curr = end[1] - while curr is not end: - yield curr[0] - curr = curr[1] - - # The second argument is an addition that causes a pylint warning. - def pop(self, last=True): # pylint: disable=W0221 - if not self: - raise KeyError("set is empty") - key = self.end[1][0] if last else self.end[2][0] - self.discard(key) - return key - - def __repr__(self): - if not self: - return f"{self.__class__.__name__}()" - return f"{self.__class__.__name__}({list(self)!r})" - - def __eq__(self, other): - if isinstance(other, OrderedSet): - return len(self) == len(other) and list(self) == list(other) - return set(self) == set(other) - - # Extensions to the recipe. - def update(self, iterable): - for i in iterable: - if i not in self: - self.add(i) - - -class CycleError(Exception): - """An exception raised when an unexpected cycle is detected.""" - - def __init__(self, nodes): - self.nodes = nodes - - def __str__(self): - return "CycleError: cycle involving: " + str(self.nodes) - - -def TopologicallySorted(graph, get_edges): - r"""Topologically sort based on a user provided edge definition. - - Args: - graph: A list of node names. - get_edges: A function mapping from node name to a hashable collection - of node names which this node has outgoing edges to. - Returns: - A list containing all of the node in graph in topological order. - It is assumed that calling get_edges once for each node and caching is - cheaper than repeatedly calling get_edges. - Raises: - CycleError in the event of a cycle. - Example: - graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'} - def GetEdges(node): - return re.findall(r'\$\(([^))]\)', graph[node]) - print TopologicallySorted(graph.keys(), GetEdges) - ==> - ['a', 'c', b'] - """ - get_edges = memoize(get_edges) - visited = set() - visiting = set() - ordered_nodes = [] - - def Visit(node): - if node in visiting: - raise CycleError(visiting) - if node in visited: - return - visited.add(node) - visiting.add(node) - for neighbor in get_edges(node): - Visit(neighbor) - visiting.remove(node) - ordered_nodes.insert(0, node) - - for node in sorted(graph): - Visit(node) - return ordered_nodes - - -def CrossCompileRequested(): - # TODO: figure out how to not build extra host objects in the - # non-cross-compile case when this is enabled, and enable unconditionally. - return ( - os.environ.get("GYP_CROSSCOMPILE") - or os.environ.get("AR_host") - or os.environ.get("CC_host") - or os.environ.get("CXX_host") - or os.environ.get("AR_target") - or os.environ.get("CC_target") - or os.environ.get("CXX_target") - ) - - -def IsCygwin(): - try: - out = subprocess.Popen( - "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - stdout = out.communicate()[0].decode("utf-8") - return "CYGWIN" in str(stdout) - except Exception: - return False diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/common_test.py deleted file mode 100755 index b6c4cccc1ac5c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/common_test.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the common.py file.""" - -import gyp.common -import unittest -import sys -import os -from unittest.mock import patch, MagicMock - -class TestTopologicallySorted(unittest.TestCase): - def test_Valid(self): - """Test that sorting works on a valid graph with one possible order.""" - graph = { - "a": ["b", "c"], - "b": [], - "c": ["d"], - "d": ["b"], - } - - def GetEdge(node): - return tuple(graph[node]) - - assert gyp.common.TopologicallySorted( - graph.keys(), GetEdge) == ["a", "c", "d", "b"] - - def test_Cycle(self): - """Test that an exception is thrown on a cyclic graph.""" - graph = { - "a": ["b"], - "b": ["c"], - "c": ["d"], - "d": ["a"], - } - - def GetEdge(node): - return tuple(graph[node]) - - self.assertRaises( - gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge - ) - - -class TestGetFlavor(unittest.TestCase): - """Test that gyp.common.GetFlavor works as intended""" - - original_platform = "" - - def setUp(self): - self.original_platform = sys.platform - - def tearDown(self): - sys.platform = self.original_platform - - def assertFlavor(self, expected, argument, param): - sys.platform = argument - assert expected == gyp.common.GetFlavor(param) - - def test_platform_default(self): - self.assertFlavor("freebsd", "freebsd9", {}) - self.assertFlavor("freebsd", "freebsd10", {}) - self.assertFlavor("openbsd", "openbsd5", {}) - self.assertFlavor("solaris", "sunos5", {}) - self.assertFlavor("solaris", "sunos", {}) - self.assertFlavor("linux", "linux2", {}) - self.assertFlavor("linux", "linux3", {}) - self.assertFlavor("linux", "linux", {}) - - def test_param(self): - self.assertFlavor("foobar", "linux2", {"flavor": "foobar"}) - - class MockCommunicate: - def __init__(self, stdout): - self.stdout = stdout - - def decode(self, encoding): - return self.stdout - - @patch("os.close") - @patch("os.unlink") - @patch("tempfile.mkstemp") - def test_GetCrossCompilerPredefines(self, mock_mkstemp, mock_unlink, mock_close): - mock_close.return_value = None - mock_unlink.return_value = None - mock_mkstemp.return_value = (0, "temp.c") - - def mock_run(env, defines_stdout, expected_cmd): - with patch("subprocess.run") as mock_run: - mock_process = MagicMock() - mock_process.returncode = 0 - mock_process.stdout = TestGetFlavor.MockCommunicate(defines_stdout) - mock_run.return_value = mock_process - expected_input = "temp.c" if sys.platform == "win32" else "/dev/null" - with patch.dict(os.environ, env): - defines = gyp.common.GetCrossCompilerPredefines() - flavor = gyp.common.GetFlavor({}) - if env.get("CC_target"): - mock_run.assert_called_with( - [ - *expected_cmd, - "-dM", "-E", "-x", "c", expected_input - ], - shell=sys.platform == "win32", - capture_output=True, check=True) - return [defines, flavor] - - [defines1, _] = mock_run({}, "", []) - assert {} == defines1 - - [defines2, flavor2] = mock_run( - { "CC_target": "/opt/wasi-sdk/bin/clang" }, - "#define __wasm__ 1\n#define __wasi__ 1\n", - ["/opt/wasi-sdk/bin/clang"] - ) - assert { "__wasm__": "1", "__wasi__": "1" } == defines2 - assert flavor2 == "wasi" - - [defines3, flavor3] = mock_run( - { "CC_target": "/opt/wasi-sdk/bin/clang --target=wasm32" }, - "#define __wasm__ 1\n", - ["/opt/wasi-sdk/bin/clang", "--target=wasm32"] - ) - assert { "__wasm__": "1" } == defines3 - assert flavor3 == "wasm" - - [defines4, flavor4] = mock_run( - { "CC_target": "/emsdk/upstream/emscripten/emcc" }, - "#define __EMSCRIPTEN__ 1\n", - ["/emsdk/upstream/emscripten/emcc"] - ) - assert { "__EMSCRIPTEN__": "1" } == defines4 - assert flavor4 == "emscripten" - - # Test path which include white space - [defines5, flavor5] = mock_run( - { - "CC_target": "\"/Users/Toyo Li/wasi-sdk/bin/clang\" -O3", - "CFLAGS": "--target=wasm32-wasi-threads -pthread" - }, - "#define __wasm__ 1\n#define __wasi__ 1\n#define _REENTRANT 1\n", - [ - "/Users/Toyo Li/wasi-sdk/bin/clang", - "-O3", - "--target=wasm32-wasi-threads", - "-pthread" - ] - ) - assert { - "__wasm__": "1", - "__wasi__": "1", - "_REENTRANT": "1" - } == defines5 - assert flavor5 == "wasi" - - original_sep = os.sep - os.sep = "\\" - [defines6, flavor6] = mock_run( - { "CC_target": "\"C:\\Program Files\\wasi-sdk\\clang.exe\"" }, - "#define __wasm__ 1\n#define __wasi__ 1\n", - ["C:/Program Files/wasi-sdk/clang.exe"] - ) - os.sep = original_sep - assert { "__wasm__": "1", "__wasi__": "1" } == defines6 - assert flavor6 == "wasi" - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py deleted file mode 100644 index 02567b251446d..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys -import re -import os -import locale -from functools import reduce - - -def XmlToString(content, encoding="utf-8", pretty=False): - """ Writes the XML content to disk, touching the file only if it has changed. - - Visual Studio files have a lot of pre-defined structures. This function makes - it easy to represent these structures as Python data structures, instead of - having to create a lot of function calls. - - Each XML element of the content is represented as a list composed of: - 1. The name of the element, a string, - 2. The attributes of the element, a dictionary (optional), and - 3+. The content of the element, if any. Strings are simple text nodes and - lists are child elements. - - Example 1: - - becomes - ['test'] - - Example 2: - - This is - it! - - - becomes - ['myelement', {'a':'value1', 'b':'value2'}, - ['childtype', 'This is'], - ['childtype', 'it!'], - ] - - Args: - content: The structured content to be converted. - encoding: The encoding to report on the first XML line. - pretty: True if we want pretty printing with indents and new lines. - - Returns: - The XML content as a string. - """ - # We create a huge list of all the elements of the file. - xml_parts = ['' % encoding] - if pretty: - xml_parts.append("\n") - _ConstructContentList(xml_parts, content, pretty) - - # Convert it to a string - return "".join(xml_parts) - - -def _ConstructContentList(xml_parts, specification, pretty, level=0): - """ Appends the XML parts corresponding to the specification. - - Args: - xml_parts: A list of XML parts to be appended to. - specification: The specification of the element. See EasyXml docs. - pretty: True if we want pretty printing with indents and new lines. - level: Indentation level. - """ - # The first item in a specification is the name of the element. - if pretty: - indentation = " " * level - new_line = "\n" - else: - indentation = "" - new_line = "" - name = specification[0] - if not isinstance(name, str): - raise Exception( - "The first item of an EasyXml specification should be " - "a string. Specification was " + str(specification) - ) - xml_parts.append(indentation + "<" + name) - - # Optionally in second position is a dictionary of the attributes. - rest = specification[1:] - if rest and isinstance(rest[0], dict): - for at, val in sorted(rest[0].items()): - xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"') - rest = rest[1:] - if rest: - xml_parts.append(">") - all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) - multi_line = not all_strings - if multi_line and new_line: - xml_parts.append(new_line) - for child_spec in rest: - # If it's a string, append a text node. - # Otherwise recurse over that child definition - if isinstance(child_spec, str): - xml_parts.append(_XmlEscape(child_spec)) - else: - _ConstructContentList(xml_parts, child_spec, pretty, level + 1) - if multi_line and indentation: - xml_parts.append(indentation) - xml_parts.append(f"{new_line}") - else: - xml_parts.append("/>%s" % new_line) - - -def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, - win32=(sys.platform == "win32")): - """ Writes the XML content to disk, touching the file only if it has changed. - - Args: - content: The structured content to be written. - path: Location of the file. - encoding: The encoding to report on the first line of the XML file. - pretty: True if we want pretty printing with indents and new lines. - """ - xml_string = XmlToString(content, encoding, pretty) - if win32 and os.linesep != "\r\n": - xml_string = xml_string.replace("\n", "\r\n") - - try: # getdefaultlocale() was removed in Python 3.11 - default_encoding = locale.getdefaultlocale()[1] - except AttributeError: - default_encoding = locale.getencoding() - - if default_encoding and default_encoding.upper() != encoding.upper(): - xml_string = xml_string.encode(encoding) - - # Get the old content - try: - with open(path) as file: - existing = file.read() - except OSError: - existing = None - - # It has changed, write it - if existing != xml_string: - with open(path, "wb") as file: - file.write(xml_string) - - -_xml_escape_map = { - '"': """, - "'": "'", - "<": "<", - ">": ">", - "&": "&", - "\n": " ", - "\r": " ", -} - - -_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) - - -def _XmlEscape(value, attr=False): - """ Escape a string for inclusion in XML.""" - - def replace(match): - m = match.string[match.start() : match.end()] - # don't replace single quotes in attrs - if attr and m == "'": - return m - return _xml_escape_map[m] - - return _xml_escape_re.sub(replace, value) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py deleted file mode 100755 index 2d9b15210dc12..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the easy_xml.py file. """ - -import gyp.easy_xml as easy_xml -import unittest - -from io import StringIO - - -class TestSequenceFunctions(unittest.TestCase): - def setUp(self): - self.stderr = StringIO() - - def test_EasyXml_simple(self): - self.assertEqual( - easy_xml.XmlToString(["test"]), - '', - ) - - self.assertEqual( - easy_xml.XmlToString(["test"], encoding="Windows-1252"), - '', - ) - - def test_EasyXml_simple_with_attributes(self): - self.assertEqual( - easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]), - '', - ) - - def test_EasyXml_escaping(self): - original = "'\"\r&\nfoo" - converted = "<test>'" & foo" - converted_apos = converted.replace("'", "'") - self.assertEqual( - easy_xml.XmlToString(["test3", {"a": original}, original]), - '%s' - % (converted, converted_apos), - ) - - def test_EasyXml_pretty(self): - self.assertEqual( - easy_xml.XmlToString( - ["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]], - pretty=True, - ), - '\n' - "\n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - "\n", - ) - - def test_EasyXml_complex(self): - # We want to create: - target = ( - '' - "" - '' - "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}" - "Win32Proj" - "automated_ui_tests" - "" - '' - "' - "Application" - "Unicode" - "SpectreLoadCF" - "14.36.32532" - "" - "" - ) - - xml = easy_xml.XmlToString( - [ - "Project", - [ - "PropertyGroup", - {"Label": "Globals"}, - ["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"], - ["Keyword", "Win32Proj"], - ["RootNamespace", "automated_ui_tests"], - ], - ["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}], - [ - "PropertyGroup", - { - "Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'", - "Label": "Configuration", - }, - ["ConfigurationType", "Application"], - ["CharacterSet", "Unicode"], - ["SpectreMitigation", "SpectreLoadCF"], - ["VCToolsVersion", "14.36.32532"], - ], - ] - ) - self.assertEqual(xml, target) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py deleted file mode 100755 index 0754aff26fe7c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""These functions are executed via gyp-flock-tool when using the Makefile -generator. Used on systems that don't have a built-in flock.""" - -import fcntl -import os -import struct -import subprocess -import sys - - -def main(args): - executor = FlockTool() - executor.Dispatch(args) - - -class FlockTool: - """This class emulates the 'flock' command.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace("-", "") - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - # Note that the stock python on SunOS has a bug - # where fcntl.flock(fd, LOCK_EX) always fails - # with EBADF, that's why we use this F_SETLK - # hack instead. - fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666) - if sys.platform.startswith("aix") or sys.platform == "os400": - # Python on AIX is compiled with LARGEFILE support, which changes the - # struct size. - op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - else: - op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - fcntl.fcntl(fd, fcntl.F_SETLK, op) - return subprocess.call(cmd_list) - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py deleted file mode 100644 index 1334f2fca9967..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py +++ /dev/null @@ -1,804 +0,0 @@ -# Copyright (c) 2014 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This script is intended for use as a GYP_GENERATOR. It takes as input (by way of -the generator flag config_path) the path of a json file that dictates the files -and targets to search for. The following keys are supported: -files: list of paths (relative) of the files to search for. -test_targets: unqualified target names to search for. Any target in this list -that depends upon a file in |files| is output regardless of the type of target -or chain of dependencies. -additional_compile_targets: Unqualified targets to search for in addition to -test_targets. Targets in the combined list that depend upon a file in |files| -are not necessarily output. For example, if the target is of type none then the -target is not output (but one of the descendants of the target will be). - -The following is output: -error: only supplied if there is an error. -compile_targets: minimal set of targets that directly or indirectly (for - targets of type none) depend on the files in |files| and is one of the - supplied targets or a target that one of the supplied targets depends on. - The expectation is this set of targets is passed into a build step. This list - always contains the output of test_targets as well. -test_targets: set of targets from the supplied |test_targets| that either - directly or indirectly depend upon a file in |files|. This list if useful - if additional processing needs to be done for certain targets after the - build, such as running tests. -status: outputs one of three values: none of the supplied files were found, - one of the include files changed so that it should be assumed everything - changed (in this case test_targets and compile_targets are not output) or at - least one file was found. -invalid_targets: list of supplied targets that were not found. - -Example: -Consider a graph like the following: - A D - / \ -B C -A depends upon both B and C, A is of type none and B and C are executables. -D is an executable, has no dependencies and nothing depends on it. -If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and -files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then -the following is output: -|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc -and the supplied target A depends upon it. A is not output as a build_target -as it is of type none with no rules and actions. -|test_targets| = ["B"] B directly depends upon the change file b.cc. - -Even though the file d.cc, which D depends upon, has changed D is not output -as it was not supplied by way of |additional_compile_targets| or |test_targets|. - -If the generator flag analyzer_output_path is specified, output is written -there. Otherwise output is written to stdout. - -In Gyp the "all" target is shorthand for the root targets in the files passed -to gyp. For example, if file "a.gyp" contains targets "a1" and -"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency -on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2". -Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not -directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp -then the "all" target includes "b1" and "b2". -""" - - -import gyp.common -import json -import os -import posixpath - -debug = False - -found_dependency_string = "Found dependency" -no_dependency_string = "No dependencies" -# Status when it should be assumed that everything has changed. -all_changed_string = "Found dependency (all)" - -# MatchStatus is used indicate if and how a target depends upon the supplied -# sources. -# The target's sources contain one of the supplied paths. -MATCH_STATUS_MATCHES = 1 -# The target has a dependency on another target that contains one of the -# supplied paths. -MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2 -# The target's sources weren't in the supplied paths and none of the target's -# dependencies depend upon a target that matched. -MATCH_STATUS_DOESNT_MATCH = 3 -# The target doesn't contain the source, but the dependent targets have not yet -# been visited to determine a more specific status yet. -MATCH_STATUS_TBD = 4 - -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = {} -for dirname in [ - "INTERMEDIATE_DIR", - "SHARED_INTERMEDIATE_DIR", - "PRODUCT_DIR", - "LIB_DIR", - "SHARED_LIB_DIR", -]: - generator_default_variables[dirname] = "!!!" - -for unused in [ - "RULE_INPUT_PATH", - "RULE_INPUT_ROOT", - "RULE_INPUT_NAME", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "CONFIGURATION_NAME", -]: - generator_default_variables[unused] = "" - - -def _ToGypPath(path): - """Converts a path to the format used by gyp.""" - if os.sep == "\\" and os.altsep == "/": - return path.replace("\\", "/") - return path - - -def _ResolveParent(path, base_path_components): - """Resolves |path|, which starts with at least one '../'. Returns an empty - string if the path shouldn't be considered. See _AddSources() for a - description of |base_path_components|.""" - depth = 0 - while path.startswith("../"): - depth += 1 - path = path[3:] - # Relative includes may go outside the source tree. For example, an action may - # have inputs in /usr/include, which are not in the source tree. - if depth > len(base_path_components): - return "" - if depth == len(base_path_components): - return path - return ( - "/".join(base_path_components[0 : len(base_path_components) - depth]) - + "/" - + path - ) - - -def _AddSources(sources, base_path, base_path_components, result): - """Extracts valid sources from |sources| and adds them to |result|. Each - source file is relative to |base_path|, but may contain '..'. To make - resolving '..' easier |base_path_components| contains each of the - directories in |base_path|. Additionally each source may contain variables. - Such sources are ignored as it is assumed dependencies on them are expressed - and tracked in some other means.""" - # NOTE: gyp paths are always posix style. - for source in sources: - if not len(source) or source.startswith("!!!") or source.startswith("$"): - continue - # variable expansion may lead to //. - org_source = source - source = source[0] + source[1:].replace("//", "/") - if source.startswith("../"): - source = _ResolveParent(source, base_path_components) - if len(source): - result.append(source) - continue - result.append(base_path + source) - if debug: - print("AddSource", org_source, result[len(result) - 1]) - - -def _ExtractSourcesFromAction(action, base_path, base_path_components, results): - if "inputs" in action: - _AddSources(action["inputs"], base_path, base_path_components, results) - - -def _ToLocalPath(toplevel_dir, path): - """Converts |path| to a path relative to |toplevel_dir|.""" - if path == toplevel_dir: - return "" - if path.startswith(toplevel_dir + "/"): - return path[len(toplevel_dir) + len("/") :] - return path - - -def _ExtractSources(target, target_dict, toplevel_dir): - # |target| is either absolute or relative and in the format of the OS. Gyp - # source paths are always posix. Convert |target| to a posix path relative to - # |toplevel_dir_|. This is done to make it easy to build source paths. - base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target))) - base_path_components = base_path.split("/") - - # Add a trailing '/' so that _AddSources() can easily build paths. - if len(base_path): - base_path += "/" - - if debug: - print("ExtractSources", target, base_path) - - results = [] - if "sources" in target_dict: - _AddSources(target_dict["sources"], base_path, base_path_components, results) - # Include the inputs from any actions. Any changes to these affect the - # resulting output. - if "actions" in target_dict: - for action in target_dict["actions"]: - _ExtractSourcesFromAction(action, base_path, base_path_components, results) - if "rules" in target_dict: - for rule in target_dict["rules"]: - _ExtractSourcesFromAction(rule, base_path, base_path_components, results) - - return results - - -class Target: - """Holds information about a particular target: - deps: set of Targets this Target depends upon. This is not recursive, only the - direct dependent Targets. - match_status: one of the MatchStatus values. - back_deps: set of Targets that have a dependency on this Target. - visited: used during iteration to indicate whether we've visited this target. - This is used for two iterations, once in building the set of Targets and - again in _GetBuildTargets(). - name: fully qualified name of the target. - requires_build: True if the target type is such that it needs to be built. - See _DoesTargetTypeRequireBuild for details. - added_to_compile_targets: used when determining if the target was added to the - set of targets that needs to be built. - in_roots: true if this target is a descendant of one of the root nodes. - is_executable: true if the type of target is executable. - is_static_library: true if the type of target is static_library. - is_or_has_linked_ancestor: true if the target does a link (eg executable), or - if there is a target in back_deps that does a link.""" - - def __init__(self, name): - self.deps = set() - self.match_status = MATCH_STATUS_TBD - self.back_deps = set() - self.name = name - # TODO(sky): I don't like hanging this off Target. This state is specific - # to certain functions and should be isolated there. - self.visited = False - self.requires_build = False - self.added_to_compile_targets = False - self.in_roots = False - self.is_executable = False - self.is_static_library = False - self.is_or_has_linked_ancestor = False - - -class Config: - """Details what we're looking for - files: set of files to search for - targets: see file description for details.""" - - def __init__(self): - self.files = [] - self.targets = set() - self.additional_compile_target_names = set() - self.test_target_names = set() - - def Init(self, params): - """Initializes Config. This is a separate method as it raises an exception - if there is a parse error.""" - generator_flags = params.get("generator_flags", {}) - config_path = generator_flags.get("config_path", None) - if not config_path: - return - try: - f = open(config_path) - config = json.load(f) - f.close() - except OSError: - raise Exception("Unable to open file " + config_path) - except ValueError as e: - raise Exception("Unable to parse config file " + config_path + str(e)) - if not isinstance(config, dict): - raise Exception("config_path must be a JSON file containing a dictionary") - self.files = config.get("files", []) - self.additional_compile_target_names = set( - config.get("additional_compile_targets", []) - ) - self.test_target_names = set(config.get("test_targets", [])) - - -def _WasBuildFileModified(build_file, data, files, toplevel_dir): - """Returns true if the build file |build_file| is either in |files| or - one of the files included by |build_file| is in |files|. |toplevel_dir| is - the root of the source tree.""" - if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files: - if debug: - print("gyp file modified", build_file) - return True - - # First element of included_files is the file itself. - if len(data[build_file]["included_files"]) <= 1: - return False - - for include_file in data[build_file]["included_files"][1:]: - # |included_files| are relative to the directory of the |build_file|. - rel_include_file = _ToGypPath( - gyp.common.UnrelativePath(include_file, build_file) - ) - if _ToLocalPath(toplevel_dir, rel_include_file) in files: - if debug: - print( - "included gyp file modified, gyp_file=", - build_file, - "included file=", - rel_include_file, - ) - return True - return False - - -def _GetOrCreateTargetByName(targets, target_name): - """Creates or returns the Target at targets[target_name]. If there is no - Target for |target_name| one is created. Returns a tuple of whether a new - Target was created and the Target.""" - if target_name in targets: - return False, targets[target_name] - target = Target(target_name) - targets[target_name] = target - return True, target - - -def _DoesTargetTypeRequireBuild(target_dict): - """Returns true if the target type is such that it needs to be built.""" - # If a 'none' target has rules or actions we assume it requires a build. - return bool( - target_dict["type"] != "none" - or target_dict.get("actions") - or target_dict.get("rules") - ) - - -def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files): - """Returns a tuple of the following: - . A dictionary mapping from fully qualified name to Target. - . A list of the targets that have a source file in |files|. - . Targets that constitute the 'all' target. See description at top of file - for details on the 'all' target. - This sets the |match_status| of the targets that contain any of the source - files in |files| to MATCH_STATUS_MATCHES. - |toplevel_dir| is the root of the source tree.""" - # Maps from target name to Target. - name_to_target = {} - - # Targets that matched. - matching_targets = [] - - # Queue of targets to visit. - targets_to_visit = target_list[:] - - # Maps from build file to a boolean indicating whether the build file is in - # |files|. - build_file_in_files = {} - - # Root targets across all files. - roots = set() - - # Set of Targets in |build_files|. - build_file_targets = set() - - while len(targets_to_visit) > 0: - target_name = targets_to_visit.pop() - created_target, target = _GetOrCreateTargetByName(name_to_target, target_name) - if created_target: - roots.add(target) - elif target.visited: - continue - - target.visited = True - target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name]) - target_type = target_dicts[target_name]["type"] - target.is_executable = target_type == "executable" - target.is_static_library = target_type == "static_library" - target.is_or_has_linked_ancestor = ( - target_type in {"executable", "shared_library"} - ) - - build_file = gyp.common.ParseQualifiedTarget(target_name)[0] - if build_file not in build_file_in_files: - build_file_in_files[build_file] = _WasBuildFileModified( - build_file, data, files, toplevel_dir - ) - - if build_file in build_files: - build_file_targets.add(target) - - # If a build file (or any of its included files) is modified we assume all - # targets in the file are modified. - if build_file_in_files[build_file]: - print("matching target from modified build file", target_name) - target.match_status = MATCH_STATUS_MATCHES - matching_targets.append(target) - else: - sources = _ExtractSources( - target_name, target_dicts[target_name], toplevel_dir - ) - for source in sources: - if _ToGypPath(os.path.normpath(source)) in files: - print("target", target_name, "matches", source) - target.match_status = MATCH_STATUS_MATCHES - matching_targets.append(target) - break - - # Add dependencies to visit as well as updating back pointers for deps. - for dep in target_dicts[target_name].get("dependencies", []): - targets_to_visit.append(dep) - - created_dep_target, dep_target = _GetOrCreateTargetByName( - name_to_target, dep - ) - if not created_dep_target: - roots.discard(dep_target) - - target.deps.add(dep_target) - dep_target.back_deps.add(target) - - return name_to_target, matching_targets, roots & build_file_targets - - -def _GetUnqualifiedToTargetMapping(all_targets, to_find): - """Returns a tuple of the following: - . mapping (dictionary) from unqualified name to Target for all the - Targets in |to_find|. - . any target names not found. If this is empty all targets were found.""" - result = {} - if not to_find: - return {}, [] - to_find = set(to_find) - for target_name in all_targets: - extracted = gyp.common.ParseQualifiedTarget(target_name) - if len(extracted) > 1 and extracted[1] in to_find: - to_find.remove(extracted[1]) - result[extracted[1]] = all_targets[target_name] - if not to_find: - return result, [] - return result, list(to_find) - - -def _DoesTargetDependOnMatchingTargets(target): - """Returns true if |target| or any of its dependencies is one of the - targets containing the files supplied as input to analyzer. This updates - |matches| of the Targets as it recurses. - target: the Target to look for.""" - if target.match_status == MATCH_STATUS_DOESNT_MATCH: - return False - if ( - target.match_status in {MATCH_STATUS_MATCHES, - MATCH_STATUS_MATCHES_BY_DEPENDENCY} - ): - return True - for dep in target.deps: - if _DoesTargetDependOnMatchingTargets(dep): - target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY - print("\t", target.name, "matches by dep", dep.name) - return True - target.match_status = MATCH_STATUS_DOESNT_MATCH - return False - - -def _GetTargetsDependingOnMatchingTargets(possible_targets): - """Returns the list of Targets in |possible_targets| that depend (either - directly on indirectly) on at least one of the targets containing the files - supplied as input to analyzer. - possible_targets: targets to search from.""" - found = [] - print("Targets that matched by dependency:") - for target in possible_targets: - if _DoesTargetDependOnMatchingTargets(target): - found.append(target) - return found - - -def _AddCompileTargets(target, roots, add_if_no_ancestor, result): - """Recurses through all targets that depend on |target|, adding all targets - that need to be built (and are in |roots|) to |result|. - roots: set of root targets. - add_if_no_ancestor: If true and there are no ancestors of |target| then add - |target| to |result|. |target| must still be in |roots|. - result: targets that need to be built are added here.""" - if target.visited: - return - - target.visited = True - target.in_roots = target in roots - - for back_dep_target in target.back_deps: - _AddCompileTargets(back_dep_target, roots, False, result) - target.added_to_compile_targets |= back_dep_target.added_to_compile_targets - target.in_roots |= back_dep_target.in_roots - target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor - - # Always add 'executable' targets. Even though they may be built by other - # targets that depend upon them it makes detection of what is going to be - # built easier. - # And always add static_libraries that have no dependencies on them from - # linkables. This is necessary as the other dependencies on them may be - # static libraries themselves, which are not compile time dependencies. - if target.in_roots and ( - target.is_executable - or ( - not target.added_to_compile_targets - and (add_if_no_ancestor or target.requires_build) - ) - or ( - target.is_static_library - and add_if_no_ancestor - and not target.is_or_has_linked_ancestor - ) - ): - print( - "\t\tadding to compile targets", - target.name, - "executable", - target.is_executable, - "added_to_compile_targets", - target.added_to_compile_targets, - "add_if_no_ancestor", - add_if_no_ancestor, - "requires_build", - target.requires_build, - "is_static_library", - target.is_static_library, - "is_or_has_linked_ancestor", - target.is_or_has_linked_ancestor, - ) - result.add(target) - target.added_to_compile_targets = True - - -def _GetCompileTargets(matching_targets, supplied_targets): - """Returns the set of Targets that require a build. - matching_targets: targets that changed and need to be built. - supplied_targets: set of targets supplied to analyzer to search from.""" - result = set() - for target in matching_targets: - print("finding compile targets for match", target.name) - _AddCompileTargets(target, supplied_targets, True, result) - return result - - -def _WriteOutput(params, **values): - """Writes the output, either to stdout or a file is specified.""" - if "error" in values: - print("Error:", values["error"]) - if "status" in values: - print(values["status"]) - if "targets" in values: - values["targets"].sort() - print("Supplied targets that depend on changed files:") - for target in values["targets"]: - print("\t", target) - if "invalid_targets" in values: - values["invalid_targets"].sort() - print("The following targets were not found:") - for target in values["invalid_targets"]: - print("\t", target) - if "build_targets" in values: - values["build_targets"].sort() - print("Targets that require a build:") - for target in values["build_targets"]: - print("\t", target) - if "compile_targets" in values: - values["compile_targets"].sort() - print("Targets that need to be built:") - for target in values["compile_targets"]: - print("\t", target) - if "test_targets" in values: - values["test_targets"].sort() - print("Test targets:") - for target in values["test_targets"]: - print("\t", target) - - output_path = params.get("generator_flags", {}).get("analyzer_output_path", None) - if not output_path: - print(json.dumps(values)) - return - try: - f = open(output_path, "w") - f.write(json.dumps(values) + "\n") - f.close() - except OSError as e: - print("Error writing to output file", output_path, str(e)) - - -def _WasGypIncludeFileModified(params, files): - """Returns true if one of the files in |files| is in the set of included - files.""" - if params["options"].includes: - for include in params["options"].includes: - if _ToGypPath(os.path.normpath(include)) in files: - print("Include file modified, assuming all changed", include) - return True - return False - - -def _NamesNotIn(names, mapping): - """Returns a list of the values in |names| that are not in |mapping|.""" - return [name for name in names if name not in mapping] - - -def _LookupTargets(names, mapping): - """Returns a list of the mapping[name] for each value in |names| that is in - |mapping|.""" - return [mapping[name] for name in names if name in mapping] - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - flavor = gyp.common.GetFlavor(params) - if flavor == "mac": - default_variables.setdefault("OS", "mac") - elif flavor == "win": - default_variables.setdefault("OS", "win") - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - else: - operating_system = flavor - if flavor == "android": - operating_system = "linux" # Keep this legacy behavior for now. - default_variables.setdefault("OS", operating_system) - - -class TargetCalculator: - """Calculates the matching test_targets and matching compile_targets.""" - - def __init__( - self, - files, - additional_compile_target_names, - test_target_names, - data, - target_list, - target_dicts, - toplevel_dir, - build_files, - ): - self._additional_compile_target_names = set(additional_compile_target_names) - self._test_target_names = set(test_target_names) - ( - self._name_to_target, - self._changed_targets, - self._root_targets, - ) = _GenerateTargets( - data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files - ) - ( - self._unqualified_mapping, - self.invalid_targets, - ) = _GetUnqualifiedToTargetMapping( - self._name_to_target, self._supplied_target_names_no_all() - ) - - def _supplied_target_names(self): - return self._additional_compile_target_names | self._test_target_names - - def _supplied_target_names_no_all(self): - """Returns the supplied test targets without 'all'.""" - result = self._supplied_target_names() - result.discard("all") - return result - - def is_build_impacted(self): - """Returns true if the supplied files impact the build at all.""" - return self._changed_targets - - def find_matching_test_target_names(self): - """Returns the set of output test targets.""" - assert self.is_build_impacted() - # Find the test targets first. 'all' is special cased to mean all the - # root targets. To deal with all the supplied |test_targets| are expanded - # to include the root targets during lookup. If any of the root targets - # match, we remove it and replace it with 'all'. - test_target_names_no_all = set(self._test_target_names) - test_target_names_no_all.discard("all") - test_targets_no_all = _LookupTargets( - test_target_names_no_all, self._unqualified_mapping - ) - test_target_names_contains_all = "all" in self._test_target_names - if test_target_names_contains_all: - test_targets = list(set(test_targets_no_all) | set(self._root_targets)) - else: - test_targets = list(test_targets_no_all) - print("supplied test_targets") - for target_name in self._test_target_names: - print("\t", target_name) - print("found test_targets") - for target in test_targets: - print("\t", target.name) - print("searching for matching test targets") - matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) - matching_test_targets_contains_all = test_target_names_contains_all and set( - matching_test_targets - ) & set(self._root_targets) - if matching_test_targets_contains_all: - # Remove any of the targets for all that were not explicitly supplied, - # 'all' is subsequentely added to the matching names below. - matching_test_targets = list( - set(matching_test_targets) & set(test_targets_no_all) - ) - print("matched test_targets") - for target in matching_test_targets: - print("\t", target.name) - matching_target_names = [ - gyp.common.ParseQualifiedTarget(target.name)[1] - for target in matching_test_targets - ] - if matching_test_targets_contains_all: - matching_target_names.append("all") - print("\tall") - return matching_target_names - - def find_matching_compile_target_names(self): - """Returns the set of output compile targets.""" - assert self.is_build_impacted() - # Compile targets are found by searching up from changed targets. - # Reset the visited status for _GetBuildTargets. - for target in self._name_to_target.values(): - target.visited = False - - supplied_targets = _LookupTargets( - self._supplied_target_names_no_all(), self._unqualified_mapping - ) - if "all" in self._supplied_target_names(): - supplied_targets = list(set(supplied_targets) | set(self._root_targets)) - print("Supplied test_targets & compile_targets") - for target in supplied_targets: - print("\t", target.name) - print("Finding compile targets") - compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets) - return [ - gyp.common.ParseQualifiedTarget(target.name)[1] - for target in compile_targets - ] - - -def GenerateOutput(target_list, target_dicts, data, params): - """Called by gyp as the final stage. Outputs results.""" - config = Config() - try: - config.Init(params) - - if not config.files: - raise Exception( - "Must specify files to analyze via config_path generator " "flag" - ) - - toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir)) - if debug: - print("toplevel_dir", toplevel_dir) - - if _WasGypIncludeFileModified(params, config.files): - result_dict = { - "status": all_changed_string, - "test_targets": list(config.test_target_names), - "compile_targets": list( - config.additional_compile_target_names | config.test_target_names - ), - } - _WriteOutput(params, **result_dict) - return - - calculator = TargetCalculator( - config.files, - config.additional_compile_target_names, - config.test_target_names, - data, - target_list, - target_dicts, - toplevel_dir, - params["build_files"], - ) - if not calculator.is_build_impacted(): - result_dict = { - "status": no_dependency_string, - "test_targets": [], - "compile_targets": [], - } - if calculator.invalid_targets: - result_dict["invalid_targets"] = calculator.invalid_targets - _WriteOutput(params, **result_dict) - return - - test_target_names = calculator.find_matching_test_target_names() - compile_target_names = calculator.find_matching_compile_target_names() - found_at_least_one_target = compile_target_names or test_target_names - result_dict = { - "test_targets": test_target_names, - "status": found_dependency_string - if found_at_least_one_target - else no_dependency_string, - "compile_targets": list(set(compile_target_names) | set(test_target_names)), - } - if calculator.invalid_targets: - result_dict["invalid_targets"] = calculator.invalid_targets - _WriteOutput(params, **result_dict) - - except Exception as e: - _WriteOutput(params, error=str(e)) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py deleted file mode 100644 index 2a63f412dbc83..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py +++ /dev/null @@ -1,1173 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This generates makefiles suitable for inclusion into the Android build system -# via an Android.mk file. It is based on make.py, the standard makefile -# generator. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level GypAndroid.mk. This means that all -# variables in .mk-files clobber one another, and furthermore that any -# variables set potentially clash with other Android build system variables. -# Try to avoid setting global variables where possible. - - -import gyp -import gyp.common -import gyp.generator.make as make # Reuse global functions from make backend. -import os -import re -import subprocess - -generator_default_variables = { - "OS": "android", - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_SUFFIX": ".so", - "INTERMEDIATE_DIR": "$(gyp_intermediate_dir)", - "SHARED_INTERMEDIATE_DIR": "$(gyp_shared_intermediate_dir)", - "PRODUCT_DIR": "$(gyp_shared_intermediate_dir)", - "SHARED_LIB_DIR": "$(builddir)/lib.$(TOOLSET)", - "LIB_DIR": "$(obj).$(TOOLSET)", - "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python. - "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python. - "RULE_INPUT_PATH": "$(RULE_SOURCES)", - "RULE_INPUT_EXT": "$(suffix $<)", - "RULE_INPUT_NAME": "$(notdir $<)", - "CONFIGURATION_NAME": "$(GYP_CONFIGURATION)", -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - - -# Generator-specific gyp specs. -generator_additional_non_configuration_keys = [ - # Boolean to declare that this target does not want its name mangled. - "android_unmangled_name", - # Map of android build system variables to set. - "aosp_build_settings", -] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - - -ALL_MODULES_FOOTER = """\ -# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from -# all the included sub-makefiles. This is just here to clarify. -gyp_all_modules: -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Map gyp target types to Android module classes. -MODULE_CLASSES = { - "static_library": "STATIC_LIBRARIES", - "shared_library": "SHARED_LIBRARIES", - "executable": "EXECUTABLES", -} - - -def IsCPPExtension(ext): - return make.COMPILABLE_EXTENSIONS.get(ext) == "cxx" - - -def Sourceify(path): - """Convert a path to its source directory form. The Android backend does not - support options.generator_output, so this function is a noop.""" - return path - - -# Map from qualified target to path to output. -# For Android, the target of these maps is a tuple ('static', 'modulename'), -# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, -# since we link by module. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class AndroidMkWriter: - """AndroidMkWriter packages up the writing of one target-specific Android.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, android_top_dir): - self.android_top_dir = android_top_dir - - def Write( - self, - qualified_target, - relative_target, - base_path, - output_filename, - spec, - configs, - part_of_all, - write_alias_target, - sdk_version, - ): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - relative_target: qualified target name relative to the root - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - write_alias_target: flag indicating whether to create short aliases for - this target - sdk_version: what to emit for LOCAL_SDK_VERSION in output - """ - gyp.common.EnsureDirExists(output_filename) - - self.fp = open(output_filename, "w") - - self.fp.write(header) - - self.qualified_target = qualified_target - self.relative_target = relative_target - self.path = base_path - self.target = spec["target_name"] - self.type = spec["type"] - self.toolset = spec["toolset"] - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - - self.android_class = MODULE_CLASSES.get(self.type, "GYP") - self.android_module = self.ComputeAndroidModule(spec) - (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) - self.output = self.output_binary = self.ComputeOutput(spec) - - # Standard header. - self.WriteLn("include $(CLEAR_VARS)\n") - - # Module class and name. - self.WriteLn("LOCAL_MODULE_CLASS := " + self.android_class) - self.WriteLn("LOCAL_MODULE := " + self.android_module) - # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. - # The library module classes fail if the stem is set. ComputeOutputParts - # makes sure that stem == modulename in these cases. - if self.android_stem != self.android_module: - self.WriteLn("LOCAL_MODULE_STEM := " + self.android_stem) - self.WriteLn("LOCAL_MODULE_SUFFIX := " + self.android_suffix) - if self.toolset == "host": - self.WriteLn("LOCAL_IS_HOST_MODULE := true") - self.WriteLn("LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)") - elif sdk_version > 0: - self.WriteLn( - "LOCAL_MODULE_TARGET_ARCH := " "$(TARGET_$(GYP_VAR_PREFIX)ARCH)" - ) - self.WriteLn("LOCAL_SDK_VERSION := %s" % sdk_version) - - # Grab output directories; needed for Actions and Rules. - if self.toolset == "host": - self.WriteLn( - "gyp_intermediate_dir := " - "$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))" - ) - else: - self.WriteLn( - "gyp_intermediate_dir := " - "$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))" - ) - self.WriteLn( - "gyp_shared_intermediate_dir := " - "$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))" - ) - self.WriteLn() - - # List files this target depends on so that actions/rules/copies/sources - # can depend on the list. - # TODO: doesn't pull in things through transitive link deps; needed? - target_dependencies = [x[1] for x in deps if x[0] == "path"] - self.WriteLn("# Make sure our deps are built first.") - self.WriteList( - target_dependencies, "GYP_TARGET_DEPENDENCIES", local_pathify=True - ) - - # Actions must come first, since they can generate more OBJs for use below. - if "actions" in spec: - self.WriteActions(spec["actions"], extra_sources, extra_outputs) - - # Rules must be early like actions. - if "rules" in spec: - self.WriteRules(spec["rules"], extra_sources, extra_outputs) - - if "copies" in spec: - self.WriteCopies(spec["copies"], extra_outputs) - - # GYP generated outputs. - self.WriteList(extra_outputs, "GYP_GENERATED_OUTPUTS", local_pathify=True) - - # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend - # on both our dependency targets and our generated files. - self.WriteLn("# Make sure our deps and generated files are built first.") - self.WriteLn( - "LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) " - "$(GYP_GENERATED_OUTPUTS)" - ) - self.WriteLn() - - # Sources. - if spec.get("sources", []) or extra_sources: - self.WriteSources(spec, configs, extra_sources) - - self.WriteTarget( - spec, configs, deps, link_deps, part_of_all, write_alias_target - ) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = ("path", self.output_binary) - - # Update global list of link dependencies. - if self.type == "static_library": - target_link_deps[qualified_target] = ("static", self.android_module) - elif self.type == "shared_library": - target_link_deps[qualified_target] = ("shared", self.android_module) - - self.fp.close() - return self.android_module - - def WriteActions(self, actions, extra_sources, extra_outputs): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - """ - for action in actions: - name = make.StringToMakefileVariable( - "{}_{}".format(self.relative_target, action["action_name"]) - ) - self.WriteLn('### Rules for action "%s":' % action["action_name"]) - inputs = action["inputs"] - outputs = action["outputs"] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - if not out.startswith("$"): - print( - 'WARNING: Action for target "%s" writes output to local path ' - '"%s".' % (self.target, out) - ) - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get("process_outputs_as_sources", False)): - extra_sources += outputs - - # Prepare the actual command. - command = gyp.common.EncodePOSIXShellList(action["action"]) - if "message" in action: - quiet_cmd = "Gyp action: %s ($@)" % action["message"] - else: - quiet_cmd = "Gyp action: %s ($@)" % name - if len(dirs) > 0: - command = "mkdir -p %s" % " ".join(dirs) + "; " + command - - cd_action = "cd $(gyp_local_path)/%s; " % self.path - command = cd_action + command - - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the gyp_* - # variables for the action rule with an absolute version so that the - # output goes in the right place. - # Only write the gyp_* rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) - self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output) - self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output) - self.WriteLn( - "%s: gyp_intermediate_dir := " - "$(abspath $(gyp_intermediate_dir))" % main_output - ) - self.WriteLn( - "%s: gyp_shared_intermediate_dir := " - "$(abspath $(gyp_shared_intermediate_dir))" % main_output - ) - - # Android's envsetup.sh adds a number of directories to the path including - # the built host binary directory. This causes actions/rules invoked by - # gyp to sometimes use these instead of system versions, e.g. bison. - # The built host binaries may not be suitable, and can cause errors. - # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable - # set by envsetup. - self.WriteLn( - "%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))" - % main_output - ) - - # Don't allow spaces in input/output filenames, but make an exception for - # filenames which start with '$(' since it's okay for there to be spaces - # inside of make function/macro invocations. - for input in inputs: - if not input.startswith("$(") and " " in input: - raise gyp.common.GypError( - 'Action input filename "%s" in target %s contains a space' - % (input, self.target) - ) - for output in outputs: - if not output.startswith("$(") and " " in output: - raise gyp.common.GypError( - 'Action output filename "%s" in target %s contains a space' - % (output, self.target) - ) - - self.WriteLn( - "%s: %s $(GYP_TARGET_DEPENDENCIES)" - % (main_output, " ".join(map(self.LocalPathify, inputs))) - ) - self.WriteLn('\t@echo "%s"' % quiet_cmd) - self.WriteLn("\t$(hide)%s\n" % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn(f"{self.LocalPathify(output)}: {main_output} ;") - - extra_outputs += outputs - self.WriteLn() - - self.WriteLn() - - def WriteRules(self, rules, extra_sources, extra_outputs): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - """ - if len(rules) == 0: - return - - for rule in rules: - if len(rule.get("rule_sources", [])) == 0: - continue - name = make.StringToMakefileVariable( - "{}_{}".format(self.relative_target, rule["rule_name"]) - ) - self.WriteLn('\n### Generated for rule "%s":' % name) - self.WriteLn('# "%s":' % rule) - - inputs = rule.get("inputs") - for rule_source in rule.get("rule_sources", []): - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = os.path.splitext( - rule_source_basename - ) - - outputs = [ - self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) - for out in rule["outputs"] - ] - - dirs = set() - for out in outputs: - if not out.startswith("$"): - print( - "WARNING: Rule for target %s writes output to local path %s" - % (self.target, out) - ) - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - extra_outputs += outputs - if int(rule.get("process_outputs_as_sources", False)): - extra_sources.extend(outputs) - - components = [] - for component in rule["action"]: - component = self.ExpandInputRoot( - component, rule_source_root, rule_source_dirname - ) - if "$(RULE_SOURCES)" in component: - component = component.replace("$(RULE_SOURCES)", rule_source) - components.append(component) - - command = gyp.common.EncodePOSIXShellList(components) - cd_action = "cd $(gyp_local_path)/%s; " % self.path - command = cd_action + command - if dirs: - command = "mkdir -p %s" % " ".join(dirs) + "; " + command - - # We set up a rule to build the first output, and then set up - # a rule for each additional output to depend on the first. - outputs = map(self.LocalPathify, outputs) - main_output = outputs[0] - self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output) - self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output) - self.WriteLn( - "%s: gyp_intermediate_dir := " - "$(abspath $(gyp_intermediate_dir))" % main_output - ) - self.WriteLn( - "%s: gyp_shared_intermediate_dir := " - "$(abspath $(gyp_shared_intermediate_dir))" % main_output - ) - - # See explanation in WriteActions. - self.WriteLn( - "%s: export PATH := " - "$(subst $(ANDROID_BUILD_PATHS),,$(PATH))" % main_output - ) - - main_output_deps = self.LocalPathify(rule_source) - if inputs: - main_output_deps += " " - main_output_deps += " ".join([self.LocalPathify(f) for f in inputs]) - - self.WriteLn( - "%s: %s $(GYP_TARGET_DEPENDENCIES)" - % (main_output, main_output_deps) - ) - self.WriteLn("\t%s\n" % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn(f"{output}: {main_output} ;") - self.WriteLn() - - self.WriteLn() - - def WriteCopies(self, copies, extra_outputs): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - """ - self.WriteLn("### Generated for copy rule.") - - variable = make.StringToMakefileVariable(self.relative_target + "_copies") - outputs = [] - for copy in copies: - for path in copy["files"]: - # The Android build system does not allow generation of files into the - # source tree. The destination should start with a variable, which will - # typically be $(gyp_intermediate_dir) or - # $(gyp_shared_intermediate_dir). Note that we can't use an assertion - # because some of the gyp tests depend on this. - if not copy["destination"].startswith("$"): - print( - "WARNING: Copy rule for target %s writes output to " - "local path %s" % (self.target, copy["destination"]) - ) - - # LocalPathify() calls normpath, stripping trailing slashes. - path = Sourceify(self.LocalPathify(path)) - filename = os.path.split(path)[1] - output = Sourceify( - self.LocalPathify(os.path.join(copy["destination"], filename)) - ) - - self.WriteLn(f"{output}: {path} $(GYP_TARGET_DEPENDENCIES) | $(ACP)") - self.WriteLn("\t@echo Copying: $@") - self.WriteLn("\t$(hide) mkdir -p $(dir $@)") - self.WriteLn("\t$(hide) $(ACP) -rpf $< $@") - self.WriteLn() - outputs.append(output) - self.WriteLn( - "{} = {}".format(variable, " ".join(map(make.QuoteSpaces, outputs))) - ) - extra_outputs.append("$(%s)" % variable) - self.WriteLn() - - def WriteSourceFlags(self, spec, configs): - """Write out the flags and include paths used to compile source files for - the current target. - - Args: - spec, configs: input from gyp. - """ - for configname, config in sorted(configs.items()): - extracted_includes = [] - - self.WriteLn("\n# Flags passed to both C and C++ files.") - cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( - config.get("cflags", []) + config.get("cflags_c", []) - ) - extracted_includes.extend(includes_from_cflags) - self.WriteList(cflags, "MY_CFLAGS_%s" % configname) - - self.WriteList( - config.get("defines"), - "MY_DEFS_%s" % configname, - prefix="-D", - quoter=make.EscapeCppDefine, - ) - - self.WriteLn("\n# Include paths placed before CFLAGS/CPPFLAGS") - includes = list(config.get("include_dirs", [])) - includes.extend(extracted_includes) - includes = map(Sourceify, map(self.LocalPathify, includes)) - includes = self.NormalizeIncludePaths(includes) - self.WriteList(includes, "LOCAL_C_INCLUDES_%s" % configname) - - self.WriteLn("\n# Flags passed to only C++ (and not C) files.") - self.WriteList(config.get("cflags_cc"), "LOCAL_CPPFLAGS_%s" % configname) - - self.WriteLn( - "\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) " - "$(MY_DEFS_$(GYP_CONFIGURATION))" - ) - # Undefine ANDROID for host modules - # TODO: the source code should not use macro ANDROID to tell if it's host - # or target module. - if self.toolset == "host": - self.WriteLn("# Undefine ANDROID for host modules") - self.WriteLn("LOCAL_CFLAGS += -UANDROID") - self.WriteLn( - "LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) " - "$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))" - ) - self.WriteLn("LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))") - # Android uses separate flags for assembly file invocations, but gyp expects - # the same CFLAGS to be applied: - self.WriteLn("LOCAL_ASFLAGS := $(LOCAL_CFLAGS)") - - def WriteSources(self, spec, configs, extra_sources): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - We need to handle shared_intermediate directory source files as - a special case by copying them to the intermediate directory and - treating them as a generated sources. Otherwise the Android build - rules won't pick them up. - - Args: - spec, configs: input from gyp. - extra_sources: Sources generated from Actions or Rules. - """ - sources = filter(make.Compilable, spec.get("sources", [])) - generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] - extra_sources = filter(make.Compilable, extra_sources) - - # Determine and output the C++ extension used by these sources. - # We simply find the first C++ file and use that extension. - all_sources = sources + extra_sources - local_cpp_extension = ".cpp" - for source in all_sources: - (root, ext) = os.path.splitext(source) - if IsCPPExtension(ext): - local_cpp_extension = ext - break - if local_cpp_extension != ".cpp": - self.WriteLn("LOCAL_CPP_EXTENSION := %s" % local_cpp_extension) - - # We need to move any non-generated sources that are coming from the - # shared intermediate directory out of LOCAL_SRC_FILES and put them - # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files - # that don't match our local_cpp_extension, since Android will only - # generate Makefile rules for a single LOCAL_CPP_EXTENSION. - local_files = [] - for source in sources: - (root, ext) = os.path.splitext(source) - if "$(gyp_shared_intermediate_dir)" in source: - extra_sources.append(source) - elif "$(gyp_intermediate_dir)" in source: - extra_sources.append(source) - elif IsCPPExtension(ext) and ext != local_cpp_extension: - extra_sources.append(source) - else: - local_files.append(os.path.normpath(os.path.join(self.path, source))) - - # For any generated source, if it is coming from the shared intermediate - # directory then we add a Make rule to copy them to the local intermediate - # directory first. This is because the Android LOCAL_GENERATED_SOURCES - # must be in the local module intermediate directory for the compile rules - # to work properly. If the file has the wrong C++ extension, then we add - # a rule to copy that to intermediates and use the new version. - final_generated_sources = [] - # If a source file gets copied, we still need to add the original source - # directory as header search path, for GCC searches headers in the - # directory that contains the source file by default. - origin_src_dirs = [] - for source in extra_sources: - local_file = source - if "$(gyp_intermediate_dir)/" not in local_file: - basename = os.path.basename(local_file) - local_file = "$(gyp_intermediate_dir)/" + basename - (root, ext) = os.path.splitext(local_file) - if IsCPPExtension(ext) and ext != local_cpp_extension: - local_file = root + local_cpp_extension - if local_file != source: - self.WriteLn(f"{local_file}: {self.LocalPathify(source)}") - self.WriteLn("\tmkdir -p $(@D); cp $< $@") - origin_src_dirs.append(os.path.dirname(source)) - final_generated_sources.append(local_file) - - # We add back in all of the non-compilable stuff to make sure that the - # make rules have dependencies on them. - final_generated_sources.extend(generated_not_sources) - self.WriteList(final_generated_sources, "LOCAL_GENERATED_SOURCES") - - origin_src_dirs = gyp.common.uniquer(origin_src_dirs) - origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) - self.WriteList(origin_src_dirs, "GYP_COPIED_SOURCE_ORIGIN_DIRS") - - self.WriteList(local_files, "LOCAL_SRC_FILES") - - # Write out the flags used to compile the source; this must be done last - # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. - self.WriteSourceFlags(spec, configs) - - def ComputeAndroidModule(self, spec): - """Return the Android module name used for a gyp spec. - - We use the complete qualified target name to avoid collisions between - duplicate targets in different directories. We also add a suffix to - distinguish gyp-generated module names. - """ - - if int(spec.get("android_unmangled_name", 0)): - assert self.type != "shared_library" or self.target.startswith("lib") - return self.target - - if self.type == "shared_library": - # For reasons of convention, the Android build system requires that all - # shared library modules are named 'libfoo' when generating -l flags. - prefix = "lib_" - else: - prefix = "" - - if spec["toolset"] == "host": - suffix = "_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp" - else: - suffix = "_gyp" - - if self.path: - middle = make.StringToMakefileVariable(f"{self.path}_{self.target}") - else: - middle = make.StringToMakefileVariable(self.target) - - return "".join([prefix, middle, suffix]) - - def ComputeOutputParts(self, spec): - """Return the 'output basename' of a gyp spec, split into filename + ext. - - Android libraries must be named the same thing as their module name, - otherwise the linker can't find them, so product_name and so on must be - ignored if we are building a library, and the "lib" prepending is - not done for Android. - """ - assert self.type != "loadable_module" # TODO: not supported? - - target = spec["target_name"] - target_prefix = "" - target_ext = "" - if self.type == "static_library": - target = self.ComputeAndroidModule(spec) - target_ext = ".a" - elif self.type == "shared_library": - target = self.ComputeAndroidModule(spec) - target_ext = ".so" - elif self.type == "none": - target_ext = ".stamp" - elif self.type != "executable": - print( - "ERROR: What output file should be generated?", - "type", - self.type, - "target", - target, - ) - - if self.type not in {"static_library", "shared_library"}: - target_prefix = spec.get("product_prefix", target_prefix) - target = spec.get("product_name", target) - product_ext = spec.get("product_extension") - if product_ext: - target_ext = "." + product_ext - - target_stem = target_prefix + target - return (target_stem, target_ext) - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - return "".join(self.ComputeOutputParts(spec)) - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - if self.type == "executable": - # We install host executables into shared_intermediate_dir so they can be - # run by gyp rules that refer to PRODUCT_DIR. - path = "$(gyp_shared_intermediate_dir)" - elif self.type == "shared_library": - if self.toolset == "host": - path = "$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)" - else: - path = "$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)" - else: - # Other targets just get built into their intermediate dir. - if self.toolset == "host": - path = ( - "$(call intermediates-dir-for,%s,%s,true,," - "$(GYP_HOST_VAR_PREFIX))" - % (self.android_class, self.android_module) - ) - else: - path = ( - f"$(call intermediates-dir-for,{self.android_class}," - f"{self.android_module},,,$(GYP_VAR_PREFIX))" - ) - - assert spec.get("product_dir") is None # TODO: not supported? - return os.path.join(path, self.ComputeOutputBasename(spec)) - - def NormalizeIncludePaths(self, include_paths): - """Normalize include_paths. - Convert absolute paths to relative to the Android top directory. - - Args: - include_paths: A list of unprocessed include paths. - Returns: - A list of normalized include paths. - """ - normalized = [] - for path in include_paths: - if path[0] == "/": - path = gyp.common.RelativePath(path, self.android_top_dir) - normalized.append(path) - return normalized - - def ExtractIncludesFromCFlags(self, cflags): - """Extract includes "-I..." out from cflags - - Args: - cflags: A list of compiler flags, which may be mixed with "-I.." - Returns: - A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. - """ - clean_cflags = [] - include_paths = [] - for flag in cflags: - if flag.startswith("-I"): - include_paths.append(flag[2:]) - else: - clean_cflags.append(flag) - - return (clean_cflags, include_paths) - - def FilterLibraries(self, libraries): - """Filter the 'libraries' key to separate things that shouldn't be ldflags. - - Library entries that look like filenames should be converted to android - module names instead of being passed to the linker as flags. - - Args: - libraries: the value of spec.get('libraries') - Returns: - A tuple (static_lib_modules, dynamic_lib_modules, ldflags) - """ - static_lib_modules = [] - dynamic_lib_modules = [] - ldflags = [] - for libs in libraries: - # Libs can have multiple words. - for lib in libs.split(): - # Filter the system libraries, which are added by default by the Android - # build system. - if ( - lib == "-lc" - or lib == "-lstdc++" - or lib == "-lm" - or lib.endswith("libgcc.a") - ): - continue - match = re.search(r"([^/]+)\.a$", lib) - if match: - static_lib_modules.append(match.group(1)) - continue - match = re.search(r"([^/]+)\.so$", lib) - if match: - dynamic_lib_modules.append(match.group(1)) - continue - if lib.startswith("-l"): - ldflags.append(lib) - return (static_lib_modules, dynamic_lib_modules, ldflags) - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if "dependencies" in spec: - deps.extend( - [ - target_outputs[dep] - for dep in spec["dependencies"] - if target_outputs[dep] - ] - ) - for dep in spec["dependencies"]: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - def WriteTargetFlags(self, spec, configs, link_deps): - """Write Makefile code to specify the link flags and library dependencies. - - spec, configs: input from gyp. - link_deps: link dependency list; see ComputeDeps() - """ - # Libraries (i.e. -lfoo) - # These must be included even for static libraries as some of them provide - # implicit include paths through the build system. - libraries = gyp.common.uniquer(spec.get("libraries", [])) - static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) - - if self.type != "static_library": - for configname, config in sorted(configs.items()): - ldflags = list(config.get("ldflags", [])) - self.WriteLn("") - self.WriteList(ldflags, "LOCAL_LDFLAGS_%s" % configname) - self.WriteList(ldflags_libs, "LOCAL_GYP_LIBS") - self.WriteLn( - "LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) " - "$(LOCAL_GYP_LIBS)" - ) - - # Link dependencies (i.e. other gyp targets this target depends on) - # These need not be included for static libraries as within the gyp build - # we do not use the implicit include path mechanism. - if self.type != "static_library": - static_link_deps = [x[1] for x in link_deps if x[0] == "static"] - shared_link_deps = [x[1] for x in link_deps if x[0] == "shared"] - else: - static_link_deps = [] - shared_link_deps = [] - - # Only write the lists if they are non-empty. - if static_libs or static_link_deps: - self.WriteLn("") - self.WriteList(static_libs + static_link_deps, "LOCAL_STATIC_LIBRARIES") - self.WriteLn("# Enable grouping to fix circular references") - self.WriteLn("LOCAL_GROUP_STATIC_LIBRARIES := true") - if dynamic_libs or shared_link_deps: - self.WriteLn("") - self.WriteList(dynamic_libs + shared_link_deps, "LOCAL_SHARED_LIBRARIES") - - def WriteTarget( - self, spec, configs, deps, link_deps, part_of_all, write_alias_target - ): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - part_of_all: flag indicating this target is part of 'all' - write_alias_target: flag indicating whether to create short aliases for this - target - """ - self.WriteLn("### Rules for final target.") - - if self.type != "none": - self.WriteTargetFlags(spec, configs, link_deps) - - settings = spec.get("aosp_build_settings", {}) - if settings: - self.WriteLn("### Set directly by aosp_build_settings.") - for k, v in settings.items(): - if isinstance(v, list): - self.WriteList(v, k) - else: - self.WriteLn(f"{k} := {make.QuoteIfNecessary(v)}") - self.WriteLn("") - - # Add to the set of targets which represent the gyp 'all' target. We use the - # name 'gyp_all_modules' as the Android build system doesn't allow the use - # of the Make target 'all' and because 'all_modules' is the equivalent of - # the Make target 'all' on Android. - if part_of_all and write_alias_target: - self.WriteLn('# Add target alias to "gyp_all_modules" target.') - self.WriteLn(".PHONY: gyp_all_modules") - self.WriteLn("gyp_all_modules: %s" % self.android_module) - self.WriteLn("") - - # Add an alias from the gyp target name to the Android module name. This - # simplifies manual builds of the target, and is required by the test - # framework. - if self.target != self.android_module and write_alias_target: - self.WriteLn("# Alias gyp target name.") - self.WriteLn(".PHONY: %s" % self.target) - self.WriteLn(f"{self.target}: {self.android_module}") - self.WriteLn("") - - # Add the command to trigger build of the target type depending - # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY - # NOTE: This has to come last! - modifier = "" - if self.toolset == "host": - modifier = "HOST_" - if self.type == "static_library": - self.WriteLn("include $(BUILD_%sSTATIC_LIBRARY)" % modifier) - elif self.type == "shared_library": - self.WriteLn("LOCAL_PRELINK_MODULE := false") - self.WriteLn("include $(BUILD_%sSHARED_LIBRARY)" % modifier) - elif self.type == "executable": - self.WriteLn("LOCAL_CXX_STL := libc++_static") - # Executables are for build and test purposes only, so they're installed - # to a directory that doesn't get included in the system image. - self.WriteLn("LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)") - self.WriteLn("include $(BUILD_%sEXECUTABLE)" % modifier) - else: - self.WriteLn("LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp") - self.WriteLn("LOCAL_UNINSTALLABLE_MODULE := true") - if self.toolset == "target": - self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)") - else: - self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)") - self.WriteLn() - self.WriteLn("include $(BUILD_SYSTEM)/base_rules.mk") - self.WriteLn() - self.WriteLn("$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)") - self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') - self.WriteLn("\t$(hide) mkdir -p $(dir $@)") - self.WriteLn("\t$(hide) touch $@") - self.WriteLn() - self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX :=") - - def WriteList( - self, - value_list, - variable=None, - prefix="", - quoter=make.QuoteIfNecessary, - local_pathify=False, - ): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = "" - if value_list: - value_list = [quoter(prefix + value) for value in value_list] - if local_pathify: - value_list = [self.LocalPathify(value) for value in value_list] - values = " \\\n\t" + " \\\n\t".join(value_list) - self.fp.write(f"{variable} :={values}\n\n") - - def WriteLn(self, text=""): - self.fp.write(text + "\n") - - def LocalPathify(self, path): - """Convert a subdirectory-relative path into a normalized path which starts - with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). - Absolute paths, or paths that contain variables, are just normalized.""" - if "$(" in path or os.path.isabs(path): - # path is not a file in the project tree in this case, but calling - # normpath is still important for trimming trailing slashes. - return os.path.normpath(path) - local_path = os.path.join("$(LOCAL_PATH)", self.path, path) - local_path = os.path.normpath(local_path) - # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) - # - i.e. that the resulting path is still inside the project tree. The - # path may legitimately have ended up containing just $(LOCAL_PATH), though, - # so we don't look for a slash. - assert local_path.startswith( - "$(LOCAL_PATH)" - ), f"Path {path} attempts to escape from gyp path {self.path} !)" - return local_path - - def ExpandInputRoot(self, template, expansion, dirname): - if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template: - return template - path = template % { - "INPUT_ROOT": expansion, - "INPUT_DIRNAME": dirname, - } - return os.path.normpath(path) - - -def PerformBuild(data, configurations, params): - # The android backend only supports the default configuration. - options = params["options"] - makefile = os.path.abspath(os.path.join(options.toplevel_dir, "GypAndroid.mk")) - env = dict(os.environ) - env["ONE_SHOT_MAKEFILE"] = makefile - arguments = ["make", "-C", os.environ["ANDROID_BUILD_TOP"], "gyp_all_modules"] - print("Building: %s" % arguments) - subprocess.check_call(arguments, env=env) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params["options"] - generator_flags = params.get("generator_flags", {}) - limit_to_target_all = generator_flags.get("limit_to_target_all", False) - write_alias_targets = generator_flags.get("write_alias_targets", True) - sdk_version = generator_flags.get("aosp_sdk_version", 0) - android_top_dir = os.environ.get("ANDROID_BUILD_TOP") - assert android_top_dir, "$ANDROID_BUILD_TOP not set; you need to run lunch." - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - assert ( - not options.generator_output - ), "The Android backend does not support options.generator_output." - base_path = gyp.common.RelativePath( - os.path.dirname(build_file), options.toplevel_dir - ) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - for target in target_list: - spec = target_dicts[target] - if spec["default_configuration"] != "Default": - default_configuration = spec["default_configuration"] - break - if not default_configuration: - default_configuration = "Default" - - makefile_name = "GypAndroid" + options.suffix + ".mk" - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - assert ( - not options.generator_output - ), "The Android backend does not support options.generator_output." - gyp.common.EnsureDirExists(makefile_path) - root_makefile = open(makefile_path, "w") - - root_makefile.write(header) - - # We set LOCAL_PATH just once, here, to the top of the project tree. This - # allows all the other paths we use to be relative to the Android.mk file, - # as the Android build system expects. - root_makefile.write("\nLOCAL_PATH := $(call my-dir)\n") - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params["build_files"]: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - android_modules = {} - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target) - relative_build_file = gyp.common.RelativePath(build_file, options.toplevel_dir) - build_files.add(relative_build_file) - included_files = data[build_file]["included_files"] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir, - ) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if params["home_dot_gyp"] and abs_include_file.startswith( - params["home_dot_gyp"] - ): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath( - build_file, target + "." + toolset + options.suffix + ".mk" - ) - - spec = target_dicts[qualified_target] - configs = spec["configurations"] - - part_of_all = qualified_target in needed_targets - if limit_to_target_all and not part_of_all: - continue - - relative_target = gyp.common.QualifiedTarget( - relative_build_file, target, toolset - ) - writer = AndroidMkWriter(android_top_dir) - android_module = writer.Write( - qualified_target, - relative_target, - base_path, - output_file, - spec, - configs, - part_of_all=part_of_all, - write_alias_target=write_alias_targets, - sdk_version=sdk_version, - ) - if android_module in android_modules: - print( - "ERROR: Android module names must be unique. The following " - "targets both generate Android module name %s.\n %s\n %s" - % (android_module, android_modules[android_module], qualified_target) - ) - return - android_modules[android_module] = qualified_target - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath( - output_file, os.path.dirname(makefile_path) - ) - include_list.add(mkfile_rel_path) - - root_makefile.write("GYP_CONFIGURATION ?= %s\n" % default_configuration) - root_makefile.write("GYP_VAR_PREFIX ?=\n") - root_makefile.write("GYP_HOST_VAR_PREFIX ?=\n") - root_makefile.write("GYP_HOST_MULTILIB ?= first\n") - - # Write out the sorted list of includes. - root_makefile.write("\n") - for include_file in sorted(include_list): - root_makefile.write("include $(LOCAL_PATH)/" + include_file + "\n") - root_makefile.write("\n") - - if write_alias_targets: - root_makefile.write(ALL_MODULES_FOOTER) - - root_makefile.close() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py deleted file mode 100644 index 320a891aa8adc..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py +++ /dev/null @@ -1,1318 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""cmake output module - -This module is under development and should be considered experimental. - -This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is -created for each configuration. - -This module's original purpose was to support editing in IDEs like KDevelop -which use CMake for project management. It is also possible to use CMake to -generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator -will convert the CMakeLists.txt to a code::blocks cbp for the editor to read, -but build using CMake. As a result QtCreator editor is unaware of compiler -defines. The generated CMakeLists.txt can also be used to build on Linux. There -is currently no support for building on platforms other than Linux. - -The generated CMakeLists.txt should properly compile all projects. However, -there is a mismatch between gyp and cmake with regard to linking. All attempts -are made to work around this, but CMake sometimes sees -Wl,--start-group as a -library and incorrectly repeats it. As a result the output of this generator -should not be relied on for building. - -When using with kdevelop, use version 4.4+. Previous versions of kdevelop will -not be able to find the header file directories described in the generated -CMakeLists.txt file. -""" - - -import multiprocessing -import os -import signal -import subprocess -import gyp.common -import gyp.xcode_emulation - -_maketrans = str.maketrans - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_PREFIX": "lib", - "SHARED_LIB_SUFFIX": ".so", - "SHARED_LIB_DIR": "${builddir}/lib.${TOOLSET}", - "LIB_DIR": "${obj}.${TOOLSET}", - "INTERMEDIATE_DIR": "${obj}.${TOOLSET}/${TARGET}/geni", - "SHARED_INTERMEDIATE_DIR": "${obj}/gen", - "PRODUCT_DIR": "${builddir}", - "RULE_INPUT_PATH": "${RULE_INPUT_PATH}", - "RULE_INPUT_DIRNAME": "${RULE_INPUT_DIRNAME}", - "RULE_INPUT_NAME": "${RULE_INPUT_NAME}", - "RULE_INPUT_ROOT": "${RULE_INPUT_ROOT}", - "RULE_INPUT_EXT": "${RULE_INPUT_EXT}", - "CONFIGURATION_NAME": "${configuration}", -} - -FULL_PATH_VARS = ("${CMAKE_CURRENT_LIST_DIR}", "${builddir}", "${obj}") - -generator_supports_multiple_toolsets = True -generator_wants_static_library_dependencies_adjusted = True - -COMPILABLE_EXTENSIONS = { - ".c": "cc", - ".cc": "cxx", - ".cpp": "cxx", - ".cxx": "cxx", - ".s": "s", # cc - ".S": "s", # cc -} - - -def RemovePrefix(a, prefix): - """Returns 'a' without 'prefix' if it starts with 'prefix'.""" - return a[len(prefix) :] if a.startswith(prefix) else a - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - default_variables.setdefault("OS", gyp.common.GetFlavor(params)) - - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS) - - -def Linkable(filename): - """Return true if the file is linkable (should be on the link line).""" - return filename.endswith(".o") - - -def NormjoinPathForceCMakeSource(base_path, rel_path): - """Resolves rel_path against base_path and returns the result. - - If rel_path is an absolute path it is returned unchanged. - Otherwise it is resolved against base_path and normalized. - If the result is a relative path, it is forced to be relative to the - CMakeLists.txt. - """ - if os.path.isabs(rel_path): - return rel_path - if any(rel_path.startswith(var) for var in FULL_PATH_VARS): - return rel_path - # TODO: do we need to check base_path for absolute variables as well? - return os.path.join( - "${CMAKE_CURRENT_LIST_DIR}", os.path.normpath(os.path.join(base_path, rel_path)) - ) - - -def NormjoinPath(base_path, rel_path): - """Resolves rel_path against base_path and returns the result. - TODO: what is this really used for? - If rel_path begins with '$' it is returned unchanged. - Otherwise it is resolved against base_path if relative, then normalized. - """ - if rel_path.startswith("$") and not rel_path.startswith("${configuration}"): - return rel_path - return os.path.normpath(os.path.join(base_path, rel_path)) - - -def CMakeStringEscape(a): - """Escapes the string 'a' for use inside a CMake string. - - This means escaping - '\' otherwise it may be seen as modifying the next character - '"' otherwise it will end the string - ';' otherwise the string becomes a list - - The following do not need to be escaped - '#' when the lexer is in string state, this does not start a comment - - The following are yet unknown - '$' generator variables (like ${obj}) must not be escaped, - but text $ should be escaped - what is wanted is to know which $ come from generator variables - """ - return a.replace("\\", "\\\\").replace(";", "\\;").replace('"', '\\"') - - -def SetFileProperty(output, source_name, property_name, values, sep): - """Given a set of source file, sets the given property on them.""" - output.write("set_source_files_properties(") - output.write(source_name) - output.write(" PROPERTIES ") - output.write(property_name) - output.write(' "') - for value in values: - output.write(CMakeStringEscape(value)) - output.write(sep) - output.write('")\n') - - -def SetFilesProperty(output, variable, property_name, values, sep): - """Given a set of source files, sets the given property on them.""" - output.write("set_source_files_properties(") - WriteVariable(output, variable) - output.write(" PROPERTIES ") - output.write(property_name) - output.write(' "') - for value in values: - output.write(CMakeStringEscape(value)) - output.write(sep) - output.write('")\n') - - -def SetTargetProperty(output, target_name, property_name, values, sep=""): - """Given a target, sets the given property.""" - output.write("set_target_properties(") - output.write(target_name) - output.write(" PROPERTIES ") - output.write(property_name) - output.write(' "') - for value in values: - output.write(CMakeStringEscape(value)) - output.write(sep) - output.write('")\n') - - -def SetVariable(output, variable_name, value): - """Sets a CMake variable.""" - output.write("set(") - output.write(variable_name) - output.write(' "') - output.write(CMakeStringEscape(value)) - output.write('")\n') - - -def SetVariableList(output, variable_name, values): - """Sets a CMake variable to a list.""" - if not values: - return SetVariable(output, variable_name, "") - if len(values) == 1: - return SetVariable(output, variable_name, values[0]) - output.write("list(APPEND ") - output.write(variable_name) - output.write('\n "') - output.write('"\n "'.join([CMakeStringEscape(value) for value in values])) - output.write('")\n') - - -def UnsetVariable(output, variable_name): - """Unsets a CMake variable.""" - output.write("unset(") - output.write(variable_name) - output.write(")\n") - - -def WriteVariable(output, variable_name, prepend=None): - if prepend: - output.write(prepend) - output.write("${") - output.write(variable_name) - output.write("}") - - -class CMakeTargetType: - def __init__(self, command, modifier, property_modifier): - self.command = command - self.modifier = modifier - self.property_modifier = property_modifier - - -cmake_target_type_from_gyp_target_type = { - "executable": CMakeTargetType("add_executable", None, "RUNTIME"), - "static_library": CMakeTargetType("add_library", "STATIC", "ARCHIVE"), - "shared_library": CMakeTargetType("add_library", "SHARED", "LIBRARY"), - "loadable_module": CMakeTargetType("add_library", "MODULE", "LIBRARY"), - "none": CMakeTargetType("add_custom_target", "SOURCES", None), -} - - -def StringToCMakeTargetName(a): - """Converts the given string 'a' to a valid CMake target name. - - All invalid characters are replaced by '_'. - Invalid for cmake: ' ', '/', '(', ')', '"' - Invalid for make: ':' - Invalid for unknown reasons but cause failures: '.' - """ - return a.translate(_maketrans(' /():."', "_______")) - - -def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, output): - """Write CMake for the 'actions' in the target. - - Args: - target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. - extra_sources: [(, )] to append with generated source files. - extra_deps: [] to append with generated targets. - path_to_gyp: relative path from CMakeLists.txt being generated to - the Gyp file in which the target being generated is defined. - """ - for action in actions: - action_name = StringToCMakeTargetName(action["action_name"]) - action_target_name = f"{target_name}__{action_name}" - - inputs = action["inputs"] - inputs_name = action_target_name + "__input" - SetVariableList( - output, - inputs_name, - [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs], - ) - - outputs = action["outputs"] - cmake_outputs = [ - NormjoinPathForceCMakeSource(path_to_gyp, out) for out in outputs - ] - outputs_name = action_target_name + "__output" - SetVariableList(output, outputs_name, cmake_outputs) - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir} - - if int(action.get("process_outputs_as_sources", False)): - extra_sources.extend(zip(cmake_outputs, outputs)) - - # add_custom_command - output.write("add_custom_command(OUTPUT ") - WriteVariable(output, outputs_name) - output.write("\n") - - if len(dirs) > 0: - for directory in dirs: - output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ") - output.write(directory) - output.write("\n") - - output.write(" COMMAND ") - output.write(gyp.common.EncodePOSIXShellList(action["action"])) - output.write("\n") - - output.write(" DEPENDS ") - WriteVariable(output, inputs_name) - output.write("\n") - - output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") - output.write(path_to_gyp) - output.write("\n") - - output.write(" COMMENT ") - if "message" in action: - output.write(action["message"]) - else: - output.write(action_target_name) - output.write("\n") - - output.write(" VERBATIM\n") - output.write(")\n") - - # add_custom_target - output.write("add_custom_target(") - output.write(action_target_name) - output.write("\n DEPENDS ") - WriteVariable(output, outputs_name) - output.write("\n SOURCES ") - WriteVariable(output, inputs_name) - output.write("\n)\n") - - extra_deps.append(action_target_name) - - -def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source): - if rel_path.startswith(("${RULE_INPUT_PATH}", "${RULE_INPUT_DIRNAME}")): - if any(rule_source.startswith(var) for var in FULL_PATH_VARS): - return rel_path - return NormjoinPathForceCMakeSource(base_path, rel_path) - - -def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, output): - """Write CMake for the 'rules' in the target. - - Args: - target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. - extra_sources: [(, )] to append with generated source files. - extra_deps: [] to append with generated targets. - path_to_gyp: relative path from CMakeLists.txt being generated to - the Gyp file in which the target being generated is defined. - """ - for rule in rules: - rule_name = StringToCMakeTargetName(target_name + "__" + rule["rule_name"]) - - inputs = rule.get("inputs", []) - inputs_name = rule_name + "__input" - SetVariableList( - output, - inputs_name, - [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs], - ) - outputs = rule["outputs"] - var_outputs = [] - - for count, rule_source in enumerate(rule.get("rule_sources", [])): - action_name = rule_name + "_" + str(count) - - rule_source_dirname, rule_source_basename = os.path.split(rule_source) - rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename) - - SetVariable(output, "RULE_INPUT_PATH", rule_source) - SetVariable(output, "RULE_INPUT_DIRNAME", rule_source_dirname) - SetVariable(output, "RULE_INPUT_NAME", rule_source_basename) - SetVariable(output, "RULE_INPUT_ROOT", rule_source_root) - SetVariable(output, "RULE_INPUT_EXT", rule_source_ext) - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir} - - # Create variables for the output, as 'local' variable will be unset. - these_outputs = [] - for output_index, out in enumerate(outputs): - output_name = action_name + "_" + str(output_index) - SetVariable( - output, - output_name, - NormjoinRulePathForceCMakeSource(path_to_gyp, out, rule_source), - ) - if int(rule.get("process_outputs_as_sources", False)): - extra_sources.append(("${" + output_name + "}", out)) - these_outputs.append("${" + output_name + "}") - var_outputs.append("${" + output_name + "}") - - # add_custom_command - output.write("add_custom_command(OUTPUT\n") - for out in these_outputs: - output.write(" ") - output.write(out) - output.write("\n") - - for directory in dirs: - output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ") - output.write(directory) - output.write("\n") - - output.write(" COMMAND ") - output.write(gyp.common.EncodePOSIXShellList(rule["action"])) - output.write("\n") - - output.write(" DEPENDS ") - WriteVariable(output, inputs_name) - output.write(" ") - output.write(NormjoinPath(path_to_gyp, rule_source)) - output.write("\n") - - # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives. - # The cwd is the current build directory. - output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") - output.write(path_to_gyp) - output.write("\n") - - output.write(" COMMENT ") - if "message" in rule: - output.write(rule["message"]) - else: - output.write(action_name) - output.write("\n") - - output.write(" VERBATIM\n") - output.write(")\n") - - UnsetVariable(output, "RULE_INPUT_PATH") - UnsetVariable(output, "RULE_INPUT_DIRNAME") - UnsetVariable(output, "RULE_INPUT_NAME") - UnsetVariable(output, "RULE_INPUT_ROOT") - UnsetVariable(output, "RULE_INPUT_EXT") - - # add_custom_target - output.write("add_custom_target(") - output.write(rule_name) - output.write(" DEPENDS\n") - for out in var_outputs: - output.write(" ") - output.write(out) - output.write("\n") - output.write("SOURCES ") - WriteVariable(output, inputs_name) - output.write("\n") - for rule_source in rule.get("rule_sources", []): - output.write(" ") - output.write(NormjoinPath(path_to_gyp, rule_source)) - output.write("\n") - output.write(")\n") - - extra_deps.append(rule_name) - - -def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output): - """Write CMake for the 'copies' in the target. - - Args: - target_name: the name of the CMake target being generated. - actions: the Gyp 'actions' dict for this target. - extra_deps: [] to append with generated targets. - path_to_gyp: relative path from CMakeLists.txt being generated to - the Gyp file in which the target being generated is defined. - """ - copy_name = target_name + "__copies" - - # CMake gets upset with custom targets with OUTPUT which specify no output. - have_copies = any(copy["files"] for copy in copies) - if not have_copies: - output.write("add_custom_target(") - output.write(copy_name) - output.write(")\n") - extra_deps.append(copy_name) - return - - class Copy: - def __init__(self, ext, command): - self.cmake_inputs = [] - self.cmake_outputs = [] - self.gyp_inputs = [] - self.gyp_outputs = [] - self.ext = ext - self.inputs_name = None - self.outputs_name = None - self.command = command - - file_copy = Copy("", "copy") - dir_copy = Copy("_dirs", "copy_directory") - - for copy in copies: - files = copy["files"] - destination = copy["destination"] - for src in files: - path = os.path.normpath(src) - basename = os.path.split(path)[1] - dst = os.path.join(destination, basename) - - copy = file_copy if os.path.basename(src) else dir_copy - - copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src)) - copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst)) - copy.gyp_inputs.append(src) - copy.gyp_outputs.append(dst) - - for copy in (file_copy, dir_copy): - if copy.cmake_inputs: - copy.inputs_name = copy_name + "__input" + copy.ext - SetVariableList(output, copy.inputs_name, copy.cmake_inputs) - - copy.outputs_name = copy_name + "__output" + copy.ext - SetVariableList(output, copy.outputs_name, copy.cmake_outputs) - - # add_custom_command - output.write("add_custom_command(\n") - - output.write("OUTPUT") - for copy in (file_copy, dir_copy): - if copy.outputs_name: - WriteVariable(output, copy.outputs_name, " ") - output.write("\n") - - for copy in (file_copy, dir_copy): - for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs): - # 'cmake -E copy src dst' will create the 'dst' directory if needed. - output.write("COMMAND ${CMAKE_COMMAND} -E %s " % copy.command) - output.write(src) - output.write(" ") - output.write(dst) - output.write("\n") - - output.write("DEPENDS") - for copy in (file_copy, dir_copy): - if copy.inputs_name: - WriteVariable(output, copy.inputs_name, " ") - output.write("\n") - - output.write("WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") - output.write(path_to_gyp) - output.write("\n") - - output.write("COMMENT Copying for ") - output.write(target_name) - output.write("\n") - - output.write("VERBATIM\n") - output.write(")\n") - - # add_custom_target - output.write("add_custom_target(") - output.write(copy_name) - output.write("\n DEPENDS") - for copy in (file_copy, dir_copy): - if copy.outputs_name: - WriteVariable(output, copy.outputs_name, " ") - output.write("\n SOURCES") - if file_copy.inputs_name: - WriteVariable(output, file_copy.inputs_name, " ") - output.write("\n)\n") - - extra_deps.append(copy_name) - - -def CreateCMakeTargetBaseName(qualified_target): - """This is the name we would like the target to have.""" - _, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget( - qualified_target - ) - cmake_target_base_name = gyp_target_name - if gyp_target_toolset and gyp_target_toolset != "target": - cmake_target_base_name += "_" + gyp_target_toolset - return StringToCMakeTargetName(cmake_target_base_name) - - -def CreateCMakeTargetFullName(qualified_target): - """An unambiguous name for the target.""" - gyp_file, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget( - qualified_target - ) - cmake_target_full_name = gyp_file + ":" + gyp_target_name - if gyp_target_toolset and gyp_target_toolset != "target": - cmake_target_full_name += "_" + gyp_target_toolset - return StringToCMakeTargetName(cmake_target_full_name) - - -class CMakeNamer: - """Converts Gyp target names into CMake target names. - - CMake requires that target names be globally unique. One way to ensure - this is to fully qualify the names of the targets. Unfortunately, this - ends up with all targets looking like "chrome_chrome_gyp_chrome" instead - of just "chrome". If this generator were only interested in building, it - would be possible to fully qualify all target names, then create - unqualified target names which depend on all qualified targets which - should have had that name. This is more or less what the 'make' generator - does with aliases. However, one goal of this generator is to create CMake - files for use with IDEs, and fully qualified names are not as user - friendly. - - Since target name collision is rare, we do the above only when required. - - Toolset variants are always qualified from the base, as this is required for - building. However, it also makes sense for an IDE, as it is possible for - defines to be different. - """ - - def __init__(self, target_list): - self.cmake_target_base_names_conficting = set() - - cmake_target_base_names_seen = set() - for qualified_target in target_list: - cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target) - - if cmake_target_base_name not in cmake_target_base_names_seen: - cmake_target_base_names_seen.add(cmake_target_base_name) - else: - self.cmake_target_base_names_conficting.add(cmake_target_base_name) - - def CreateCMakeTargetName(self, qualified_target): - base_name = CreateCMakeTargetBaseName(qualified_target) - if base_name in self.cmake_target_base_names_conficting: - return CreateCMakeTargetFullName(qualified_target) - return base_name - - -def WriteTarget( - namer, - qualified_target, - target_dicts, - build_dir, - config_to_use, - options, - generator_flags, - all_qualified_targets, - flavor, - output, -): - # The make generator does this always. - # TODO: It would be nice to be able to tell CMake all dependencies. - circular_libs = generator_flags.get("circular", True) - - if not generator_flags.get("standalone", False): - output.write("\n#") - output.write(qualified_target) - output.write("\n") - - gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) - rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir) - rel_gyp_dir = os.path.dirname(rel_gyp_file) - - # Relative path from build dir to top dir. - build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) - # Relative path from build dir to gyp dir. - build_to_gyp = os.path.join(build_to_top, rel_gyp_dir) - - path_from_cmakelists_to_gyp = build_to_gyp - - spec = target_dicts.get(qualified_target, {}) - config = spec.get("configurations", {}).get(config_to_use, {}) - - xcode_settings = None - if flavor == "mac": - xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - - target_name = spec.get("target_name", "") - target_type = spec.get("type", "") - target_toolset = spec.get("toolset") - - cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) - if cmake_target_type is None: - print( - "Target %s has unknown target type %s, skipping." - % (target_name, target_type) - ) - return - - SetVariable(output, "TARGET", target_name) - SetVariable(output, "TOOLSET", target_toolset) - - cmake_target_name = namer.CreateCMakeTargetName(qualified_target) - - extra_sources = [] - extra_deps = [] - - # Actions must come first, since they can generate more OBJs for use below. - if "actions" in spec: - WriteActions( - cmake_target_name, - spec["actions"], - extra_sources, - extra_deps, - path_from_cmakelists_to_gyp, - output, - ) - - # Rules must be early like actions. - if "rules" in spec: - WriteRules( - cmake_target_name, - spec["rules"], - extra_sources, - extra_deps, - path_from_cmakelists_to_gyp, - output, - ) - - # Copies - if "copies" in spec: - WriteCopies( - cmake_target_name, - spec["copies"], - extra_deps, - path_from_cmakelists_to_gyp, - output, - ) - - # Target and sources - srcs = spec.get("sources", []) - - # Gyp separates the sheep from the goats based on file extensions. - # A full separation is done here because of flag handing (see below). - s_sources = [] - c_sources = [] - cxx_sources = [] - linkable_sources = [] - other_sources = [] - for src in srcs: - _, ext = os.path.splitext(src) - src_type = COMPILABLE_EXTENSIONS.get(ext, None) - src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src) - - if src_type == "s": - s_sources.append(src_norm_path) - elif src_type == "cc": - c_sources.append(src_norm_path) - elif src_type == "cxx": - cxx_sources.append(src_norm_path) - elif Linkable(ext): - linkable_sources.append(src_norm_path) - else: - other_sources.append(src_norm_path) - - for extra_source in extra_sources: - src, real_source = extra_source - _, ext = os.path.splitext(real_source) - src_type = COMPILABLE_EXTENSIONS.get(ext, None) - - if src_type == "s": - s_sources.append(src) - elif src_type == "cc": - c_sources.append(src) - elif src_type == "cxx": - cxx_sources.append(src) - elif Linkable(ext): - linkable_sources.append(src) - else: - other_sources.append(src) - - s_sources_name = None - if s_sources: - s_sources_name = cmake_target_name + "__asm_srcs" - SetVariableList(output, s_sources_name, s_sources) - - c_sources_name = None - if c_sources: - c_sources_name = cmake_target_name + "__c_srcs" - SetVariableList(output, c_sources_name, c_sources) - - cxx_sources_name = None - if cxx_sources: - cxx_sources_name = cmake_target_name + "__cxx_srcs" - SetVariableList(output, cxx_sources_name, cxx_sources) - - linkable_sources_name = None - if linkable_sources: - linkable_sources_name = cmake_target_name + "__linkable_srcs" - SetVariableList(output, linkable_sources_name, linkable_sources) - - other_sources_name = None - if other_sources: - other_sources_name = cmake_target_name + "__other_srcs" - SetVariableList(output, other_sources_name, other_sources) - - # CMake gets upset when executable targets provide no sources. - # http://www.cmake.org/pipermail/cmake/2010-July/038461.html - dummy_sources_name = None - has_sources = ( - s_sources_name - or c_sources_name - or cxx_sources_name - or linkable_sources_name - or other_sources_name - ) - if target_type == "executable" and not has_sources: - dummy_sources_name = cmake_target_name + "__dummy_srcs" - SetVariable( - output, dummy_sources_name, "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c" - ) - output.write('if(NOT EXISTS "') - WriteVariable(output, dummy_sources_name) - output.write('")\n') - output.write(' file(WRITE "') - WriteVariable(output, dummy_sources_name) - output.write('" "")\n') - output.write("endif()\n") - - # CMake is opposed to setting linker directories and considers the practice - # of setting linker directories dangerous. Instead, it favors the use of - # find_library and passing absolute paths to target_link_libraries. - # However, CMake does provide the command link_directories, which adds - # link directories to targets defined after it is called. - # As a result, link_directories must come before the target definition. - # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES. - library_dirs = config.get("library_dirs") - if library_dirs is not None: - output.write("link_directories(") - for library_dir in library_dirs: - output.write(" ") - output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir)) - output.write("\n") - output.write(")\n") - - output.write(cmake_target_type.command) - output.write("(") - output.write(cmake_target_name) - - if cmake_target_type.modifier is not None: - output.write(" ") - output.write(cmake_target_type.modifier) - - if s_sources_name: - WriteVariable(output, s_sources_name, " ") - if c_sources_name: - WriteVariable(output, c_sources_name, " ") - if cxx_sources_name: - WriteVariable(output, cxx_sources_name, " ") - if linkable_sources_name: - WriteVariable(output, linkable_sources_name, " ") - if other_sources_name: - WriteVariable(output, other_sources_name, " ") - if dummy_sources_name: - WriteVariable(output, dummy_sources_name, " ") - - output.write(")\n") - - # Let CMake know if the 'all' target should depend on this target. - exclude_from_all = ( - "TRUE" if qualified_target not in all_qualified_targets else "FALSE" - ) - SetTargetProperty(output, cmake_target_name, "EXCLUDE_FROM_ALL", exclude_from_all) - for extra_target_name in extra_deps: - SetTargetProperty( - output, extra_target_name, "EXCLUDE_FROM_ALL", exclude_from_all - ) - - # Output name and location. - if target_type != "none": - # Link as 'C' if there are no other files - if not c_sources and not cxx_sources: - SetTargetProperty(output, cmake_target_name, "LINKER_LANGUAGE", ["C"]) - - # Mark uncompiled sources as uncompiled. - if other_sources_name: - output.write("set_source_files_properties(") - WriteVariable(output, other_sources_name, "") - output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n') - - # Mark object sources as linkable. - if linkable_sources_name: - output.write("set_source_files_properties(") - WriteVariable(output, other_sources_name, "") - output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n') - - # Output directory - target_output_directory = spec.get("product_dir") - if target_output_directory is None: - if target_type in ("executable", "loadable_module"): - target_output_directory = generator_default_variables["PRODUCT_DIR"] - elif target_type == "shared_library": - target_output_directory = "${builddir}/lib.${TOOLSET}" - elif spec.get("standalone_static_library", False): - target_output_directory = generator_default_variables["PRODUCT_DIR"] - else: - base_path = gyp.common.RelativePath( - os.path.dirname(gyp_file), options.toplevel_dir - ) - target_output_directory = "${obj}.${TOOLSET}" - target_output_directory = os.path.join( - target_output_directory, base_path - ) - - cmake_target_output_directory = NormjoinPathForceCMakeSource( - path_from_cmakelists_to_gyp, target_output_directory - ) - SetTargetProperty( - output, - cmake_target_name, - cmake_target_type.property_modifier + "_OUTPUT_DIRECTORY", - cmake_target_output_directory, - ) - - # Output name - default_product_prefix = "" - default_product_name = target_name - default_product_ext = "" - if target_type == "static_library": - static_library_prefix = generator_default_variables["STATIC_LIB_PREFIX"] - default_product_name = RemovePrefix( - default_product_name, static_library_prefix - ) - default_product_prefix = static_library_prefix - default_product_ext = generator_default_variables["STATIC_LIB_SUFFIX"] - - elif target_type in ("loadable_module", "shared_library"): - shared_library_prefix = generator_default_variables["SHARED_LIB_PREFIX"] - default_product_name = RemovePrefix( - default_product_name, shared_library_prefix - ) - default_product_prefix = shared_library_prefix - default_product_ext = generator_default_variables["SHARED_LIB_SUFFIX"] - - elif target_type != "executable": - print( - "ERROR: What output file should be generated?", - "type", - target_type, - "target", - target_name, - ) - - product_prefix = spec.get("product_prefix", default_product_prefix) - product_name = spec.get("product_name", default_product_name) - product_ext = spec.get("product_extension") - product_ext = "." + product_ext if product_ext else default_product_ext - - SetTargetProperty(output, cmake_target_name, "PREFIX", product_prefix) - SetTargetProperty( - output, - cmake_target_name, - cmake_target_type.property_modifier + "_OUTPUT_NAME", - product_name, - ) - SetTargetProperty(output, cmake_target_name, "SUFFIX", product_ext) - - # Make the output of this target referenceable as a source. - cmake_target_output_basename = product_prefix + product_name + product_ext - cmake_target_output = os.path.join( - cmake_target_output_directory, cmake_target_output_basename - ) - SetFileProperty(output, cmake_target_output, "GENERATED", ["TRUE"], "") - - # Includes - includes = config.get("include_dirs") - if includes: - # This (target include directories) is what requires CMake 2.8.8 - includes_name = cmake_target_name + "__include_dirs" - SetVariableList( - output, - includes_name, - [ - NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include) - for include in includes - ], - ) - output.write("set_property(TARGET ") - output.write(cmake_target_name) - output.write(" APPEND PROPERTY INCLUDE_DIRECTORIES ") - WriteVariable(output, includes_name, "") - output.write(")\n") - - # Defines - defines = config.get("defines") - if defines is not None: - SetTargetProperty( - output, cmake_target_name, "COMPILE_DEFINITIONS", defines, ";" - ) - - # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493 - # CMake currently does not have target C and CXX flags. - # So, instead of doing... - - # cflags_c = config.get('cflags_c') - # if cflags_c is not None: - # SetTargetProperty(output, cmake_target_name, - # 'C_COMPILE_FLAGS', cflags_c, ' ') - - # cflags_cc = config.get('cflags_cc') - # if cflags_cc is not None: - # SetTargetProperty(output, cmake_target_name, - # 'CXX_COMPILE_FLAGS', cflags_cc, ' ') - - # Instead we must... - cflags = config.get("cflags", []) - cflags_c = config.get("cflags_c", []) - cflags_cxx = config.get("cflags_cc", []) - if xcode_settings: - cflags = xcode_settings.GetCflags(config_to_use) - cflags_c = xcode_settings.GetCflagsC(config_to_use) - cflags_cxx = xcode_settings.GetCflagsCC(config_to_use) - # cflags_objc = xcode_settings.GetCflagsObjC(config_to_use) - # cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use) - - if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources): - SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", cflags, " ") - - elif c_sources and not (s_sources or cxx_sources): - flags = [] - flags.extend(cflags) - flags.extend(cflags_c) - SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ") - - elif cxx_sources and not (s_sources or c_sources): - flags = [] - flags.extend(cflags) - flags.extend(cflags_cxx) - SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ") - - else: - # TODO: This is broken, one cannot generally set properties on files, - # as other targets may require different properties on the same files. - if s_sources and cflags: - SetFilesProperty(output, s_sources_name, "COMPILE_FLAGS", cflags, " ") - - if c_sources and (cflags or cflags_c): - flags = [] - flags.extend(cflags) - flags.extend(cflags_c) - SetFilesProperty(output, c_sources_name, "COMPILE_FLAGS", flags, " ") - - if cxx_sources and (cflags or cflags_cxx): - flags = [] - flags.extend(cflags) - flags.extend(cflags_cxx) - SetFilesProperty(output, cxx_sources_name, "COMPILE_FLAGS", flags, " ") - - # Linker flags - ldflags = config.get("ldflags") - if ldflags is not None: - SetTargetProperty(output, cmake_target_name, "LINK_FLAGS", ldflags, " ") - - # XCode settings - xcode_settings = config.get("xcode_settings", {}) - for xcode_setting, xcode_value in xcode_settings.items(): - SetTargetProperty( - output, - cmake_target_name, - "XCODE_ATTRIBUTE_%s" % xcode_setting, - xcode_value, - "" if isinstance(xcode_value, str) else " ", - ) - - # Note on Dependencies and Libraries: - # CMake wants to handle link order, resolving the link line up front. - # Gyp does not retain or enforce specifying enough information to do so. - # So do as other gyp generators and use --start-group and --end-group. - # Give CMake as little information as possible so that it doesn't mess it up. - - # Dependencies - rawDeps = spec.get("dependencies", []) - - static_deps = [] - shared_deps = [] - other_deps = [] - for rawDep in rawDeps: - dep_cmake_name = namer.CreateCMakeTargetName(rawDep) - dep_spec = target_dicts.get(rawDep, {}) - dep_target_type = dep_spec.get("type", None) - - if dep_target_type == "static_library": - static_deps.append(dep_cmake_name) - elif dep_target_type == "shared_library": - shared_deps.append(dep_cmake_name) - else: - other_deps.append(dep_cmake_name) - - # ensure all external dependencies are complete before internal dependencies - # extra_deps currently only depend on their own deps, so otherwise run early - if static_deps or shared_deps or other_deps: - for extra_dep in extra_deps: - output.write("add_dependencies(") - output.write(extra_dep) - output.write("\n") - for deps in (static_deps, shared_deps, other_deps): - for dep in gyp.common.uniquer(deps): - output.write(" ") - output.write(dep) - output.write("\n") - output.write(")\n") - - linkable = target_type in ("executable", "loadable_module", "shared_library") - other_deps.extend(extra_deps) - if other_deps or (not linkable and (static_deps or shared_deps)): - output.write("add_dependencies(") - output.write(cmake_target_name) - output.write("\n") - for dep in gyp.common.uniquer(other_deps): - output.write(" ") - output.write(dep) - output.write("\n") - if not linkable: - for deps in (static_deps, shared_deps): - for lib_dep in gyp.common.uniquer(deps): - output.write(" ") - output.write(lib_dep) - output.write("\n") - output.write(")\n") - - # Libraries - if linkable: - external_libs = [lib for lib in spec.get("libraries", []) if len(lib) > 0] - if external_libs or static_deps or shared_deps: - output.write("target_link_libraries(") - output.write(cmake_target_name) - output.write("\n") - if static_deps: - write_group = circular_libs and len(static_deps) > 1 and flavor != "mac" - if write_group: - output.write("-Wl,--start-group\n") - for dep in gyp.common.uniquer(static_deps): - output.write(" ") - output.write(dep) - output.write("\n") - if write_group: - output.write("-Wl,--end-group\n") - if shared_deps: - for dep in gyp.common.uniquer(shared_deps): - output.write(" ") - output.write(dep) - output.write("\n") - if external_libs: - for lib in gyp.common.uniquer(external_libs): - output.write(' "') - output.write(RemovePrefix(lib, "$(SDKROOT)")) - output.write('"\n') - - output.write(")\n") - - UnsetVariable(output, "TOOLSET") - UnsetVariable(output, "TARGET") - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, config_to_use): - options = params["options"] - generator_flags = params["generator_flags"] - flavor = gyp.common.GetFlavor(params) - - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to cmake easier, cmake doesn't put anything here. - # Each Gyp configuration creates a different CMakeLists.txt file - # to avoid incompatibilities between Gyp and CMake configurations. - generator_dir = os.path.relpath(options.generator_output or ".") - - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get("output_dir", "out") - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(generator_dir, output_dir, config_to_use)) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - - output_file = os.path.join(toplevel_build, "CMakeLists.txt") - gyp.common.EnsureDirExists(output_file) - - output = open(output_file, "w") - output.write("cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n") - output.write("cmake_policy(VERSION 2.8.8)\n") - - gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1]) - output.write("project(") - output.write(project_target) - output.write(")\n") - - SetVariable(output, "configuration", config_to_use) - - ar = None - cc = None - cxx = None - - make_global_settings = data[gyp_file].get("make_global_settings", []) - build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) - for key, value in make_global_settings: - if key == "AR": - ar = os.path.join(build_to_top, value) - if key == "CC": - cc = os.path.join(build_to_top, value) - if key == "CXX": - cxx = os.path.join(build_to_top, value) - - ar = gyp.common.GetEnvironFallback(["AR_target", "AR"], ar) - cc = gyp.common.GetEnvironFallback(["CC_target", "CC"], cc) - cxx = gyp.common.GetEnvironFallback(["CXX_target", "CXX"], cxx) - - if ar: - SetVariable(output, "CMAKE_AR", ar) - if cc: - SetVariable(output, "CMAKE_C_COMPILER", cc) - if cxx: - SetVariable(output, "CMAKE_CXX_COMPILER", cxx) - - # The following appears to be as-yet undocumented. - # http://public.kitware.com/Bug/view.php?id=8392 - output.write("enable_language(ASM)\n") - # ASM-ATT does not support .S files. - # output.write('enable_language(ASM-ATT)\n') - - if cc: - SetVariable(output, "CMAKE_ASM_COMPILER", cc) - - SetVariable(output, "builddir", "${CMAKE_CURRENT_BINARY_DIR}") - SetVariable(output, "obj", "${builddir}/obj") - output.write("\n") - - # TODO: Undocumented/unsupported (the CMake Java generator depends on it). - # CMake by default names the object resulting from foo.c to be foo.c.o. - # Gyp traditionally names the object resulting from foo.c foo.o. - # This should be irrelevant, but some targets extract .o files from .a - # and depend on the name of the extracted .o files. - output.write("set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n") - output.write("set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n") - output.write("\n") - - # Force ninja to use rsp files. Otherwise link and ar lines can get too long, - # resulting in 'Argument list too long' errors. - # However, rsp files don't work correctly on Mac. - if flavor != "mac": - output.write("set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n") - output.write("\n") - - namer = CMakeNamer(target_list) - - # The list of targets upon which the 'all' target should depend. - # CMake has it's own implicit 'all' target, one is not created explicitly. - all_qualified_targets = set() - for build_file in params["build_files"]: - for qualified_target in gyp.common.AllTargets( - target_list, target_dicts, os.path.normpath(build_file) - ): - all_qualified_targets.add(qualified_target) - - for qualified_target in target_list: - if flavor == "mac": - gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) - spec = target_dicts[qualified_target] - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec) - - WriteTarget( - namer, - qualified_target, - target_dicts, - build_dir, - config_to_use, - options, - generator_flags, - all_qualified_targets, - flavor, - output, - ) - - output.close() - - -def PerformBuild(data, configurations, params): - options = params["options"] - generator_flags = params["generator_flags"] - - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to cmake easier, cmake doesn't put anything here. - generator_dir = os.path.relpath(options.generator_output or ".") - - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get("output_dir", "out") - - for config_name in configurations: - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath( - os.path.join(generator_dir, output_dir, config_name) - ) - arguments = ["cmake", "-G", "Ninja"] - print(f"Generating [{config_name}]: {arguments}") - subprocess.check_call(arguments, cwd=build_dir) - - arguments = ["ninja", "-C", build_dir] - print(f"Building [{config_name}]: {arguments}") - subprocess.check_call(arguments) - - -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - target_list, target_dicts, data, params, config_name = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - -def GenerateOutput(target_list, target_dicts, data, params): - user_config = params.get("generator_flags", {}).get("config", None) - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) - else: - config_names = target_dicts[target_list[0]]["configurations"] - if params["parallel"]: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append( - (target_list, target_dicts, data, params, config_name) - ) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt as e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig( - target_list, target_dicts, data, params, config_name - ) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py deleted file mode 100644 index 5d7f14da9699d..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (c) 2016 Ben Noordhuis . All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import gyp.common -import gyp.xcode_emulation -import json -import os - -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None -generator_supports_multiple_toolsets = True -generator_wants_sorted_dependencies = False - -# Lifted from make.py. The actual values don't matter much. -generator_default_variables = { - "CONFIGURATION_NAME": "$(BUILDTYPE)", - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni", - "PRODUCT_DIR": "$(builddir)", - "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", - "RULE_INPUT_EXT": "$(suffix $<)", - "RULE_INPUT_NAME": "$(notdir $<)", - "RULE_INPUT_PATH": "$(abspath $<)", - "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", - "SHARED_INTERMEDIATE_DIR": "$(obj)/gen", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", -} - - -def IsMac(params): - return gyp.common.GetFlavor(params) == "mac" - - -def CalculateVariables(default_variables, params): - default_variables.setdefault("OS", gyp.common.GetFlavor(params)) - - -def AddCommandsForTarget(cwd, target, params, per_config_commands): - output_dir = params["generator_flags"].get("output_dir", "out") - for configuration_name, configuration in target["configurations"].items(): - if IsMac(params): - xcode_settings = gyp.xcode_emulation.XcodeSettings(target) - cflags = xcode_settings.GetCflags(configuration_name) - cflags_c = xcode_settings.GetCflagsC(configuration_name) - cflags_cc = xcode_settings.GetCflagsCC(configuration_name) - else: - cflags = configuration.get("cflags", []) - cflags_c = configuration.get("cflags_c", []) - cflags_cc = configuration.get("cflags_cc", []) - - cflags_c = cflags + cflags_c - cflags_cc = cflags + cflags_cc - - defines = configuration.get("defines", []) - defines = ["-D" + s for s in defines] - - # TODO(bnoordhuis) Handle generated source files. - extensions = (".c", ".cc", ".cpp", ".cxx") - sources = [s for s in target.get("sources", []) if s.endswith(extensions)] - - def resolve(filename): - return os.path.abspath(os.path.join(cwd, filename)) - - # TODO(bnoordhuis) Handle generated header files. - include_dirs = configuration.get("include_dirs", []) - include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")] - includes = ["-I" + resolve(s) for s in include_dirs] - - defines = gyp.common.EncodePOSIXShellList(defines) - includes = gyp.common.EncodePOSIXShellList(includes) - cflags_c = gyp.common.EncodePOSIXShellList(cflags_c) - cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc) - - commands = per_config_commands.setdefault(configuration_name, []) - for source in sources: - file = resolve(source) - isc = source.endswith(".c") - cc = "cc" if isc else "c++" - cflags = cflags_c if isc else cflags_cc - command = " ".join( - ( - cc, - defines, - includes, - cflags, - "-c", - gyp.common.EncodePOSIXShellArgument(file), - ) - ) - commands.append({"command": command, "directory": output_dir, "file": file}) - - -def GenerateOutput(target_list, target_dicts, data, params): - per_config_commands = {} - for qualified_target, target in target_dicts.items(): - build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( - qualified_target - ) - if IsMac(params): - settings = data[build_file] - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target) - cwd = os.path.dirname(build_file) - AddCommandsForTarget(cwd, target, params, per_config_commands) - - output_dir = None - try: - # generator_output can be `None` on Windows machines, or even not - # defined in other cases - output_dir = params.get("options").generator_output - except AttributeError: - pass - output_dir = output_dir or params["generator_flags"].get("output_dir", "out") - for configuration_name, commands in per_config_commands.items(): - filename = os.path.join(output_dir, configuration_name, "compile_commands.json") - gyp.common.EnsureDirExists(filename) - fp = open(filename, "w") - json.dump(commands, fp=fp, indent=0, check_circular=False) - - -def PerformBuild(data, configurations, params): - pass diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py deleted file mode 100644 index 99d5c1fd69db3..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -import os -import gyp -import gyp.common -import gyp.msvs_emulation -import json - -generator_supports_multiple_toolsets = True - -generator_wants_static_library_dependencies_adjusted = False - -generator_filelist_paths = {} - -generator_default_variables = {} -for dirname in [ - "INTERMEDIATE_DIR", - "SHARED_INTERMEDIATE_DIR", - "PRODUCT_DIR", - "LIB_DIR", - "SHARED_LIB_DIR", -]: - # Some gyp steps fail if these are empty(!). - generator_default_variables[dirname] = "dir" -for unused in [ - "RULE_INPUT_PATH", - "RULE_INPUT_ROOT", - "RULE_INPUT_NAME", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "CONFIGURATION_NAME", -]: - generator_default_variables[unused] = "" - - -def CalculateVariables(default_variables, params): - generator_flags = params.get("generator_flags", {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - default_variables.setdefault("OS", gyp.common.GetFlavor(params)) - - flavor = gyp.common.GetFlavor(params) - if flavor == "win": - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get("generator_flags", {}) - if generator_flags.get("adjust_static_libraries", False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - toplevel = params["options"].toplevel_dir - generator_dir = os.path.relpath(params["options"].generator_output or ".") - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get("output_dir", "out") - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, generator_dir, output_dir, "gypfiles") - ) - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def GenerateOutput(target_list, target_dicts, data, params): - # Map of target -> list of targets it depends on. - edges = {} - - # Queue of targets to visit. - targets_to_visit = target_list[:] - - while len(targets_to_visit) > 0: - target = targets_to_visit.pop() - if target in edges: - continue - edges[target] = [] - - for dep in target_dicts[target].get("dependencies", []): - edges[target].append(dep) - targets_to_visit.append(dep) - - try: - filepath = params["generator_flags"]["output_dir"] - except KeyError: - filepath = "." - filename = os.path.join(filepath, "dump.json") - f = open(filename, "w") - json.dump(edges, f) - f.close() - print("Wrote json to %s." % filename) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py deleted file mode 100644 index 52aeae6050990..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py +++ /dev/null @@ -1,461 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""GYP backend that generates Eclipse CDT settings files. - -This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML -files that can be imported into an Eclipse CDT project. The XML file contains a -list of include paths and symbols (i.e. defines). - -Because a full .cproject definition is not created by this generator, it's not -possible to properly define the include dirs and symbols for each file -individually. Instead, one set of includes/symbols is generated for the entire -project. This works fairly well (and is a vast improvement in general), but may -still result in a few indexer issues here and there. - -This generator has no automated tests, so expect it to be broken. -""" - -from xml.sax.saxutils import escape -import os.path -import subprocess -import gyp -import gyp.common -import gyp.msvs_emulation -import shlex -import xml.etree.ElementTree as ET - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = {} - -for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]: - # Some gyp steps fail if these are empty(!), so we convert them to variables - generator_default_variables[dirname] = "$" + dirname - -for unused in [ - "RULE_INPUT_PATH", - "RULE_INPUT_ROOT", - "RULE_INPUT_NAME", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "CONFIGURATION_NAME", -]: - generator_default_variables[unused] = "" - -# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as -# part of the path when dealing with generated headers. This value will be -# replaced dynamically for each configuration. -generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR" - - -def CalculateVariables(default_variables, params): - generator_flags = params.get("generator_flags", {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - flavor = gyp.common.GetFlavor(params) - default_variables.setdefault("OS", flavor) - if flavor == "win": - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get("generator_flags", {}) - if generator_flags.get("adjust_static_libraries", False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - -def GetAllIncludeDirectories( - target_list, - target_dicts, - shared_intermediate_dirs, - config_name, - params, - compiler_path, -): - """Calculate the set of include directories to be used. - - Returns: - A list including all the include_dir's specified for every target followed - by any include directories that were added as cflag compiler options. - """ - - gyp_includes_set = set() - compiler_includes_list = [] - - # Find compiler's default include dirs. - if compiler_path: - command = shlex.split(compiler_path) - command.extend(["-E", "-xc++", "-v", "-"]) - proc = subprocess.Popen( - args=command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - output = proc.communicate()[1].decode("utf-8") - # Extract the list of include dirs from the output, which has this format: - # ... - # #include "..." search starts here: - # #include <...> search starts here: - # /usr/include/c++/4.6 - # /usr/local/include - # End of search list. - # ... - in_include_list = False - for line in output.splitlines(): - if line.startswith("#include"): - in_include_list = True - continue - if line.startswith("End of search list."): - break - if in_include_list: - include_dir = line.strip() - if include_dir not in compiler_includes_list: - compiler_includes_list.append(include_dir) - - flavor = gyp.common.GetFlavor(params) - if flavor == "win": - generator_flags = params.get("generator_flags", {}) - for target_name in target_list: - target = target_dicts[target_name] - if config_name in target["configurations"]: - config = target["configurations"][config_name] - - # Look for any include dirs that were explicitly added via cflags. This - # may be done in gyp files to force certain includes to come at the end. - # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and - # remove this. - if flavor == "win": - msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) - cflags = msvs_settings.GetCflags(config_name) - else: - cflags = config["cflags"] - for cflag in cflags: - if cflag.startswith("-I"): - include_dir = cflag[2:] - if include_dir not in compiler_includes_list: - compiler_includes_list.append(include_dir) - - # Find standard gyp include dirs. - if "include_dirs" in config: - include_dirs = config["include_dirs"] - for shared_intermediate_dir in shared_intermediate_dirs: - for include_dir in include_dirs: - include_dir = include_dir.replace( - "$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir - ) - if not os.path.isabs(include_dir): - base_dir = os.path.dirname(target_name) - - include_dir = base_dir + "/" + include_dir - include_dir = os.path.abspath(include_dir) - - gyp_includes_set.add(include_dir) - - # Generate a list that has all the include dirs. - all_includes_list = list(gyp_includes_set) - all_includes_list.sort() - for compiler_include in compiler_includes_list: - if compiler_include not in gyp_includes_set: - all_includes_list.append(compiler_include) - - # All done. - return all_includes_list - - -def GetCompilerPath(target_list, data, options): - """Determine a command that can be used to invoke the compiler. - - Returns: - If this is a gyp project that has explicit make settings, try to determine - the compiler from that. Otherwise, see if a compiler was specified via the - CC_target environment variable. - """ - # First, see if the compiler is configured in make's settings. - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_dict = data[build_file].get("make_global_settings", {}) - for key, value in make_global_settings_dict: - if key in ["CC", "CXX"]: - return os.path.join(options.toplevel_dir, value) - - # Check to see if the compiler was specified as an environment variable. - for key in ["CC_target", "CC", "CXX"]: - compiler = os.environ.get(key) - if compiler: - return compiler - - return "gcc" - - -def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path): - """Calculate the defines for a project. - - Returns: - A dict that includes explicit defines declared in gyp files along with all - of the default defines that the compiler uses. - """ - - # Get defines declared in the gyp files. - all_defines = {} - flavor = gyp.common.GetFlavor(params) - if flavor == "win": - generator_flags = params.get("generator_flags", {}) - for target_name in target_list: - target = target_dicts[target_name] - - if flavor == "win": - msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) - extra_defines = msvs_settings.GetComputedDefines(config_name) - else: - extra_defines = [] - if config_name in target["configurations"]: - config = target["configurations"][config_name] - target_defines = config["defines"] - else: - target_defines = [] - for define in target_defines + extra_defines: - split_define = define.split("=", 1) - if len(split_define) == 1: - split_define.append("1") - if split_define[0].strip() in all_defines: - # Already defined - continue - all_defines[split_define[0].strip()] = split_define[1].strip() - # Get default compiler defines (if possible). - if flavor == "win": - return all_defines # Default defines already processed in the loop above. - if compiler_path: - command = shlex.split(compiler_path) - command.extend(["-E", "-dM", "-"]) - cpp_proc = subprocess.Popen( - args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE - ) - cpp_output = cpp_proc.communicate()[0].decode("utf-8") - cpp_lines = cpp_output.split("\n") - for cpp_line in cpp_lines: - if not cpp_line.strip(): - continue - cpp_line_parts = cpp_line.split(" ", 2) - key = cpp_line_parts[1] - val = cpp_line_parts[2] if len(cpp_line_parts) >= 3 else "1" - all_defines[key] = val - - return all_defines - - -def WriteIncludePaths(out, eclipse_langs, include_dirs): - """Write the includes section of a CDT settings export file.""" - - out.write( - '
\n' - ) - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for include_dir in include_dirs: - out.write( - ' %s\n' - % include_dir - ) - out.write(" \n") - out.write("
\n") - - -def WriteMacros(out, eclipse_langs, defines): - """Write the macros section of a CDT settings export file.""" - - out.write( - '
\n' - ) - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for key in sorted(defines): - out.write( - " %s%s\n" - % (escape(key), escape(defines[key])) - ) - out.write(" \n") - out.write("
\n") - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): - options = params["options"] - generator_flags = params.get("generator_flags", {}) - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the - # SHARED_INTERMEDIATE_DIR. Include both possible locations. - shared_intermediate_dirs = [ - os.path.join(toplevel_build, "obj", "gen"), - os.path.join(toplevel_build, "gen"), - ] - - GenerateCdtSettingsFile( - target_list, - target_dicts, - data, - params, - config_name, - os.path.join(toplevel_build, "eclipse-cdt-settings.xml"), - options, - shared_intermediate_dirs, - ) - GenerateClasspathFile( - target_list, - target_dicts, - options.toplevel_dir, - toplevel_build, - os.path.join(toplevel_build, "eclipse-classpath.xml"), - ) - - -def GenerateCdtSettingsFile( - target_list, - target_dicts, - data, - params, - config_name, - out_name, - options, - shared_intermediate_dirs, -): - gyp.common.EnsureDirExists(out_name) - with open(out_name, "w") as out: - out.write('\n') - out.write("\n") - - eclipse_langs = [ - "C++ Source File", - "C Source File", - "Assembly Source File", - "GNU C++", - "GNU C", - "Assembly", - ] - compiler_path = GetCompilerPath(target_list, data, options) - include_dirs = GetAllIncludeDirectories( - target_list, - target_dicts, - shared_intermediate_dirs, - config_name, - params, - compiler_path, - ) - WriteIncludePaths(out, eclipse_langs, include_dirs) - defines = GetAllDefines( - target_list, target_dicts, data, config_name, params, compiler_path - ) - WriteMacros(out, eclipse_langs, defines) - - out.write("\n") - - -def GenerateClasspathFile( - target_list, target_dicts, toplevel_dir, toplevel_build, out_name -): - """Generates a classpath file suitable for symbol navigation and code - completion of Java code (such as in Android projects) by finding all - .java and .jar files used as action inputs.""" - gyp.common.EnsureDirExists(out_name) - result = ET.Element("classpath") - - def AddElements(kind, paths): - # First, we need to normalize the paths so they are all relative to the - # toplevel dir. - rel_paths = set() - for path in paths: - if os.path.isabs(path): - rel_paths.add(os.path.relpath(path, toplevel_dir)) - else: - rel_paths.add(path) - - for path in sorted(rel_paths): - entry_element = ET.SubElement(result, "classpathentry") - entry_element.set("kind", kind) - entry_element.set("path", path) - - AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir)) - AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) - # Include the standard JRE container and a dummy out folder - AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"]) - # Include a dummy out folder so that Eclipse doesn't use the default /bin - # folder in the root of the project. - AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")]) - - ET.ElementTree(result).write(out_name) - - -def GetJavaJars(target_list, target_dicts, toplevel_dir): - """Generates a sequence of all .jars used as inputs.""" - for target_name in target_list: - target = target_dicts[target_name] - for action in target.get("actions", []): - for input_ in action["inputs"]: - if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"): - if os.path.isabs(input_): - yield input_ - else: - yield os.path.join(os.path.dirname(target_name), input_) - - -def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): - """Generates a sequence of all likely java package root directories.""" - for target_name in target_list: - target = target_dicts[target_name] - for action in target.get("actions", []): - for input_ in action["inputs"]: - if os.path.splitext(input_)[1] == ".java" and not input_.startswith( - "$" - ): - dir_ = os.path.dirname( - os.path.join(os.path.dirname(target_name), input_) - ) - # If there is a parent 'src' or 'java' folder, navigate up to it - - # these are canonical package root names in Chromium. This will - # break if 'src' or 'java' exists in the package structure. This - # could be further improved by inspecting the java file for the - # package name if this proves to be too fragile in practice. - parent_search = dir_ - while os.path.basename(parent_search) not in ["src", "java"]: - parent_search, _ = os.path.split(parent_search) - if not parent_search or parent_search == toplevel_dir: - # Didn't find a known root, just return the original path - yield dir_ - break - else: - yield parent_search - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate an XML settings file that can be imported into a CDT project.""" - - if params["options"].generator_output: - raise NotImplementedError("--generator_output not implemented for eclipse") - - user_config = params.get("generator_flags", {}).get("config", None) - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) - else: - config_names = target_dicts[target_list[0]]["configurations"] - for config_name in config_names: - GenerateOutputForConfig( - target_list, target_dicts, data, params, config_name - ) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py deleted file mode 100644 index 4171704c47a4b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypd output module - -This module produces gyp input as its output. Output files are given the -.gypd extension to avoid overwriting the .gyp files that they are generated -from. Internal references to .gyp files (such as those found in -"dependencies" sections) are not adjusted to point to .gypd files instead; -unlike other paths, which are relative to the .gyp or .gypd file, such paths -are relative to the directory from which gyp was run to create the .gypd file. - -This generator module is intended to be a sample and a debugging aid, hence -the "d" for "debug" in .gypd. It is useful to inspect the results of the -various merges, expansions, and conditional evaluations performed by gyp -and to see a representation of what would be fed to a generator module. - -It's not advisable to rename .gypd files produced by this module to .gyp, -because they will have all merges, expansions, and evaluations already -performed and the relevant constructs not present in the output; paths to -dependencies may be wrong; and various sections that do not belong in .gyp -files such as such as "included_files" and "*_excluded" will be present. -Output will also be stripped of comments. This is not intended to be a -general-purpose gyp pretty-printer; for that, you probably just want to -run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip -comments but won't do all of the other things done to this module's output. - -The specific formatting of the output generated by this module is subject -to change. -""" - - -import gyp.common -import pprint - - -# These variables should just be spit back out as variable references. -_generator_identity_variables = [ - "CONFIGURATION_NAME", - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "INTERMEDIATE_DIR", - "LIB_DIR", - "PRODUCT_DIR", - "RULE_INPUT_ROOT", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "RULE_INPUT_NAME", - "RULE_INPUT_PATH", - "SHARED_INTERMEDIATE_DIR", - "SHARED_LIB_DIR", - "SHARED_LIB_PREFIX", - "SHARED_LIB_SUFFIX", - "STATIC_LIB_PREFIX", - "STATIC_LIB_SUFFIX", -] - -# gypd doesn't define a default value for OS like many other generator -# modules. Specify "-D OS=whatever" on the command line to provide a value. -generator_default_variables = {} - -# gypd supports multiple toolsets -generator_supports_multiple_toolsets = True - -# TODO(mark): This always uses <, which isn't right. The input module should -# notify the generator to tell it which phase it is operating in, and this -# module should use < for the early phase and then switch to > for the late -# phase. Bonus points for carrying @ back into the output too. -for v in _generator_identity_variables: - generator_default_variables[v] = "<(%s)" % v - - -def GenerateOutput(target_list, target_dicts, data, params): - output_files = {} - for qualified_target in target_list: - [input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2] - - if input_file[-4:] != ".gyp": - continue - input_file_stem = input_file[:-4] - output_file = input_file_stem + params["options"].suffix + ".gypd" - - output_files[output_file] = output_files.get(output_file, input_file) - - for output_file, input_file in output_files.items(): - output = open(output_file, "w") - pprint.pprint(data[input_file], output) - output.close() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py deleted file mode 100644 index 8dfb1f1645f77..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypsh output module - -gypsh is a GYP shell. It's not really a generator per se. All it does is -fire up an interactive Python session with a few local variables set to the -variables passed to the generator. Like gypd, it's intended as a debugging -aid, to facilitate the exploration of .gyp structures after being processed -by the input module. - -The expected usage is "gyp -f gypsh -D OS=desired_os". -""" - - -import code -import sys - - -# All of this stuff about generator variables was lovingly ripped from gypd.py. -# That module has a much better description of what's going on and why. -_generator_identity_variables = [ - "EXECUTABLE_PREFIX", - "EXECUTABLE_SUFFIX", - "INTERMEDIATE_DIR", - "PRODUCT_DIR", - "RULE_INPUT_ROOT", - "RULE_INPUT_DIRNAME", - "RULE_INPUT_EXT", - "RULE_INPUT_NAME", - "RULE_INPUT_PATH", - "SHARED_INTERMEDIATE_DIR", -] - -generator_default_variables = {} - -for v in _generator_identity_variables: - generator_default_variables[v] = "<(%s)" % v - - -def GenerateOutput(target_list, target_dicts, data, params): - locals = { - "target_list": target_list, - "target_dicts": target_dicts, - "data": data, - } - - # Use a banner that looks like the stock Python one and like what - # code.interact uses by default, but tack on something to indicate what - # locals are available, and identify gypsh. - banner = ( - f"Python {sys.version} on {sys.platform}\nlocals.keys() = " - f"{sorted(locals.keys())!r}\ngypsh" - ) - - code.interact(banner, local=locals) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py deleted file mode 100644 index 392d900914dea..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py +++ /dev/null @@ -1,2745 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This is all roughly based on the Makefile system used by the Linux -# kernel, but is a non-recursive make -- we put the entire dependency -# graph in front of make and let it figure it out. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level Makefile. This means that all -# variables in .mk-files clobber one another. Be careful to use := -# where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last between different -# .mk files. -# -# TODOs: -# -# Global settings and utility functions are currently stuffed in the -# toplevel Makefile. It may make sense to generate some .mk files on -# the side to keep the files readable. - - -import os -import re -import subprocess -import sys -import gyp -import gyp.common -import gyp.xcode_emulation -from gyp.common import GetEnvironFallback - -import hashlib - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni", - "SHARED_INTERMEDIATE_DIR": "$(obj)/gen", - "PRODUCT_DIR": "$(builddir)", - "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python. - "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python. - "RULE_INPUT_PATH": "$(abspath $<)", - "RULE_INPUT_EXT": "$(suffix $<)", - "RULE_INPUT_NAME": "$(notdir $<)", - "CONFIGURATION_NAME": "$(BUILDTYPE)", -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - -# Request sorted dependencies in the order from dependents to dependencies. -generator_wants_sorted_dependencies = False - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - flavor = gyp.common.GetFlavor(params) - if flavor == "mac": - default_variables.setdefault("OS", "mac") - default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib") - default_variables.setdefault( - "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - default_variables.setdefault( - "LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Make generator. - import gyp.generator.xcode as xcode_generator - - global generator_additional_non_configuration_keys - generator_additional_non_configuration_keys = getattr( - xcode_generator, "generator_additional_non_configuration_keys", [] - ) - global generator_additional_path_sections - generator_additional_path_sections = getattr( - xcode_generator, "generator_additional_path_sections", [] - ) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr( - xcode_generator, "generator_extra_sources_for_rules", [] - ) - COMPILABLE_EXTENSIONS.update({".m": "objc", ".mm": "objcxx"}) - else: - operating_system = flavor - if flavor == "android": - operating_system = "linux" # Keep this legacy behavior for now. - default_variables.setdefault("OS", operating_system) - if flavor == "aix": - default_variables.setdefault("SHARED_LIB_SUFFIX", ".a") - elif flavor == "zos": - default_variables.setdefault("SHARED_LIB_SUFFIX", ".x") - COMPILABLE_EXTENSIONS.update({".pli": "pli"}) - else: - default_variables.setdefault("SHARED_LIB_SUFFIX", ".so") - default_variables.setdefault("SHARED_LIB_DIR", "$(builddir)/lib.$(TOOLSET)") - default_variables.setdefault("LIB_DIR", "$(obj).$(TOOLSET)") - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get("generator_flags", {}) - android_ndk_version = generator_flags.get("android_ndk_version", None) - # Android NDK requires a strict link order. - if android_ndk_version: - global generator_wants_sorted_dependencies - generator_wants_sorted_dependencies = True - - output_dir = params["options"].generator_output or params["options"].toplevel_dir - builddir_name = generator_flags.get("output_dir", "out") - qualified_out_dir = os.path.normpath( - os.path.join(output_dir, builddir_name, "gypfiles") - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": params["options"].toplevel_dir, - "qualified_out_dir": qualified_out_dir, - } - - -# The .d checking code below uses these functions: -# wildcard, sort, foreach, shell, wordlist -# wildcard can handle spaces, the rest can't. -# Since I could find no way to make foreach work with spaces in filenames -# correctly, the .d files have spaces replaced with another character. The .d -# file for -# Chromium\ Framework.framework/foo -# is for example -# out/Release/.deps/out/Release/Chromium?Framework.framework/foo -# This is the replacement character. -SPACE_REPLACEMENT = "?" - - -LINK_COMMANDS_LINUX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) -o $@ $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,--start-group $(LD_INPUTS) $(LIBS) -Wl,--end-group - -# Note: this does not handle spaces in paths -define xargs - $(1) $(word 1,$(2)) -$(if $(word 2,$(2)),$(call xargs,$(1),$(wordlist 2,$(words $(2)),$(2)))) -endef - -define write-to-file - @: >$(1) -$(call xargs,@printf "%s\\n" >>$(1),$(2)) -endef - -OBJ_FILE_LIST := ar-file-list - -define create_archive - rm -f $(1) $(1).$(OBJ_FILE_LIST); mkdir -p `dirname $(1)` - $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2))) - $(AR.$(TOOLSET)) crs $(1) @$(1).$(OBJ_FILE_LIST) -endef - -define create_thin_archive - rm -f $(1) $(OBJ_FILE_LIST); mkdir -p `dirname $(1)` - $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2))) - $(AR.$(TOOLSET)) crsT $(1) @$(1).$(OBJ_FILE_LIST) -endef - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -""" # noqa: E501 - -LINK_COMMANDS_MAC = """\ -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - -LINK_COMMANDS_ANDROID = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Note: this does not handle spaces in paths -define xargs - $(1) $(word 1,$(2)) -$(if $(word 2,$(2)),$(call xargs,$(1),$(wordlist 2,$(words $(2)),$(2)))) -endef - -define write-to-file - @: >$(1) -$(call xargs,@printf "%s\\n" >>$(1),$(2)) -endef - -OBJ_FILE_LIST := ar-file-list - -define create_archive - rm -f $(1) $(1).$(OBJ_FILE_LIST); mkdir -p `dirname $(1)` - $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2))) - $(AR.$(TOOLSET)) crs $(1) @$(1).$(OBJ_FILE_LIST) -endef - -define create_thin_archive - rm -f $(1) $(OBJ_FILE_LIST); mkdir -p `dirname $(1)` - $(call write-to-file,$(1).$(OBJ_FILE_LIST),$(filter %.o,$(2))) - $(AR.$(TOOLSET)) crsT $(1) @$(1).$(OBJ_FILE_LIST) -endef - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -quiet_cmd_link_host = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) -cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - - -LINK_COMMANDS_AIX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - - -LINK_COMMANDS_OS400 = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X64 crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X64 crs $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - - -LINK_COMMANDS_OS390 = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" # noqa: E501 - - -# Header of toplevel Makefile. -# This should go into the build tree, but it's easier to keep it here for now. -SHARED_HEADER = ( - """\ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := %(srcdir)s -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= %(builddir)s - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= %(default_configuration)s - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - -%(make_global_settings)s - -CC.target ?= %(CC.target)s -CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) -CXX.target ?= %(CXX.target)s -CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) -LINK.target ?= %(LINK.target)s -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= %(AR.target)s -PLI.target ?= %(PLI.target)s - -# C++ apps need to be linked with g++. -LINK ?= $(CXX.target) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= %(CC.host)s -CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) -CXX.host ?= %(CXX.host)s -CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) -LINK.host ?= %(LINK.host)s -LDFLAGS.host ?= $(LDFLAGS_host) -AR.host ?= %(AR.host)s -PLI.host ?= %(PLI.host)s - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),""" - + SPACE_REPLACEMENT - + """,$1) -unreplace_spaces = $(subst """ - + SPACE_REPLACEMENT - + """,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = %(makedep_args)s -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \\ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters.""" - r""" -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1).""" + - (r""" -sed -e "s|^$(notdir $@)|$@|" -re 's/\\\\([^$$])/\/\1/g' $(depfile).raw >> $(depfile)""" - if sys.platform == 'win32' else r""" -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)""") + - r""" -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines.""" + - (""" -sed -e 's/\\\\\\\\$$//' -e 's/\\\\\\\\/\\//g' -e 'y| |\\n|' $(depfile).raw |\\""" - if sys.platform == 'win32' else """ -sed -e 's|\\\\||' -e 'y| |\\n|' $(depfile).raw |\\""") + - r""" - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef -""" - """ -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) -o $@ $< $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) -o $@ $< $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -%(extra_commands)s -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@") - -quiet_cmd_symlink = SYMLINK $@ -cmd_symlink = ln -sf "$<" "$@" - -%(link_commands)s -""" # noqa: E501 - r""" -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))' -""" - """ -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain """ - + SPACE_REPLACEMENT - + """ instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\\ - for p in $(POSTBUILDS); do\\ - eval $$p;\\ - E=$$?;\\ - if [ $$E -ne 0 ]; then\\ - break;\\ - fi;\\ - done;\\ - if [ $$E -ne 0 ]; then\\ - rm -rf "$@";\\ - exit $$E;\\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains """ - + SPACE_REPLACEMENT - + """ for -# spaces already and dirx strips the """ - + SPACE_REPLACEMENT - + """ characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "%(default_target)s" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: %(default_target)s -%(default_target)s: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -""" # noqa: E501 -) - -SHARED_HEADER_MAC_COMMANDS = """ -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" -""" # noqa: E501 - - -def WriteRootHeaderSuffixRules(writer): - extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower) - - writer.write("# Suffix rules, putting all outputs into $(obj).\n") - for ext in extensions: - writer.write("$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n" % ext) - writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) - - writer.write("\n# Try building from generated source, too.\n") - for ext in extensions: - writer.write( - "$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n" % ext - ) - writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) - writer.write("\n") - for ext in extensions: - writer.write("$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n" % ext) - writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) - writer.write("\n") - - -SHARED_HEADER_OS390_COMMANDS = """ -PLIFLAGS.target ?= -qlp=64 -qlimits=extname=31 $(PLIFLAGS) -PLIFLAGS.host ?= -qlp=64 -qlimits=extname=31 $(PLIFLAGS) - -quiet_cmd_pli = PLI($(TOOLSET)) $@ -cmd_pli = $(PLI.$(TOOLSET)) $(GYP_PLIFLAGS) $(PLIFLAGS.$(TOOLSET)) -c $< && \ - if [ -f $(notdir $@) ]; then /bin/cp $(notdir $@) $@; else true; fi -""" - -SHARED_HEADER_SUFFIX_RULES_COMMENT1 = """\ -# Suffix rules, putting all outputs into $(obj). -""" - - -SHARED_HEADER_SUFFIX_RULES_COMMENT2 = """\ -# Try building from generated source, too. -""" - - -SHARED_FOOTER = """\ -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Maps every compilable file extension to the do_cmd that compiles it. -COMPILABLE_EXTENSIONS = { - ".c": "cc", - ".cc": "cxx", - ".cpp": "cxx", - ".cxx": "cxx", - ".s": "cc", - ".S": "cc", -} - - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - return any(res for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS)) - - -def Linkable(filename): - """Return true if the file is linkable (should be on the link line).""" - return filename.endswith(".o") - - -def Target(filename): - """Translate a compilable filename to its .o target.""" - return os.path.splitext(filename)[0] + ".o" - - -def EscapeShellArgument(s): - """Quotes an argument so that it will be interpreted literally by a POSIX - shell. Taken from - http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python - """ - return "'" + s.replace("'", "'\\''") + "'" - - -def EscapeMakeVariableExpansion(s): - """Make has its own variable expansion syntax using $. We must escape it for - string to be interpreted literally.""" - return s.replace("$", "$$") - - -def EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = EscapeShellArgument(s) - s = EscapeMakeVariableExpansion(s) - # '#' characters must be escaped even embedded in a string, else Make will - # treat it as the start of a comment. - return s.replace("#", r"\#") - - -def QuoteIfNecessary(string): - """TODO: Should this ideally be replaced with one or more of the above - functions?""" - if '"' in string: - string = '"' + string.replace('"', '\\"') + '"' - return string - -def replace_sep(string): - if sys.platform == 'win32': - string = string.replace('\\\\', '/').replace('\\', '/') - return string - -def StringToMakefileVariable(string): - """Convert a string to a value that is acceptable as a make variable name.""" - return re.sub("[^a-zA-Z0-9_]", "_", string) - - -srcdir_prefix = "" - - -def Sourceify(path): - """Convert a path to its source directory form.""" - if "$(" in path: - return path - if os.path.isabs(path): - return path - return srcdir_prefix + path - - -def QuoteSpaces(s, quote=r"\ "): - return s.replace(" ", quote) - - -def SourceifyAndQuoteSpaces(path): - """Convert a path to its source directory form and quote spaces.""" - return QuoteSpaces(Sourceify(path)) - - -# Map from qualified target to path to output. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class MakefileWriter: - """MakefileWriter packages up the writing of one target-specific foobar.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, generator_flags, flavor): - self.generator_flags = generator_flags - self.flavor = flavor - - self.suffix_rules_srcdir = {} - self.suffix_rules_objdir1 = {} - self.suffix_rules_objdir2 = {} - - # Generate suffix rules for all compilable extensions. - for ext in COMPILABLE_EXTENSIONS: - # Suffix rules for source folder. - self.suffix_rules_srcdir.update( - { - ext: ( - """\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD -\t@$(call do_cmd,%s,1) -""" - % (ext, COMPILABLE_EXTENSIONS[ext]) - ) - } - ) - - # Suffix rules for generated source files. - self.suffix_rules_objdir1.update( - { - ext: ( - """\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD -\t@$(call do_cmd,%s,1) -""" - % (ext, COMPILABLE_EXTENSIONS[ext]) - ) - } - ) - self.suffix_rules_objdir2.update( - { - ext: ( - """\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD -\t@$(call do_cmd,%s,1) -""" - % (ext, COMPILABLE_EXTENSIONS[ext]) - ) - } - ) - - def Write( - self, qualified_target, base_path, output_filename, spec, configs, part_of_all - ): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - gyp.common.EnsureDirExists(output_filename) - - self.fp = open(output_filename, "w") - - self.fp.write(header) - - self.qualified_target = qualified_target - self.path = base_path - self.target = spec["target_name"] - self.type = spec["type"] - self.toolset = spec["toolset"] - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - if self.flavor == "mac": - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - else: - self.xcode_settings = None - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - extra_link_deps = [] - extra_mac_bundle_resources = [] - mac_bundle_deps = [] - - if self.is_mac_bundle: - self.output = self.ComputeMacBundleOutput(spec) - self.output_binary = self.ComputeMacBundleBinaryOutput(spec) - else: - self.output = self.output_binary = replace_sep(self.ComputeOutput(spec)) - - self.is_standalone_static_library = bool( - spec.get("standalone_static_library", 0) - ) - self._INSTALLABLE_TARGETS = ("executable", "loadable_module", "shared_library") - if self.is_standalone_static_library or self.type in self._INSTALLABLE_TARGETS: - self.alias = os.path.basename(self.output) - install_path = self._InstallableTargetInstallPath() - else: - self.alias = self.output - install_path = self.output - - self.WriteLn("TOOLSET := " + self.toolset) - self.WriteLn("TARGET := " + self.target) - - # Actions must come first, since they can generate more OBJs for use below. - if "actions" in spec: - self.WriteActions( - spec["actions"], - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ) - - # Rules must be early like actions. - if "rules" in spec: - self.WriteRules( - spec["rules"], - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ) - - if "copies" in spec: - self.WriteCopies(spec["copies"], extra_outputs, part_of_all) - - # Bundle resources. - if self.is_mac_bundle: - all_mac_bundle_resources = ( - spec.get("mac_bundle_resources", []) + extra_mac_bundle_resources - ) - self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) - self.WriteMacInfoPlist(mac_bundle_deps) - - # Sources. - all_sources = spec.get("sources", []) + extra_sources - if all_sources: - self.WriteSources( - configs, - deps, - all_sources, - extra_outputs, - extra_link_deps, - part_of_all, - gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, - lambda p: Sourceify(self.Absolutify(p)), - self.Pchify, - ), - ) - sources = [x for x in all_sources if Compilable(x)] - if sources: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) - extensions = {os.path.splitext(s)[1] for s in sources} - for ext in extensions: - if ext in self.suffix_rules_srcdir: - self.WriteLn(self.suffix_rules_srcdir[ext]) - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) - for ext in extensions: - if ext in self.suffix_rules_objdir1: - self.WriteLn(self.suffix_rules_objdir1[ext]) - for ext in extensions: - if ext in self.suffix_rules_objdir2: - self.WriteLn(self.suffix_rules_objdir2[ext]) - self.WriteLn("# End of this set of suffix rules") - - # Add dependency from bundle to bundle binary. - if self.is_mac_bundle: - mac_bundle_deps.append(self.output_binary) - - self.WriteTarget( - spec, - configs, - deps, - extra_link_deps + link_deps, - mac_bundle_deps, - extra_outputs, - part_of_all, - ) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = install_path - - # Update global list of link dependencies. - if self.type in ("static_library", "shared_library"): - target_link_deps[qualified_target] = self.output_binary - - # Currently any versions have the same effect, but in future the behavior - # could be different. - if self.generator_flags.get("android_ndk_version", None): - self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps) - - self.fp.close() - - def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): - """Write a "sub-project" Makefile. - - This is a small, wrapper Makefile that calls the top-level Makefile to build - the targets from a single gyp file (i.e. a sub-project). - - Arguments: - output_filename: sub-project Makefile name to write - makefile_path: path to the top-level Makefile - targets: list of "all" targets for this sub-project - build_dir: build output directory, relative to the sub-project - """ - gyp.common.EnsureDirExists(output_filename) - self.fp = open(output_filename, "w") - self.fp.write(header) - # For consistency with other builders, put sub-project build output in the - # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). - self.WriteLn( - "export builddir_name ?= %s" - % replace_sep(os.path.join(os.path.dirname(output_filename), build_dir)) - ) - self.WriteLn(".PHONY: all") - self.WriteLn("all:") - if makefile_path: - makefile_path = " -C " + makefile_path - self.WriteLn("\t$(MAKE){} {}".format(makefile_path, " ".join(targets))) - self.fp.close() - - def WriteActions( - self, - actions, - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for action in actions: - name = StringToMakefileVariable( - "{}_{}".format(self.qualified_target, action["action_name"]) - ) - self.WriteLn('### Rules for action "%s":' % action["action_name"]) - inputs = action["inputs"] - outputs = action["outputs"] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get("process_outputs_as_sources", False)): - extra_sources += outputs - if int(action.get("process_outputs_as_mac_bundle_resources", False)): - extra_mac_bundle_resources += outputs - - # Write the actual command. - action_commands = action["action"] - if self.flavor == "mac": - action_commands = [ - gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action_commands - ] - command = gyp.common.EncodePOSIXShellList(action_commands) - if "message" in action: - self.WriteLn( - "quiet_cmd_{} = ACTION {} $@".format(name, action["message"]) - ) - else: - self.WriteLn(f"quiet_cmd_{name} = ACTION {name} $@") - if len(dirs) > 0: - command = "mkdir -p %s" % " ".join(dirs) + "; " + command - - cd_action = "cd %s; " % Sourceify(self.path or ".") - - # command and cd_action get written to a toplevel variable called - # cmd_foo. Toplevel variables can't handle things that change per - # makefile like $(TARGET), so hardcode the target. - command = command.replace("$(TARGET)", self.target) - cd_action = cd_action.replace("$(TARGET)", self.target) - - # Set LD_LIBRARY_PATH in case the action runs an executable from this - # build which links to shared libs from this build. - # actions run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - if self.flavor in {"zos", "aix"}: - self.WriteLn( - "cmd_%s = LIBPATH=$(builddir)/lib.host:" - "$(builddir)/lib.target:$$LIBPATH; " - "export LIBPATH; " - "%s%s" % (name, cd_action, command) - ) - else: - self.WriteLn( - "cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:" - "$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "export LD_LIBRARY_PATH; " - "%s%s" % (name, cd_action, command) - ) - self.WriteLn() - outputs = [self.Absolutify(o) for o in outputs] - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the obj - # variable for the action rule with an absolute version so that the output - # goes in the right place. - # Only write the 'obj' and 'builddir' rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - # Same for environment. - self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0])) - self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0])) - self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv()) - - for input in inputs: - assert " " not in input, ( - "Spaces in action input filenames not supported (%s)" % input - ) - for output in outputs: - assert " " not in output, ( - "Spaces in action output filenames not supported (%s)" % output - ) - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - self.WriteDoCmd( - outputs, - [Sourceify(self.Absolutify(i)) for i in inputs], - part_of_all=part_of_all, - command=name, - ) - - # Stuff the outputs in a variable so we can refer to them later. - outputs_variable = "action_%s_outputs" % name - self.WriteLn("{} := {}".format(outputs_variable, " ".join(outputs))) - extra_outputs.append("$(%s)" % outputs_variable) - self.WriteLn() - - self.WriteLn() - - def WriteRules( - self, - rules, - extra_sources, - extra_outputs, - extra_mac_bundle_resources, - part_of_all, - ): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for rule in rules: - name = StringToMakefileVariable( - "{}_{}".format(self.qualified_target, rule["rule_name"]) - ) - count = 0 - self.WriteLn("### Generated for rule %s:" % name) - - all_outputs = [] - - for rule_source in rule.get("rule_sources", []): - dirs = set() - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = os.path.splitext( - rule_source_basename - ) - - outputs = [ - self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) - for out in rule["outputs"] - ] - - for out in outputs: - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - if int(rule.get("process_outputs_as_sources", False)): - extra_sources += outputs - if int(rule.get("process_outputs_as_mac_bundle_resources", False)): - extra_mac_bundle_resources += outputs - inputs = [ - Sourceify(self.Absolutify(i)) - for i in [rule_source] + rule.get("inputs", []) - ] - actions = ["$(call do_cmd,%s_%d)" % (name, count)] - - if name == "resources_grit": - # HACK: This is ugly. Grit intentionally doesn't touch the - # timestamp of its output file when the file doesn't change, - # which is fine in hash-based dependency systems like scons - # and forge, but not kosher in the make world. After some - # discussion, hacking around it here seems like the least - # amount of pain. - actions += ["@touch --no-create $@"] - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - outputs = [self.Absolutify(o) for o in outputs] - all_outputs += outputs - # Only write the 'obj' and 'builddir' rules for the "primary" output - # (:1); it's superfluous for the "extra outputs", and this avoids - # accidentally writing duplicate dummy rules for those outputs. - self.WriteLn("%s: obj := $(abs_obj)" % outputs[0]) - self.WriteLn("%s: builddir := $(abs_builddir)" % outputs[0]) - self.WriteMakeRule( - outputs, inputs, actions, command="%s_%d" % (name, count) - ) - # Spaces in rule filenames are not supported, but rule variables have - # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)'). - # The spaces within the variables are valid, so remove the variables - # before checking. - variables_with_spaces = re.compile(r"\$\([^ ]* \$<\)") - for output in outputs: - output = re.sub(variables_with_spaces, "", output) - assert " " not in output, ( - "Spaces in rule filenames not yet supported (%s)" % output - ) - self.WriteLn("all_deps += %s" % " ".join(outputs)) - - action = [ - self.ExpandInputRoot(ac, rule_source_root, rule_source_dirname) - for ac in rule["action"] - ] - mkdirs = "" - if len(dirs) > 0: - mkdirs = "mkdir -p %s; " % " ".join(dirs) - cd_action = "cd %s; " % Sourceify(self.path or ".") - - # action, cd_action, and mkdirs get written to a toplevel variable - # called cmd_foo. Toplevel variables can't handle things that change - # per makefile like $(TARGET), so hardcode the target. - if self.flavor == "mac": - action = [ - gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action - ] - action = gyp.common.EncodePOSIXShellList(action) - action = action.replace("$(TARGET)", self.target) - cd_action = cd_action.replace("$(TARGET)", self.target) - mkdirs = mkdirs.replace("$(TARGET)", self.target) - - # Set LD_LIBRARY_PATH in case the rule runs an executable from this - # build which links to shared libs from this build. - # rules run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn( - "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH=" - "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "export LD_LIBRARY_PATH; " - "%(cd_action)s%(mkdirs)s%(action)s" - % { - "action": action, - "cd_action": cd_action, - "count": count, - "mkdirs": mkdirs, - "name": name, - } - ) - self.WriteLn( - "quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@" - % {"count": count, "name": name} - ) - self.WriteLn() - count += 1 - - outputs_variable = "rule_%s_outputs" % name - self.WriteList(all_outputs, outputs_variable) - extra_outputs.append("$(%s)" % outputs_variable) - - self.WriteLn("### Finished generating for rule: %s" % name) - self.WriteLn() - self.WriteLn("### Finished generating for all rules") - self.WriteLn("") - - def WriteCopies(self, copies, extra_outputs, part_of_all): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn("### Generated for copy rule.") - - variable = StringToMakefileVariable(self.qualified_target + "_copies") - outputs = [] - for copy in copies: - for path in copy["files"]: - # Absolutify() may call normpath, and will strip trailing slashes. - path = Sourceify(self.Absolutify(path)) - filename = os.path.split(path)[1] - output = Sourceify( - self.Absolutify(os.path.join(copy["destination"], filename)) - ) - - # If the output path has variables in it, which happens in practice for - # 'copies', writing the environment as target-local doesn't work, - # because the variables are already needed for the target name. - # Copying the environment variables into global make variables doesn't - # work either, because then the .d files will potentially contain spaces - # after variable expansion, and .d file handling cannot handle spaces. - # As a workaround, manually expand variables at gyp time. Since 'copies' - # can't run scripts, there's no need to write the env then. - # WriteDoCmd() will escape spaces for .d files. - env = self.GetSortedXcodeEnv() - output = gyp.xcode_emulation.ExpandEnvVars(output, env) - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - self.WriteDoCmd([output], [path], "copy", part_of_all) - outputs.append(output) - self.WriteLn( - "{} = {}".format(variable, " ".join(QuoteSpaces(o) for o in outputs)) - ) - extra_outputs.append("$(%s)" % variable) - self.WriteLn() - - def WriteMacBundleResources(self, resources, bundle_deps): - """Writes Makefile code for 'mac_bundle_resources'.""" - self.WriteLn("### Generated for mac_bundle_resources") - - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - [Sourceify(self.Absolutify(r)) for r in resources], - ): - _, ext = os.path.splitext(output) - if ext != ".xcassets": - # Make does not supports '.xcassets' emulation. - self.WriteDoCmd( - [output], [res], "mac_tool,,,copy-bundle-resource", part_of_all=True - ) - bundle_deps.append(output) - - def WriteMacInfoPlist(self, bundle_deps): - """Write Makefile code for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - lambda p: Sourceify(self.Absolutify(p)), - ) - if not info_plist: - return - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = "$(obj).$(TOOLSET)/$(TARGET)/" + os.path.basename( - info_plist - ) - self.WriteList( - defines, - intermediate_plist + ": INFOPLIST_DEFINES", - "-D", - quoter=EscapeCppDefine, - ) - self.WriteMakeRule( - [intermediate_plist], - [info_plist], - [ - "$(call do_cmd,infoplist)", - # "Convert" the plist so that any weird whitespace changes from the - # preprocessor do not affect the XML parser in mac_tool. - "@plutil -convert xml1 $@ $@", - ], - ) - info_plist = intermediate_plist - # plists can contain envvars and substitute them into the file. - self.WriteSortedXcodeEnv( - out, self.GetSortedXcodeEnv(additional_settings=extra_env) - ) - self.WriteDoCmd( - [out], [info_plist], "mac_tool,,,copy-info-plist", part_of_all=True - ) - bundle_deps.append(out) - - def WriteSources( - self, - configs, - deps, - sources, - extra_outputs, - extra_link_deps, - part_of_all, - precompiled_header, - ): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - - configs, deps, sources: input from gyp. - extra_outputs: a list of extra outputs this action should be dependent on; - used to serialize action/rules before compilation - extra_link_deps: a list that will be filled in with any outputs of - compilation (to be used in link lines) - part_of_all: flag indicating this target is part of 'all' - """ - - # Write configuration-specific variables for CFLAGS, etc. - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList( - config.get("defines"), - "DEFS_%s" % configname, - prefix="-D", - quoter=EscapeCppDefine, - ) - - if self.flavor == "mac": - cflags = self.xcode_settings.GetCflags( - configname, arch=config.get("xcode_configuration_platform") - ) - cflags_c = self.xcode_settings.GetCflagsC(configname) - cflags_cc = self.xcode_settings.GetCflagsCC(configname) - cflags_objc = self.xcode_settings.GetCflagsObjC(configname) - cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname) - else: - cflags = config.get("cflags") - cflags_c = config.get("cflags_c") - cflags_cc = config.get("cflags_cc") - - self.WriteLn("# Flags passed to all source files.") - self.WriteList(cflags, "CFLAGS_%s" % configname) - self.WriteLn("# Flags passed to only C files.") - self.WriteList(cflags_c, "CFLAGS_C_%s" % configname) - self.WriteLn("# Flags passed to only C++ files.") - self.WriteList(cflags_cc, "CFLAGS_CC_%s" % configname) - if self.flavor == "mac": - self.WriteLn("# Flags passed to only ObjC files.") - self.WriteList(cflags_objc, "CFLAGS_OBJC_%s" % configname) - self.WriteLn("# Flags passed to only ObjC++ files.") - self.WriteList(cflags_objcc, "CFLAGS_OBJCC_%s" % configname) - includes = config.get("include_dirs") - if includes: - includes = [Sourceify(self.Absolutify(i)) for i in includes] - self.WriteList(includes, "INCS_%s" % configname, prefix="-I") - - compilable = list(filter(Compilable, sources)) - objs = [self.Objectify(self.Absolutify(Target(c))) for c in compilable] - self.WriteList(objs, "OBJS") - - for obj in objs: - assert " " not in obj, "Spaces in object filenames not supported (%s)" % obj - self.WriteLn( - "# Add to the list of files we specially track " "dependencies for." - ) - self.WriteLn("all_deps += $(OBJS)") - self.WriteLn() - - # Make sure our dependencies are built first. - if deps: - self.WriteMakeRule( - ["$(OBJS)"], - deps, - comment="Make sure our dependencies are built " "before any of us.", - order_only=True, - ) - - # Make sure the actions and rules run first. - # If they generate any extra headers etc., the per-.o file dep tracking - # will catch the proper rebuilds, so order only is still ok here. - if extra_outputs: - self.WriteMakeRule( - ["$(OBJS)"], - extra_outputs, - comment="Make sure our actions/rules run " "before any of us.", - order_only=True, - ) - - pchdeps = precompiled_header.GetObjDependencies(compilable, objs) - if pchdeps: - self.WriteLn("# Dependencies from obj files to their precompiled headers") - for source, obj, gch in pchdeps: - self.WriteLn(f"{obj}: {gch}") - self.WriteLn("# End precompiled header dependencies") - - if objs: - extra_link_deps.append("$(OBJS)") - self.WriteLn( - """\ -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual.""" - ) - self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)") - self.WriteLn( - "$(OBJS): GYP_CFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("c") + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE))" - ) - self.WriteLn( - "$(OBJS): GYP_CXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("cc") + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE))" - ) - if self.flavor == "mac": - self.WriteLn( - "$(OBJS): GYP_OBJCFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("m") - + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE)) " - "$(CFLAGS_OBJC_$(BUILDTYPE))" - ) - self.WriteLn( - "$(OBJS): GYP_OBJCXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude("mm") - + "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE)) " - "$(CFLAGS_OBJCC_$(BUILDTYPE))" - ) - - self.WritePchTargets(precompiled_header.GetPchBuildCommands()) - - # If there are any object files in our input file list, link them into our - # output. - extra_link_deps += [source for source in sources if Linkable(source)] - - self.WriteLn() - - def WritePchTargets(self, pch_commands): - """Writes make rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - extra_flags = { - "c": "$(CFLAGS_C_$(BUILDTYPE))", - "cc": "$(CFLAGS_CC_$(BUILDTYPE))", - "m": "$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))", - "mm": "$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))", - }[lang] - var_name = { - "c": "GYP_PCH_CFLAGS", - "cc": "GYP_PCH_CXXFLAGS", - "m": "GYP_PCH_OBJCFLAGS", - "mm": "GYP_PCH_OBJCXXFLAGS", - }[lang] - self.WriteLn( - f"{gch}: {var_name} := {lang_flag} " + "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "$(CFLAGS_$(BUILDTYPE)) " + extra_flags - ) - - self.WriteLn(f"{gch}: {input} FORCE_DO_CMD") - self.WriteLn("\t@$(call do_cmd,pch_%s,1)" % lang) - self.WriteLn("") - assert " " not in gch, "Spaces in gch filenames not supported (%s)" % gch - self.WriteLn("all_deps += %s" % gch) - self.WriteLn("") - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - assert not self.is_mac_bundle - - if self.flavor == "mac" and self.type in ( - "static_library", - "executable", - "shared_library", - "loadable_module", - ): - return self.xcode_settings.GetExecutablePath() - - target = spec["target_name"] - target_prefix = "" - target_ext = "" - if self.type == "static_library": - if target[:3] == "lib": - target = target[3:] - target_prefix = "lib" - target_ext = ".a" - elif self.type in ("loadable_module", "shared_library"): - if target[:3] == "lib": - target = target[3:] - target_prefix = "lib" - if self.flavor == "aix": - target_ext = ".a" - elif self.flavor == "zos": - target_ext = ".x" - else: - target_ext = ".so" - elif self.type == "none": - target = "%s.stamp" % target - elif self.type != "executable": - print( - "ERROR: What output file should be generated?", - "type", - self.type, - "target", - target, - ) - - target_prefix = spec.get("product_prefix", target_prefix) - target = spec.get("product_name", target) - product_ext = spec.get("product_extension") - if product_ext: - target_ext = "." + product_ext - - return target_prefix + target + target_ext - - def _InstallImmediately(self): - return ( - self.toolset == "target" - and self.flavor == "mac" - and self.type - in ("static_library", "executable", "shared_library", "loadable_module") - ) - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - assert not self.is_mac_bundle - - path = os.path.join("$(obj)." + self.toolset, self.path) - if self.type == "executable" or self._InstallImmediately(): - path = "$(builddir)" - path = spec.get("product_dir", path) - return os.path.join(path, self.ComputeOutputBasename(spec)) - - def ComputeMacBundleOutput(self, spec): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = generator_default_variables["PRODUCT_DIR"] - return os.path.join(path, self.xcode_settings.GetWrapperName()) - - def ComputeMacBundleBinaryOutput(self, spec): - """Return the 'output' (full output path) to the binary in a bundle.""" - path = generator_default_variables["PRODUCT_DIR"] - return os.path.join(path, self.xcode_settings.GetExecutablePath()) - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if "dependencies" in spec: - deps.extend( - [ - target_outputs[dep] - for dep in spec["dependencies"] - if target_outputs[dep] - ] - ) - for dep in spec["dependencies"]: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? - # This hack makes it work: - # link_deps.extend(spec.get('libraries', [])) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - def GetSharedObjectFromSidedeck(self, sidedeck): - """Return the shared object files based on sidedeck""" - return re.sub(r"\.x$", ".so", sidedeck) - - def GetUnversionedSidedeckFromSidedeck(self, sidedeck): - """Return the shared object files based on sidedeck""" - return re.sub(r"\.\d+\.x$", ".x", sidedeck) - - def WriteDependencyOnExtraOutputs(self, target, extra_outputs): - self.WriteMakeRule( - [self.output_binary], - extra_outputs, - comment="Build our special outputs first.", - order_only=True, - ) - - def WriteTarget( - self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all - ): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - extra_outputs: any extra outputs that our target should depend on - part_of_all: flag indicating this target is part of 'all' - """ - - self.WriteLn("### Rules for final target.") - - if extra_outputs: - self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) - self.WriteMakeRule( - extra_outputs, - deps, - comment=("Preserve order dependency of " "special output on deps."), - order_only=True, - ) - - target_postbuilds = {} - if self.type != "none": - for configname in sorted(configs.keys()): - config = configs[configname] - if self.flavor == "mac": - ldflags = self.xcode_settings.GetLdflags( - configname, - generator_default_variables["PRODUCT_DIR"], - lambda p: Sourceify(self.Absolutify(p)), - arch=config.get("xcode_configuration_platform"), - ) - - # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. - gyp_to_build = gyp.common.InvertRelativePath(self.path) - target_postbuild = self.xcode_settings.AddImplicitPostbuilds( - configname, - QuoteSpaces( - os.path.normpath(os.path.join(gyp_to_build, self.output)) - ), - QuoteSpaces( - os.path.normpath( - os.path.join(gyp_to_build, self.output_binary) - ) - ), - ) - if target_postbuild: - target_postbuilds[configname] = target_postbuild - else: - ldflags = config.get("ldflags", []) - # Compute an rpath for this output if needed. - if any(dep.endswith(".so") or ".so." in dep for dep in deps): - # We want to get the literal string "$ORIGIN" - # into the link command, so we need lots of escaping. - ldflags.append(r"-Wl,-rpath=\$$ORIGIN/") - ldflags.append(r"-Wl,-rpath-link=\$(builddir)/") - library_dirs = config.get("library_dirs", []) - ldflags += [("-L%s" % library_dir) for library_dir in library_dirs] - self.WriteList(ldflags, "LDFLAGS_%s" % configname) - if self.flavor == "mac": - self.WriteList( - self.xcode_settings.GetLibtoolflags(configname), - "LIBTOOLFLAGS_%s" % configname, - ) - libraries = spec.get("libraries") - if libraries: - # Remove duplicate entries - libraries = gyp.common.uniquer(libraries) - if self.flavor == "mac": - libraries = self.xcode_settings.AdjustLibraries(libraries) - self.WriteList(libraries, "LIBS") - self.WriteLn( - "%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))" - % QuoteSpaces(self.output_binary) - ) - self.WriteLn("%s: LIBS := $(LIBS)" % QuoteSpaces(self.output_binary)) - - if self.flavor == "mac": - self.WriteLn( - "%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))" - % QuoteSpaces(self.output_binary) - ) - - # Postbuild actions. Like actions, but implicitly depend on the target's - # output. - postbuilds = [] - if self.flavor == "mac": - if target_postbuilds: - postbuilds.append("$(TARGET_POSTBUILDS_$(BUILDTYPE))") - postbuilds.extend(gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) - - if postbuilds: - # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), - # so we must output its definition first, since we declare variables - # using ":=". - self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) - - for configname in target_postbuilds: - self.WriteLn( - "%s: TARGET_POSTBUILDS_%s := %s" - % ( - QuoteSpaces(self.output), - configname, - gyp.common.EncodePOSIXShellList(target_postbuilds[configname]), - ) - ) - - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert(0, gyp.common.EncodePOSIXShellList(["cd", self.path])) - for i, postbuild in enumerate(postbuilds): - if not postbuild.startswith("$"): - postbuilds[i] = EscapeShellArgument(postbuild) - self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(self.output)) - self.WriteLn( - "%s: POSTBUILDS := %s" - % (QuoteSpaces(self.output), " ".join(postbuilds)) - ) - - # A bundle directory depends on its dependencies such as bundle resources - # and bundle binary. When all dependencies have been built, the bundle - # needs to be packaged. - if self.is_mac_bundle: - # If the framework doesn't contain a binary, then nothing depends - # on the actions -- make the framework depend on them directly too. - self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) - - # Bundle dependencies. Note that the code below adds actions to this - # target, so if you move these two lines, move the lines below as well. - self.WriteList([QuoteSpaces(dep) for dep in bundle_deps], "BUNDLE_DEPS") - self.WriteLn("%s: $(BUNDLE_DEPS)" % QuoteSpaces(self.output)) - - # After the framework is built, package it. Needs to happen before - # postbuilds, since postbuilds depend on this. - if self.type in ("shared_library", "loadable_module"): - self.WriteLn( - "\t@$(call do_cmd,mac_package_framework,,,%s)" - % self.xcode_settings.GetFrameworkVersion() - ) - - # Bundle postbuilds can depend on the whole bundle, so run them after - # the bundle is packaged, not already after the bundle binary is done. - if postbuilds: - self.WriteLn("\t@$(call do_postbuilds)") - postbuilds = [] # Don't write postbuilds for target's output. - - # Needed by test/mac/gyptest-rebuild.py. - self.WriteLn("\t@true # No-op, used by tests") - - # Since this target depends on binary and resources which are in - # nested subfolders, the framework directory will be older than - # its dependencies usually. To prevent this rule from executing - # on every build (expensive, especially with postbuilds), expliclity - # update the time on the framework directory. - self.WriteLn("\t@touch -c %s" % QuoteSpaces(self.output)) - - if postbuilds: - assert not self.is_mac_bundle, ( - "Postbuilds for bundles should be done " - "on the bundle, not the binary (target '%s')" % self.target - ) - assert "product_dir" not in spec, ( - "Postbuilds do not work with " "custom product_dir" - ) - - if self.type == "executable": - self.WriteLn( - "%s: LD_INPUTS := %s" - % ( - QuoteSpaces(self.output_binary), - " ".join(QuoteSpaces(dep) for dep in link_deps), - ) - ) - if self.toolset == "host" and self.flavor == "android": - self.WriteDoCmd( - [self.output_binary], - link_deps, - "link_host", - part_of_all, - postbuilds=postbuilds, - ) - else: - self.WriteDoCmd( - [self.output_binary], - link_deps, - "link", - part_of_all, - postbuilds=postbuilds, - ) - - elif self.type == "static_library": - for link_dep in link_deps: - assert " " not in link_dep, ( - "Spaces in alink input filenames not supported (%s)" % link_dep - ) - if ( - self.flavor not in ("mac", "openbsd", "netbsd", "win") - and not self.is_standalone_static_library - ): - if self.flavor in ("linux", "android"): - self.WriteMakeRule( - [self.output_binary], - link_deps, - actions=["$(call create_thin_archive,$@,$^)"], - ) - else: - self.WriteDoCmd( - [self.output_binary], - link_deps, - "alink_thin", - part_of_all, - postbuilds=postbuilds, - ) - else: - if self.flavor in ("linux", "android"): - self.WriteMakeRule( - [self.output_binary], - link_deps, - actions=["$(call create_archive,$@,$^)"], - ) - else: - self.WriteDoCmd( - [self.output_binary], - link_deps, - "alink", - part_of_all, - postbuilds=postbuilds, - ) - elif self.type == "shared_library": - self.WriteLn( - "%s: LD_INPUTS := %s" - % ( - QuoteSpaces(self.output_binary), - " ".join(QuoteSpaces(dep) for dep in link_deps), - ) - ) - self.WriteDoCmd( - [self.output_binary], - link_deps, - "solink", - part_of_all, - postbuilds=postbuilds, - ) - # z/OS has a .so target as well as a sidedeck .x target - if self.flavor == "zos": - self.WriteLn( - "%s: %s" - % ( - QuoteSpaces( - self.GetSharedObjectFromSidedeck(self.output_binary) - ), - QuoteSpaces(self.output_binary), - ) - ) - elif self.type == "loadable_module": - for link_dep in link_deps: - assert " " not in link_dep, ( - "Spaces in module input filenames not supported (%s)" % link_dep - ) - if self.toolset == "host" and self.flavor == "android": - self.WriteDoCmd( - [self.output_binary], - link_deps, - "solink_module_host", - part_of_all, - postbuilds=postbuilds, - ) - else: - self.WriteDoCmd( - [self.output_binary], - link_deps, - "solink_module", - part_of_all, - postbuilds=postbuilds, - ) - elif self.type == "none": - # Write a stamp line. - self.WriteDoCmd( - [self.output_binary], deps, "touch", part_of_all, postbuilds=postbuilds - ) - else: - print("WARNING: no output for", self.type, self.target) - - # Add an alias for each target (if there are any outputs). - # Installable target aliases are created below. - if (self.output and self.output != self.target) and ( - self.type not in self._INSTALLABLE_TARGETS - ): - self.WriteMakeRule( - [self.target], [self.output], comment="Add target alias", phony=True - ) - if part_of_all: - self.WriteMakeRule( - ["all"], - [self.target], - comment='Add target alias to "all" target.', - phony=True, - ) - - # Add special-case rules for our installable targets. - # 1) They need to install to the build dir or "product" dir. - # 2) They get shortcuts for building (e.g. "make chrome"). - # 3) They are part of "make all". - if self.type in self._INSTALLABLE_TARGETS or self.is_standalone_static_library: - if self.type == "shared_library": - file_desc = "shared library" - elif self.type == "static_library": - file_desc = "static library" - else: - file_desc = "executable" - install_path = self._InstallableTargetInstallPath() - installable_deps = [] - if self.flavor != "zos": - installable_deps.append(self.output) - if ( - self.flavor == "mac" - and "product_dir" not in spec - and self.toolset == "target" - ): - # On mac, products are created in install_path immediately. - assert install_path == self.output, f"{install_path} != {self.output}" - - # Point the target alias to the final binary output. - self.WriteMakeRule( - [self.target], [install_path], comment="Add target alias", phony=True - ) - if install_path != self.output: - assert not self.is_mac_bundle # See comment a few lines above. - self.WriteDoCmd( - [install_path], - [self.output], - "copy", - comment="Copy this to the %s output path." % file_desc, - part_of_all=part_of_all, - ) - if self.flavor != "zos": - installable_deps.append(install_path) - if self.flavor == "zos" and self.type == "shared_library": - # lib.target/libnode.so has a dependency on $(obj).target/libnode.so - self.WriteDoCmd( - [self.GetSharedObjectFromSidedeck(install_path)], - [self.GetSharedObjectFromSidedeck(self.output)], - "copy", - comment="Copy this to the %s output path." % file_desc, - part_of_all=part_of_all, - ) - # Create a symlink of libnode.x to libnode.version.x - self.WriteDoCmd( - [self.GetUnversionedSidedeckFromSidedeck(install_path)], - [install_path], - "symlink", - comment="Symlnk this to the %s output path." % file_desc, - part_of_all=part_of_all, - ) - # Place libnode.version.so and libnode.x symlink in lib.target dir - installable_deps.append(self.GetSharedObjectFromSidedeck(install_path)) - installable_deps.append( - self.GetUnversionedSidedeckFromSidedeck(install_path) - ) - if self.alias not in (self.output, self.target): - self.WriteMakeRule( - [self.alias], - installable_deps, - comment="Short alias for building this %s." % file_desc, - phony=True, - ) - if self.flavor == "zos" and self.type == "shared_library": - # Make sure that .x symlink target is run - self.WriteMakeRule( - ["all"], - [ - self.GetUnversionedSidedeckFromSidedeck(install_path), - self.GetSharedObjectFromSidedeck(install_path), - ], - comment='Add %s to "all" target.' % file_desc, - phony=True, - ) - elif part_of_all: - self.WriteMakeRule( - ["all"], - [install_path], - comment='Add %s to "all" target.' % file_desc, - phony=True, - ) - - def WriteList(self, value_list, variable=None, prefix="", quoter=QuoteIfNecessary): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = "" - if value_list: - value_list = [replace_sep(quoter(prefix + value)) for value in value_list] - values = " \\\n\t" + " \\\n\t".join(value_list) - self.fp.write(f"{variable} :={values}\n\n") - - def WriteDoCmd( - self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False - ): - """Write a Makefile rule that uses do_cmd. - - This makes the outputs dependent on the command line that was run, - as well as support the V= make command line flag. - """ - suffix = "" - if postbuilds: - assert "," not in command - suffix = ",,1" # Tell do_cmd to honor $POSTBUILDS - self.WriteMakeRule( - outputs, - inputs, - actions=[f"$(call do_cmd,{command}{suffix})"], - comment=comment, - command=command, - force=True, - ) - # Add our outputs to the list of targets we read depfiles from. - # all_deps is only used for deps file reading, and for deps files we replace - # spaces with ? because escaping doesn't work with make's $(sort) and - # other functions. - outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] - self.WriteLn("all_deps += %s" % " ".join(outputs)) - - def WriteMakeRule( - self, - outputs, - inputs, - actions=None, - comment=None, - order_only=False, - force=False, - phony=False, - command=None, - ): - """Write a Makefile rule, with some extra tricks. - - outputs: a list of outputs for the rule (note: this is not directly - supported by make; see comments below) - inputs: a list of inputs for the rule - actions: a list of shell commands to run for the rule - comment: a comment to put in the Makefile above the rule (also useful - for making this Python script's code self-documenting) - order_only: if true, makes the dependency order-only - force: if true, include FORCE_DO_CMD as an order-only dep - phony: if true, the rule does not actually generate the named output, the - output is just a name to run the rule - command: (optional) command name to generate unambiguous labels - """ - outputs = [QuoteSpaces(o) for o in outputs] - inputs = [QuoteSpaces(i) for i in inputs] - - if comment: - self.WriteLn("# " + comment) - if phony: - self.WriteLn(".PHONY: " + " ".join(outputs)) - if actions: - self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) - force_append = " FORCE_DO_CMD" if force else "" - - if order_only: - # Order only rule: Just write a simple rule. - # TODO(evanm): just make order_only a list of deps instead of this hack. - self.WriteLn( - "{}: | {}{}".format(" ".join(outputs), " ".join(inputs), force_append) - ) - elif len(outputs) == 1: - # Regular rule, one output: Just write a simple rule. - self.WriteLn("{}: {}{}".format(outputs[0], " ".join(inputs), force_append)) - else: - # Regular rule, more than one output: Multiple outputs are tricky in - # make. We will write three rules: - # - All outputs depend on an intermediate file. - # - Make .INTERMEDIATE depend on the intermediate. - # - The intermediate file depends on the inputs and executes the - # actual command. - # - The intermediate recipe will 'touch' the intermediate file. - # - The multi-output rule will have an do-nothing recipe. - - # Hash the target name to avoid generating overlong filenames. - cmddigest = hashlib.sha1( - (command or self.target).encode("utf-8") - ).hexdigest() - intermediate = "%s.intermediate" % cmddigest - self.WriteLn("{}: {}".format(" ".join(outputs), intermediate)) - self.WriteLn("\t%s" % "@:") - self.WriteLn("{}: {}".format(".INTERMEDIATE", intermediate)) - self.WriteLn( - "{}: {}{}".format(intermediate, " ".join(inputs), force_append) - ) - actions.insert(0, "$(call do_cmd,touch)") - - if actions: - for action in actions: - self.WriteLn("\t%s" % action) - self.WriteLn() - - def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): - """Write a set of LOCAL_XXX definitions for Android NDK. - - These variable definitions will be used by Android NDK but do nothing for - non-Android applications. - - Arguments: - module_name: Android NDK module name, which must be unique among all - module names. - all_sources: A list of source files (will be filtered by Compilable). - link_deps: A list of link dependencies, which must be sorted in - the order from dependencies to dependents. - """ - if self.type not in ("executable", "shared_library", "static_library"): - return - - self.WriteLn("# Variable definitions for Android applications") - self.WriteLn("include $(CLEAR_VARS)") - self.WriteLn("LOCAL_MODULE := " + module_name) - self.WriteLn( - "LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) " - "$(DEFS_$(BUILDTYPE)) " - # LOCAL_CFLAGS is applied to both of C and C++. There is - # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C - # sources. - "$(CFLAGS_C_$(BUILDTYPE)) " - # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while - # LOCAL_C_INCLUDES does not expect it. So put it in - # LOCAL_CFLAGS. - "$(INCS_$(BUILDTYPE))" - ) - # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred. - self.WriteLn("LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))") - self.WriteLn("LOCAL_C_INCLUDES :=") - self.WriteLn("LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)") - - # Detect the C++ extension. - cpp_ext = {".cc": 0, ".cpp": 0, ".cxx": 0} - default_cpp_ext = ".cpp" - for filename in all_sources: - ext = os.path.splitext(filename)[1] - if ext in cpp_ext: - cpp_ext[ext] += 1 - if cpp_ext[ext] > cpp_ext[default_cpp_ext]: - default_cpp_ext = ext - self.WriteLn("LOCAL_CPP_EXTENSION := " + default_cpp_ext) - - self.WriteList( - list(map(self.Absolutify, filter(Compilable, all_sources))), - "LOCAL_SRC_FILES", - ) - - # Filter out those which do not match prefix and suffix and produce - # the resulting list without prefix and suffix. - def DepsToModules(deps, prefix, suffix): - modules = [] - for filepath in deps: - filename = os.path.basename(filepath) - if filename.startswith(prefix) and filename.endswith(suffix): - modules.append(filename[len(prefix) : -len(suffix)]) - return modules - - # Retrieve the default value of 'SHARED_LIB_SUFFIX' - params = {"flavor": "linux"} - default_variables = {} - CalculateVariables(default_variables, params) - - self.WriteList( - DepsToModules( - link_deps, - generator_default_variables["SHARED_LIB_PREFIX"], - default_variables["SHARED_LIB_SUFFIX"], - ), - "LOCAL_SHARED_LIBRARIES", - ) - self.WriteList( - DepsToModules( - link_deps, - generator_default_variables["STATIC_LIB_PREFIX"], - generator_default_variables["STATIC_LIB_SUFFIX"], - ), - "LOCAL_STATIC_LIBRARIES", - ) - - if self.type == "executable": - self.WriteLn("include $(BUILD_EXECUTABLE)") - elif self.type == "shared_library": - self.WriteLn("include $(BUILD_SHARED_LIBRARY)") - elif self.type == "static_library": - self.WriteLn("include $(BUILD_STATIC_LIBRARY)") - self.WriteLn() - - def WriteLn(self, text=""): - self.fp.write(text + "\n") - - def GetSortedXcodeEnv(self, additional_settings=None): - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, - "$(abs_builddir)", - os.path.join("$(abs_srcdir)", self.path), - "$(BUILDTYPE)", - additional_settings, - ) - - def GetSortedXcodePostbuildEnv(self): - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - "CHROMIUM_STRIP_SAVE_FILE", "" - ) - # Even if strip_save_file is empty, explicitly write it. Else a postbuild - # might pick up an export from an earlier target. - return self.GetSortedXcodeEnv( - additional_settings={"CHROMIUM_STRIP_SAVE_FILE": strip_save_file} - ) - - def WriteSortedXcodeEnv(self, target, env): - for k, v in env: - # For - # foo := a\ b - # the escaped space does the right thing. For - # export foo := a\ b - # it does not -- the backslash is written to the env as literal character. - # So don't escape spaces in |env[k]|. - self.WriteLn(f"{QuoteSpaces(target)}: export {k} := {v}") - - def Objectify(self, path): - """Convert a path to its output directory form.""" - if "$(" in path: - path = path.replace("$(obj)/", "$(obj).%s/$(TARGET)/" % self.toolset) - if "$(obj)" not in path: - path = f"$(obj).{self.toolset}/$(TARGET)/{path}" - return path - - def Pchify(self, path, lang): - """Convert a prefix header path to its output directory form.""" - path = self.Absolutify(path) - if "$(" in path: - path = path.replace( - "$(obj)/", f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}" - ) - return path - return f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}/{path}" - - def Absolutify(self, path): - """Convert a subdirectory-relative path into a base-relative path. - Skips over paths that contain variables.""" - if "$(" in path: - # Don't call normpath in this case, as it might collapse the - # path too aggressively if it features '..'. However it's still - # important to strip trailing slashes. - return path.rstrip("/") - return os.path.normpath(os.path.join(self.path, path)) - - def ExpandInputRoot(self, template, expansion, dirname): - if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template: - return template - path = template % { - "INPUT_ROOT": expansion, - "INPUT_DIRNAME": dirname, - } - return path - - def _InstallableTargetInstallPath(self): - """Returns the location of the final output for an installable target.""" - # Functionality removed for all platforms to match Xcode and hoist - # shared libraries into PRODUCT_DIR for users: - # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files - # rely on this. Emulate this behavior for mac. - # if self.type == "shared_library" and ( - # self.flavor != "mac" or self.toolset != "target" - # ): - # # Install all shared libs into a common directory (per toolset) for - # # convenient access with LD_LIBRARY_PATH. - # return "$(builddir)/lib.%s/%s" % (self.toolset, self.alias) - if self.flavor == "zos" and self.type == "shared_library": - return "$(builddir)/lib.%s/%s" % (self.toolset, self.alias) - - return "$(builddir)/" + self.alias - - -def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files): - """Write the target to regenerate the Makefile.""" - options = params["options"] - build_files_args = [ - gyp.common.RelativePath(filename, options.toplevel_dir) - for filename in params["build_files_arg"] - ] - - gyp_binary = gyp.common.FixIfRelativePath( - params["gyp_binary"], options.toplevel_dir - ) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join(".", gyp_binary) - - root_makefile.write( - "quiet_cmd_regen_makefile = ACTION Regenerating $@\n" - "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n" - "%(makefile_name)s: %(deps)s\n" - "\t$(call do_cmd,regen_makefile)\n\n" - % { - "makefile_name": makefile_name, - "deps": replace_sep( - " ".join(SourceifyAndQuoteSpaces(bf) for bf in build_files) - ), - "cmd": replace_sep(gyp.common.EncodePOSIXShellList( - [gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args - )), - } - ) - - -def PerformBuild(data, configurations, params): - options = params["options"] - for config in configurations: - arguments = ["make"] - if options.toplevel_dir and options.toplevel_dir != ".": - arguments += "-C", options.toplevel_dir - arguments.append("BUILDTYPE=" + config) - print(f"Building [{config}]: {arguments}") - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params["options"] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get("generator_flags", {}) - builddir_name = generator_flags.get("output_dir", "out") - android_ndk_version = generator_flags.get("android_ndk_version", None) - default_target = generator_flags.get("default_target", "all") - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - if options.generator_output: - output_file = os.path.join( - options.depth, options.generator_output, base_path, base_name - ) - base_path = gyp.common.RelativePath( - os.path.dirname(build_file), options.toplevel_dir - ) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = {target_dicts[target]["toolset"] for target in target_list} - for target in target_list: - spec = target_dicts[target] - if spec["default_configuration"] != "Default": - default_configuration = spec["default_configuration"] - break - if not default_configuration: - default_configuration = "Default" - - srcdir = "." - makefile_name = "Makefile" + options.suffix - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - if options.generator_output: - global srcdir_prefix - makefile_path = os.path.join( - options.toplevel_dir, options.generator_output, makefile_name - ) - srcdir = replace_sep(gyp.common.RelativePath(srcdir, options.generator_output)) - srcdir_prefix = "$(srcdir)/" - - flock_command = "flock" - copy_archive_arguments = "-af" - makedep_arguments = "-MMD" - - # wasm-ld doesn't support --start-group/--end-group - link_commands = LINK_COMMANDS_LINUX - if flavor in ["wasi", "wasm"]: - link_commands = link_commands.replace(' -Wl,--start-group', '').replace( - ' -Wl,--end-group', '' - ) - - CC_target = replace_sep(GetEnvironFallback(("CC_target", "CC"), "$(CC)")) - AR_target = replace_sep(GetEnvironFallback(("AR_target", "AR"), "$(AR)")) - CXX_target = replace_sep(GetEnvironFallback(("CXX_target", "CXX"), "$(CXX)")) - LINK_target = replace_sep(GetEnvironFallback(("LINK_target", "LINK"), "$(LINK)")) - PLI_target = replace_sep(GetEnvironFallback(("PLI_target", "PLI"), "pli")) - CC_host = replace_sep(GetEnvironFallback(("CC_host", "CC"), "gcc")) - AR_host = replace_sep(GetEnvironFallback(("AR_host", "AR"), "ar")) - CXX_host = replace_sep(GetEnvironFallback(("CXX_host", "CXX"), "g++")) - LINK_host = replace_sep(GetEnvironFallback(("LINK_host", "LINK"), "$(CXX.host)")) - PLI_host = replace_sep(GetEnvironFallback(("PLI_host", "PLI"), "pli")) - - header_params = { - "default_target": default_target, - "builddir": builddir_name, - "default_configuration": default_configuration, - "flock": flock_command, - "flock_index": 1, - "link_commands": link_commands, - "extra_commands": "", - "srcdir": srcdir, - "copy_archive_args": copy_archive_arguments, - "makedep_args": makedep_arguments, - "CC.target": CC_target, - "AR.target": AR_target, - "CXX.target": CXX_target, - "LINK.target": LINK_target, - "PLI.target": PLI_target, - "CC.host": CC_host, - "AR.host": AR_host, - "CXX.host": CXX_host, - "LINK.host": LINK_host, - "PLI.host": PLI_host, - } - if flavor == "mac": - flock_command = "./gyp-mac-tool flock" - header_params.update( - { - "flock": flock_command, - "flock_index": 2, - "link_commands": LINK_COMMANDS_MAC, - "extra_commands": SHARED_HEADER_MAC_COMMANDS, - } - ) - elif flavor == "android": - header_params.update({"link_commands": LINK_COMMANDS_ANDROID}) - elif flavor == "zos": - copy_archive_arguments = "-fPR" - CC_target = GetEnvironFallback(("CC_target", "CC"), "njsc") - makedep_arguments = "-MMD" - if CC_target == "clang": - CC_host = GetEnvironFallback(("CC_host", "CC"), "clang") - CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "clang++") - CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "clang++") - elif CC_target == "ibm-clang64": - CC_host = GetEnvironFallback(("CC_host", "CC"), "ibm-clang64") - CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "ibm-clang++64") - CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "ibm-clang++64") - elif CC_target == "ibm-clang": - CC_host = GetEnvironFallback(("CC_host", "CC"), "ibm-clang") - CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "ibm-clang++") - CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "ibm-clang++") - else: - # Node.js versions prior to v18: - makedep_arguments = "-qmakedep=gcc" - CC_host = GetEnvironFallback(("CC_host", "CC"), "njsc") - CXX_target = GetEnvironFallback(("CXX_target", "CXX"), "njsc++") - CXX_host = GetEnvironFallback(("CXX_host", "CXX"), "njsc++") - header_params.update( - { - "copy_archive_args": copy_archive_arguments, - "makedep_args": makedep_arguments, - "link_commands": LINK_COMMANDS_OS390, - "extra_commands": SHARED_HEADER_OS390_COMMANDS, - "CC.target": CC_target, - "CXX.target": CXX_target, - "CC.host": CC_host, - "CXX.host": CXX_host, - } - ) - elif flavor == "solaris": - copy_archive_arguments = "-pPRf@" - header_params.update( - { - "copy_archive_args": copy_archive_arguments, - "flock": "./gyp-flock-tool flock", - "flock_index": 2, - } - ) - elif flavor == "freebsd": - # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific. - header_params.update({"flock": "lockf"}) - elif flavor == "openbsd": - copy_archive_arguments = "-pPRf" - header_params.update({"copy_archive_args": copy_archive_arguments}) - elif flavor == "aix": - copy_archive_arguments = "-pPRf" - header_params.update( - { - "copy_archive_args": copy_archive_arguments, - "link_commands": LINK_COMMANDS_AIX, - "flock": "./gyp-flock-tool flock", - "flock_index": 2, - } - ) - elif flavor == "os400": - copy_archive_arguments = "-pPRf" - header_params.update( - { - "copy_archive_args": copy_archive_arguments, - "link_commands": LINK_COMMANDS_OS400, - "flock": "./gyp-flock-tool flock", - "flock_index": 2, - } - ) - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_array = data[build_file].get("make_global_settings", []) - wrappers = {} - for key, value in make_global_settings_array: - if key.endswith("_wrapper"): - wrappers[key[: -len("_wrapper")]] = "$(abspath %s)" % value - make_global_settings = "" - for key, value in make_global_settings_array: - if re.match(".*_wrapper", key): - continue - if value[0] != "$": - value = "$(abspath %s)" % value - wrapper = wrappers.get(key) - if wrapper: - value = f"{wrapper} {value}" - del wrappers[key] - if key in ("CC", "CC.host", "CXX", "CXX.host"): - make_global_settings += ( - "ifneq (,$(filter $(origin %s), undefined default))\n" % key - ) - # Let gyp-time envvars win over global settings. - env_key = key.replace(".", "_") # CC.host -> CC_host - if env_key in os.environ: - value = os.environ[env_key] - make_global_settings += f" {key} = {value}\n" - make_global_settings += "endif\n" - else: - make_global_settings += f"{key} ?= {value}\n" - # TODO(ukai): define cmd when only wrapper is specified in - # make_global_settings. - - header_params["make_global_settings"] = make_global_settings - - gyp.common.EnsureDirExists(makefile_path) - root_makefile = open(makefile_path, "w") - root_makefile.write(SHARED_HEADER % header_params) - # Currently any versions have the same effect, but in future the behavior - # could be different. - if android_ndk_version: - root_makefile.write( - "# Define LOCAL_PATH for build of Android applications.\n" - "LOCAL_PATH := $(call my-dir)\n" - "\n" - ) - for toolset in toolsets: - root_makefile.write("TOOLSET := %s\n" % toolset) - WriteRootHeaderSuffixRules(root_makefile) - - # Put build-time support tools next to the root Makefile. - dest_path = os.path.dirname(makefile_path) - gyp.common.CopyTool(flavor, dest_path) - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params["build_files"]: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target) - - this_make_global_settings = data[build_file].get("make_global_settings", []) - assert make_global_settings_array == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets " - f"{this_make_global_settings} vs. {make_global_settings}" - ) - - build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) - included_files = data[build_file]["included_files"] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir, - ) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if params["home_dot_gyp"] and abs_include_file.startswith( - params["home_dot_gyp"] - ): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath( - build_file, target + "." + toolset + options.suffix + ".mk" - ) - - spec = target_dicts[qualified_target] - configs = spec["configurations"] - - if flavor == "mac": - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - writer = MakefileWriter(generator_flags, flavor) - writer.Write( - qualified_target, - base_path, - output_file, - spec, - configs, - part_of_all=qualified_target in needed_targets, - ) - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath( - output_file, os.path.dirname(makefile_path) - ) - include_list.add(mkfile_rel_path) - - # Write out per-gyp (sub-project) Makefiles. - depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) - for build_file in build_files: - # The paths in build_files were relativized above, so undo that before - # testing against the non-relativized items in target_list and before - # calculating the Makefile path. - build_file = os.path.join(depth_rel_path, build_file) - gyp_targets = [ - target_dicts[qualified_target]["target_name"] - for qualified_target in target_list - if qualified_target.startswith(build_file) - and qualified_target in needed_targets - ] - # Only generate Makefiles for gyp files with targets. - if not gyp_targets: - continue - base_path, output_file = CalculateMakefilePath( - build_file, os.path.splitext(os.path.basename(build_file))[0] + ".Makefile" - ) - makefile_rel_path = gyp.common.RelativePath( - os.path.dirname(makefile_path), os.path.dirname(output_file) - ) - writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, builddir_name) - - # Write out the sorted list of includes. - root_makefile.write("\n") - for include_file in sorted(include_list): - # We wrap each .mk include in an if statement so users can tell make to - # not load a file by setting NO_LOAD. The below make code says, only - # load the .mk file if the .mk filename doesn't start with a token in - # NO_LOAD. - root_makefile.write( - "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n" - " $(findstring $(join ^,$(prefix)),\\\n" - " $(join ^," + include_file + ")))),)\n" - ) - root_makefile.write(" include " + include_file + "\n") - root_makefile.write("endif\n") - root_makefile.write("\n") - - if not generator_flags.get("standalone") and generator_flags.get( - "auto_regeneration", True - ): - WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py deleted file mode 100644 index 6b5b24acc0001..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py +++ /dev/null @@ -1,3976 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -import ntpath -import os -import posixpath -import re -import subprocess -import sys - -from collections import OrderedDict - -import gyp.common -import gyp.easy_xml as easy_xml -import gyp.generator.ninja as ninja_generator -import gyp.MSVSNew as MSVSNew -import gyp.MSVSProject as MSVSProject -import gyp.MSVSSettings as MSVSSettings -import gyp.MSVSToolFile as MSVSToolFile -import gyp.MSVSUserFile as MSVSUserFile -import gyp.MSVSUtil as MSVSUtil -import gyp.MSVSVersion as MSVSVersion -from gyp.common import GypError -from gyp.common import OrderedSet - - -# Regular expression for validating Visual Studio GUIDs. If the GUID -# contains lowercase hex letters, MSVS will be fine. However, -# IncrediBuild BuildConsole will parse the solution file, but then -# silently skip building the target causing hard to track down errors. -# Note that this only happens with the BuildConsole, and does not occur -# if IncrediBuild is executed from inside Visual Studio. This regex -# validates that the string looks like a GUID with all uppercase hex -# letters. -VALID_MSVS_GUID_CHARS = re.compile(r"^[A-F0-9\-]+$") - -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - -generator_default_variables = { - "DRIVER_PREFIX": "", - "DRIVER_SUFFIX": ".sys", - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": ".exe", - "STATIC_LIB_PREFIX": "", - "SHARED_LIB_PREFIX": "", - "STATIC_LIB_SUFFIX": ".lib", - "SHARED_LIB_SUFFIX": ".dll", - "INTERMEDIATE_DIR": "$(IntDir)", - "SHARED_INTERMEDIATE_DIR": "$(OutDir)/obj/global_intermediate", - "OS": "win", - "PRODUCT_DIR": "$(OutDir)", - "LIB_DIR": "$(OutDir)lib", - "RULE_INPUT_ROOT": "$(InputName)", - "RULE_INPUT_DIRNAME": "$(InputDir)", - "RULE_INPUT_EXT": "$(InputExt)", - "RULE_INPUT_NAME": "$(InputFileName)", - "RULE_INPUT_PATH": "$(InputPath)", - "CONFIGURATION_NAME": "$(ConfigurationName)", -} - - -# The msvs specific sections that hold paths -generator_additional_path_sections = [ - "msvs_cygwin_dirs", - "msvs_props", -] - - -generator_additional_non_configuration_keys = [ - "msvs_cygwin_dirs", - "msvs_cygwin_shell", - "msvs_large_pdb", - "msvs_shard", - "msvs_external_builder", - "msvs_external_builder_out_dir", - "msvs_external_builder_build_cmd", - "msvs_external_builder_clean_cmd", - "msvs_external_builder_clcompile_cmd", - "msvs_enable_winrt", - "msvs_requires_importlibrary", - "msvs_enable_winphone", - "msvs_application_type_revision", - "msvs_target_platform_version", - "msvs_target_platform_minversion", -] - -generator_filelist_paths = None - -# List of precompiled header related keys. -precomp_keys = [ - "msvs_precompiled_header", - "msvs_precompiled_source", -] - - -cached_username = None - - -cached_domain = None - - -# TODO(gspencer): Switch the os.environ calls to be -# win32api.GetDomainName() and win32api.GetUserName() once the -# python version in depot_tools has been updated to work on Vista -# 64-bit. -def _GetDomainAndUserName(): - if sys.platform not in ("win32", "cygwin"): - return ("DOMAIN", "USERNAME") - global cached_username - global cached_domain - if not cached_domain or not cached_username: - domain = os.environ.get("USERDOMAIN") - username = os.environ.get("USERNAME") - if not domain or not username: - call = subprocess.Popen( - ["net", "config", "Workstation"], stdout=subprocess.PIPE - ) - config = call.communicate()[0].decode("utf-8") - username_re = re.compile(r"^User name\s+(\S+)", re.MULTILINE) - username_match = username_re.search(config) - if username_match: - username = username_match.group(1) - domain_re = re.compile(r"^Logon domain\s+(\S+)", re.MULTILINE) - domain_match = domain_re.search(config) - if domain_match: - domain = domain_match.group(1) - cached_domain = domain - cached_username = username - return (cached_domain, cached_username) - - -fixpath_prefix = None - - -def _NormalizedSource(source): - """Normalize the path. - - But not if that gets rid of a variable, as this may expand to something - larger than one directory. - - Arguments: - source: The path to be normalize.d - - Returns: - The normalized path. - """ - normalized = os.path.normpath(source) - if source.count("$") == normalized.count("$"): - source = normalized - return source - - -def _FixPath(path, separator="\\"): - """Convert paths to a form that will make sense in a vcproj file. - - Arguments: - path: The path to convert, may contain / etc. - Returns: - The path with all slashes made into backslashes. - """ - if ( - fixpath_prefix - and path - and not os.path.isabs(path) - and path[0] != "$" - and not _IsWindowsAbsPath(path) - ): - path = os.path.join(fixpath_prefix, path) - if separator == "\\": - path = path.replace("/", "\\") - path = _NormalizedSource(path) - if separator == "/": - path = path.replace("\\", "/") - if path and path[-1] == separator: - path = path[:-1] - return path - - -def _IsWindowsAbsPath(path): - """ - On Cygwin systems Python needs a little help determining if a path - is an absolute Windows path or not, so that - it does not treat those as relative, which results in bad paths like: - '..\\C:\\\\some_source_code_file.cc' - """ - return path.startswith("c:") or path.startswith("C:") - - -def _FixPaths(paths, separator="\\"): - """Fix each of the paths of the list.""" - return [_FixPath(i, separator) for i in paths] - - -def _ConvertSourcesToFilterHierarchy( - sources, prefix=None, excluded=None, list_excluded=True, msvs_version=None -): - """Converts a list split source file paths into a vcproj folder hierarchy. - - Arguments: - sources: A list of source file paths split. - prefix: A list of source file path layers meant to apply to each of sources. - excluded: A set of excluded files. - msvs_version: A MSVSVersion object. - - Returns: - A hierarchy of filenames and MSVSProject.Filter objects that matches the - layout of the source tree. - For example: - _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']], - prefix=['joe']) - --> - [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), - MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] - """ - if not prefix: - prefix = [] - result = [] - excluded_result = [] - folders = OrderedDict() - # Gather files into the final result, excluded, or folders. - for s in sources: - if len(s) == 1: - filename = _NormalizedSource("\\".join(prefix + s)) - if filename in excluded: - excluded_result.append(filename) - else: - result.append(filename) - elif msvs_version and not msvs_version.UsesVcxproj(): - # For MSVS 2008 and earlier, we need to process all files before walking - # the sub folders. - if not folders.get(s[0]): - folders[s[0]] = [] - folders[s[0]].append(s[1:]) - else: - contents = _ConvertSourcesToFilterHierarchy( - [s[1:]], - prefix + [s[0]], - excluded=excluded, - list_excluded=list_excluded, - msvs_version=msvs_version, - ) - contents = MSVSProject.Filter(s[0], contents=contents) - result.append(contents) - # Add a folder for excluded files. - if excluded_result and list_excluded: - excluded_folder = MSVSProject.Filter( - "_excluded_files", contents=excluded_result - ) - result.append(excluded_folder) - - if msvs_version and msvs_version.UsesVcxproj(): - return result - - # Populate all the folders. - for f in folders: - contents = _ConvertSourcesToFilterHierarchy( - folders[f], - prefix=prefix + [f], - excluded=excluded, - list_excluded=list_excluded, - msvs_version=msvs_version, - ) - contents = MSVSProject.Filter(f, contents=contents) - result.append(contents) - return result - - -def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): - if not value: - return - _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset) - - -def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): - # TODO(bradnelson): ugly hack, fix this more generally!!! - if "Directories" in setting or "Dependencies" in setting: - if type(value) == str: - value = value.replace("/", "\\") - else: - value = [i.replace("/", "\\") for i in value] - if not tools.get(tool_name): - tools[tool_name] = {} - tool = tools[tool_name] - if setting == "CompileAsWinRT": - return - if tool.get(setting): - if only_if_unset: - return - if type(tool[setting]) == list and type(value) == list: - tool[setting] += value - else: - raise TypeError( - 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' - "not allowed, previous value: %s" - % (value, setting, tool_name, str(tool[setting])) - ) - else: - tool[setting] = value - - -def _ConfigTargetVersion(config_data): - return config_data.get("msvs_target_version", "Windows7") - - -def _ConfigPlatform(config_data): - return config_data.get("msvs_configuration_platform", "Win32") - - -def _ConfigBaseName(config_name, platform_name): - if config_name.endswith("_" + platform_name): - return config_name[0 : -len(platform_name) - 1] - else: - return config_name - - -def _ConfigFullName(config_name, config_data): - platform_name = _ConfigPlatform(config_data) - return f"{_ConfigBaseName(config_name, platform_name)}|{platform_name}" - - -def _ConfigWindowsTargetPlatformVersion(config_data, version): - target_ver = config_data.get("msvs_windows_target_platform_version") - if target_ver and re.match(r"^\d+", target_ver): - return target_ver - config_ver = config_data.get("msvs_windows_sdk_version") - vers = [config_ver] if config_ver else version.compatible_sdks - for ver in vers: - for key in [ - r"HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s", - r"HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s", - ]: - sdk_dir = MSVSVersion._RegistryGetValue(key % ver, "InstallationFolder") - if not sdk_dir: - continue - version = MSVSVersion._RegistryGetValue(key % ver, "ProductVersion") or "" - # Find a matching entry in sdk_dir\include. - expected_sdk_dir = r"%s\include" % sdk_dir - names = sorted( - ( - x - for x in ( - os.listdir(expected_sdk_dir) - if os.path.isdir(expected_sdk_dir) - else [] - ) - if x.startswith(version) - ), - reverse=True, - ) - if names: - return names[0] - else: - print( - "Warning: No include files found for detected " - "Windows SDK version %s" % (version), - file=sys.stdout, - ) - - -def _BuildCommandLineForRuleRaw( - spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env -): - - if [x for x in cmd if "$(InputDir)" in x]: - input_dir_preamble = ( - "set INPUTDIR=$(InputDir)\n" - "if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n" - "set INPUTDIR=%INPUTDIR:~0,-1%\n" - ) - else: - input_dir_preamble = "" - - if cygwin_shell: - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get("msvs_cygwin_dirs", ["."])[0]) - # Prepare command. - direct_cmd = cmd - direct_cmd = [ - i.replace("$(IntDir)", '`cygpath -m "${INTDIR}"`') for i in direct_cmd - ] - direct_cmd = [ - i.replace("$(OutDir)", '`cygpath -m "${OUTDIR}"`') for i in direct_cmd - ] - direct_cmd = [ - i.replace("$(InputDir)", '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd - ] - if has_input_path: - direct_cmd = [ - i.replace("$(InputPath)", '`cygpath -m "${INPUTPATH}"`') - for i in direct_cmd - ] - direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] - # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) - direct_cmd = " ".join(direct_cmd) - # TODO(quote): regularize quoting path names throughout the module - cmd = "" - if do_setup_env: - cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' - cmd += "set CYGWIN=nontsec&& " - if direct_cmd.find("NUMBER_OF_PROCESSORS") >= 0: - cmd += "set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& " - if direct_cmd.find("INTDIR") >= 0: - cmd += "set INTDIR=$(IntDir)&& " - if direct_cmd.find("OUTDIR") >= 0: - cmd += "set OUTDIR=$(OutDir)&& " - if has_input_path and direct_cmd.find("INPUTPATH") >= 0: - cmd += "set INPUTPATH=$(InputPath) && " - cmd += 'bash -c "%(cmd)s"' - cmd = cmd % {"cygwin_dir": cygwin_dir, "cmd": direct_cmd} - return input_dir_preamble + cmd - else: - # Convert cat --> type to mimic unix. - command = ["type"] if cmd[0] == "cat" else [cmd[0].replace("/", "\\")] - # Add call before command to ensure that commands can be tied together one - # after the other without aborting in Incredibuild, since IB makes a bat - # file out of the raw command string, and some commands (like python) are - # actually batch files themselves. - command.insert(0, "call") - # Fix the paths - # TODO(quote): This is a really ugly heuristic, and will miss path fixing - # for arguments like "--arg=path", arg=path, or "/opt:path". - # If the argument starts with a slash or dash, or contains an equal sign, - # it's probably a command line switch. - # Return the path with forward slashes because the command using it might - # not support backslashes. - arguments = [ - i if (i[:1] in "/-" or "=" in i) else _FixPath(i, "/") - for i in cmd[1:] - ] - arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments] - arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments] - if quote_cmd: - # Support a mode for using cmd directly. - # Convert any paths to native form (first element is used directly). - # TODO(quote): regularize quoting path names throughout the module - command[1] = '"%s"' % command[1] - arguments = ['"%s"' % i for i in arguments] - # Collapse into a single command. - return input_dir_preamble + " ".join(command + arguments) - - -def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env): - # Currently this weird argument munging is used to duplicate the way a - # python script would need to be run as part of the chrome tree. - # Eventually we should add some sort of rule_default option to set this - # per project. For now the behavior chrome needs is the default. - mcs = rule.get("msvs_cygwin_shell") - if mcs is None: - mcs = int(spec.get("msvs_cygwin_shell", 1)) - elif isinstance(mcs, str): - mcs = int(mcs) - quote_cmd = int(rule.get("msvs_quote_cmd", 1)) - return _BuildCommandLineForRuleRaw( - spec, rule["action"], mcs, has_input_path, quote_cmd, do_setup_env=do_setup_env - ) - - -def _AddActionStep(actions_dict, inputs, outputs, description, command): - """Merge action into an existing list of actions. - - Care must be taken so that actions which have overlapping inputs either don't - get assigned to the same input, or get collapsed into one. - - Arguments: - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - inputs: list of inputs - outputs: list of outputs - description: description of the action - command: command line to execute - """ - # Require there to be at least one input (call sites will ensure this). - assert inputs - - action = { - "inputs": inputs, - "outputs": outputs, - "description": description, - "command": command, - } - - # Pick where to stick this action. - # While less than optimal in terms of build time, attach them to the first - # input for now. - chosen_input = inputs[0] - - # Add it there. - if chosen_input not in actions_dict: - actions_dict[chosen_input] = [] - actions_dict[chosen_input].append(action) - - -def _AddCustomBuildToolForMSVS( - p, spec, primary_input, inputs, outputs, description, cmd -): - """Add a custom build tool to execute something. - - Arguments: - p: the target project - spec: the target project dict - primary_input: input file to attach the build tool to - inputs: list of inputs - outputs: list of outputs - description: description of the action - cmd: command line to execute - """ - inputs = _FixPaths(inputs) - outputs = _FixPaths(outputs) - tool = MSVSProject.Tool( - "VCCustomBuildTool", - { - "Description": description, - "AdditionalDependencies": ";".join(inputs), - "Outputs": ";".join(outputs), - "CommandLine": cmd, - }, - ) - # Add to the properties of primary input for each config. - for config_name, c_data in spec["configurations"].items(): - p.AddFileConfig( - _FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool] - ) - - -def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): - """Add actions accumulated into an actions_dict, merging as needed. - - Arguments: - p: the target project - spec: the target project dict - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - """ - for primary_input in actions_dict: - inputs = OrderedSet() - outputs = OrderedSet() - descriptions = [] - commands = [] - for action in actions_dict[primary_input]: - inputs.update(OrderedSet(action["inputs"])) - outputs.update(OrderedSet(action["outputs"])) - descriptions.append(action["description"]) - commands.append(action["command"]) - # Add the custom build step for one input file. - description = ", and also ".join(descriptions) - command = "\r\n".join(commands) - _AddCustomBuildToolForMSVS( - p, - spec, - primary_input=primary_input, - inputs=inputs, - outputs=outputs, - description=description, - cmd=command, - ) - - -def _RuleExpandPath(path, input_file): - """Given the input file to which a rule applied, string substitute a path. - - Arguments: - path: a path to string expand - input_file: the file to which the rule applied. - Returns: - The string substituted path. - """ - path = path.replace( - "$(InputName)", os.path.splitext(os.path.split(input_file)[1])[0] - ) - path = path.replace("$(InputDir)", os.path.dirname(input_file)) - path = path.replace( - "$(InputExt)", os.path.splitext(os.path.split(input_file)[1])[1] - ) - path = path.replace("$(InputFileName)", os.path.split(input_file)[1]) - path = path.replace("$(InputPath)", input_file) - return path - - -def _FindRuleTriggerFiles(rule, sources): - """Find the list of files which a particular rule applies to. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The list of sources that trigger a particular rule. - """ - return rule.get("rule_sources", []) - - -def _RuleInputsAndOutputs(rule, trigger_file): - """Find the inputs and outputs generated by a rule. - - Arguments: - rule: the rule in question. - trigger_file: the main trigger for this rule. - Returns: - The pair of (inputs, outputs) involved in this rule. - """ - raw_inputs = _FixPaths(rule.get("inputs", [])) - raw_outputs = _FixPaths(rule.get("outputs", [])) - inputs = OrderedSet() - outputs = OrderedSet() - inputs.add(trigger_file) - for i in raw_inputs: - inputs.add(_RuleExpandPath(i, trigger_file)) - for o in raw_outputs: - outputs.add(_RuleExpandPath(o, trigger_file)) - return (inputs, outputs) - - -def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): - """Generate a native rules file. - - Arguments: - p: the target project - rules: the set of rules to include - output_dir: the directory in which the project/gyp resides - spec: the project dict - options: global generator options - """ - rules_filename = "{}{}.rules".format(spec["target_name"], options.suffix) - rules_file = MSVSToolFile.Writer( - os.path.join(output_dir, rules_filename), spec["target_name"] - ) - # Add each rule. - for r in rules: - rule_name = r["rule_name"] - rule_ext = r["extension"] - inputs = _FixPaths(r.get("inputs", [])) - outputs = _FixPaths(r.get("outputs", [])) - # Skip a rule with no action and no inputs. - if "action" not in r and not r.get("rule_sources", []): - continue - cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, do_setup_env=True) - rules_file.AddCustomBuildRule( - name=rule_name, - description=r.get("message", rule_name), - extensions=[rule_ext], - additional_dependencies=inputs, - outputs=outputs, - cmd=cmd, - ) - # Write out rules file. - rules_file.WriteIfChanged() - - # Add rules file to project. - p.AddToolFile(rules_filename) - - -def _Cygwinify(path): - path = path.replace("$(OutDir)", "$(OutDirCygwin)") - path = path.replace("$(IntDir)", "$(IntDirCygwin)") - return path - - -def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add): - """Generate an external makefile to do a set of rules. - - Arguments: - rules: the list of rules to include - output_dir: path containing project and gyp files - spec: project specification data - sources: set of sources known - options: global generator options - actions_to_add: The list of actions we will add to. - """ - filename = "{}_rules{}.mk".format(spec["target_name"], options.suffix) - mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) - # Find cygwin style versions of some paths. - mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') - mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') - # Gather stuff needed to emit all: target. - all_inputs = OrderedSet() - all_outputs = OrderedSet() - all_output_dirs = OrderedSet() - first_outputs = [] - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - all_inputs.update(OrderedSet(inputs)) - all_outputs.update(OrderedSet(outputs)) - # Only use one target from each rule as the dependency for - # 'all' so we don't try to build each rule multiple times. - first_outputs.append(next(iter(outputs))) - # Get the unique output directories for this rule. - output_dirs = [os.path.split(i)[0] for i in outputs] - for od in output_dirs: - all_output_dirs.add(od) - first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] - # Write out all: target, including mkdir for each output directory. - mk_file.write("all: %s\n" % " ".join(first_outputs_cyg)) - for od in all_output_dirs: - if od: - mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od) - mk_file.write("\n") - # Define how each output is generated. - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - # Get all the inputs and outputs for this rule for this trigger file. - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs = [_Cygwinify(i) for i in inputs] - outputs = [_Cygwinify(i) for i in outputs] - # Prepare the command line for this rule. - cmd = [_RuleExpandPath(c, tf) for c in rule["action"]] - cmd = ['"%s"' % i for i in cmd] - cmd = " ".join(cmd) - # Add it to the makefile. - mk_file.write("{}: {}\n".format(" ".join(outputs), " ".join(inputs))) - mk_file.write("\t%s\n\n" % cmd) - # Close up the file. - mk_file.close() - - # Add makefile to list of sources. - sources.add(filename) - # Add a build action to call makefile. - cmd = [ - "make", - "OutDir=$(OutDir)", - "IntDir=$(IntDir)", - "-j", - "${NUMBER_OF_PROCESSORS_PLUS_1}", - "-f", - filename, - ] - cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True) - # Insert makefile as 0'th input, so it gets the action attached there, - # as this is easier to understand from in the IDE. - all_inputs = list(all_inputs) - all_inputs.insert(0, filename) - _AddActionStep( - actions_to_add, - inputs=_FixPaths(all_inputs), - outputs=_FixPaths(all_outputs), - description="Running external rules for %s" % spec["target_name"], - command=cmd, - ) - - -def _EscapeEnvironmentVariableExpansion(s): - """Escapes % characters. - - Escapes any % characters so that Windows-style environment variable - expansions will leave them alone. - See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile - to understand why we have to do this. - - Args: - s: The string to be escaped. - - Returns: - The escaped string. - """ - s = s.replace("%", "%%") - return s - - -quote_replacer_regex = re.compile(r'(\\*)"') - - -def _EscapeCommandLineArgumentForMSVS(s): - """Escapes a Windows command-line argument. - - So that the Win32 CommandLineToArgv function will turn the escaped result back - into the original string. - See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - ("Parsing C++ Command-Line Arguments") to understand why we have to do - this. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a literal quote, CommandLineToArgv requires an odd number of - # backslashes preceding it, and it produces half as many literal backslashes - # (rounded down). So we need to produce 2n+1 backslashes. - return 2 * match.group(1) + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex.sub(_Replace, s) - # Now add unescaped quotes so that any whitespace is interpreted literally. - s = '"' + s + '"' - return s - - -delimiters_replacer_regex = re.compile(r"(\\*)([,;]+)") - - -def _EscapeVCProjCommandLineArgListItem(s): - """Escapes command line arguments for MSVS. - - The VCProj format stores string lists in a single string using commas and - semi-colons as separators, which must be quoted if they are to be - interpreted literally. However, command-line arguments may already have - quotes, and the VCProj parser is ignorant of the backslash escaping - convention used by CommandLineToArgv, so the command-line quotes and the - VCProj quotes may not be the same quotes. So to store a general - command-line argument in a VCProj list, we need to parse the existing - quoting according to VCProj's convention and quote any delimiters that are - not already quoted by that convention. The quotes that we add will also be - seen by CommandLineToArgv, so if backslashes precede them then we also have - to escape those backslashes according to the CommandLineToArgv - convention. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a non-literal quote, CommandLineToArgv requires an even number of - # backslashes preceding it, and it produces half as many literal - # backslashes. So we need to produce 2n backslashes. - return 2 * match.group(1) + '"' + match.group(2) + '"' - - segments = s.split('"') - # The unquoted segments are at the even-numbered indices. - for i in range(0, len(segments), 2): - segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) - # Concatenate back into a single string - s = '"'.join(segments) - if len(segments) % 2 == 0: - # String ends while still quoted according to VCProj's convention. This - # means the delimiter and the next list item that follow this one in the - # .vcproj file will be misinterpreted as part of this item. There is nothing - # we can do about this. Adding an extra quote would correct the problem in - # the VCProj but cause the same problem on the final command-line. Moving - # the item to the end of the list does works, but that's only possible if - # there's only one such item. Let's just warn the user. - print( - "Warning: MSVS may misinterpret the odd number of " + "quotes in " + s, - file=sys.stderr, - ) - return s - - -def _EscapeCppDefineForMSVS(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSVS(s) - s = _EscapeVCProjCommandLineArgListItem(s) - # cl.exe replaces literal # characters with = in preprocessor definitions for - # some reason. Octal-encode to work around that. - s = s.replace("#", "\\%03o" % ord("#")) - return s - - -quote_replacer_regex2 = re.compile(r'(\\+)"') - - -def _EscapeCommandLineArgumentForMSBuild(s): - """Escapes a Windows command-line argument for use by MSBuild.""" - - def _Replace(match): - return (len(match.group(1)) / 2 * 4) * "\\" + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex2.sub(_Replace, s) - return s - - -def _EscapeMSBuildSpecialCharacters(s): - escape_dictionary = { - "%": "%25", - "$": "%24", - "@": "%40", - "'": "%27", - ";": "%3B", - "?": "%3F", - "*": "%2A", - } - result = "".join([escape_dictionary.get(c, c) for c in s]) - return result - - -def _EscapeCppDefineForMSBuild(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSBuild(s) - s = _EscapeMSBuildSpecialCharacters(s) - # cl.exe replaces literal # characters with = in preprocessor definitions for - # some reason. Octal-encode to work around that. - s = s.replace("#", "\\%03o" % ord("#")) - return s - - -def _GenerateRulesForMSVS( - p, output_dir, options, spec, sources, excluded_sources, actions_to_add -): - """Generate all the rules for a particular project. - - Arguments: - p: the project - output_dir: directory to emit rules to - options: global options passed to the generator - spec: the specification for this project - sources: the set of all known source files in this project - excluded_sources: the set of sources excluded from normal processing - actions_to_add: deferred list of actions to add in - """ - rules = spec.get("rules", []) - rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))] - rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))] - - # Handle rules that use a native rules file. - if rules_native: - _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) - - # Handle external rules (non-native rules). - if rules_external: - _GenerateExternalRules( - rules_external, output_dir, spec, sources, options, actions_to_add - ) - _AdjustSourcesForRules(rules, sources, excluded_sources, False) - - -def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild): - # Add outputs generated by each rule (if applicable). - for rule in rules: - # Add in the outputs from this rule. - trigger_files = _FindRuleTriggerFiles(rule, sources) - for trigger_file in trigger_files: - # Remove trigger_file from excluded_sources to let the rule be triggered - # (e.g. rule trigger ax_enums.idl is added to excluded_sources - # because it's also in an action's inputs in the same project) - excluded_sources.discard(_FixPath(trigger_file)) - # Done if not processing outputs as sources. - if int(rule.get("process_outputs_as_sources", False)): - inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) - inputs = OrderedSet(_FixPaths(inputs)) - outputs = OrderedSet(_FixPaths(outputs)) - inputs.remove(_FixPath(trigger_file)) - sources.update(inputs) - if not is_msbuild: - excluded_sources.update(inputs) - sources.update(outputs) - - -def _FilterActionsFromExcluded(excluded_sources, actions_to_add): - """Take inputs with actions attached out of the list of exclusions. - - Arguments: - excluded_sources: list of source files not to be built. - actions_to_add: dict of actions keyed on source file they're attached to. - Returns: - excluded_sources with files that have actions attached removed. - """ - must_keep = OrderedSet(_FixPaths(actions_to_add.keys())) - return [s for s in excluded_sources if s not in must_keep] - - -def _GetDefaultConfiguration(spec): - return spec["configurations"][spec["default_configuration"]] - - -def _GetGuidOfProject(proj_path, spec): - """Get the guid for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - Returns: - the guid. - Raises: - ValueError: if the specified GUID is invalid. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - # Decide the guid of the project. - guid = default_config.get("msvs_guid") - if guid: - if VALID_MSVS_GUID_CHARS.match(guid) is None: - raise ValueError( - 'Invalid MSVS guid: "%s". Must match regex: "%s".' - % (guid, VALID_MSVS_GUID_CHARS.pattern) - ) - guid = "{%s}" % guid - guid = guid or MSVSNew.MakeGuid(proj_path) - return guid - - -def _GetMsbuildToolsetOfProject(proj_path, spec, version): - """Get the platform toolset for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - version: The MSVSVersion object. - Returns: - the platform toolset string or None. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - toolset = default_config.get("msbuild_toolset") - if not toolset and version.DefaultToolset(): - toolset = version.DefaultToolset() - if spec["type"] == "windows_driver": - toolset = "WindowsKernelModeDriver10.0" - return toolset - - -def _GenerateProject(project, options, version, generator_flags, spec): - """Generates a vcproj file. - - Arguments: - project: the MSVSProject object. - options: global generator options. - version: the MSVSVersion object. - generator_flags: dict of generator-specific flags. - Returns: - A list of source files that cannot be found on disk. - """ - default_config = _GetDefaultConfiguration(project.spec) - - # Skip emitting anything if told to with msvs_existing_vcproj option. - if default_config.get("msvs_existing_vcproj"): - return [] - - if version.UsesVcxproj(): - return _GenerateMSBuildProject(project, options, version, generator_flags, spec) - else: - return _GenerateMSVSProject(project, options, version, generator_flags) - - -def _GenerateMSVSProject(project, options, version, generator_flags): - """Generates a .vcproj file. It may create .rules and .user files too. - - Arguments: - project: The project object we will generate the file for. - options: Global options passed to the generator. - version: The VisualStudioVersion object. - generator_flags: dict of generator-specific flags. - """ - spec = project.spec - gyp.common.EnsureDirExists(project.path) - - platforms = _GetUniquePlatforms(spec) - p = MSVSProject.Writer( - project.path, version, spec["target_name"], project.guid, platforms - ) - - # Get directory project file is in. - project_dir = os.path.split(project.path)[0] - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - - config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec["configurations"].items(): - _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) - - # Prepare list of sources and excluded sources. - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) - - # Add rules. - actions_to_add = {} - _GenerateRulesForMSVS( - p, project_dir, options, spec, sources, excluded_sources, actions_to_add - ) - list_excluded = generator_flags.get("msvs_list_excluded_files", True) - sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, project_dir, sources, excluded_sources, list_excluded, version - ) - - # Add in files. - missing_sources = _VerifySourcesExist(sources, project_dir) - p.AddFiles(sources) - - _AddToolFilesToMSVS(p, spec) - _HandlePreCompiledHeaders(p, sources, spec) - _AddActions(actions_to_add, spec, relative_path_of_gyp_file) - _AddCopies(actions_to_add, spec) - _WriteMSVSUserFile(project.path, version, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add) - _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded) - _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) - - # Write it out. - p.WriteIfChanged() - - return missing_sources - - -def _GetUniquePlatforms(spec): - """Returns the list of unique platforms for this spec, e.g ['win32', ...]. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - # Gather list of unique platforms. - platforms = OrderedSet() - for configuration in spec["configurations"]: - platforms.add(_ConfigPlatform(spec["configurations"][configuration])) - platforms = list(platforms) - return platforms - - -def _CreateMSVSUserFile(proj_path, version, spec): - """Generates a .user file for the user running this Gyp program. - - Arguments: - proj_path: The path of the project file being created. The .user file - shares the same path (with an appropriate suffix). - version: The VisualStudioVersion object. - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - (domain, username) = _GetDomainAndUserName() - vcuser_filename = ".".join([proj_path, domain, username, "user"]) - user_file = MSVSUserFile.Writer(vcuser_filename, version, spec["target_name"]) - return user_file - - -def _GetMSVSConfigurationType(spec, build_file): - """Returns the configuration type for this project. - - It's a number defined by Microsoft. May raise an exception. - - Args: - spec: The target dictionary containing the properties of the target. - build_file: The path of the gyp file. - Returns: - An integer, the configuration type. - """ - try: - config_type = { - "executable": "1", # .exe - "shared_library": "2", # .dll - "loadable_module": "2", # .dll - "static_library": "4", # .lib - "windows_driver": "5", # .sys - "none": "10", # Utility type - }[spec["type"]] - except KeyError: - if spec.get("type"): - raise GypError( - "Target type %s is not a valid target type for " - "target %s in %s." % (spec["type"], spec["target_name"], build_file) - ) - else: - raise GypError( - "Missing type field for target %s in %s." - % (spec["target_name"], build_file) - ) - return config_type - - -def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): - """Adds a configuration to the MSVS project. - - Many settings in a vcproj file are specific to a configuration. This - function the main part of the vcproj file that's configuration specific. - - Arguments: - p: The target project being generated. - spec: The target dictionary containing the properties of the target. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - config: The dictionary that defines the special processing to be done - for this configuration. - """ - # Get the information for this configuration - include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config) - libraries = _GetLibraries(spec) - library_dirs = _GetLibraryDirs(config) - out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) - defines = _GetDefines(config) - defines = [_EscapeCppDefineForMSVS(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(config) - prebuild = config.get("msvs_prebuild") - postbuild = config.get("msvs_postbuild") - def_file = _GetModuleDefinition(spec) - precompiled_header = config.get("msvs_precompiled_header") - - # Prepare the list of tools as a dictionary. - tools = {} - # Add in user specified msvs_settings. - msvs_settings = config.get("msvs_settings", {}) - MSVSSettings.ValidateMSVSSettings(msvs_settings) - - # Prevent default library inheritance from the environment. - _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", ["$(NOINHERIT)"]) - - for tool in msvs_settings: - settings = config["msvs_settings"][tool] - for setting in settings: - _ToolAppend(tools, tool, setting, settings[setting]) - # Add the information to the appropriate tool - _ToolAppend(tools, "VCCLCompilerTool", "AdditionalIncludeDirectories", include_dirs) - _ToolAppend(tools, "VCMIDLTool", "AdditionalIncludeDirectories", midl_include_dirs) - _ToolAppend( - tools, - "VCResourceCompilerTool", - "AdditionalIncludeDirectories", - resource_include_dirs, - ) - # Add in libraries. - _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", libraries) - _ToolAppend(tools, "VCLinkerTool", "AdditionalLibraryDirectories", library_dirs) - if out_file: - _ToolAppend(tools, vc_tool, "OutputFile", out_file, only_if_unset=True) - # Add defines. - _ToolAppend(tools, "VCCLCompilerTool", "PreprocessorDefinitions", defines) - _ToolAppend(tools, "VCResourceCompilerTool", "PreprocessorDefinitions", defines) - # Change program database directory to prevent collisions. - _ToolAppend( - tools, - "VCCLCompilerTool", - "ProgramDataBaseFileName", - "$(IntDir)$(ProjectName)\\vc80.pdb", - only_if_unset=True, - ) - # Add disabled warnings. - _ToolAppend(tools, "VCCLCompilerTool", "DisableSpecificWarnings", disabled_warnings) - # Add Pre-build. - _ToolAppend(tools, "VCPreBuildEventTool", "CommandLine", prebuild) - # Add Post-build. - _ToolAppend(tools, "VCPostBuildEventTool", "CommandLine", postbuild) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(tools, "VCCLCompilerTool", "UsePrecompiledHeader", "2") - _ToolAppend( - tools, "VCCLCompilerTool", "PrecompiledHeaderThrough", precompiled_header - ) - _ToolAppend(tools, "VCCLCompilerTool", "ForcedIncludeFiles", precompiled_header) - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec["type"] == "loadable_module": - _ToolAppend(tools, "VCLinkerTool", "IgnoreImportLibrary", "true") - # Set the module definition file if any. - if def_file: - _ToolAppend(tools, "VCLinkerTool", "ModuleDefinitionFile", def_file) - - _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) - - -def _GetIncludeDirs(config): - """Returns the list of directories to be used for #include directives. - - Arguments: - config: The dictionary that defines the special processing to be done - for this configuration. - Returns: - The list of directory paths. - """ - # TODO(bradnelson): include_dirs should really be flexible enough not to - # require this sort of thing. - include_dirs = config.get("include_dirs", []) + config.get( - "msvs_system_include_dirs", [] - ) - midl_include_dirs = config.get("midl_include_dirs", []) + config.get( - "msvs_system_include_dirs", [] - ) - resource_include_dirs = config.get("resource_include_dirs", include_dirs) - include_dirs = _FixPaths(include_dirs) - midl_include_dirs = _FixPaths(midl_include_dirs) - resource_include_dirs = _FixPaths(resource_include_dirs) - return include_dirs, midl_include_dirs, resource_include_dirs - - -def _GetLibraryDirs(config): - """Returns the list of directories to be used for library search paths. - - Arguments: - config: The dictionary that defines the special processing to be done - for this configuration. - Returns: - The list of directory paths. - """ - - library_dirs = config.get("library_dirs", []) - library_dirs = _FixPaths(library_dirs) - return library_dirs - - -def _GetLibraries(spec): - """Returns the list of libraries for this configuration. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The list of directory paths. - """ - libraries = spec.get("libraries", []) - # Strip out -l, as it is not used on windows (but is needed so we can pass - # in libraries that are assumed to be in the default library path). - # Also remove duplicate entries, leaving only the last duplicate, while - # preserving order. - found = OrderedSet() - unique_libraries_list = [] - for entry in reversed(libraries): - library = re.sub(r"^\-l", "", entry) - if not os.path.splitext(library)[1]: - library += ".lib" - if library not in found: - found.add(library) - unique_libraries_list.append(library) - unique_libraries_list.reverse() - return unique_libraries_list - - -def _GetOutputFilePathAndTool(spec, msbuild): - """Returns the path and tool to use for this target. - - Figures out the path of the file this spec will create and the name of - the VC tool that will create it. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - A triple of (file path, name of the vc tool, name of the msbuild tool) - """ - # Select a name for the output file. - out_file = "" - vc_tool = "" - msbuild_tool = "" - output_file_map = { - "executable": ("VCLinkerTool", "Link", "$(OutDir)", ".exe"), - "shared_library": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"), - "loadable_module": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"), - "windows_driver": ("VCLinkerTool", "Link", "$(OutDir)", ".sys"), - "static_library": ("VCLibrarianTool", "Lib", "$(OutDir)lib\\", ".lib"), - } - output_file_props = output_file_map.get(spec["type"]) - if output_file_props and int(spec.get("msvs_auto_output_file", 1)): - vc_tool, msbuild_tool, out_dir, suffix = output_file_props - if spec.get("standalone_static_library", 0): - out_dir = "$(OutDir)" - out_dir = spec.get("product_dir", out_dir) - product_extension = spec.get("product_extension") - if product_extension: - suffix = "." + product_extension - elif msbuild: - suffix = "$(TargetExt)" - prefix = spec.get("product_prefix", "") - product_name = spec.get("product_name", "$(ProjectName)") - out_file = ntpath.join(out_dir, prefix + product_name + suffix) - return out_file, vc_tool, msbuild_tool - - -def _GetOutputTargetExt(spec): - """Returns the extension for this target, including the dot - - If product_extension is specified, set target_extension to this to avoid - MSB8012, returns None otherwise. Ignores any target_extension settings in - the input files. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - A string with the extension, or None - """ - target_extension = spec.get("product_extension") - if target_extension: - return "." + target_extension - return None - - -def _GetDefines(config): - """Returns the list of preprocessor definitions for this configuration. - - Arguments: - config: The dictionary that defines the special processing to be done - for this configuration. - Returns: - The list of preprocessor definitions. - """ - defines = [] - for d in config.get("defines", []): - fd = "=".join([str(dpart) for dpart in d]) if isinstance(d, list) else str(d) - defines.append(fd) - return defines - - -def _GetDisabledWarnings(config): - return [str(i) for i in config.get("msvs_disabled_warnings", [])] - - -def _GetModuleDefinition(spec): - def_file = "" - if spec["type"] in [ - "shared_library", - "loadable_module", - "executable", - "windows_driver", - ]: - def_files = [s for s in spec.get("sources", []) if s.endswith(".def")] - if len(def_files) == 1: - def_file = _FixPath(def_files[0]) - elif def_files: - raise ValueError( - "Multiple module definition files in one target, target %s lists " - "multiple .def files: %s" % (spec["target_name"], " ".join(def_files)) - ) - return def_file - - -def _ConvertToolsToExpectedForm(tools): - """Convert tools to a form expected by Visual Studio. - - Arguments: - tools: A dictionary of settings; the tool name is the key. - Returns: - A list of Tool objects. - """ - tool_list = [] - for tool, settings in tools.items(): - # Collapse settings with lists. - settings_fixed = {} - for setting, value in settings.items(): - if type(value) == list: - if ( - tool == "VCLinkerTool" and setting == "AdditionalDependencies" - ) or setting == "AdditionalOptions": - settings_fixed[setting] = " ".join(value) - else: - settings_fixed[setting] = ";".join(value) - else: - settings_fixed[setting] = value - # Add in this tool. - tool_list.append(MSVSProject.Tool(tool, settings_fixed)) - return tool_list - - -def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): - """Add to the project file the configuration specified by config. - - Arguments: - p: The target project being generated. - spec: the target project dict. - tools: A dictionary of settings; the tool name is the key. - config: The dictionary that defines the special processing to be done - for this configuration. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - """ - attributes = _GetMSVSAttributes(spec, config, config_type) - # Add in this configuration. - tool_list = _ConvertToolsToExpectedForm(tools) - p.AddConfig(_ConfigFullName(config_name, config), attrs=attributes, tools=tool_list) - - -def _GetMSVSAttributes(spec, config, config_type): - # Prepare configuration attributes. - prepared_attrs = {} - source_attrs = config.get("msvs_configuration_attributes", {}) - for a in source_attrs: - prepared_attrs[a] = source_attrs[a] - # Add props files. - vsprops_dirs = config.get("msvs_props", []) - vsprops_dirs = _FixPaths(vsprops_dirs) - if vsprops_dirs: - prepared_attrs["InheritedPropertySheets"] = ";".join(vsprops_dirs) - # Set configuration type. - prepared_attrs["ConfigurationType"] = config_type - output_dir = prepared_attrs.get( - "OutputDirectory", "$(SolutionDir)$(ConfigurationName)" - ) - prepared_attrs["OutputDirectory"] = _FixPath(output_dir) + "\\" - if "IntermediateDirectory" not in prepared_attrs: - intermediate = "$(ConfigurationName)\\obj\\$(ProjectName)" - prepared_attrs["IntermediateDirectory"] = _FixPath(intermediate) + "\\" - else: - intermediate = _FixPath(prepared_attrs["IntermediateDirectory"]) + "\\" - intermediate = MSVSSettings.FixVCMacroSlashes(intermediate) - prepared_attrs["IntermediateDirectory"] = intermediate - return prepared_attrs - - -def _AddNormalizedSources(sources_set, sources_array): - sources_set.update(_NormalizedSource(s) for s in sources_array) - - -def _PrepareListOfSources(spec, generator_flags, gyp_file): - """Prepare list of sources and excluded sources. - - Besides the sources specified directly in the spec, adds the gyp file so - that a change to it will cause a re-compile. Also adds appropriate sources - for actions and copies. Assumes later stage will un-exclude files which - have custom build steps attached. - - Arguments: - spec: The target dictionary containing the properties of the target. - gyp_file: The name of the gyp file. - Returns: - A pair of (list of sources, list of excluded sources). - The sources will be relative to the gyp file. - """ - sources = OrderedSet() - _AddNormalizedSources(sources, spec.get("sources", [])) - excluded_sources = OrderedSet() - # Add in the gyp file. - if not generator_flags.get("standalone"): - sources.add(gyp_file) - - # Add in 'action' inputs and outputs. - for a in spec.get("actions", []): - inputs = a["inputs"] - inputs = [_NormalizedSource(i) for i in inputs] - # Add all inputs to sources and excluded sources. - inputs = OrderedSet(inputs) - sources.update(inputs) - if not spec.get("msvs_external_builder"): - excluded_sources.update(inputs) - if int(a.get("process_outputs_as_sources", False)): - _AddNormalizedSources(sources, a.get("outputs", [])) - # Add in 'copies' inputs and outputs. - for cpy in spec.get("copies", []): - _AddNormalizedSources(sources, cpy.get("files", [])) - return (sources, excluded_sources) - - -def _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, gyp_dir, sources, excluded_sources, list_excluded, version -): - """Adjusts the list of sources and excluded sources. - - Also converts the sets to lists. - - Arguments: - spec: The target dictionary containing the properties of the target. - options: Global generator options. - gyp_dir: The path to the gyp file being processed. - sources: A set of sources to be included for this project. - excluded_sources: A set of sources to be excluded for this project. - version: A MSVSVersion object. - Returns: - A trio of (list of sources, list of excluded sources, - path of excluded IDL file) - """ - # Exclude excluded sources coming into the generator. - excluded_sources.update(OrderedSet(spec.get("sources_excluded", []))) - # Add excluded sources into sources for good measure. - sources.update(excluded_sources) - # Convert to proper windows form. - # NOTE: sources goes from being a set to a list here. - # NOTE: excluded_sources goes from being a set to a list here. - sources = _FixPaths(sources) - # Convert to proper windows form. - excluded_sources = _FixPaths(excluded_sources) - - excluded_idl = _IdlFilesHandledNonNatively(spec, sources) - - precompiled_related = _GetPrecompileRelatedFiles(spec) - # Find the excluded ones, minus the precompiled header related ones. - fully_excluded = [i for i in excluded_sources if i not in precompiled_related] - - # Convert to folders and the right slashes. - sources = [i.split("\\") for i in sources] - sources = _ConvertSourcesToFilterHierarchy( - sources, - excluded=fully_excluded, - list_excluded=list_excluded, - msvs_version=version, - ) - - # Prune filters with a single child to flatten ugly directory structures - # such as ../../src/modules/module1 etc. - if version.UsesVcxproj(): - while ( - all(isinstance(s, MSVSProject.Filter) for s in sources) - and len({s.name for s in sources}) == 1 - ): - assert all(len(s.contents) == 1 for s in sources) - sources = [s.contents[0] for s in sources] - else: - while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter): - sources = sources[0].contents - - return sources, excluded_sources, excluded_idl - - -def _IdlFilesHandledNonNatively(spec, sources): - # If any non-native rules use 'idl' as an extension exclude idl files. - # Gather a list here to use later. - using_idl = False - for rule in spec.get("rules", []): - if rule["extension"] == "idl" and int(rule.get("msvs_external_rule", 0)): - using_idl = True - break - excluded_idl = [i for i in sources if i.endswith(".idl")] if using_idl else [] - return excluded_idl - - -def _GetPrecompileRelatedFiles(spec): - # Gather a list of precompiled header related sources. - precompiled_related = [] - for _, config in spec["configurations"].items(): - for k in precomp_keys: - f = config.get(k) - if f: - precompiled_related.append(_FixPath(f)) - return precompiled_related - - -def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - for file_name, excluded_configs in exclusions.items(): - if not list_excluded and len(excluded_configs) == len(spec["configurations"]): - # If we're not listing excluded files, then they won't appear in the - # project, so don't try to configure them to be excluded. - pass - else: - for config_name, config in excluded_configs: - p.AddFileConfig( - file_name, - _ConfigFullName(config_name, config), - {"ExcludedFromBuild": "true"}, - ) - - -def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): - exclusions = {} - # Exclude excluded sources from being built. - for f in excluded_sources: - excluded_configs = [] - for config_name, config in spec["configurations"].items(): - precomped = [_FixPath(config.get(i, "")) for i in precomp_keys] - # Don't do this for ones that are precompiled header related. - if f not in precomped: - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - # If any non-native rules use 'idl' as an extension exclude idl files. - # Exclude them now. - for f in excluded_idl: - excluded_configs = [] - for config_name, config in spec["configurations"].items(): - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - return exclusions - - -def _AddToolFilesToMSVS(p, spec): - # Add in tool files (rules). - tool_files = OrderedSet() - for _, config in spec["configurations"].items(): - for f in config.get("msvs_tool_files", []): - tool_files.add(f) - for f in tool_files: - p.AddToolFile(f) - - -def _HandlePreCompiledHeaders(p, sources, spec): - # Pre-compiled header source stubs need a different compiler flag - # (generate precompiled header) and any source file not of the same - # kind (i.e. C vs. C++) as the precompiled header source stub needs - # to have use of precompiled headers disabled. - extensions_excluded_from_precompile = [] - for config_name, config in spec["configurations"].items(): - source = config.get("msvs_precompiled_source") - if source: - source = _FixPath(source) - # UsePrecompiledHeader=1 for if using precompiled headers. - tool = MSVSProject.Tool("VCCLCompilerTool", {"UsePrecompiledHeader": "1"}) - p.AddFileConfig( - source, _ConfigFullName(config_name, config), {}, tools=[tool] - ) - basename, extension = os.path.splitext(source) - if extension == ".c": - extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"] - else: - extensions_excluded_from_precompile = [".c"] - - def DisableForSourceTree(source_tree): - for source in source_tree: - if isinstance(source, MSVSProject.Filter): - DisableForSourceTree(source.contents) - else: - basename, extension = os.path.splitext(source) - if extension in extensions_excluded_from_precompile: - for config_name, config in spec["configurations"].items(): - tool = MSVSProject.Tool( - "VCCLCompilerTool", - { - "UsePrecompiledHeader": "0", - "ForcedIncludeFiles": "$(NOINHERIT)", - }, - ) - p.AddFileConfig( - _FixPath(source), - _ConfigFullName(config_name, config), - {}, - tools=[tool], - ) - - # Do nothing if there was no precompiled source. - if extensions_excluded_from_precompile: - DisableForSourceTree(sources) - - -def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): - # Add actions. - actions = spec.get("actions", []) - # Don't setup_env every time. When all the actions are run together in one - # batch file in VS, the PATH will grow too long. - # Membership in this set means that the cygwin environment has been set up, - # and does not need to be set up again. - have_setup_env = set() - for a in actions: - # Attach actions to the gyp file if nothing else is there. - inputs = a.get("inputs") or [relative_path_of_gyp_file] - attached_to = inputs[0] - need_setup_env = attached_to not in have_setup_env - cmd = _BuildCommandLineForRule( - spec, a, has_input_path=False, do_setup_env=need_setup_env - ) - have_setup_env.add(attached_to) - # Add the action. - _AddActionStep( - actions_to_add, - inputs=inputs, - outputs=a.get("outputs", []), - description=a.get("message", a["action_name"]), - command=cmd, - ) - - -def _WriteMSVSUserFile(project_path, version, spec): - # Add run_as and test targets. - if "run_as" in spec: - run_as = spec["run_as"] - action = run_as.get("action", []) - environment = run_as.get("environment", []) - working_directory = run_as.get("working_directory", ".") - elif int(spec.get("test", 0)): - action = ["$(TargetPath)", "--gtest_print_time"] - environment = [] - working_directory = "." - else: - return # Nothing to add - # Write out the user file. - user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec["configurations"].items(): - user_file.AddDebugSettings( - _ConfigFullName(config_name, c_data), action, environment, working_directory - ) - user_file.WriteIfChanged() - - -def _AddCopies(actions_to_add, spec): - copies = _GetCopies(spec) - for inputs, outputs, cmd, description in copies: - _AddActionStep( - actions_to_add, - inputs=inputs, - outputs=outputs, - description=description, - command=cmd, - ) - - -def _GetCopies(spec): - copies = [] - # Add copies. - for cpy in spec.get("copies", []): - for src in cpy.get("files", []): - dst = os.path.join(cpy["destination"], os.path.basename(src)) - # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and - # outputs, so do the same for our generated command line. - if src.endswith("/"): - src_bare = src[:-1] - base_dir = posixpath.split(src_bare)[0] - outer_dir = posixpath.split(src_bare)[1] - fixed_dst = _FixPath(dst) - full_dst = f'"{fixed_dst}\\{outer_dir}\\"' - cmd = ( - f'mkdir {full_dst} 2>nul & cd "{_FixPath(base_dir)}" ' - f'&& xcopy /e /f /y "{outer_dir}" {full_dst}' - ) - copies.append( - ( - [src], - ["dummy_copies", dst], - cmd, - f"Copying {src} to {fixed_dst}", - ) - ) - else: - fix_dst = _FixPath(cpy["destination"]) - cmd = ( - f'mkdir "{fix_dst}" 2>nul & set ERRORLEVEL=0 & ' - f'copy /Y "{_FixPath(src)}" "{_FixPath(dst)}"' - ) - copies.append(([src], [dst], cmd, f"Copying {src} to {fix_dst}")) - return copies - - -def _GetPathDict(root, path): - # |path| will eventually be empty (in the recursive calls) if it was initially - # relative; otherwise it will eventually end up as '\', 'D:\', etc. - if not path or path.endswith(os.sep): - return root - parent, folder = os.path.split(path) - parent_dict = _GetPathDict(root, parent) - if folder not in parent_dict: - parent_dict[folder] = {} - return parent_dict[folder] - - -def _DictsToFolders(base_path, bucket, flat): - # Convert to folders recursively. - children = [] - for folder, contents in bucket.items(): - if type(contents) == dict: - folder_children = _DictsToFolders( - os.path.join(base_path, folder), contents, flat - ) - if flat: - children += folder_children - else: - folder_children = MSVSNew.MSVSFolder( - os.path.join(base_path, folder), - name="(" + folder + ")", - entries=folder_children, - ) - children.append(folder_children) - else: - children.append(contents) - return children - - -def _CollapseSingles(parent, node): - # Recursively explorer the tree of dicts looking for projects which are - # the sole item in a folder which has the same name as the project. Bring - # such projects up one level. - if type(node) == dict and len(node) == 1 and next(iter(node)) == parent + ".vcproj": - return node[next(iter(node))] - if type(node) != dict: - return node - for child in node: - node[child] = _CollapseSingles(child, node[child]) - return node - - -def _GatherSolutionFolders(sln_projects, project_objects, flat): - root = {} - # Convert into a tree of dicts on path. - for p in sln_projects: - gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] - if p.endswith("#host"): - target += "_host" - gyp_dir = os.path.dirname(gyp_file) - path_dict = _GetPathDict(root, gyp_dir) - path_dict[target + ".vcproj"] = project_objects[p] - # Walk down from the top until we hit a folder that has more than one entry. - # In practice, this strips the top-level "src/" dir from the hierarchy in - # the solution. - while len(root) == 1 and type(root[next(iter(root))]) == dict: - root = root[next(iter(root))] - # Collapse singles. - root = _CollapseSingles("", root) - # Merge buckets until everything is a root entry. - return _DictsToFolders("", root, flat) - - -def _GetPathOfProject(qualified_target, spec, options, msvs_version): - default_config = _GetDefaultConfiguration(spec) - proj_filename = default_config.get("msvs_existing_vcproj") - if not proj_filename: - proj_filename = spec["target_name"] - if spec["toolset"] == "host": - proj_filename += "_host" - proj_filename = proj_filename + options.suffix + msvs_version.ProjectExtension() - - build_file = gyp.common.BuildFile(qualified_target) - proj_path = os.path.join(os.path.dirname(build_file), proj_filename) - fix_prefix = None - if options.generator_output: - project_dir_path = os.path.dirname(os.path.abspath(proj_path)) - proj_path = os.path.join(options.generator_output, proj_path) - fix_prefix = gyp.common.RelativePath( - project_dir_path, os.path.dirname(proj_path) - ) - return proj_path, fix_prefix - - -def _GetPlatformOverridesOfProject(spec): - # Prepare a dict indicating which project configurations are used for which - # solution configurations for this target. - config_platform_overrides = {} - for config_name, c in spec["configurations"].items(): - config_fullname = _ConfigFullName(config_name, c) - platform = c.get("msvs_target_platform", _ConfigPlatform(c)) - base_name = _ConfigBaseName(config_name, _ConfigPlatform(c)) - fixed_config_fullname = f"{base_name}|{platform}" - if spec["toolset"] == "host" and generator_supports_multiple_toolsets: - fixed_config_fullname = f"{config_name}|x64" - config_platform_overrides[config_fullname] = fixed_config_fullname - return config_platform_overrides - - -def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): - """Create a MSVSProject object for the targets found in target list. - - Arguments: - target_list: the list of targets to generate project objects for. - target_dicts: the dictionary of specifications. - options: global generator options. - msvs_version: the MSVSVersion object. - Returns: - A set of created projects, keyed by target. - """ - global fixpath_prefix - # Generate each project. - projects = {} - for qualified_target in target_list: - spec = target_dicts[qualified_target] - proj_path, fixpath_prefix = _GetPathOfProject( - qualified_target, spec, options, msvs_version - ) - guid = _GetGuidOfProject(proj_path, spec) - overrides = _GetPlatformOverridesOfProject(spec) - build_file = gyp.common.BuildFile(qualified_target) - # Create object for this project. - target_name = spec["target_name"] - if spec["toolset"] == "host": - target_name += "_host" - obj = MSVSNew.MSVSProject( - proj_path, - name=target_name, - guid=guid, - spec=spec, - build_file=build_file, - config_platform_overrides=overrides, - fixpath_prefix=fixpath_prefix, - ) - # Set project toolset if any (MS build only) - if msvs_version.UsesVcxproj(): - obj.set_msbuild_toolset( - _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version) - ) - projects[qualified_target] = obj - # Set all the dependencies, but not if we are using an external builder like - # ninja - for project in projects.values(): - if not project.spec.get("msvs_external_builder"): - deps = project.spec.get("dependencies", []) - deps = [projects[d] for d in deps] - project.set_dependencies(deps) - return projects - - -def _InitNinjaFlavor(params, target_list, target_dicts): - """Initialize targets for the ninja flavor. - - This sets up the necessary variables in the targets to generate msvs projects - that use ninja as an external builder. The variables in the spec are only set - if they have not been set. This allows individual specs to override the - default values initialized here. - Arguments: - params: Params provided to the generator. - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - """ - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec.get("msvs_external_builder"): - # The spec explicitly defined an external builder, so don't change it. - continue - - path_to_ninja = spec.get("msvs_path_to_ninja", "ninja.exe") - - spec["msvs_external_builder"] = "ninja" - if not spec.get("msvs_external_builder_out_dir"): - gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) - gyp_dir = os.path.dirname(gyp_file) - configuration = "$(Configuration)" - if params.get("target_arch") == "x64": - configuration += "_x64" - if params.get("target_arch") == "arm64": - configuration += "_arm64" - spec["msvs_external_builder_out_dir"] = os.path.join( - gyp.common.RelativePath(params["options"].toplevel_dir, gyp_dir), - ninja_generator.ComputeOutputDir(params), - configuration, - ) - if not spec.get("msvs_external_builder_build_cmd"): - spec["msvs_external_builder_build_cmd"] = [ - path_to_ninja, - "-C", - "$(OutDir)", - "$(ProjectName)", - ] - if not spec.get("msvs_external_builder_clean_cmd"): - spec["msvs_external_builder_clean_cmd"] = [ - path_to_ninja, - "-C", - "$(OutDir)", - "-tclean", - "$(ProjectName)", - ] - - -def CalculateVariables(default_variables, params): - """Generated variables that require params to be known.""" - - generator_flags = params.get("generator_flags", {}) - - # Select project file format version (if unset, default to auto detecting). - msvs_version = MSVSVersion.SelectVisualStudioVersion( - generator_flags.get("msvs_version", "auto") - ) - # Stash msvs_version for later (so we don't have to probe the system twice). - params["msvs_version"] = msvs_version - - # Set a variable so conditions can be based on msvs_version. - default_variables["MSVS_VERSION"] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if ( - os.environ.get("PROCESSOR_ARCHITECTURE", "").find("64") >= 0 - or os.environ.get("PROCESSOR_ARCHITEW6432", "").find("64") >= 0 - ): - default_variables["MSVS_OS_BITS"] = 64 - else: - default_variables["MSVS_OS_BITS"] = 32 - - if gyp.common.GetFlavor(params) == "ninja": - default_variables["SHARED_INTERMEDIATE_DIR"] = "$(OutDir)gen" - - -def PerformBuild(data, configurations, params): - options = params["options"] - msvs_version = params["msvs_version"] - devenv = os.path.join(msvs_version.path, "Common7", "IDE", "devenv.com") - - for build_file, build_file_dict in data.items(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != ".gyp": - continue - sln_path = build_file_root + options.suffix + ".sln" - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - - for config in configurations: - arguments = [devenv, sln_path, "/Build", config] - print(f"Building [{config}]: {arguments}") - subprocess.check_call(arguments) - - -def CalculateGeneratorInputInfo(params): - if params.get("flavor") == "ninja": - toplevel = params["options"].toplevel_dir - qualified_out_dir = os.path.normpath( - os.path.join( - toplevel, - ninja_generator.ComputeOutputDir(params), - "gypfiles-msvs-ninja", - ) - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate .sln and .vcproj files. - - This is the entry point for this generator. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dictionary containing per .gyp data. - """ - global fixpath_prefix - - options = params["options"] - - # Get the project file format version back out of where we stashed it in - # GeneratorCalculatedVariables. - msvs_version = params["msvs_version"] - - generator_flags = params.get("generator_flags", {}) - - # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. - (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) - - # Optionally use the large PDB workaround for targets marked with - # 'msvs_large_pdb': 1. - (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables - ) - - # Optionally configure each spec to use ninja as the external builder. - if params.get("flavor") == "ninja": - _InitNinjaFlavor(params, target_list, target_dicts) - - # Prepare the set of configurations. - configs = set() - for qualified_target in target_list: - spec = target_dicts[qualified_target] - for config_name, config in spec["configurations"].items(): - config_name = _ConfigFullName(config_name, config) - configs.add(config_name) - if config_name == "Release|arm64": - configs.add("Release|x64") - configs = list(configs) - - # Figure out all the projects that will be generated and their guids - project_objects = _CreateProjectObjects( - target_list, target_dicts, options, msvs_version - ) - - # Generate each project. - missing_sources = [] - for project in project_objects.values(): - fixpath_prefix = project.fixpath_prefix - missing_sources.extend( - _GenerateProject(project, options, msvs_version, generator_flags, spec) - ) - fixpath_prefix = None - - for build_file in data: - # Validate build_file extension - target_only_configs = configs - if generator_supports_multiple_toolsets: - target_only_configs = [i for i in configs if i.endswith("arm64")] - if not build_file.endswith(".gyp"): - continue - sln_path = os.path.splitext(build_file)[0] + options.suffix + ".sln" - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - # Get projects in the solution, and their dependents. - sln_projects = gyp.common.BuildFileTargets(target_list, build_file) - sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) - # Create folder hierarchy. - root_entries = _GatherSolutionFolders( - sln_projects, project_objects, flat=msvs_version.FlatSolution() - ) - # Create solution. - sln = MSVSNew.MSVSSolution( - sln_path, - entries=root_entries, - variants=target_only_configs, - websiteProperties=False, - version=msvs_version, - ) - sln.Write() - - if missing_sources: - error_message = "Missing input files:\n" + "\n".join(set(missing_sources)) - if generator_flags.get("msvs_error_on_missing_sources", False): - raise GypError(error_message) - else: - print("Warning: " + error_message, file=sys.stdout) - - -def _GenerateMSBuildFiltersFile( - filters_path, - source_files, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, -): - """Generate the filters file. - - This file is used by Visual Studio to organize the presentation of source - files into folders. - - Arguments: - filters_path: The path of the file to be created. - source_files: The hierarchical structure of all the sources. - extension_to_rule_name: A dictionary mapping file extensions to rules. - """ - filter_group = [] - source_group = [] - _AppendFiltersForMSBuild( - "", - source_files, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - filter_group, - source_group, - ) - if filter_group: - content = [ - "Project", - { - "ToolsVersion": "4.0", - "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003", - }, - ["ItemGroup"] + filter_group, - ["ItemGroup"] + source_group, - ] - easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) - elif os.path.exists(filters_path): - # We don't need this filter anymore. Delete the old filter file. - os.unlink(filters_path) - - -def _AppendFiltersForMSBuild( - parent_filter_name, - sources, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - filter_group, - source_group, -): - """Creates the list of filters and sources to be added in the filter file. - - Args: - parent_filter_name: The name of the filter under which the sources are - found. - sources: The hierarchy of filters and sources to process. - extension_to_rule_name: A dictionary mapping file extensions to rules. - filter_group: The list to which filter entries will be appended. - source_group: The list to which source entries will be appended. - """ - for source in sources: - if isinstance(source, MSVSProject.Filter): - # We have a sub-filter. Create the name of that sub-filter. - if not parent_filter_name: - filter_name = source.name - else: - filter_name = f"{parent_filter_name}\\{source.name}" - # Add the filter to the group. - filter_group.append( - [ - "Filter", - {"Include": filter_name}, - ["UniqueIdentifier", MSVSNew.MakeGuid(source.name)], - ] - ) - # Recurse and add its dependents. - _AppendFiltersForMSBuild( - filter_name, - source.contents, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - filter_group, - source_group, - ) - else: - # It's a source. Create a source entry. - _, element = _MapFileToMsBuildSourceType( - source, rule_dependencies, extension_to_rule_name, platforms, toolset - ) - source_entry = [element, {"Include": source}] - # Specify the filter it is part of, if any. - if parent_filter_name: - source_entry.append(["Filter", parent_filter_name]) - source_group.append(source_entry) - - -def _MapFileToMsBuildSourceType( - source, rule_dependencies, extension_to_rule_name, platforms, toolset -): - """Returns the group and element type of the source file. - - Arguments: - source: The source file name. - extension_to_rule_name: A dictionary mapping file extensions to rules. - - Returns: - A pair of (group this file should be part of, the label of element) - """ - _, ext = os.path.splitext(source) - ext = ext.lower() - if ext in extension_to_rule_name: - group = "rule" - element = extension_to_rule_name[ext] - elif ext in [".cc", ".cpp", ".c", ".cxx", ".mm"]: - group = "compile" - element = "ClCompile" - elif ext in [".h", ".hxx"]: - group = "include" - element = "ClInclude" - elif ext == ".rc": - group = "resource" - element = "ResourceCompile" - elif ext in [".s", ".asm"]: - group = "masm" - element = "MASM" - if "arm64" in platforms and toolset == "target": - element = "MARMASM" - elif ext == ".idl": - group = "midl" - element = "Midl" - elif source in rule_dependencies: - group = "rule_dependency" - element = "CustomBuild" - else: - group = "none" - element = "None" - return (group, element) - - -def _GenerateRulesForMSBuild( - output_dir, - options, - spec, - sources, - excluded_sources, - props_files_of_rules, - targets_files_of_rules, - actions_to_add, - rule_dependencies, - extension_to_rule_name, -): - # MSBuild rules are implemented using three files: an XML file, a .targets - # file and a .props file. - # For more details see: - # https://devblogs.microsoft.com/cppblog/quick-help-on-vs2010-custom-build-rule/ - rules = spec.get("rules", []) - rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))] - rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))] - - msbuild_rules = [] - for rule in rules_native: - # Skip a rule with no action and no inputs. - if "action" not in rule and not rule.get("rule_sources", []): - continue - msbuild_rule = MSBuildRule(rule, spec) - msbuild_rules.append(msbuild_rule) - rule_dependencies.update(msbuild_rule.additional_dependencies.split(";")) - extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name - if msbuild_rules: - base = spec["target_name"] + options.suffix - props_name = base + ".props" - targets_name = base + ".targets" - xml_name = base + ".xml" - - props_files_of_rules.add(props_name) - targets_files_of_rules.add(targets_name) - - props_path = os.path.join(output_dir, props_name) - targets_path = os.path.join(output_dir, targets_name) - xml_path = os.path.join(output_dir, xml_name) - - _GenerateMSBuildRulePropsFile(props_path, msbuild_rules) - _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules) - _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules) - - if rules_external: - _GenerateExternalRules( - rules_external, output_dir, spec, sources, options, actions_to_add - ) - _AdjustSourcesForRules(rules, sources, excluded_sources, True) - - -class MSBuildRule: - """Used to store information used to generate an MSBuild rule. - - Attributes: - rule_name: The rule name, sanitized to use in XML. - target_name: The name of the target. - after_targets: The name of the AfterTargets element. - before_targets: The name of the BeforeTargets element. - depends_on: The name of the DependsOn element. - compute_output: The name of the ComputeOutput element. - dirs_to_make: The name of the DirsToMake element. - inputs: The name of the _inputs element. - tlog: The name of the _tlog element. - extension: The extension this rule applies to. - description: The message displayed when this rule is invoked. - additional_dependencies: A string listing additional dependencies. - outputs: The outputs of this rule. - command: The command used to run the rule. - """ - - def __init__(self, rule, spec): - self.display_name = rule["rule_name"] - # Assure that the rule name is only characters and numbers - self.rule_name = re.sub(r"\W", "_", self.display_name) - # Create the various element names, following the example set by the - # Visual Studio 2008 to 2010 conversion. I don't know if VS2010 - # is sensitive to the exact names. - self.target_name = "_" + self.rule_name - self.after_targets = self.rule_name + "AfterTargets" - self.before_targets = self.rule_name + "BeforeTargets" - self.depends_on = self.rule_name + "DependsOn" - self.compute_output = "Compute%sOutput" % self.rule_name - self.dirs_to_make = self.rule_name + "DirsToMake" - self.inputs = self.rule_name + "_inputs" - self.tlog = self.rule_name + "_tlog" - self.extension = rule["extension"] - if not self.extension.startswith("."): - self.extension = "." + self.extension - - self.description = MSVSSettings.ConvertVCMacrosToMSBuild( - rule.get("message", self.rule_name) - ) - old_additional_dependencies = _FixPaths(rule.get("inputs", [])) - self.additional_dependencies = ";".join( - [ - MSVSSettings.ConvertVCMacrosToMSBuild(i) - for i in old_additional_dependencies - ] - ) - old_outputs = _FixPaths(rule.get("outputs", [])) - self.outputs = ";".join( - [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in old_outputs] - ) - old_command = _BuildCommandLineForRule( - spec, rule, has_input_path=True, do_setup_env=True - ) - self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) - - -def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules): - """Generate the .props file.""" - content = [ - "Project", - {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"}, - ] - for rule in msbuild_rules: - content.extend( - [ - [ - "PropertyGroup", - { - "Condition": "'$(%s)' == '' and '$(%s)' == '' and " - "'$(ConfigurationType)' != 'Makefile'" - % (rule.before_targets, rule.after_targets) - }, - [rule.before_targets, "Midl"], - [rule.after_targets, "CustomBuild"], - ], - [ - "PropertyGroup", - [ - rule.depends_on, - {"Condition": "'$(ConfigurationType)' != 'Makefile'"}, - "_SelectedFiles;$(%s)" % rule.depends_on, - ], - ], - [ - "ItemDefinitionGroup", - [ - rule.rule_name, - ["CommandLineTemplate", rule.command], - ["Outputs", rule.outputs], - ["ExecutionDescription", rule.description], - ["AdditionalDependencies", rule.additional_dependencies], - ], - ], - ] - ) - easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules): - """Generate the .targets file.""" - content = [ - "Project", - {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"}, - ] - item_group = [ - "ItemGroup", - [ - "PropertyPageSchema", - {"Include": "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"}, - ], - ] - for rule in msbuild_rules: - item_group.append( - [ - "AvailableItemName", - {"Include": rule.rule_name}, - ["Targets", rule.target_name], - ] - ) - content.append(item_group) - - for rule in msbuild_rules: - content.append( - [ - "UsingTask", - { - "TaskName": rule.rule_name, - "TaskFactory": "XamlTaskFactory", - "AssemblyName": "Microsoft.Build.Tasks.v4.0", - }, - ["Task", "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"], - ] - ) - for rule in msbuild_rules: - rule_name = rule.rule_name - target_outputs = "%%(%s.Outputs)" % rule_name - target_inputs = ( - "%%(%s.Identity);%%(%s.AdditionalDependencies);" "$(MSBuildProjectFile)" - ) % (rule_name, rule_name) - rule_inputs = "%%(%s.Identity)" % rule_name - extension_condition = ( - "'%(Extension)'=='.obj' or " - "'%(Extension)'=='.res' or " - "'%(Extension)'=='.rsc' or " - "'%(Extension)'=='.lib'" - ) - remove_section = [ - "ItemGroup", - {"Condition": "'@(SelectedFiles)' != ''"}, - [ - rule_name, - { - "Remove": "@(%s)" % rule_name, - "Condition": "'%(Identity)' != '@(SelectedFiles)'", - }, - ], - ] - inputs_section = [ - "ItemGroup", - [rule.inputs, {"Include": "%%(%s.AdditionalDependencies)" % rule_name}], - ] - logging_section = [ - "ItemGroup", - [ - rule.tlog, - { - "Include": "%%(%s.Outputs)" % rule_name, - "Condition": ( - "'%%(%s.Outputs)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name) - ), - }, - ["Source", "@(%s, '|')" % rule_name], - ["Inputs", "@(%s -> '%%(Fullpath)', ';')" % rule.inputs], - ], - ] - message_section = [ - "Message", - {"Importance": "High", "Text": "%%(%s.ExecutionDescription)" % rule_name}, - ] - write_tlog_section = [ - "WriteLinesToFile", - { - "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - "File": "$(IntDir)$(ProjectName).write.1.tlog", - "Lines": "^%%(%s.Source);@(%s->'%%(Fullpath)')" - % (rule.tlog, rule.tlog), - }, - ] - read_tlog_section = [ - "WriteLinesToFile", - { - "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - "File": "$(IntDir)$(ProjectName).read.1.tlog", - "Lines": f"^%({rule.tlog}.Source);%({rule.tlog}.Inputs)", - }, - ] - command_and_input_section = [ - rule_name, - { - "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule_name, rule_name), - "EchoOff": "true", - "StandardOutputImportance": "High", - "StandardErrorImportance": "High", - "CommandLineTemplate": "%%(%s.CommandLineTemplate)" % rule_name, - "AdditionalOptions": "%%(%s.AdditionalOptions)" % rule_name, - "Inputs": rule_inputs, - }, - ] - content.extend( - [ - [ - "Target", - { - "Name": rule.target_name, - "BeforeTargets": "$(%s)" % rule.before_targets, - "AfterTargets": "$(%s)" % rule.after_targets, - "Condition": "'@(%s)' != ''" % rule_name, - "DependsOnTargets": "$(%s);%s" - % (rule.depends_on, rule.compute_output), - "Outputs": target_outputs, - "Inputs": target_inputs, - }, - remove_section, - inputs_section, - logging_section, - message_section, - write_tlog_section, - read_tlog_section, - command_and_input_section, - ], - [ - "PropertyGroup", - [ - "ComputeLinkInputsTargets", - "$(ComputeLinkInputsTargets);", - "%s;" % rule.compute_output, - ], - [ - "ComputeLibInputsTargets", - "$(ComputeLibInputsTargets);", - "%s;" % rule.compute_output, - ], - ], - [ - "Target", - { - "Name": rule.compute_output, - "Condition": "'@(%s)' != ''" % rule_name, - }, - [ - "ItemGroup", - [ - rule.dirs_to_make, - { - "Condition": "'@(%s)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" - % (rule_name, rule_name), - "Include": "%%(%s.Outputs)" % rule_name, - }, - ], - [ - "Link", - { - "Include": "%%(%s.Identity)" % rule.dirs_to_make, - "Condition": extension_condition, - }, - ], - [ - "Lib", - { - "Include": "%%(%s.Identity)" % rule.dirs_to_make, - "Condition": extension_condition, - }, - ], - [ - "ImpLib", - { - "Include": "%%(%s.Identity)" % rule.dirs_to_make, - "Condition": extension_condition, - }, - ], - ], - [ - "MakeDir", - { - "Directories": ( - "@(%s->'%%(RootDir)%%(Directory)')" % rule.dirs_to_make - ) - }, - ], - ], - ] - ) - easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): - # Generate the .xml file - content = [ - "ProjectSchemaDefinitions", - { - "xmlns": ( - "clr-namespace:Microsoft.Build.Framework.XamlTypes;" - "assembly=Microsoft.Build.Framework" - ), - "xmlns:x": "http://schemas.microsoft.com/winfx/2006/xaml", - "xmlns:sys": "clr-namespace:System;assembly=mscorlib", - "xmlns:transformCallback": "Microsoft.Cpp.Dev10.ConvertPropertyCallback", - }, - ] - for rule in msbuild_rules: - content.extend( - [ - [ - "Rule", - { - "Name": rule.rule_name, - "PageTemplate": "tool", - "DisplayName": rule.display_name, - "Order": "200", - }, - [ - "Rule.DataSource", - [ - "DataSource", - {"Persistence": "ProjectFile", "ItemType": rule.rule_name}, - ], - ], - [ - "Rule.Categories", - [ - "Category", - {"Name": "General"}, - ["Category.DisplayName", ["sys:String", "General"]], - ], - [ - "Category", - {"Name": "Command Line", "Subtype": "CommandLine"}, - ["Category.DisplayName", ["sys:String", "Command Line"]], - ], - ], - [ - "StringListProperty", - { - "Name": "Inputs", - "Category": "Command Line", - "IsRequired": "true", - "Switch": " ", - }, - [ - "StringListProperty.DataSource", - [ - "DataSource", - { - "Persistence": "ProjectFile", - "ItemType": rule.rule_name, - "SourceType": "Item", - }, - ], - ], - ], - [ - "StringProperty", - { - "Name": "CommandLineTemplate", - "DisplayName": "Command Line", - "Visible": "False", - "IncludeInCommandLine": "False", - }, - ], - [ - "DynamicEnumProperty", - { - "Name": rule.before_targets, - "Category": "General", - "EnumProvider": "Targets", - "IncludeInCommandLine": "False", - }, - [ - "DynamicEnumProperty.DisplayName", - ["sys:String", "Execute Before"], - ], - [ - "DynamicEnumProperty.Description", - [ - "sys:String", - "Specifies the targets for the build customization" - " to run before.", - ], - ], - [ - "DynamicEnumProperty.ProviderSettings", - [ - "NameValuePair", - { - "Name": "Exclude", - "Value": "^%s|^Compute" % rule.before_targets, - }, - ], - ], - [ - "DynamicEnumProperty.DataSource", - [ - "DataSource", - { - "Persistence": "ProjectFile", - "HasConfigurationCondition": "true", - }, - ], - ], - ], - [ - "DynamicEnumProperty", - { - "Name": rule.after_targets, - "Category": "General", - "EnumProvider": "Targets", - "IncludeInCommandLine": "False", - }, - [ - "DynamicEnumProperty.DisplayName", - ["sys:String", "Execute After"], - ], - [ - "DynamicEnumProperty.Description", - [ - "sys:String", - ( - "Specifies the targets for the build customization" - " to run after." - ), - ], - ], - [ - "DynamicEnumProperty.ProviderSettings", - [ - "NameValuePair", - { - "Name": "Exclude", - "Value": "^%s|^Compute" % rule.after_targets, - }, - ], - ], - [ - "DynamicEnumProperty.DataSource", - [ - "DataSource", - { - "Persistence": "ProjectFile", - "ItemType": "", - "HasConfigurationCondition": "true", - }, - ], - ], - ], - [ - "StringListProperty", - { - "Name": "Outputs", - "DisplayName": "Outputs", - "Visible": "False", - "IncludeInCommandLine": "False", - }, - ], - [ - "StringProperty", - { - "Name": "ExecutionDescription", - "DisplayName": "Execution Description", - "Visible": "False", - "IncludeInCommandLine": "False", - }, - ], - [ - "StringListProperty", - { - "Name": "AdditionalDependencies", - "DisplayName": "Additional Dependencies", - "IncludeInCommandLine": "False", - "Visible": "false", - }, - ], - [ - "StringProperty", - { - "Subtype": "AdditionalOptions", - "Name": "AdditionalOptions", - "Category": "Command Line", - }, - [ - "StringProperty.DisplayName", - ["sys:String", "Additional Options"], - ], - [ - "StringProperty.Description", - ["sys:String", "Additional Options"], - ], - ], - ], - [ - "ItemType", - {"Name": rule.rule_name, "DisplayName": rule.display_name}, - ], - [ - "FileExtension", - {"Name": "*" + rule.extension, "ContentType": rule.rule_name}, - ], - [ - "ContentType", - { - "Name": rule.rule_name, - "DisplayName": "", - "ItemType": rule.rule_name, - }, - ], - ] - ) - easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True) - - -def _GetConfigurationAndPlatform(name, settings, spec): - configuration = name.rsplit("_", 1)[0] - platform = settings.get("msvs_configuration_platform", "Win32") - if spec["toolset"] == "host" and platform == "arm64": - platform = "x64" # Host-only tools are always built for x64 - return (configuration, platform) - - -def _GetConfigurationCondition(name, settings, spec): - return r"'$(Configuration)|$(Platform)'=='%s|%s'" % _GetConfigurationAndPlatform( - name, settings, spec - ) - - -def _GetMSBuildProjectConfigurations(configurations, spec): - group = ["ItemGroup", {"Label": "ProjectConfigurations"}] - for (name, settings) in sorted(configurations.items()): - configuration, platform = _GetConfigurationAndPlatform(name, settings, spec) - designation = f"{configuration}|{platform}" - group.append( - [ - "ProjectConfiguration", - {"Include": designation}, - ["Configuration", configuration], - ["Platform", platform], - ] - ) - return [group] - - -def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name): - namespace = os.path.splitext(gyp_file_name)[0] - properties = [ - [ - "PropertyGroup", - {"Label": "Globals"}, - ["ProjectGuid", guid], - ["Keyword", "Win32Proj"], - ["RootNamespace", namespace], - ["IgnoreWarnCompileDuplicatedFilename", "true"], - ] - ] - - if ( - os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64" - or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64" - ): - properties[0].append(["PreferredToolArchitecture", "x64"]) - - if spec.get("msvs_target_platform_version"): - target_platform_version = spec.get("msvs_target_platform_version") - properties[0].append(["WindowsTargetPlatformVersion", target_platform_version]) - if spec.get("msvs_target_platform_minversion"): - target_platform_minversion = spec.get("msvs_target_platform_minversion") - properties[0].append( - ["WindowsTargetPlatformMinVersion", target_platform_minversion] - ) - else: - properties[0].append( - ["WindowsTargetPlatformMinVersion", target_platform_version] - ) - - if spec.get("msvs_enable_winrt"): - properties[0].append(["DefaultLanguage", "en-US"]) - properties[0].append(["AppContainerApplication", "true"]) - if spec.get("msvs_application_type_revision"): - app_type_revision = spec.get("msvs_application_type_revision") - properties[0].append(["ApplicationTypeRevision", app_type_revision]) - else: - properties[0].append(["ApplicationTypeRevision", "8.1"]) - if spec.get("msvs_enable_winphone"): - properties[0].append(["ApplicationType", "Windows Phone"]) - else: - properties[0].append(["ApplicationType", "Windows Store"]) - - platform_name = None - msvs_windows_sdk_version = None - for configuration in spec["configurations"].values(): - platform_name = platform_name or _ConfigPlatform(configuration) - msvs_windows_sdk_version = ( - msvs_windows_sdk_version - or _ConfigWindowsTargetPlatformVersion(configuration, version) - ) - if platform_name and msvs_windows_sdk_version: - break - if msvs_windows_sdk_version: - properties[0].append( - ["WindowsTargetPlatformVersion", str(msvs_windows_sdk_version)] - ) - elif version.compatible_sdks: - raise GypError( - "%s requires any SDK of %s version, but none were found" - % (version.description, version.compatible_sdks) - ) - - if platform_name == "ARM": - properties[0].append(["WindowsSDKDesktopARMSupport", "true"]) - - return properties - - -def _GetMSBuildConfigurationDetails(spec, build_file): - properties = {} - for name, settings in spec["configurations"].items(): - msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) - condition = _GetConfigurationCondition(name, settings, spec) - character_set = msbuild_attributes.get("CharacterSet") - vctools_version = msbuild_attributes.get("VCToolsVersion") - config_type = msbuild_attributes.get("ConfigurationType") - _AddConditionalProperty(properties, condition, "ConfigurationType", config_type) - spectre_mitigation = msbuild_attributes.get('SpectreMitigation') - if spectre_mitigation: - _AddConditionalProperty(properties, condition, "SpectreMitigation", - spectre_mitigation) - if config_type == "Driver": - _AddConditionalProperty(properties, condition, "DriverType", "WDM") - _AddConditionalProperty( - properties, condition, "TargetVersion", _ConfigTargetVersion(settings) - ) - if character_set and "msvs_enable_winrt" not in spec: - _AddConditionalProperty( - properties, condition, "CharacterSet", character_set - ) - if vctools_version and "msvs_enable_winrt" not in spec: - _AddConditionalProperty( - properties, condition, "VCToolsVersion", vctools_version - ) - return _GetMSBuildPropertyGroup(spec, "Configuration", properties) - - -def _GetMSBuildLocalProperties(msbuild_toolset): - # Currently the only local property we support is PlatformToolset - properties = {} - if msbuild_toolset: - properties = [ - [ - "PropertyGroup", - {"Label": "Locals"}, - ["PlatformToolset", msbuild_toolset], - ] - ] - return properties - - -def _GetMSBuildPropertySheets(configurations, spec): - user_props = r"$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" - additional_props = {} - props_specified = False - for name, settings in sorted(configurations.items()): - configuration = _GetConfigurationCondition(name, settings, spec) - if "msbuild_props" in settings: - additional_props[configuration] = _FixPaths(settings["msbuild_props"]) - props_specified = True - else: - additional_props[configuration] = "" - - if not props_specified: - return [ - [ - "ImportGroup", - {"Label": "PropertySheets"}, - [ - "Import", - { - "Project": user_props, - "Condition": "exists('%s')" % user_props, - "Label": "LocalAppDataPlatform", - }, - ], - ] - ] - else: - sheets = [] - for condition, props in additional_props.items(): - import_group = [ - "ImportGroup", - {"Label": "PropertySheets", "Condition": condition}, - [ - "Import", - { - "Project": user_props, - "Condition": "exists('%s')" % user_props, - "Label": "LocalAppDataPlatform", - }, - ], - ] - for props_file in props: - import_group.append(["Import", {"Project": props_file}]) - sheets.append(import_group) - return sheets - - -def _ConvertMSVSBuildAttributes(spec, config, build_file): - config_type = _GetMSVSConfigurationType(spec, build_file) - msvs_attributes = _GetMSVSAttributes(spec, config, config_type) - msbuild_attributes = {} - for a in msvs_attributes: - if a in ["IntermediateDirectory", "OutputDirectory"]: - directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a]) - if not directory.endswith("\\"): - directory += "\\" - msbuild_attributes[a] = directory - elif a == "CharacterSet": - msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a]) - elif a == "ConfigurationType": - msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) - elif a == "SpectreMitigation": - msbuild_attributes[a] = msvs_attributes[a] - elif a == "VCToolsVersion": - msbuild_attributes[a] = msvs_attributes[a] - else: - print("Warning: Do not know how to convert MSVS attribute " + a) - return msbuild_attributes - - -def _ConvertMSVSCharacterSet(char_set): - if char_set.isdigit(): - char_set = {"0": "MultiByte", "1": "Unicode", "2": "MultiByte"}[char_set] - return char_set - - -def _ConvertMSVSConfigurationType(config_type): - if config_type.isdigit(): - config_type = { - "1": "Application", - "2": "DynamicLibrary", - "4": "StaticLibrary", - "5": "Driver", - "10": "Utility", - }[config_type] - return config_type - - -def _GetMSBuildAttributes(spec, config, build_file): - if "msbuild_configuration_attributes" not in config: - msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file) - - else: - config_type = _GetMSVSConfigurationType(spec, build_file) - config_type = _ConvertMSVSConfigurationType(config_type) - msbuild_attributes = config.get("msbuild_configuration_attributes", {}) - msbuild_attributes.setdefault("ConfigurationType", config_type) - output_dir = msbuild_attributes.get( - "OutputDirectory", "$(SolutionDir)$(Configuration)" - ) - msbuild_attributes["OutputDirectory"] = _FixPath(output_dir) + "\\" - if "IntermediateDirectory" not in msbuild_attributes: - intermediate = _FixPath("$(Configuration)") + "\\" - msbuild_attributes["IntermediateDirectory"] = intermediate - if "CharacterSet" in msbuild_attributes: - msbuild_attributes["CharacterSet"] = _ConvertMSVSCharacterSet( - msbuild_attributes["CharacterSet"] - ) - if "TargetName" not in msbuild_attributes: - prefix = spec.get("product_prefix", "") - product_name = spec.get("product_name", "$(ProjectName)") - target_name = prefix + product_name - msbuild_attributes["TargetName"] = target_name - if "TargetExt" not in msbuild_attributes and "product_extension" in spec: - ext = spec.get("product_extension") - msbuild_attributes["TargetExt"] = "." + ext - - if spec.get("msvs_external_builder"): - external_out_dir = spec.get("msvs_external_builder_out_dir", ".") - msbuild_attributes["OutputDirectory"] = _FixPath(external_out_dir) + "\\" - - # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile' - # (depending on the tool used) to avoid MSB8012 warning. - msbuild_tool_map = { - "executable": "Link", - "shared_library": "Link", - "loadable_module": "Link", - "windows_driver": "Link", - "static_library": "Lib", - } - msbuild_tool = msbuild_tool_map.get(spec["type"]) - if msbuild_tool: - msbuild_settings = config["finalized_msbuild_settings"] - out_file = msbuild_settings[msbuild_tool].get("OutputFile") - if out_file: - msbuild_attributes["TargetPath"] = _FixPath(out_file) - target_ext = msbuild_settings[msbuild_tool].get("TargetExt") - if target_ext: - msbuild_attributes["TargetExt"] = target_ext - - return msbuild_attributes - - -def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): - # TODO(jeanluc) We could optimize out the following and do it only if - # there are actions. - # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'. - new_paths = [] - cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0] - if cygwin_dirs: - cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs) - new_paths.append(cyg_path) - # TODO(jeanluc) Change the convention to have both a cygwin_dir and a - # python_dir. - python_path = cyg_path.replace("cygwin\\bin", "python_26") - new_paths.append(python_path) - if new_paths: - new_paths = "$(ExecutablePath);" + ";".join(new_paths) - - properties = {} - for (name, configuration) in sorted(configurations.items()): - condition = _GetConfigurationCondition(name, configuration, spec) - attributes = _GetMSBuildAttributes(spec, configuration, build_file) - msbuild_settings = configuration["finalized_msbuild_settings"] - _AddConditionalProperty( - properties, condition, "IntDir", attributes["IntermediateDirectory"] - ) - _AddConditionalProperty( - properties, condition, "OutDir", attributes["OutputDirectory"] - ) - _AddConditionalProperty( - properties, condition, "TargetName", attributes["TargetName"] - ) - if "TargetExt" in attributes: - _AddConditionalProperty( - properties, condition, "TargetExt", attributes["TargetExt"] - ) - - if attributes.get("TargetPath"): - _AddConditionalProperty( - properties, condition, "TargetPath", attributes["TargetPath"] - ) - if attributes.get("TargetExt"): - _AddConditionalProperty( - properties, condition, "TargetExt", attributes["TargetExt"] - ) - - if new_paths: - _AddConditionalProperty(properties, condition, "ExecutablePath", new_paths) - tool_settings = msbuild_settings.get("", {}) - for name, value in sorted(tool_settings.items()): - formatted_value = _GetValueFormattedForMSBuild("", name, value) - _AddConditionalProperty(properties, condition, name, formatted_value) - return _GetMSBuildPropertyGroup(spec, None, properties) - - -def _AddConditionalProperty(properties, condition, name, value): - """Adds a property / conditional value pair to a dictionary. - - Arguments: - properties: The dictionary to be modified. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - condition: The condition under which the named property has the value. - name: The name of the property. - value: The value of the property. - """ - if name not in properties: - properties[name] = {} - values = properties[name] - if value not in values: - values[value] = [] - conditions = values[value] - conditions.append(condition) - - -# Regex for msvs variable references ( i.e. $(FOO) ). -MSVS_VARIABLE_REFERENCE = re.compile(r"\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)") - - -def _GetMSBuildPropertyGroup(spec, label, properties): - """Returns a PropertyGroup definition for the specified properties. - - Arguments: - spec: The target project dict. - label: An optional label for the PropertyGroup. - properties: The dictionary to be converted. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - """ - group = ["PropertyGroup"] - if label: - group.append({"Label": label}) - num_configurations = len(spec["configurations"]) - - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - edges = set() - for value in sorted(properties[node].keys()): - # Add to edges all $(...) references to variables. - # - # Variable references that refer to names not in properties are excluded - # These can exist for instance to refer built in definitions like - # $(SolutionDir). - # - # Self references are ignored. Self reference is used in a few places to - # append to the default value. I.e. PATH=$(PATH);other_path - edges.update( - { - v - for v in MSVS_VARIABLE_REFERENCE.findall(value) - if v in properties and v != node - } - ) - return edges - - properties_ordered = gyp.common.TopologicallySorted(properties.keys(), GetEdges) - # Walk properties in the reverse of a topological sort on - # user_of_variable -> used_variable as this ensures variables are - # defined before they are used. - # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - for name in reversed(properties_ordered): - values = properties[name] - for value, conditions in sorted(values.items()): - if len(conditions) == num_configurations: - # If the value is the same all configurations, - # just add one unconditional entry. - group.append([name, value]) - else: - for condition in conditions: - group.append([name, {"Condition": condition}, value]) - return [group] - - -def _GetMSBuildToolSettingsSections(spec, configurations): - groups = [] - for (name, configuration) in sorted(configurations.items()): - msbuild_settings = configuration["finalized_msbuild_settings"] - group = [ - "ItemDefinitionGroup", - {"Condition": _GetConfigurationCondition(name, configuration, spec)}, - ] - for tool_name, tool_settings in sorted(msbuild_settings.items()): - # Skip the tool named '' which is a holder of global settings handled - # by _GetMSBuildConfigurationGlobalProperties. - if tool_name and tool_settings: - tool = [tool_name] - for name, value in sorted(tool_settings.items()): - formatted_value = _GetValueFormattedForMSBuild( - tool_name, name, value - ) - tool.append([name, formatted_value]) - group.append(tool) - groups.append(group) - return groups - - -def _FinalizeMSBuildSettings(spec, configuration): - if "msbuild_settings" in configuration: - converted = False - msbuild_settings = configuration["msbuild_settings"] - MSVSSettings.ValidateMSBuildSettings(msbuild_settings) - else: - converted = True - msvs_settings = configuration.get("msvs_settings", {}) - msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) - include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs( - configuration - ) - libraries = _GetLibraries(spec) - library_dirs = _GetLibraryDirs(configuration) - out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) - target_ext = _GetOutputTargetExt(spec) - defines = _GetDefines(configuration) - if converted: - # Visual Studio 2010 has TR1 - defines = [d for d in defines if d != "_HAS_TR1=0"] - # Warn of ignored settings - ignored_settings = ["msvs_tool_files"] - for ignored_setting in ignored_settings: - value = configuration.get(ignored_setting) - if value: - print( - "Warning: The automatic conversion to MSBuild does not handle " - "%s. Ignoring setting of %s" % (ignored_setting, str(value)) - ) - - defines = [_EscapeCppDefineForMSBuild(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(configuration) - prebuild = configuration.get("msvs_prebuild") - postbuild = configuration.get("msvs_postbuild") - def_file = _GetModuleDefinition(spec) - precompiled_header = configuration.get("msvs_precompiled_header") - - # Add the information to the appropriate tool - # TODO(jeanluc) We could optimize and generate these settings only if - # the corresponding files are found, e.g. don't generate ResourceCompile - # if you don't have any resources. - _ToolAppend( - msbuild_settings, "ClCompile", "AdditionalIncludeDirectories", include_dirs - ) - _ToolAppend( - msbuild_settings, "Midl", "AdditionalIncludeDirectories", midl_include_dirs - ) - _ToolAppend( - msbuild_settings, - "ResourceCompile", - "AdditionalIncludeDirectories", - resource_include_dirs, - ) - # Add in libraries, note that even for empty libraries, we want this - # set, to prevent inheriting default libraries from the environment. - _ToolSetOrAppend(msbuild_settings, "Link", "AdditionalDependencies", libraries) - _ToolAppend(msbuild_settings, "Link", "AdditionalLibraryDirectories", library_dirs) - if out_file: - _ToolAppend( - msbuild_settings, msbuild_tool, "OutputFile", out_file, only_if_unset=True - ) - if target_ext: - _ToolAppend( - msbuild_settings, msbuild_tool, "TargetExt", target_ext, only_if_unset=True - ) - # Add defines. - _ToolAppend(msbuild_settings, "ClCompile", "PreprocessorDefinitions", defines) - _ToolAppend(msbuild_settings, "ResourceCompile", "PreprocessorDefinitions", defines) - # Add disabled warnings. - _ToolAppend( - msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings - ) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use") - _ToolAppend( - msbuild_settings, "ClCompile", "PrecompiledHeaderFile", precompiled_header - ) - _ToolAppend( - msbuild_settings, "ClCompile", "ForcedIncludeFiles", [precompiled_header] - ) - else: - _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "NotUsing") - # Turn off WinRT compilation - _ToolAppend(msbuild_settings, "ClCompile", "CompileAsWinRT", "false") - # Turn on import libraries if appropriate - if spec.get("msvs_requires_importlibrary"): - _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "false") - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec["type"] == "loadable_module": - _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "true") - # Set the module definition file if any. - if def_file: - _ToolAppend(msbuild_settings, "Link", "ModuleDefinitionFile", def_file) - configuration["finalized_msbuild_settings"] = msbuild_settings - if prebuild: - _ToolAppend(msbuild_settings, "PreBuildEvent", "Command", prebuild) - if postbuild: - _ToolAppend(msbuild_settings, "PostBuildEvent", "Command", postbuild) - - -def _GetValueFormattedForMSBuild(tool_name, name, value): - if type(value) == list: - # For some settings, VS2010 does not automatically extends the settings - # TODO(jeanluc) Is this what we want? - if name in [ - "AdditionalIncludeDirectories", - "AdditionalLibraryDirectories", - "AdditionalOptions", - "DelayLoadDLLs", - "DisableSpecificWarnings", - "PreprocessorDefinitions", - ]: - value.append("%%(%s)" % name) - # For most tools, entries in a list should be separated with ';' but some - # settings use a space. Check for those first. - exceptions = { - "ClCompile": ["AdditionalOptions"], - "Link": ["AdditionalOptions"], - "Lib": ["AdditionalOptions"], - } - char = " " if name in exceptions.get(tool_name, []) else ";" - formatted_value = char.join( - [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value] - ) - else: - formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value) - return formatted_value - - -def _VerifySourcesExist(sources, root_dir): - """Verifies that all source files exist on disk. - - Checks that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation but no otherwise - visible errors. - - Arguments: - sources: A recursive list of Filter/file names. - root_dir: The root directory for the relative path names. - Returns: - A list of source files that cannot be found on disk. - """ - missing_sources = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - missing_sources.extend(_VerifySourcesExist(source.contents, root_dir)) - else: - if "$" not in source: - full_path = os.path.join(root_dir, source) - if not os.path.exists(full_path): - missing_sources.append(full_path) - return missing_sources - - -def _GetMSBuildSources( - spec, - sources, - exclusions, - rule_dependencies, - extension_to_rule_name, - actions_spec, - sources_handled_by_action, - list_excluded, -): - groups = [ - "none", - "masm", - "midl", - "include", - "compile", - "resource", - "rule", - "rule_dependency", - ] - grouped_sources = {} - for g in groups: - grouped_sources[g] = [] - - _AddSources2( - spec, - sources, - exclusions, - grouped_sources, - rule_dependencies, - extension_to_rule_name, - sources_handled_by_action, - list_excluded, - ) - sources = [] - for g in groups: - if grouped_sources[g]: - sources.append(["ItemGroup"] + grouped_sources[g]) - if actions_spec: - sources.append(["ItemGroup"] + actions_spec) - return sources - - -def _AddSources2( - spec, - sources, - exclusions, - grouped_sources, - rule_dependencies, - extension_to_rule_name, - sources_handled_by_action, - list_excluded, -): - extensions_excluded_from_precompile = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - _AddSources2( - spec, - source.contents, - exclusions, - grouped_sources, - rule_dependencies, - extension_to_rule_name, - sources_handled_by_action, - list_excluded, - ) - else: - if source not in sources_handled_by_action: - detail = [] - excluded_configurations = exclusions.get(source, []) - if len(excluded_configurations) == len(spec["configurations"]): - detail.append(["ExcludedFromBuild", "true"]) - else: - for config_name, configuration in sorted(excluded_configurations): - condition = _GetConfigurationCondition( - config_name, configuration - ) - detail.append( - ["ExcludedFromBuild", {"Condition": condition}, "true"] - ) - # Add precompile if needed - for config_name, configuration in spec["configurations"].items(): - precompiled_source = configuration.get( - "msvs_precompiled_source", "" - ) - if precompiled_source != "": - precompiled_source = _FixPath(precompiled_source) - if not extensions_excluded_from_precompile: - # If the precompiled header is generated by a C source, - # we must not try to use it for C++ sources, - # and vice versa. - basename, extension = os.path.splitext(precompiled_source) - if extension == ".c": - extensions_excluded_from_precompile = [ - ".cc", - ".cpp", - ".cxx", - ] - else: - extensions_excluded_from_precompile = [".c"] - - if precompiled_source == source: - condition = _GetConfigurationCondition( - config_name, configuration, spec - ) - detail.append( - ["PrecompiledHeader", {"Condition": condition}, "Create"] - ) - else: - # Turn off precompiled header usage for source files of a - # different type than the file that generated the - # precompiled header. - for extension in extensions_excluded_from_precompile: - if source.endswith(extension): - detail.append(["PrecompiledHeader", ""]) - detail.append(["ForcedIncludeFiles", ""]) - - group, element = _MapFileToMsBuildSourceType( - source, - rule_dependencies, - extension_to_rule_name, - _GetUniquePlatforms(spec), - spec["toolset"], - ) - if group == "compile" and not os.path.isabs(source): - # Add an value to support duplicate source - # file basenames, except for absolute paths to avoid paths - # with more than 260 characters. - file_name = os.path.splitext(source)[0] + ".obj" - if file_name.startswith("..\\"): - file_name = re.sub(r"^(\.\.\\)+", "", file_name) - elif file_name.startswith("$("): - file_name = re.sub(r"^\$\([^)]+\)\\", "", file_name) - detail.append(["ObjectFileName", "$(IntDir)\\" + file_name]) - grouped_sources[group].append([element, {"Include": source}] + detail) - - -def _GetMSBuildProjectReferences(project): - references = [] - if project.dependencies: - group = ["ItemGroup"] - added_dependency_set = set() - for dependency in project.dependencies: - dependency_spec = dependency.spec - should_skip_dep = False - if project.spec["toolset"] == "target": - if dependency_spec["toolset"] == "host": - if dependency_spec["type"] == "static_library": - should_skip_dep = True - if dependency.name.startswith("run_"): - should_skip_dep = False - if should_skip_dep: - continue - - canonical_name = dependency.name.replace("_host", "") - added_dependency_set.add(canonical_name) - guid = dependency.guid - project_dir = os.path.split(project.path)[0] - relative_path = gyp.common.RelativePath(dependency.path, project_dir) - project_ref = [ - "ProjectReference", - {"Include": relative_path}, - ["Project", guid], - ["ReferenceOutputAssembly", "false"], - ] - for config in dependency.spec.get("configurations", {}).values(): - if config.get("msvs_use_library_dependency_inputs", 0): - project_ref.append(["UseLibraryDependencyInputs", "true"]) - break - # If it's disabled in any config, turn it off in the reference. - if config.get("msvs_2010_disable_uldi_when_referenced", 0): - project_ref.append(["UseLibraryDependencyInputs", "false"]) - break - group.append(project_ref) - references.append(group) - return references - - -def _GenerateMSBuildProject(project, options, version, generator_flags, spec): - spec = project.spec - configurations = spec["configurations"] - toolset = spec["toolset"] - project_dir, project_file_name = os.path.split(project.path) - gyp.common.EnsureDirExists(project.path) - # Prepare list of sources and excluded sources. - - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) - # Add rules. - actions_to_add = {} - props_files_of_rules = set() - targets_files_of_rules = set() - rule_dependencies = set() - extension_to_rule_name = {} - list_excluded = generator_flags.get("msvs_list_excluded_files", True) - platforms = _GetUniquePlatforms(spec) - - # Don't generate rules if we are using an external builder like ninja. - if not spec.get("msvs_external_builder"): - _GenerateRulesForMSBuild( - project_dir, - options, - spec, - sources, - excluded_sources, - props_files_of_rules, - targets_files_of_rules, - actions_to_add, - rule_dependencies, - extension_to_rule_name, - ) - else: - rules = spec.get("rules", []) - _AdjustSourcesForRules(rules, sources, excluded_sources, True) - - sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, project_dir, sources, excluded_sources, list_excluded, version - ) - - # Don't add actions if we are using an external builder like ninja. - if not spec.get("msvs_external_builder"): - _AddActions(actions_to_add, spec, project.build_file) - _AddCopies(actions_to_add, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add) - - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( - spec, actions_to_add - ) - - _GenerateMSBuildFiltersFile( - project.path + ".filters", - sources, - rule_dependencies, - extension_to_rule_name, - platforms, - toolset, - ) - missing_sources = _VerifySourcesExist(sources, project_dir) - - for configuration in configurations.values(): - _FinalizeMSBuildSettings(spec, configuration) - - # Add attributes to root element - - import_default_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.Default.props"}] - ] - import_cpp_props_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.props"}] - ] - import_cpp_targets_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.targets"}] - ] - import_masm_props_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.props"}] - ] - import_masm_targets_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.targets"}] - ] - import_marmasm_props_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.props"}] - ] - import_marmasm_targets_section = [ - ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.targets"}] - ] - macro_section = [["PropertyGroup", {"Label": "UserMacros"}]] - - content = [ - "Project", - { - "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003", - "ToolsVersion": version.ProjectVersion(), - "DefaultTargets": "Build", - }, - ] - - content += _GetMSBuildProjectConfigurations(configurations, spec) - content += _GetMSBuildGlobalProperties( - spec, version, project.guid, project_file_name - ) - content += import_default_section - content += _GetMSBuildConfigurationDetails(spec, project.build_file) - if spec.get("msvs_enable_winphone"): - content += _GetMSBuildLocalProperties("v120_wp81") - else: - content += _GetMSBuildLocalProperties(project.msbuild_toolset) - content += import_cpp_props_section - content += import_masm_props_section - if "arm64" in platforms and toolset == "target": - content += import_marmasm_props_section - content += _GetMSBuildExtensions(props_files_of_rules) - content += _GetMSBuildPropertySheets(configurations, spec) - content += macro_section - content += _GetMSBuildConfigurationGlobalProperties( - spec, configurations, project.build_file - ) - content += _GetMSBuildToolSettingsSections(spec, configurations) - content += _GetMSBuildSources( - spec, - sources, - exclusions, - rule_dependencies, - extension_to_rule_name, - actions_spec, - sources_handled_by_action, - list_excluded, - ) - content += _GetMSBuildProjectReferences(project) - content += import_cpp_targets_section - content += import_masm_targets_section - if "arm64" in platforms and toolset == "target": - content += import_marmasm_targets_section - content += _GetMSBuildExtensionTargets(targets_files_of_rules) - - if spec.get("msvs_external_builder"): - content += _GetMSBuildExternalBuilderTargets(spec) - - # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS: - # has_run_as = _WriteMSVSUserFile(project.path, version, spec) - - easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True) - - return missing_sources - - -def _GetMSBuildExternalBuilderTargets(spec): - """Return a list of MSBuild targets for external builders. - - The "Build" and "Clean" targets are always generated. If the spec contains - 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also - be generated, to support building selected C/C++ files. - - Arguments: - spec: The gyp target spec. - Returns: - List of MSBuild 'Target' specs. - """ - build_cmd = _BuildCommandLineForRuleRaw( - spec, spec["msvs_external_builder_build_cmd"], False, False, False, False - ) - build_target = ["Target", {"Name": "Build"}] - build_target.append(["Exec", {"Command": build_cmd}]) - - clean_cmd = _BuildCommandLineForRuleRaw( - spec, spec["msvs_external_builder_clean_cmd"], False, False, False, False - ) - clean_target = ["Target", {"Name": "Clean"}] - clean_target.append(["Exec", {"Command": clean_cmd}]) - - targets = [build_target, clean_target] - - if spec.get("msvs_external_builder_clcompile_cmd"): - clcompile_cmd = _BuildCommandLineForRuleRaw( - spec, - spec["msvs_external_builder_clcompile_cmd"], - False, - False, - False, - False, - ) - clcompile_target = ["Target", {"Name": "ClCompile"}] - clcompile_target.append(["Exec", {"Command": clcompile_cmd}]) - targets.append(clcompile_target) - - return targets - - -def _GetMSBuildExtensions(props_files_of_rules): - extensions = ["ImportGroup", {"Label": "ExtensionSettings"}] - for props_file in props_files_of_rules: - extensions.append(["Import", {"Project": props_file}]) - return [extensions] - - -def _GetMSBuildExtensionTargets(targets_files_of_rules): - targets_node = ["ImportGroup", {"Label": "ExtensionTargets"}] - for targets_file in sorted(targets_files_of_rules): - targets_node.append(["Import", {"Project": targets_file}]) - return [targets_node] - - -def _GenerateActionsForMSBuild(spec, actions_to_add): - """Add actions accumulated into an actions_to_add, merging as needed. - - Arguments: - spec: the target project dict - actions_to_add: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - - Returns: - A pair of (action specification, the sources handled by this action). - """ - sources_handled_by_action = OrderedSet() - actions_spec = [] - for primary_input, actions in actions_to_add.items(): - if generator_supports_multiple_toolsets: - primary_input = primary_input.replace(".exe", "_host.exe") - inputs = OrderedSet() - outputs = OrderedSet() - descriptions = [] - commands = [] - for action in actions: - - def fixup_host_exe(i): - if "$(OutDir)" in i: - i = i.replace(".exe", "_host.exe") - return i - - if generator_supports_multiple_toolsets: - action["inputs"] = [fixup_host_exe(i) for i in action["inputs"]] - inputs.update(OrderedSet(action["inputs"])) - outputs.update(OrderedSet(action["outputs"])) - descriptions.append(action["description"]) - cmd = action["command"] - if generator_supports_multiple_toolsets: - cmd = cmd.replace(".exe", "_host.exe") - # For most actions, add 'call' so that actions that invoke batch files - # return and continue executing. msbuild_use_call provides a way to - # disable this but I have not seen any adverse effect from doing that - # for everything. - if action.get("msbuild_use_call", True): - cmd = "call " + cmd - commands.append(cmd) - # Add the custom build action for one input file. - description = ", and also ".join(descriptions) - - # We can't join the commands simply with && because the command line will - # get too long. See also _AddActions: cygwin's setup_env mustn't be called - # for every invocation or the command that sets the PATH will grow too - # long. - command = "\r\n".join( - [c + "\r\nif %errorlevel% neq 0 exit /b %errorlevel%" for c in commands] - ) - _AddMSBuildAction( - spec, - primary_input, - inputs, - outputs, - command, - description, - sources_handled_by_action, - actions_spec, - ) - return actions_spec, sources_handled_by_action - - -def _AddMSBuildAction( - spec, - primary_input, - inputs, - outputs, - cmd, - description, - sources_handled_by_action, - actions_spec, -): - command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) - primary_input = _FixPath(primary_input) - inputs_array = _FixPaths(inputs) - outputs_array = _FixPaths(outputs) - additional_inputs = ";".join([i for i in inputs_array if i != primary_input]) - outputs = ";".join(outputs_array) - sources_handled_by_action.add(primary_input) - action_spec = ["CustomBuild", {"Include": primary_input}] - action_spec.extend( - # TODO(jeanluc) 'Document' for all or just if as_sources? - [ - ["FileType", "Document"], - ["Command", command], - ["Message", description], - ["Outputs", outputs], - ] - ) - if additional_inputs: - action_spec.append(["AdditionalInputs", additional_inputs]) - actions_spec.append(action_spec) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py deleted file mode 100755 index e80b57f06a130..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the msvs.py file. """ - -import gyp.generator.msvs as msvs -import unittest - -from io import StringIO - - -class TestSequenceFunctions(unittest.TestCase): - def setUp(self): - self.stderr = StringIO() - - def test_GetLibraries(self): - self.assertEqual(msvs._GetLibraries({}), []) - self.assertEqual(msvs._GetLibraries({"libraries": []}), []) - self.assertEqual( - msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"] - ) - self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"]) - self.assertEqual( - msvs._GetLibraries( - { - "libraries": [ - "a.lib", - "b.lib", - "c.lib", - "-lb.lib", - "-lb.lib", - "d.lib", - "a.lib", - ] - } - ), - ["c.lib", "b.lib", "d.lib", "a.lib"], - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py deleted file mode 100644 index 0146c4996260a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py +++ /dev/null @@ -1,2964 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -import collections -import copy -import hashlib -import json -import multiprocessing -import os.path -import re -import signal -import shutil -import subprocess -import sys -import gyp -import gyp.common -import gyp.msvs_emulation -import gyp.MSVSUtil as MSVSUtil -import gyp.xcode_emulation - -from io import StringIO - -from gyp.common import GetEnvironFallback -import gyp.ninja_syntax as ninja_syntax - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_PREFIX": "lib", - # Gyp expects the following variables to be expandable by the build - # system to the appropriate locations. Ninja prefers paths to be - # known at gyp time. To resolve this, introduce special - # variables starting with $! and $| (which begin with a $ so gyp knows it - # should be treated specially, but is otherwise an invalid - # ninja/shell variable) that are passed to gyp here but expanded - # before writing out into the target .ninja files; see - # ExpandSpecial. - # $! is used for variables that represent a path and that can only appear at - # the start of a string, while $| is used for variables that can appear - # anywhere in a string. - "INTERMEDIATE_DIR": "$!INTERMEDIATE_DIR", - "SHARED_INTERMEDIATE_DIR": "$!PRODUCT_DIR/gen", - "PRODUCT_DIR": "$!PRODUCT_DIR", - "CONFIGURATION_NAME": "$|CONFIGURATION_NAME", - # Special variables that may be used by gyp 'rule' targets. - # We generate definitions for these variables on the fly when processing a - # rule. - "RULE_INPUT_ROOT": "${root}", - "RULE_INPUT_DIRNAME": "${dirname}", - "RULE_INPUT_PATH": "${source}", - "RULE_INPUT_EXT": "${ext}", - "RULE_INPUT_NAME": "${name}", -} - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] -generator_filelist_paths = None - -generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() - - -def StripPrefix(arg, prefix): - if arg.startswith(prefix): - return arg[len(prefix) :] - return arg - - -def QuoteShellArgument(arg, flavor): - """Quote a string such that it will be interpreted as a single argument - by the shell.""" - # Rather than attempting to enumerate the bad shell characters, just - # allow common OK ones and quote anything else. - if re.match(r"^[a-zA-Z0-9_=.\\/-]+$", arg): - return arg # No quoting necessary. - if flavor == "win": - return gyp.msvs_emulation.QuoteForRspFile(arg) - return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" - - -def Define(d, flavor): - """Takes a preprocessor define and returns a -D parameter that's ninja- and - shell-escaped.""" - if flavor == "win": - # cl.exe replaces literal # characters with = in preprocessor definitions for - # some reason. Octal-encode to work around that. - d = d.replace("#", "\\%03o" % ord("#")) - return QuoteShellArgument(ninja_syntax.escape("-D" + d), flavor) - - -def AddArch(output, arch): - """Adds an arch string to an output path.""" - output, extension = os.path.splitext(output) - return f"{output}.{arch}{extension}" - - -class Target: - """Target represents the paths used within a single gyp target. - - Conceptually, building a single target A is a series of steps: - - 1) actions/rules/copies generates source/resources/etc. - 2) compiles generates .o files - 3) link generates a binary (library/executable) - 4) bundle merges the above in a mac bundle - - (Any of these steps can be optional.) - - From a build ordering perspective, a dependent target B could just - depend on the last output of this series of steps. - - But some dependent commands sometimes need to reach inside the box. - For example, when linking B it needs to get the path to the static - library generated by A. - - This object stores those paths. To keep things simple, member - variables only store concrete paths to single files, while methods - compute derived values like "the last output of the target". - """ - - def __init__(self, type): - # Gyp type ("static_library", etc.) of this target. - self.type = type - # File representing whether any input dependencies necessary for - # dependent actions have completed. - self.preaction_stamp = None - # File representing whether any input dependencies necessary for - # dependent compiles have completed. - self.precompile_stamp = None - # File representing the completion of actions/rules/copies, if any. - self.actions_stamp = None - # Path to the output of the link step, if any. - self.binary = None - # Path to the file representing the completion of building the bundle, - # if any. - self.bundle = None - # On Windows, incremental linking requires linking against all the .objs - # that compose a .lib (rather than the .lib itself). That list is stored - # here. In this case, we also need to save the compile_deps for the target, - # so that the target that directly depends on the .objs can also depend - # on those. - self.component_objs = None - self.compile_deps = None - # Windows only. The import .lib is the output of a build step, but - # because dependents only link against the lib (not both the lib and the - # dll) we keep track of the import library here. - self.import_lib = None - # Track if this target contains any C++ files, to decide if gcc or g++ - # should be used for linking. - self.uses_cpp = False - - def Linkable(self): - """Return true if this is a target that can be linked against.""" - return self.type in ("static_library", "shared_library") - - def UsesToc(self, flavor): - """Return true if the target should produce a restat rule based on a TOC - file.""" - # For bundles, the .TOC should be produced for the binary, not for - # FinalOutput(). But the naive approach would put the TOC file into the - # bundle, so don't do this for bundles for now. - if flavor == "win" or self.bundle: - return False - return self.type in ("shared_library", "loadable_module") - - def PreActionInput(self, flavor): - """Return the path, if any, that should be used as a dependency of - any dependent action step.""" - if self.UsesToc(flavor): - return self.FinalOutput() + ".TOC" - return self.FinalOutput() or self.preaction_stamp - - def PreCompileInput(self): - """Return the path, if any, that should be used as a dependency of - any dependent compile step.""" - return self.actions_stamp or self.precompile_stamp - - def FinalOutput(self): - """Return the last output of the target, which depends on all prior - steps.""" - return self.bundle or self.binary or self.actions_stamp - - -# A small discourse on paths as used within the Ninja build: -# All files we produce (both at gyp and at build time) appear in the -# build directory (e.g. out/Debug). -# -# Paths within a given .gyp file are always relative to the directory -# containing the .gyp file. Call these "gyp paths". This includes -# sources as well as the starting directory a given gyp rule/action -# expects to be run from. We call the path from the source root to -# the gyp file the "base directory" within the per-.gyp-file -# NinjaWriter code. -# -# All paths as written into the .ninja files are relative to the build -# directory. Call these paths "ninja paths". -# -# We translate between these two notions of paths with two helper -# functions: -# -# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) -# into the equivalent ninja path. -# -# - GypPathToUniqueOutput translates a gyp path into a ninja path to write -# an output file; the result can be namespaced such that it is unique -# to the input file name as well as the output target name. - - -class NinjaWriter: - def __init__( - self, - hash_for_rules, - target_outputs, - base_dir, - build_dir, - output_file, - toplevel_build, - output_file_name, - flavor, - toplevel_dir=None, - ): - """ - base_dir: path from source root to directory containing this gyp file, - by gyp semantics, all input paths are relative to this - build_dir: path from source root to build output - toplevel_dir: path to the toplevel directory - """ - - self.hash_for_rules = hash_for_rules - self.target_outputs = target_outputs - self.base_dir = base_dir - self.build_dir = build_dir - self.ninja = ninja_syntax.Writer(output_file) - self.toplevel_build = toplevel_build - self.output_file_name = output_file_name - - self.flavor = flavor - self.abs_build_dir = None - if toplevel_dir is not None: - self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) - self.obj_ext = ".obj" if flavor == "win" else ".o" - if flavor == "win": - # See docstring of msvs_emulation.GenerateEnvironmentFiles(). - self.win_env = {} - for arch in ("x86", "x64"): - self.win_env[arch] = "environment." + arch - - # Relative path from build output dir to base dir. - build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) - self.build_to_base = os.path.join(build_to_top, base_dir) - # Relative path from base dir to build dir. - base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) - self.base_to_build = os.path.join(base_to_top, build_dir) - - def ExpandSpecial(self, path, product_dir=None): - """Expand specials like $!PRODUCT_DIR in |path|. - - If |product_dir| is None, assumes the cwd is already the product - dir. Otherwise, |product_dir| is the relative path to the product - dir. - """ - - PRODUCT_DIR = "$!PRODUCT_DIR" - if PRODUCT_DIR in path: - if product_dir: - path = path.replace(PRODUCT_DIR, product_dir) - else: - path = path.replace(PRODUCT_DIR + "/", "") - path = path.replace(PRODUCT_DIR + "\\", "") - path = path.replace(PRODUCT_DIR, ".") - - INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR" - if INTERMEDIATE_DIR in path: - int_dir = self.GypPathToUniqueOutput("gen") - # GypPathToUniqueOutput generates a path relative to the product dir, - # so insert product_dir in front if it is provided. - path = path.replace( - INTERMEDIATE_DIR, os.path.join(product_dir or "", int_dir) - ) - - CONFIGURATION_NAME = "$|CONFIGURATION_NAME" - path = path.replace(CONFIGURATION_NAME, self.config_name) - - return path - - def ExpandRuleVariables(self, path, root, dirname, source, ext, name): - if self.flavor == "win": - path = self.msvs_settings.ConvertVSMacros(path, config=self.config_name) - path = path.replace(generator_default_variables["RULE_INPUT_ROOT"], root) - path = path.replace(generator_default_variables["RULE_INPUT_DIRNAME"], dirname) - path = path.replace(generator_default_variables["RULE_INPUT_PATH"], source) - path = path.replace(generator_default_variables["RULE_INPUT_EXT"], ext) - path = path.replace(generator_default_variables["RULE_INPUT_NAME"], name) - return path - - def GypPathToNinja(self, path, env=None): - """Translate a gyp path to a ninja path, optionally expanding environment - variable references in |path| with |env|. - - See the above discourse on path conversions.""" - if env: - if self.flavor == "mac": - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - elif self.flavor == "win": - path = gyp.msvs_emulation.ExpandMacros(path, env) - if path.startswith("$!"): - expanded = self.ExpandSpecial(path) - if self.flavor == "win": - expanded = os.path.normpath(expanded) - return expanded - if "$|" in path: - path = self.ExpandSpecial(path) - assert "$" not in path, path - return os.path.normpath(os.path.join(self.build_to_base, path)) - - def GypPathToUniqueOutput(self, path, qualified=True): - """Translate a gyp path to a ninja path for writing output. - - If qualified is True, qualify the resulting filename with the name - of the target. This is necessary when e.g. compiling the same - path twice for two separate output targets. - - See the above discourse on path conversions.""" - - path = self.ExpandSpecial(path) - assert not path.startswith("$"), path - - # Translate the path following this scheme: - # Input: foo/bar.gyp, target targ, references baz/out.o - # Output: obj/foo/baz/targ.out.o (if qualified) - # obj/foo/baz/out.o (otherwise) - # (and obj.host instead of obj for cross-compiles) - # - # Why this scheme and not some other one? - # 1) for a given input, you can compute all derived outputs by matching - # its path, even if the input is brought via a gyp file with '..'. - # 2) simple files like libraries and stamps have a simple filename. - - obj = "obj" - if self.toolset != "target": - obj += "." + self.toolset - - path_dir, path_basename = os.path.split(path) - assert not os.path.isabs(path_dir), ( - "'%s' can not be absolute path (see crbug.com/462153)." % path_dir - ) - - if qualified: - path_basename = self.name + "." + path_basename - return os.path.normpath( - os.path.join(obj, self.base_dir, path_dir, path_basename) - ) - - def WriteCollapsedDependencies(self, name, targets, order_only=None): - """Given a list of targets, return a path for a single file - representing the result of building all the targets or None. - - Uses a stamp file if necessary.""" - - assert targets == [item for item in targets if item], targets - if len(targets) == 0: - assert not order_only - return None - if len(targets) > 1 or order_only: - stamp = self.GypPathToUniqueOutput(name + ".stamp") - targets = self.ninja.build(stamp, "stamp", targets, order_only=order_only) - self.ninja.newline() - return targets[0] - - def _SubninjaNameForArch(self, arch): - output_file_base = os.path.splitext(self.output_file_name)[0] - return f"{output_file_base}.{arch}.ninja" - - def WriteSpec(self, spec, config_name, generator_flags): - """The main entry point for NinjaWriter: write the build rules for a spec. - - Returns a Target object, which represents the output paths for this spec. - Returns None if there are no outputs (e.g. a settings-only 'none' type - target).""" - - self.config_name = config_name - self.name = spec["target_name"] - self.toolset = spec["toolset"] - config = spec["configurations"][config_name] - self.target = Target(spec["type"]) - self.is_standalone_static_library = bool( - spec.get("standalone_static_library", 0) - ) - - self.target_rpath = generator_flags.get("target_rpath", r"\$$ORIGIN/lib/") - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - self.xcode_settings = self.msvs_settings = None - if self.flavor == "mac": - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) - if mac_toolchain_dir: - self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir - - if self.flavor == "win": - self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) - arch = self.msvs_settings.GetArch(config_name) - self.ninja.variable("arch", self.win_env[arch]) - self.ninja.variable("cc", "$cl_" + arch) - self.ninja.variable("cxx", "$cl_" + arch) - self.ninja.variable("cc_host", "$cl_" + arch) - self.ninja.variable("cxx_host", "$cl_" + arch) - self.ninja.variable("asm", "$ml_" + arch) - - if self.flavor == "mac": - self.archs = self.xcode_settings.GetActiveArchs(config_name) - if len(self.archs) > 1: - self.arch_subninjas = { - arch: ninja_syntax.Writer( - OpenOutput( - os.path.join( - self.toplevel_build, self._SubninjaNameForArch(arch) - ), - "w", - ) - ) - for arch in self.archs - } - - # Compute predepends for all rules. - # actions_depends is the dependencies this target depends on before running - # any of its action/rule/copy steps. - # compile_depends is the dependencies this target depends on before running - # any of its compile steps. - actions_depends = [] - compile_depends = [] - # TODO(evan): it is rather confusing which things are lists and which - # are strings. Fix these. - if "dependencies" in spec: - for dep in spec["dependencies"]: - if dep in self.target_outputs: - target = self.target_outputs[dep] - actions_depends.append(target.PreActionInput(self.flavor)) - compile_depends.append(target.PreCompileInput()) - if target.uses_cpp: - self.target.uses_cpp = True - actions_depends = [item for item in actions_depends if item] - compile_depends = [item for item in compile_depends if item] - actions_depends = self.WriteCollapsedDependencies( - "actions_depends", actions_depends - ) - compile_depends = self.WriteCollapsedDependencies( - "compile_depends", compile_depends - ) - self.target.preaction_stamp = actions_depends - self.target.precompile_stamp = compile_depends - - # Write out actions, rules, and copies. These must happen before we - # compile any sources, so compute a list of predependencies for sources - # while we do it. - extra_sources = [] - mac_bundle_depends = [] - self.target.actions_stamp = self.WriteActionsRulesCopies( - spec, extra_sources, actions_depends, mac_bundle_depends - ) - - # If we have actions/rules/copies, we depend directly on those, but - # otherwise we depend on dependent target's actions/rules/copies etc. - # We never need to explicitly depend on previous target's link steps, - # because no compile ever depends on them. - compile_depends_stamp = self.target.actions_stamp or compile_depends - - # Write out the compilation steps, if any. - link_deps = [] - try: - sources = extra_sources + spec.get("sources", []) - except TypeError: - print("extra_sources: ", str(extra_sources)) - print('spec.get("sources"): ', str(spec.get("sources"))) - raise - if sources: - if self.flavor == "mac" and len(self.archs) > 1: - # Write subninja file containing compile and link commands scoped to - # a single arch if a fat binary is being built. - for arch in self.archs: - self.ninja.subninja(self._SubninjaNameForArch(arch)) - - pch = None - if self.flavor == "win": - gyp.msvs_emulation.VerifyMissingSources( - sources, self.abs_build_dir, generator_flags, self.GypPathToNinja - ) - pch = gyp.msvs_emulation.PrecompiledHeader( - self.msvs_settings, - config_name, - self.GypPathToNinja, - self.GypPathToUniqueOutput, - self.obj_ext, - ) - else: - pch = gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, - self.GypPathToNinja, - lambda path, lang: self.GypPathToUniqueOutput(path + "-" + lang), - ) - link_deps = self.WriteSources( - self.ninja, - config_name, - config, - sources, - compile_depends_stamp, - pch, - spec, - ) - # Some actions/rules output 'sources' that are already object files. - obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] - if obj_outputs: - if self.flavor != "mac" or len(self.archs) == 1: - link_deps += [self.GypPathToNinja(o) for o in obj_outputs] - else: - print( - "Warning: Actions/rules writing object files don't work with " - "multiarch targets, dropping. (target %s)" % spec["target_name"] - ) - elif self.flavor == "mac" and len(self.archs) > 1: - link_deps = collections.defaultdict(list) - - compile_deps = self.target.actions_stamp or actions_depends - if self.flavor == "win" and self.target.type == "static_library": - self.target.component_objs = link_deps - self.target.compile_deps = compile_deps - - # Write out a link step, if needed. - output = None - is_empty_bundle = not link_deps and not mac_bundle_depends - if link_deps or self.target.actions_stamp or actions_depends: - output = self.WriteTarget( - spec, config_name, config, link_deps, compile_deps - ) - if self.is_mac_bundle: - mac_bundle_depends.append(output) - - # Bundle all of the above together, if needed. - if self.is_mac_bundle: - output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) - - if not output: - return None - - assert self.target.FinalOutput(), output - return self.target - - def _WinIdlRule(self, source, prebuild, outputs): - """Handle the implicit VS .idl rule for one source file. Fills |outputs| - with files that are generated.""" - outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( - source, self.config_name - ) - outdir = self.GypPathToNinja(outdir) - - def fix_path(path, rel=None): - path = os.path.join(outdir, path) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - path = self.ExpandRuleVariables(path, root, dirname, source, ext, basename) - if rel: - path = os.path.relpath(path, rel) - return path - - vars = [(name, fix_path(value, outdir)) for name, value in vars] - output = [fix_path(p) for p in output] - vars.append(("outdir", outdir)) - vars.append(("idlflags", flags)) - input = self.GypPathToNinja(source) - self.ninja.build(output, "idl", input, variables=vars, order_only=prebuild) - outputs.extend(output) - - def WriteWinIdlFiles(self, spec, prebuild): - """Writes rules to match MSVS's implicit idl handling.""" - assert self.flavor == "win" - if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): - return [] - outputs = [] - for source in filter(lambda x: x.endswith(".idl"), spec["sources"]): - self._WinIdlRule(source, prebuild, outputs) - return outputs - - def WriteActionsRulesCopies( - self, spec, extra_sources, prebuild, mac_bundle_depends - ): - """Write out the Actions, Rules, and Copies steps. Return a path - representing the outputs of these steps.""" - outputs = [] - if self.is_mac_bundle: - mac_bundle_resources = spec.get("mac_bundle_resources", [])[:] - else: - mac_bundle_resources = [] - extra_mac_bundle_resources = [] - - if "actions" in spec: - outputs += self.WriteActions( - spec["actions"], extra_sources, prebuild, extra_mac_bundle_resources - ) - if "rules" in spec: - outputs += self.WriteRules( - spec["rules"], - extra_sources, - prebuild, - mac_bundle_resources, - extra_mac_bundle_resources, - ) - if "copies" in spec: - outputs += self.WriteCopies(spec["copies"], prebuild, mac_bundle_depends) - - if "sources" in spec and self.flavor == "win": - outputs += self.WriteWinIdlFiles(spec, prebuild) - - if self.xcode_settings and self.xcode_settings.IsIosFramework(): - self.WriteiOSFrameworkHeaders(spec, outputs, prebuild) - - stamp = self.WriteCollapsedDependencies("actions_rules_copies", outputs) - - if self.is_mac_bundle: - xcassets = self.WriteMacBundleResources( - extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends - ) - partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends) - self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) - - return stamp - - def GenerateDescription(self, verb, message, fallback): - """Generate and return a description of a build step. - - |verb| is the short summary, e.g. ACTION or RULE. - |message| is a hand-written description, or None if not available. - |fallback| is the gyp-level name of the step, usable as a fallback. - """ - if self.toolset != "target": - verb += "(%s)" % self.toolset - if message: - return f"{verb} {self.ExpandSpecial(message)}" - else: - return f"{verb} {self.name}: {fallback}" - - def WriteActions( - self, actions, extra_sources, prebuild, extra_mac_bundle_resources - ): - # Actions cd into the base directory. - env = self.GetToolchainEnv() - all_outputs = [] - for action in actions: - # First write out a rule for the action. - name = "{}_{}".format(action["action_name"], self.hash_for_rules) - description = self.GenerateDescription( - "ACTION", action.get("message", None), name - ) - win_shell_flags = ( - self.msvs_settings.GetRuleShellFlags(action) - if self.flavor == "win" - else None - ) - args = action["action"] - depfile = action.get("depfile", None) - if depfile: - depfile = self.ExpandSpecial(depfile, self.base_to_build) - pool = "console" if int(action.get("ninja_use_console", 0)) else None - rule_name, _ = self.WriteNewNinjaRule( - name, args, description, win_shell_flags, env, pool, depfile=depfile - ) - - inputs = [self.GypPathToNinja(i, env) for i in action["inputs"]] - if int(action.get("process_outputs_as_sources", False)): - extra_sources += action["outputs"] - if int(action.get("process_outputs_as_mac_bundle_resources", False)): - extra_mac_bundle_resources += action["outputs"] - outputs = [self.GypPathToNinja(o, env) for o in action["outputs"]] - - # Then write out an edge using the rule. - self.ninja.build(outputs, rule_name, inputs, order_only=prebuild) - all_outputs += outputs - - self.ninja.newline() - - return all_outputs - - def WriteRules( - self, - rules, - extra_sources, - prebuild, - mac_bundle_resources, - extra_mac_bundle_resources, - ): - env = self.GetToolchainEnv() - all_outputs = [] - for rule in rules: - # Skip a rule with no action and no inputs. - if "action" not in rule and not rule.get("rule_sources", []): - continue - - # First write out a rule for the rule action. - name = "{}_{}".format(rule["rule_name"], self.hash_for_rules) - - args = rule["action"] - description = self.GenerateDescription( - "RULE", - rule.get("message", None), - ("%s " + generator_default_variables["RULE_INPUT_PATH"]) % name, - ) - win_shell_flags = ( - self.msvs_settings.GetRuleShellFlags(rule) - if self.flavor == "win" - else None - ) - pool = "console" if int(rule.get("ninja_use_console", 0)) else None - rule_name, args = self.WriteNewNinjaRule( - name, args, description, win_shell_flags, env, pool - ) - - # TODO: if the command references the outputs directly, we should - # simplify it to just use $out. - - # Rules can potentially make use of some special variables which - # must vary per source file. - # Compute the list of variables we'll need to provide. - special_locals = ("source", "root", "dirname", "ext", "name") - needed_variables = {"source"} - for argument in args: - for var in special_locals: - if "${%s}" % var in argument: - needed_variables.add(var) - needed_variables = sorted(needed_variables) - - def cygwin_munge(path): - # pylint: disable=cell-var-from-loop - if win_shell_flags and win_shell_flags.cygwin: - return path.replace("\\", "/") - return path - - inputs = [self.GypPathToNinja(i, env) for i in rule.get("inputs", [])] - - # If there are n source files matching the rule, and m additional rule - # inputs, then adding 'inputs' to each build edge written below will - # write m * n inputs. Collapsing reduces this to m + n. - sources = rule.get("rule_sources", []) - num_inputs = len(inputs) - if prebuild: - num_inputs += 1 - if num_inputs > 2 and len(sources) > 2: - inputs = [ - self.WriteCollapsedDependencies( - rule["rule_name"], inputs, order_only=prebuild - ) - ] - prebuild = [] - - # For each source file, write an edge that generates all the outputs. - for source in sources: - source = os.path.normpath(source) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - - # Gather the list of inputs and outputs, expanding $vars if possible. - outputs = [ - self.ExpandRuleVariables(o, root, dirname, source, ext, basename) - for o in rule["outputs"] - ] - - if int(rule.get("process_outputs_as_sources", False)): - extra_sources += outputs - - was_mac_bundle_resource = source in mac_bundle_resources - if was_mac_bundle_resource or int( - rule.get("process_outputs_as_mac_bundle_resources", False) - ): - extra_mac_bundle_resources += outputs - # Note: This is n_resources * n_outputs_in_rule. - # Put to-be-removed items in a set and - # remove them all in a single pass - # if this becomes a performance issue. - if was_mac_bundle_resource: - mac_bundle_resources.remove(source) - - extra_bindings = [] - for var in needed_variables: - if var == "root": - extra_bindings.append(("root", cygwin_munge(root))) - elif var == "dirname": - # '$dirname' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - dirname_expanded = self.ExpandSpecial( - dirname, self.base_to_build - ) - extra_bindings.append( - ("dirname", cygwin_munge(dirname_expanded)) - ) - elif var == "source": - # '$source' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - source_expanded = self.ExpandSpecial(source, self.base_to_build) - extra_bindings.append(("source", cygwin_munge(source_expanded))) - elif var == "ext": - extra_bindings.append(("ext", ext)) - elif var == "name": - extra_bindings.append(("name", cygwin_munge(basename))) - else: - assert var is None, repr(var) - - outputs = [self.GypPathToNinja(o, env) for o in outputs] - if self.flavor == "win": - # WriteNewNinjaRule uses unique_name to create a rsp file on win. - extra_bindings.append( - ("unique_name", hashlib.md5(outputs[0]).hexdigest()) - ) - - self.ninja.build( - outputs, - rule_name, - self.GypPathToNinja(source), - implicit=inputs, - order_only=prebuild, - variables=extra_bindings, - ) - - all_outputs.extend(outputs) - - return all_outputs - - def WriteCopies(self, copies, prebuild, mac_bundle_depends): - outputs = [] - if self.xcode_settings: - extra_env = self.xcode_settings.GetPerTargetSettings() - env = self.GetToolchainEnv(additional_settings=extra_env) - else: - env = self.GetToolchainEnv() - for to_copy in copies: - for path in to_copy["files"]: - # Normalize the path so trailing slashes don't confuse us. - path = os.path.normpath(path) - basename = os.path.split(path)[1] - src = self.GypPathToNinja(path, env) - dst = self.GypPathToNinja( - os.path.join(to_copy["destination"], basename), env - ) - outputs += self.ninja.build(dst, "copy", src, order_only=prebuild) - if self.is_mac_bundle: - # gyp has mac_bundle_resources to copy things into a bundle's - # Resources folder, but there's no built-in way to copy files - # to other places in the bundle. - # Hence, some targets use copies for this. - # Check if this file is copied into the current bundle, - # and if so add it to the bundle depends so - # that dependent targets get rebuilt if the copy input changes. - if dst.startswith( - self.xcode_settings.GetBundleContentsFolderPath() - ): - mac_bundle_depends.append(dst) - - return outputs - - def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild): - """Prebuild steps to generate hmap files and copy headers to destination.""" - framework = self.ComputeMacBundleOutput() - all_sources = spec["sources"] - copy_headers = spec["mac_framework_headers"] - output = self.GypPathToUniqueOutput("headers.hmap") - self.xcode_settings.header_map_path = output - all_headers = map( - self.GypPathToNinja, filter(lambda x: x.endswith(".h"), all_sources) - ) - variables = [ - ("framework", framework), - ("copy_headers", map(self.GypPathToNinja, copy_headers)), - ] - outputs.extend( - self.ninja.build( - output, - "compile_ios_framework_headers", - all_headers, - variables=variables, - order_only=prebuild, - ) - ) - - def WriteMacBundleResources(self, resources, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources'.""" - xcassets = [] - - extra_env = self.xcode_settings.GetPerTargetSettings() - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) - - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - map(self.GypPathToNinja, resources), - ): - output = self.ExpandSpecial(output) - if os.path.splitext(output)[-1] != ".xcassets": - self.ninja.build( - output, - "mac_tool", - res, - variables=[ - ("mactool_cmd", "copy-bundle-resource"), - ("env", env), - ("binary", isBinary), - ], - ) - bundle_depends.append(output) - else: - xcassets.append(res) - return xcassets - - def WriteMacXCassets(self, xcassets, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources' .xcassets files. - - This add an invocation of 'actool' via the 'mac_tool.py' helper script. - It assumes that the assets catalogs define at least one imageset and - thus an Assets.car file will be generated in the application resources - directory. If this is not the case, then the build will probably be done - at each invocation of ninja.""" - if not xcassets: - return - - extra_arguments = {} - settings_to_arg = { - "XCASSETS_APP_ICON": "app-icon", - "XCASSETS_LAUNCH_IMAGE": "launch-image", - } - settings = self.xcode_settings.xcode_settings[self.config_name] - for settings_key, arg_name in settings_to_arg.items(): - value = settings.get(settings_key) - if value: - extra_arguments[arg_name] = value - - partial_info_plist = None - if extra_arguments: - partial_info_plist = self.GypPathToUniqueOutput( - "assetcatalog_generated_info.plist" - ) - extra_arguments["output-partial-info-plist"] = partial_info_plist - - outputs = [] - outputs.append( - os.path.join(self.xcode_settings.GetBundleResourceFolder(), "Assets.car") - ) - if partial_info_plist: - outputs.append(partial_info_plist) - - keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor) - extra_env = self.xcode_settings.GetPerTargetSettings() - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - bundle_depends.extend( - self.ninja.build( - outputs, - "compile_xcassets", - xcassets, - variables=[("env", env), ("keys", keys)], - ) - ) - return partial_info_plist - - def WriteMacInfoPlist(self, partial_info_plist, bundle_depends): - """Write build rules for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables["PRODUCT_DIR"], - self.xcode_settings, - self.GypPathToNinja, - ) - if not info_plist: - return - out = self.ExpandSpecial(out) - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = self.GypPathToUniqueOutput( - os.path.basename(info_plist) - ) - defines = " ".join([Define(d, self.flavor) for d in defines]) - info_plist = self.ninja.build( - intermediate_plist, - "preprocess_infoplist", - info_plist, - variables=[("defines", defines)], - ) - - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - if partial_info_plist: - intermediate_plist = self.GypPathToUniqueOutput("merged_info.plist") - info_plist = self.ninja.build( - intermediate_plist, "merge_infoplist", [partial_info_plist, info_plist] - ) - - keys = self.xcode_settings.GetExtraPlistItems(self.config_name) - keys = QuoteShellArgument(json.dumps(keys), self.flavor) - isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) - self.ninja.build( - out, - "copy_infoplist", - info_plist, - variables=[("env", env), ("keys", keys), ("binary", isBinary)], - ) - bundle_depends.append(out) - - def WriteSources( - self, - ninja_file, - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - ): - """Write build rules to compile all of |sources|.""" - if self.toolset == "host": - self.ninja.variable("ar", "$ar_host") - self.ninja.variable("cc", "$cc_host") - self.ninja.variable("cxx", "$cxx_host") - self.ninja.variable("ld", "$ld_host") - self.ninja.variable("ldxx", "$ldxx_host") - self.ninja.variable("nm", "$nm_host") - self.ninja.variable("readelf", "$readelf_host") - - if self.flavor != "mac" or len(self.archs) == 1: - return self.WriteSourcesForArch( - self.ninja, - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - ) - else: - return { - arch: self.WriteSourcesForArch( - self.arch_subninjas[arch], - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - arch=arch, - ) - for arch in self.archs - } - - def WriteSourcesForArch( - self, - ninja_file, - config_name, - config, - sources, - predepends, - precompiled_header, - spec, - arch=None, - ): - """Write build rules to compile all of |sources|.""" - - extra_defines = [] - if self.flavor == "mac": - cflags = self.xcode_settings.GetCflags(config_name, arch=arch) - cflags_c = self.xcode_settings.GetCflagsC(config_name) - cflags_cc = self.xcode_settings.GetCflagsCC(config_name) - cflags_objc = ["$cflags_c"] + self.xcode_settings.GetCflagsObjC(config_name) - cflags_objcc = ["$cflags_cc"] + self.xcode_settings.GetCflagsObjCC( - config_name - ) - elif self.flavor == "win": - asmflags = self.msvs_settings.GetAsmflags(config_name) - cflags = self.msvs_settings.GetCflags(config_name) - cflags_c = self.msvs_settings.GetCflagsC(config_name) - cflags_cc = self.msvs_settings.GetCflagsCC(config_name) - extra_defines = self.msvs_settings.GetComputedDefines(config_name) - # See comment at cc_command for why there's two .pdb files. - pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName( - config_name, self.ExpandSpecial - ) - if not pdbpath_c: - obj = "obj" - if self.toolset != "target": - obj += "." + self.toolset - pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) - pdbpath_c = pdbpath + ".c.pdb" - pdbpath_cc = pdbpath + ".cc.pdb" - self.WriteVariableList(ninja_file, "pdbname_c", [pdbpath_c]) - self.WriteVariableList(ninja_file, "pdbname_cc", [pdbpath_cc]) - self.WriteVariableList(ninja_file, "pchprefix", [self.name]) - else: - cflags = config.get("cflags", []) - cflags_c = config.get("cflags_c", []) - cflags_cc = config.get("cflags_cc", []) - - # Respect environment variables related to build, but target-specific - # flags can still override them. - if self.toolset == "target": - cflags_c = ( - os.environ.get("CPPFLAGS", "").split() - + os.environ.get("CFLAGS", "").split() - + cflags_c - ) - cflags_cc = ( - os.environ.get("CPPFLAGS", "").split() - + os.environ.get("CXXFLAGS", "").split() - + cflags_cc - ) - elif self.toolset == "host": - cflags_c = ( - os.environ.get("CPPFLAGS_host", "").split() - + os.environ.get("CFLAGS_host", "").split() - + cflags_c - ) - cflags_cc = ( - os.environ.get("CPPFLAGS_host", "").split() - + os.environ.get("CXXFLAGS_host", "").split() - + cflags_cc - ) - - defines = config.get("defines", []) + extra_defines - self.WriteVariableList( - ninja_file, "defines", [Define(d, self.flavor) for d in defines] - ) - if self.flavor == "win": - self.WriteVariableList( - ninja_file, "asmflags", map(self.ExpandSpecial, asmflags) - ) - self.WriteVariableList( - ninja_file, - "rcflags", - [ - QuoteShellArgument(self.ExpandSpecial(f), self.flavor) - for f in self.msvs_settings.GetRcflags( - config_name, self.GypPathToNinja - ) - ], - ) - - include_dirs = config.get("include_dirs", []) - - env = self.GetToolchainEnv() - if self.flavor == "win": - include_dirs = self.msvs_settings.AdjustIncludeDirs( - include_dirs, config_name - ) - self.WriteVariableList( - ninja_file, - "includes", - [ - QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) - for i in include_dirs - ], - ) - - if self.flavor == "win": - midl_include_dirs = config.get("midl_include_dirs", []) - midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs( - midl_include_dirs, config_name - ) - self.WriteVariableList( - ninja_file, - "midl_includes", - [ - QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) - for i in midl_include_dirs - ], - ) - - pch_commands = precompiled_header.GetPchBuildCommands(arch) - if self.flavor == "mac": - # Most targets use no precompiled headers, so only write these if needed. - for ext, var in [ - ("c", "cflags_pch_c"), - ("cc", "cflags_pch_cc"), - ("m", "cflags_pch_objc"), - ("mm", "cflags_pch_objcc"), - ]: - include = precompiled_header.GetInclude(ext, arch) - if include: - ninja_file.variable(var, include) - - arflags = config.get("arflags", []) - - self.WriteVariableList(ninja_file, "cflags", map(self.ExpandSpecial, cflags)) - self.WriteVariableList( - ninja_file, "cflags_c", map(self.ExpandSpecial, cflags_c) - ) - self.WriteVariableList( - ninja_file, "cflags_cc", map(self.ExpandSpecial, cflags_cc) - ) - if self.flavor == "mac": - self.WriteVariableList( - ninja_file, "cflags_objc", map(self.ExpandSpecial, cflags_objc) - ) - self.WriteVariableList( - ninja_file, "cflags_objcc", map(self.ExpandSpecial, cflags_objcc) - ) - self.WriteVariableList(ninja_file, "arflags", map(self.ExpandSpecial, arflags)) - ninja_file.newline() - outputs = [] - has_rc_source = False - for source in sources: - filename, ext = os.path.splitext(source) - ext = ext[1:] - obj_ext = self.obj_ext - if ext in ("cc", "cpp", "cxx"): - command = "cxx" - self.target.uses_cpp = True - elif ext == "c" or (ext == "S" and self.flavor != "win"): - command = "cc" - elif ext == "s" and self.flavor != "win": # Doesn't generate .o.d files. - command = "cc_s" - elif ( - self.flavor == "win" - and ext in ("asm", "S") - and not self.msvs_settings.HasExplicitAsmRules(spec) - ): - command = "asm" - # Add the _asm suffix as msvs is capable of handling .cc and - # .asm files of the same name without collision. - obj_ext = "_asm.obj" - elif self.flavor == "mac" and ext == "m": - command = "objc" - elif self.flavor == "mac" and ext == "mm": - command = "objcxx" - self.target.uses_cpp = True - elif self.flavor == "win" and ext == "rc": - command = "rc" - obj_ext = ".res" - has_rc_source = True - else: - # Ignore unhandled extensions. - continue - input = self.GypPathToNinja(source) - output = self.GypPathToUniqueOutput(filename + obj_ext) - if arch is not None: - output = AddArch(output, arch) - implicit = precompiled_header.GetObjDependencies([input], [output], arch) - variables = [] - if self.flavor == "win": - variables, output, implicit = precompiled_header.GetFlagsModifications( - input, - output, - implicit, - command, - cflags_c, - cflags_cc, - self.ExpandSpecial, - ) - ninja_file.build( - output, - command, - input, - implicit=[gch for _, _, gch in implicit], - order_only=predepends, - variables=variables, - ) - outputs.append(output) - - if has_rc_source: - resource_include_dirs = config.get("resource_include_dirs", include_dirs) - self.WriteVariableList( - ninja_file, - "resource_includes", - [ - QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) - for i in resource_include_dirs - ], - ) - - self.WritePchTargets(ninja_file, pch_commands) - - ninja_file.newline() - return outputs - - def WritePchTargets(self, ninja_file, pch_commands): - """Writes ninja rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - var_name = { - "c": "cflags_pch_c", - "cc": "cflags_pch_cc", - "m": "cflags_pch_objc", - "mm": "cflags_pch_objcc", - }[lang] - - map = { - "c": "cc", - "cc": "cxx", - "m": "objc", - "mm": "objcxx", - } - cmd = map.get(lang) - ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)]) - - def WriteLink(self, spec, config_name, config, link_deps, compile_deps): - """Write out a link step. Fills out target.binary. """ - if self.flavor != "mac" or len(self.archs) == 1: - return self.WriteLinkForArch( - self.ninja, spec, config_name, config, link_deps, compile_deps - ) - else: - output = self.ComputeOutput(spec) - inputs = [ - self.WriteLinkForArch( - self.arch_subninjas[arch], - spec, - config_name, - config, - link_deps[arch], - compile_deps, - arch=arch, - ) - for arch in self.archs - ] - extra_bindings = [] - build_output = output - if not self.is_mac_bundle: - self.AppendPostbuildVariable(extra_bindings, spec, output, output) - - # TODO(yyanagisawa): more work needed to fix: - # https://code.google.com/p/gyp/issues/detail?id=411 - if ( - spec["type"] in ("shared_library", "loadable_module") - and not self.is_mac_bundle - ): - extra_bindings.append(("lib", output)) - self.ninja.build( - [output, output + ".TOC"], - "solipo", - inputs, - variables=extra_bindings, - ) - else: - self.ninja.build(build_output, "lipo", inputs, variables=extra_bindings) - return output - - def WriteLinkForArch( - self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None - ): - """Write out a link step. Fills out target.binary. """ - command = { - "executable": "link", - "loadable_module": "solink_module", - "shared_library": "solink", - }[spec["type"]] - command_suffix = "" - - implicit_deps = set() - solibs = set() - order_deps = set() - - if compile_deps: - # Normally, the compiles of the target already depend on compile_deps, - # but a shared_library target might have no sources and only link together - # a few static_library deps, so the link step also needs to depend - # on compile_deps to make sure actions in the shared_library target - # get run before the link. - order_deps.add(compile_deps) - - if "dependencies" in spec: - # Two kinds of dependencies: - # - Linkable dependencies (like a .a or a .so): add them to the link line. - # - Non-linkable dependencies (like a rule that generates a file - # and writes a stamp file): add them to implicit_deps - extra_link_deps = set() - for dep in spec["dependencies"]: - target = self.target_outputs.get(dep) - if not target: - continue - linkable = target.Linkable() - if linkable: - new_deps = [] - if ( - self.flavor == "win" - and target.component_objs - and self.msvs_settings.IsUseLibraryDependencyInputs(config_name) - ): - new_deps = target.component_objs - if target.compile_deps: - order_deps.add(target.compile_deps) - elif self.flavor == "win" and target.import_lib: - new_deps = [target.import_lib] - elif target.UsesToc(self.flavor): - solibs.add(target.binary) - implicit_deps.add(target.binary + ".TOC") - else: - new_deps = [target.binary] - for new_dep in new_deps: - if new_dep not in extra_link_deps: - extra_link_deps.add(new_dep) - link_deps.append(new_dep) - - final_output = target.FinalOutput() - if not linkable or final_output != target.binary: - implicit_deps.add(final_output) - - extra_bindings = [] - if self.target.uses_cpp and self.flavor != "win": - extra_bindings.append(("ld", "$ldxx")) - - output = self.ComputeOutput(spec, arch) - if arch is None and not self.is_mac_bundle: - self.AppendPostbuildVariable(extra_bindings, spec, output, output) - - is_executable = spec["type"] == "executable" - # The ldflags config key is not used on mac or win. On those platforms - # linker flags are set via xcode_settings and msvs_settings, respectively. - if self.toolset == "target": - env_ldflags = os.environ.get("LDFLAGS", "").split() - elif self.toolset == "host": - env_ldflags = os.environ.get("LDFLAGS_host", "").split() - - if self.flavor == "mac": - ldflags = self.xcode_settings.GetLdflags( - config_name, - self.ExpandSpecial(generator_default_variables["PRODUCT_DIR"]), - self.GypPathToNinja, - arch, - ) - ldflags = env_ldflags + ldflags - elif self.flavor == "win": - manifest_base_name = self.GypPathToUniqueOutput( - self.ComputeOutputFileName(spec) - ) - ( - ldflags, - intermediate_manifest, - manifest_files, - ) = self.msvs_settings.GetLdflags( - config_name, - self.GypPathToNinja, - self.ExpandSpecial, - manifest_base_name, - output, - is_executable, - self.toplevel_build, - ) - ldflags = env_ldflags + ldflags - self.WriteVariableList(ninja_file, "manifests", manifest_files) - implicit_deps = implicit_deps.union(manifest_files) - if intermediate_manifest: - self.WriteVariableList( - ninja_file, "intermediatemanifest", [intermediate_manifest] - ) - command_suffix = _GetWinLinkRuleNameSuffix( - self.msvs_settings.IsEmbedManifest(config_name) - ) - def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) - if def_file: - implicit_deps.add(def_file) - else: - # Respect environment variables related to build, but target-specific - # flags can still override them. - ldflags = env_ldflags + config.get("ldflags", []) - if is_executable and len(solibs): - rpath = "lib/" - if self.toolset != "target": - rpath += self.toolset - ldflags.append(r"-Wl,-rpath=\$$ORIGIN/%s" % rpath) - else: - ldflags.append("-Wl,-rpath=%s" % self.target_rpath) - ldflags.append("-Wl,-rpath-link=%s" % rpath) - self.WriteVariableList(ninja_file, "ldflags", map(self.ExpandSpecial, ldflags)) - - library_dirs = config.get("library_dirs", []) - if self.flavor == "win": - library_dirs = [ - self.msvs_settings.ConvertVSMacros(library_dir, config_name) - for library_dir in library_dirs - ] - library_dirs = [ - "/LIBPATH:" - + QuoteShellArgument(self.GypPathToNinja(library_dir), self.flavor) - for library_dir in library_dirs - ] - else: - library_dirs = [ - QuoteShellArgument("-L" + self.GypPathToNinja(library_dir), self.flavor) - for library_dir in library_dirs - ] - - libraries = gyp.common.uniquer( - map(self.ExpandSpecial, spec.get("libraries", [])) - ) - if self.flavor == "mac": - libraries = self.xcode_settings.AdjustLibraries(libraries, config_name) - elif self.flavor == "win": - libraries = self.msvs_settings.AdjustLibraries(libraries) - - self.WriteVariableList(ninja_file, "libs", library_dirs + libraries) - - linked_binary = output - - if command in ("solink", "solink_module"): - extra_bindings.append(("soname", os.path.split(output)[1])) - extra_bindings.append(("lib", gyp.common.EncodePOSIXShellArgument(output))) - if self.flavor != "win": - link_file_list = output - if self.is_mac_bundle: - # 'Dependency Framework.framework/Versions/A/Dependency Framework' - # -> 'Dependency Framework.framework.rsp' - link_file_list = self.xcode_settings.GetWrapperName() - if arch: - link_file_list += "." + arch - link_file_list += ".rsp" - # If an rspfile contains spaces, ninja surrounds the filename with - # quotes around it and then passes it to open(), creating a file with - # quotes in its name (and when looking for the rsp file, the name - # makes it through bash which strips the quotes) :-/ - link_file_list = link_file_list.replace(" ", "_") - extra_bindings.append( - ( - "link_file_list", - gyp.common.EncodePOSIXShellArgument(link_file_list), - ) - ) - if self.flavor == "win": - extra_bindings.append(("binary", output)) - if ( - "/NOENTRY" not in ldflags - and not self.msvs_settings.GetNoImportLibrary(config_name) - ): - self.target.import_lib = output + ".lib" - extra_bindings.append( - ("implibflag", "/IMPLIB:%s" % self.target.import_lib) - ) - pdbname = self.msvs_settings.GetPDBName( - config_name, self.ExpandSpecial, output + ".pdb" - ) - output = [output, self.target.import_lib] - if pdbname: - output.append(pdbname) - elif not self.is_mac_bundle: - output = [output, output + ".TOC"] - else: - command = command + "_notoc" - elif self.flavor == "win": - extra_bindings.append(("binary", output)) - pdbname = self.msvs_settings.GetPDBName( - config_name, self.ExpandSpecial, output + ".pdb" - ) - if pdbname: - output = [output, pdbname] - - if len(solibs): - extra_bindings.append( - ("solibs", gyp.common.EncodePOSIXShellList(sorted(solibs))) - ) - - ninja_file.build( - output, - command + command_suffix, - link_deps, - implicit=sorted(implicit_deps), - order_only=list(order_deps), - variables=extra_bindings, - ) - return linked_binary - - def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): - extra_link_deps = any( - self.target_outputs.get(dep).Linkable() - for dep in spec.get("dependencies", []) - if dep in self.target_outputs - ) - if spec["type"] == "none" or (not link_deps and not extra_link_deps): - # TODO(evan): don't call this function for 'none' target types, as - # it doesn't do anything, and we fake out a 'binary' with a stamp file. - self.target.binary = compile_deps - self.target.type = "none" - elif spec["type"] == "static_library": - self.target.binary = self.ComputeOutput(spec) - if ( - self.flavor not in ("ios", "mac", "netbsd", "openbsd", "win") - and not self.is_standalone_static_library - ): - self.ninja.build( - self.target.binary, "alink_thin", link_deps, order_only=compile_deps - ) - else: - variables = [] - if self.xcode_settings: - libtool_flags = self.xcode_settings.GetLibtoolflags(config_name) - if libtool_flags: - variables.append(("libtool_flags", libtool_flags)) - if self.msvs_settings: - libflags = self.msvs_settings.GetLibFlags( - config_name, self.GypPathToNinja - ) - variables.append(("libflags", libflags)) - - if self.flavor != "mac" or len(self.archs) == 1: - self.AppendPostbuildVariable( - variables, spec, self.target.binary, self.target.binary - ) - self.ninja.build( - self.target.binary, - "alink", - link_deps, - order_only=compile_deps, - variables=variables, - ) - else: - inputs = [] - for arch in self.archs: - output = self.ComputeOutput(spec, arch) - self.arch_subninjas[arch].build( - output, - "alink", - link_deps[arch], - order_only=compile_deps, - variables=variables, - ) - inputs.append(output) - # TODO: It's not clear if - # libtool_flags should be passed to the alink - # call that combines single-arch .a files into a fat .a file. - self.AppendPostbuildVariable( - variables, spec, self.target.binary, self.target.binary - ) - self.ninja.build( - self.target.binary, - "alink", - inputs, - # FIXME: test proving order_only=compile_deps isn't - # needed. - variables=variables, - ) - else: - self.target.binary = self.WriteLink( - spec, config_name, config, link_deps, compile_deps - ) - return self.target.binary - - def WriteMacBundle(self, spec, mac_bundle_depends, is_empty): - assert self.is_mac_bundle - package_framework = spec["type"] in ("shared_library", "loadable_module") - output = self.ComputeMacBundleOutput() - if is_empty: - output += ".stamp" - variables = [] - self.AppendPostbuildVariable( - variables, - spec, - output, - self.target.binary, - is_command_start=not package_framework, - ) - if package_framework and not is_empty: - if spec["type"] == "shared_library" and self.xcode_settings.isIOS: - self.ninja.build( - output, - "package_ios_framework", - mac_bundle_depends, - variables=variables, - ) - else: - variables.append(("version", self.xcode_settings.GetFrameworkVersion())) - self.ninja.build( - output, "package_framework", mac_bundle_depends, variables=variables - ) - else: - self.ninja.build(output, "stamp", mac_bundle_depends, variables=variables) - self.target.bundle = output - return output - - def GetToolchainEnv(self, additional_settings=None): - """Returns the variables toolchain would set for build steps.""" - env = self.GetSortedXcodeEnv(additional_settings=additional_settings) - if self.flavor == "win": - env = self.GetMsvsToolchainEnv(additional_settings=additional_settings) - return env - - def GetMsvsToolchainEnv(self, additional_settings=None): - """Returns the variables Visual Studio would set for build steps.""" - return self.msvs_settings.GetVSMacroEnv( - "$!PRODUCT_DIR", config=self.config_name - ) - - def GetSortedXcodeEnv(self, additional_settings=None): - """Returns the variables Xcode would set for build steps.""" - assert self.abs_build_dir - abs_build_dir = self.abs_build_dir - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, - abs_build_dir, - os.path.join(abs_build_dir, self.build_to_base), - self.config_name, - additional_settings, - ) - - def GetSortedXcodePostbuildEnv(self): - """Returns the variables Xcode would set for postbuild steps.""" - postbuild_settings = {} - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - "CHROMIUM_STRIP_SAVE_FILE" - ) - if strip_save_file: - postbuild_settings["CHROMIUM_STRIP_SAVE_FILE"] = strip_save_file - return self.GetSortedXcodeEnv(additional_settings=postbuild_settings) - - def AppendPostbuildVariable( - self, variables, spec, output, binary, is_command_start=False - ): - """Adds a 'postbuild' variable if there is a postbuild for |output|.""" - postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start) - if postbuild: - variables.append(("postbuilds", postbuild)) - - def GetPostbuildCommand(self, spec, output, output_binary, is_command_start): - """Returns a shell command that runs all the postbuilds, and removes - |output| if any of them fails. If |is_command_start| is False, then the - returned string will start with ' && '.""" - if not self.xcode_settings or spec["type"] == "none" or not output: - return "" - output = QuoteShellArgument(output, self.flavor) - postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) - if output_binary is not None: - postbuilds = self.xcode_settings.AddImplicitPostbuilds( - self.config_name, - os.path.normpath(os.path.join(self.base_to_build, output)), - QuoteShellArgument( - os.path.normpath(os.path.join(self.base_to_build, output_binary)), - self.flavor, - ), - postbuilds, - quiet=True, - ) - - if not postbuilds: - return "" - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert( - 0, gyp.common.EncodePOSIXShellList(["cd", self.build_to_base]) - ) - env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) - # G will be non-null if any postbuild fails. Run all postbuilds in a - # subshell. - commands = ( - env - + " (" - + " && ".join([ninja_syntax.escape(command) for command in postbuilds]) - ) - command_string = ( - commands - + "); G=$$?; " - # Remove the final output if any postbuild failed. - "((exit $$G) || rm -rf %s) " % output - + "&& exit $$G)" - ) - if is_command_start: - return "(" + command_string + " && " - else: - return "$ && (" + command_string - - def ComputeExportEnvString(self, env): - """Given an environment, returns a string looking like - 'export FOO=foo; export BAR="${FOO} bar;' - that exports |env| to the shell.""" - export_str = [] - for k, v in env: - export_str.append( - "export %s=%s;" - % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))) - ) - return " ".join(export_str) - - def ComputeMacBundleOutput(self): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = generator_default_variables["PRODUCT_DIR"] - return self.ExpandSpecial( - os.path.join(path, self.xcode_settings.GetWrapperName()) - ) - - def ComputeOutputFileName(self, spec, type=None): - """Compute the filename of the final output for the current target.""" - if not type: - type = spec["type"] - - default_variables = copy.copy(generator_default_variables) - CalculateVariables(default_variables, {"flavor": self.flavor}) - - # Compute filename prefix: the product prefix, or a default for - # the product type. - DEFAULT_PREFIX = { - "loadable_module": default_variables["SHARED_LIB_PREFIX"], - "shared_library": default_variables["SHARED_LIB_PREFIX"], - "static_library": default_variables["STATIC_LIB_PREFIX"], - "executable": default_variables["EXECUTABLE_PREFIX"], - } - prefix = spec.get("product_prefix", DEFAULT_PREFIX.get(type, "")) - - # Compute filename extension: the product extension, or a default - # for the product type. - DEFAULT_EXTENSION = { - "loadable_module": default_variables["SHARED_LIB_SUFFIX"], - "shared_library": default_variables["SHARED_LIB_SUFFIX"], - "static_library": default_variables["STATIC_LIB_SUFFIX"], - "executable": default_variables["EXECUTABLE_SUFFIX"], - } - extension = spec.get("product_extension") - extension = "." + extension if extension else DEFAULT_EXTENSION.get(type, "") - - if "product_name" in spec: - # If we were given an explicit name, use that. - target = spec["product_name"] - else: - # Otherwise, derive a name from the target name. - target = spec["target_name"] - if prefix == "lib": - # Snip out an extra 'lib' from libs if appropriate. - target = StripPrefix(target, "lib") - - if type in ( - "static_library", - "loadable_module", - "shared_library", - "executable", - ): - return f"{prefix}{target}{extension}" - elif type == "none": - return "%s.stamp" % target - else: - raise Exception("Unhandled output type %s" % type) - - def ComputeOutput(self, spec, arch=None): - """Compute the path for the final output of the spec.""" - type = spec["type"] - - if self.flavor == "win": - override = self.msvs_settings.GetOutputName( - self.config_name, self.ExpandSpecial - ) - if override: - return override - - if ( - arch is None - and self.flavor == "mac" - and type - in ("static_library", "executable", "shared_library", "loadable_module") - ): - filename = self.xcode_settings.GetExecutablePath() - else: - filename = self.ComputeOutputFileName(spec, type) - - if arch is None and "product_dir" in spec: - path = os.path.join(spec["product_dir"], filename) - return self.ExpandSpecial(path) - - # Some products go into the output root, libraries go into shared library - # dir, and everything else goes into the normal place. - type_in_output_root = ["executable", "loadable_module"] - if self.flavor == "mac" and self.toolset == "target": - type_in_output_root += ["shared_library", "static_library"] - elif self.flavor == "win" and self.toolset == "target": - type_in_output_root += ["shared_library"] - - if arch is not None: - # Make sure partial executables don't end up in a bundle or the regular - # output directory. - archdir = "arch" - if self.toolset != "target": - archdir = os.path.join("arch", "%s" % self.toolset) - return os.path.join(archdir, AddArch(filename, arch)) - elif type in type_in_output_root or self.is_standalone_static_library: - return filename - elif type == "shared_library": - libdir = "lib" - if self.toolset != "target": - libdir = os.path.join("lib", "%s" % self.toolset) - return os.path.join(libdir, filename) - else: - return self.GypPathToUniqueOutput(filename, qualified=False) - - def WriteVariableList(self, ninja_file, var, values): - assert not isinstance(values, str) - if values is None: - values = [] - ninja_file.variable(var, " ".join(values)) - - def WriteNewNinjaRule( - self, name, args, description, win_shell_flags, env, pool, depfile=None - ): - """Write out a new ninja "rule" statement for a given command. - - Returns the name of the new rule, and a copy of |args| with variables - expanded.""" - - if self.flavor == "win": - args = [ - self.msvs_settings.ConvertVSMacros( - arg, self.base_to_build, config=self.config_name - ) - for arg in args - ] - description = self.msvs_settings.ConvertVSMacros( - description, config=self.config_name - ) - elif self.flavor == "mac": - # |env| is an empty list on non-mac. - args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] - description = gyp.xcode_emulation.ExpandEnvVars(description, env) - - # TODO: we shouldn't need to qualify names; we do it because - # currently the ninja rule namespace is global, but it really - # should be scoped to the subninja. - rule_name = self.name - if self.toolset == "target": - rule_name += "." + self.toolset - rule_name += "." + name - rule_name = re.sub("[^a-zA-Z0-9_]", "_", rule_name) - - # Remove variable references, but not if they refer to the magic rule - # variables. This is not quite right, as it also protects these for - # actions, not just for rules where they are valid. Good enough. - protect = ["${root}", "${dirname}", "${source}", "${ext}", "${name}"] - protect = "(?!" + "|".join(map(re.escape, protect)) + ")" - description = re.sub(protect + r"\$", "_", description) - - # gyp dictates that commands are run from the base directory. - # cd into the directory before running, and adjust paths in - # the arguments to point to the proper locations. - rspfile = None - rspfile_content = None - args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] - if self.flavor == "win": - rspfile = rule_name + ".$unique_name.rsp" - # The cygwin case handles this inside the bash sub-shell. - run_in = "" if win_shell_flags.cygwin else " " + self.build_to_base - if win_shell_flags.cygwin: - rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( - args, self.build_to_base - ) - else: - rspfile_content = gyp.msvs_emulation.EncodeRspFileList( - args, win_shell_flags.quote) - command = ( - "%s gyp-win-tool action-wrapper $arch " % sys.executable - + rspfile - + run_in - ) - else: - env = self.ComputeExportEnvString(env) - command = gyp.common.EncodePOSIXShellList(args) - command = "cd %s; " % self.build_to_base + env + command - - # GYP rules/actions express being no-ops by not touching their outputs. - # Avoid executing downstream dependencies in this case by specifying - # restat=1 to ninja. - self.ninja.rule( - rule_name, - command, - description, - depfile=depfile, - restat=True, - pool=pool, - rspfile=rspfile, - rspfile_content=rspfile_content, - ) - self.ninja.newline() - - return rule_name, args - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - global generator_additional_non_configuration_keys - global generator_additional_path_sections - flavor = gyp.common.GetFlavor(params) - if flavor == "mac": - default_variables.setdefault("OS", "mac") - default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib") - default_variables.setdefault( - "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - default_variables.setdefault( - "LIB_DIR", generator_default_variables["PRODUCT_DIR"] - ) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Ninja generator. - import gyp.generator.xcode as xcode_generator - - generator_additional_non_configuration_keys = getattr( - xcode_generator, "generator_additional_non_configuration_keys", [] - ) - generator_additional_path_sections = getattr( - xcode_generator, "generator_additional_path_sections", [] - ) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr( - xcode_generator, "generator_extra_sources_for_rules", [] - ) - elif flavor == "win": - exts = gyp.MSVSUtil.TARGET_TYPE_EXT - default_variables.setdefault("OS", "win") - default_variables["EXECUTABLE_SUFFIX"] = "." + exts["executable"] - default_variables["STATIC_LIB_PREFIX"] = "" - default_variables["STATIC_LIB_SUFFIX"] = "." + exts["static_library"] - default_variables["SHARED_LIB_PREFIX"] = "" - default_variables["SHARED_LIB_SUFFIX"] = "." + exts["shared_library"] - - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - - generator_additional_non_configuration_keys = getattr( - msvs_generator, "generator_additional_non_configuration_keys", [] - ) - generator_additional_path_sections = getattr( - msvs_generator, "generator_additional_path_sections", [] - ) - - gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) - else: - operating_system = flavor - if flavor == "android": - operating_system = "linux" # Keep this legacy behavior for now. - default_variables.setdefault("OS", operating_system) - default_variables.setdefault("SHARED_LIB_SUFFIX", ".so") - default_variables.setdefault( - "SHARED_LIB_DIR", os.path.join("$!PRODUCT_DIR", "lib") - ) - default_variables.setdefault("LIB_DIR", os.path.join("$!PRODUCT_DIR", "obj")) - - -def ComputeOutputDir(params): - """Returns the path from the toplevel_dir to the build output directory.""" - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to ninja easier, ninja doesn't put anything here. - generator_dir = os.path.relpath(params["options"].generator_output or ".") - - # output_dir: relative path from generator_dir to the build directory. - output_dir = params.get("generator_flags", {}).get("output_dir", "out") - - # Relative path from source root to our output files. e.g. "out" - return os.path.normpath(os.path.join(generator_dir, output_dir)) - - -def CalculateGeneratorInputInfo(params): - """Called by __init__ to initialize generator values based on params.""" - # E.g. "out/gypfiles" - toplevel = params["options"].toplevel_dir - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, ComputeOutputDir(params), "gypfiles") - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def OpenOutput(path, mode="w"): - """Open |path| for writing, creating directories if necessary.""" - gyp.common.EnsureDirExists(path) - return open(path, mode) - - -def CommandWithWrapper(cmd, wrappers, prog): - wrapper = wrappers.get(cmd, "") - if wrapper: - return wrapper + " " + prog - return prog - - -def GetDefaultConcurrentLinks(): - """Returns a best-guess for a number of concurrent links.""" - pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY", 0)) - if pool_size: - return pool_size - - if sys.platform in ("win32", "cygwin"): - import ctypes - - class MEMORYSTATUSEX(ctypes.Structure): - _fields_ = [ - ("dwLength", ctypes.c_ulong), - ("dwMemoryLoad", ctypes.c_ulong), - ("ullTotalPhys", ctypes.c_ulonglong), - ("ullAvailPhys", ctypes.c_ulonglong), - ("ullTotalPageFile", ctypes.c_ulonglong), - ("ullAvailPageFile", ctypes.c_ulonglong), - ("ullTotalVirtual", ctypes.c_ulonglong), - ("ullAvailVirtual", ctypes.c_ulonglong), - ("sullAvailExtendedVirtual", ctypes.c_ulonglong), - ] - - stat = MEMORYSTATUSEX() - stat.dwLength = ctypes.sizeof(stat) - ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) - - # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM - # on a 64 GiB machine. - mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GiB - hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX", 2 ** 32))) - return min(mem_limit, hard_cap) - elif sys.platform.startswith("linux"): - if os.path.exists("/proc/meminfo"): - with open("/proc/meminfo") as meminfo: - memtotal_re = re.compile(r"^MemTotal:\s*(\d*)\s*kB") - for line in meminfo: - match = memtotal_re.match(line) - if not match: - continue - # Allow 8Gb per link on Linux because Gold is quite memory hungry - return max(1, int(match.group(1)) // (8 * (2 ** 20))) - return 1 - elif sys.platform == "darwin": - try: - avail_bytes = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"])) - # A static library debug build of Chromium's unit_tests takes ~2.7GB, so - # 4GB per ld process allows for some more bloat. - return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB - except subprocess.CalledProcessError: - return 1 - else: - # TODO(scottmg): Implement this for other platforms. - return 1 - - -def _GetWinLinkRuleNameSuffix(embed_manifest): - """Returns the suffix used to select an appropriate linking rule depending on - whether the manifest embedding is enabled.""" - return "_embed" if embed_manifest else "" - - -def _AddWinLinkRules(master_ninja, embed_manifest): - """Adds link rules for Windows platform to |master_ninja|.""" - - def FullLinkCommand(ldcmd, out, binary_type): - resource_name = {"exe": "1", "dll": "2"}[binary_type] - return ( - "%(python)s gyp-win-tool link-with-manifests $arch %(embed)s " - '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' - "$manifests" - % { - "python": sys.executable, - "out": out, - "ldcmd": ldcmd, - "resname": resource_name, - "embed": embed_manifest, - } - ) - - rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest) - use_separate_mspdbsrv = int(os.environ.get("GYP_USE_SEPARATE_MSPDBSRV", "0")) != 0 - dlldesc = "LINK%s(DLL) $binary" % rule_name_suffix.upper() - dllcmd = ( - "%s gyp-win-tool link-wrapper $arch %s " - "$ld /nologo $implibflag /DLL /OUT:$binary " - "@$binary.rsp" % (sys.executable, use_separate_mspdbsrv) - ) - dllcmd = FullLinkCommand(dllcmd, "$binary", "dll") - master_ninja.rule( - "solink" + rule_name_suffix, - description=dlldesc, - command=dllcmd, - rspfile="$binary.rsp", - rspfile_content="$libs $in_newline $ldflags", - restat=True, - pool="link_pool", - ) - master_ninja.rule( - "solink_module" + rule_name_suffix, - description=dlldesc, - command=dllcmd, - rspfile="$binary.rsp", - rspfile_content="$libs $in_newline $ldflags", - restat=True, - pool="link_pool", - ) - # Note that ldflags goes at the end so that it has the option of - # overriding default settings earlier in the command line. - exe_cmd = ( - "%s gyp-win-tool link-wrapper $arch %s " - "$ld /nologo /OUT:$binary @$binary.rsp" - % (sys.executable, use_separate_mspdbsrv) - ) - exe_cmd = FullLinkCommand(exe_cmd, "$binary", "exe") - master_ninja.rule( - "link" + rule_name_suffix, - description="LINK%s $binary" % rule_name_suffix.upper(), - command=exe_cmd, - rspfile="$binary.rsp", - rspfile_content="$in_newline $libs $ldflags", - pool="link_pool", - ) - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): - options = params["options"] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get("generator_flags", {}) - generate_compile_commands = generator_flags.get("compile_commands", False) - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(ComputeOutputDir(params), config_name)) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - - master_ninja_file = OpenOutput(os.path.join(toplevel_build, "build.ninja")) - master_ninja = ninja_syntax.Writer(master_ninja_file, width=120) - - # Put build-time support tools in out/{config_name}. - gyp.common.CopyTool(flavor, toplevel_build, generator_flags) - - # Grab make settings for CC/CXX. - # The rules are - # - The priority from low to high is gcc/g++, the 'make_global_settings' in - # gyp, the environment variable. - # - If there is no 'make_global_settings' for CC.host/CXX.host or - # 'CC_host'/'CXX_host' environment variable, cc_host/cxx_host should be set - # to cc/cxx. - if flavor == "win": - ar = "lib.exe" - # cc and cxx must be set to the correct architecture by overriding with one - # of cl_x86 or cl_x64 below. - cc = "UNSET" - cxx = "UNSET" - ld = "link.exe" - ld_host = "$ld" - else: - ar = "ar" - cc = "cc" - cxx = "c++" - ld = "$cc" - ldxx = "$cxx" - ld_host = "$cc_host" - ldxx_host = "$cxx_host" - - ar_host = ar - cc_host = None - cxx_host = None - cc_host_global_setting = None - cxx_host_global_setting = None - clang_cl = None - nm = "nm" - nm_host = "nm" - readelf = "readelf" - readelf_host = "readelf" - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings = data[build_file].get("make_global_settings", []) - build_to_root = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) - wrappers = {} - for key, value in make_global_settings: - if key == "AR": - ar = os.path.join(build_to_root, value) - if key == "AR.host": - ar_host = os.path.join(build_to_root, value) - if key == "CC": - cc = os.path.join(build_to_root, value) - if cc.endswith("clang-cl"): - clang_cl = cc - if key == "CXX": - cxx = os.path.join(build_to_root, value) - if key == "CC.host": - cc_host = os.path.join(build_to_root, value) - cc_host_global_setting = value - if key == "CXX.host": - cxx_host = os.path.join(build_to_root, value) - cxx_host_global_setting = value - if key == "LD": - ld = os.path.join(build_to_root, value) - if key == "LD.host": - ld_host = os.path.join(build_to_root, value) - if key == "LDXX": - ldxx = os.path.join(build_to_root, value) - if key == "LDXX.host": - ldxx_host = os.path.join(build_to_root, value) - if key == "NM": - nm = os.path.join(build_to_root, value) - if key == "NM.host": - nm_host = os.path.join(build_to_root, value) - if key == "READELF": - readelf = os.path.join(build_to_root, value) - if key == "READELF.host": - readelf_host = os.path.join(build_to_root, value) - if key.endswith("_wrapper"): - wrappers[key[: -len("_wrapper")]] = os.path.join(build_to_root, value) - - # Support wrappers from environment variables too. - for key, value in os.environ.items(): - if key.lower().endswith("_wrapper"): - key_prefix = key[: -len("_wrapper")] - key_prefix = re.sub(r"\.HOST$", ".host", key_prefix) - wrappers[key_prefix] = os.path.join(build_to_root, value) - - mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) - if mac_toolchain_dir: - wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir - - if flavor == "win": - configs = [ - target_dicts[qualified_target]["configurations"][config_name] - for qualified_target in target_list - ] - shared_system_includes = None - if not generator_flags.get("ninja_use_custom_environment_files", 0): - shared_system_includes = gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes( - configs, generator_flags - ) - cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( - toplevel_build, generator_flags, shared_system_includes, OpenOutput - ) - for arch, path in sorted(cl_paths.items()): - if clang_cl: - # If we have selected clang-cl, use that instead. - path = clang_cl - command = CommandWithWrapper( - "CC", wrappers, QuoteShellArgument(path, "win") - ) - if clang_cl: - # Use clang-cl to cross-compile for x86 or x86_64. - command += " -m32" if arch == "x86" else " -m64" - master_ninja.variable("cl_" + arch, command) - - cc = GetEnvironFallback(["CC_target", "CC"], cc) - master_ninja.variable("cc", CommandWithWrapper("CC", wrappers, cc)) - cxx = GetEnvironFallback(["CXX_target", "CXX"], cxx) - master_ninja.variable("cxx", CommandWithWrapper("CXX", wrappers, cxx)) - - if flavor == "win": - master_ninja.variable("ld", ld) - master_ninja.variable("idl", "midl.exe") - master_ninja.variable("ar", ar) - master_ninja.variable("rc", "rc.exe") - master_ninja.variable("ml_x86", "ml.exe") - master_ninja.variable("ml_x64", "ml64.exe") - master_ninja.variable("mt", "mt.exe") - else: - master_ninja.variable("ld", CommandWithWrapper("LINK", wrappers, ld)) - master_ninja.variable("ldxx", CommandWithWrapper("LINK", wrappers, ldxx)) - master_ninja.variable("ar", GetEnvironFallback(["AR_target", "AR"], ar)) - if flavor != "mac": - # Mac does not use readelf/nm for .TOC generation, so avoiding polluting - # the master ninja with extra unused variables. - master_ninja.variable("nm", GetEnvironFallback(["NM_target", "NM"], nm)) - master_ninja.variable( - "readelf", GetEnvironFallback(["READELF_target", "READELF"], readelf) - ) - - if generator_supports_multiple_toolsets: - if not cc_host: - cc_host = cc - if not cxx_host: - cxx_host = cxx - - master_ninja.variable("ar_host", GetEnvironFallback(["AR_host"], ar_host)) - master_ninja.variable("nm_host", GetEnvironFallback(["NM_host"], nm_host)) - master_ninja.variable( - "readelf_host", GetEnvironFallback(["READELF_host"], readelf_host) - ) - cc_host = GetEnvironFallback(["CC_host"], cc_host) - cxx_host = GetEnvironFallback(["CXX_host"], cxx_host) - - # The environment variable could be used in 'make_global_settings', like - # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. - if "$(CC)" in cc_host and cc_host_global_setting: - cc_host = cc_host_global_setting.replace("$(CC)", cc) - if "$(CXX)" in cxx_host and cxx_host_global_setting: - cxx_host = cxx_host_global_setting.replace("$(CXX)", cxx) - master_ninja.variable( - "cc_host", CommandWithWrapper("CC.host", wrappers, cc_host) - ) - master_ninja.variable( - "cxx_host", CommandWithWrapper("CXX.host", wrappers, cxx_host) - ) - if flavor == "win": - master_ninja.variable("ld_host", ld_host) - else: - master_ninja.variable( - "ld_host", CommandWithWrapper("LINK", wrappers, ld_host) - ) - master_ninja.variable( - "ldxx_host", CommandWithWrapper("LINK", wrappers, ldxx_host) - ) - - master_ninja.newline() - - master_ninja.pool("link_pool", depth=GetDefaultConcurrentLinks()) - master_ninja.newline() - - deps = "msvc" if flavor == "win" else "gcc" - - if flavor != "win": - master_ninja.rule( - "cc", - description="CC $out", - command=( - "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c " - "$cflags_pch_c -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - master_ninja.rule( - "cc_s", - description="CC $out", - command=( - "$cc $defines $includes $cflags $cflags_c " - "$cflags_pch_c -c $in -o $out" - ), - ) - master_ninja.rule( - "cxx", - description="CXX $out", - command=( - "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc " - "$cflags_pch_cc -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - else: - # TODO(scottmg) Separate pdb names is a test to see if it works around - # http://crbug.com/142362. It seems there's a race between the creation of - # the .pdb by the precompiled header step for .cc and the compilation of - # .c files. This should be handled by mspdbsrv, but rarely errors out with - # c1xx : fatal error C1033: cannot open program database - # By making the rules target separate pdb files this might be avoided. - cc_command = ( - "ninja -t msvc -e $arch " + "-- " - "$cc /nologo /showIncludes /FC " - "@$out.rsp /c $in /Fo$out /Fd$pdbname_c " - ) - cxx_command = ( - "ninja -t msvc -e $arch " + "-- " - "$cxx /nologo /showIncludes /FC " - "@$out.rsp /c $in /Fo$out /Fd$pdbname_cc " - ) - master_ninja.rule( - "cc", - description="CC $out", - command=cc_command, - rspfile="$out.rsp", - rspfile_content="$defines $includes $cflags $cflags_c", - deps=deps, - ) - master_ninja.rule( - "cxx", - description="CXX $out", - command=cxx_command, - rspfile="$out.rsp", - rspfile_content="$defines $includes $cflags $cflags_cc", - deps=deps, - ) - master_ninja.rule( - "idl", - description="IDL $in", - command=( - "%s gyp-win-tool midl-wrapper $arch $outdir " - "$tlb $h $dlldata $iid $proxy $in " - "$midl_includes $idlflags" % sys.executable - ), - ) - master_ninja.rule( - "rc", - description="RC $in", - # Note: $in must be last otherwise rc.exe complains. - command=( - "%s gyp-win-tool rc-wrapper " - "$arch $rc $defines $resource_includes $rcflags /fo$out $in" - % sys.executable - ), - ) - master_ninja.rule( - "asm", - description="ASM $out", - command=( - "%s gyp-win-tool asm-wrapper " - "$arch $asm $defines $includes $asmflags /c /Fo $out $in" - % sys.executable - ), - ) - - if flavor not in ("ios", "mac", "win"): - master_ninja.rule( - "alink", - description="AR $out", - command="rm -f $out && $ar rcs $arflags $out $in", - ) - master_ninja.rule( - "alink_thin", - description="AR $out", - command="rm -f $out && $ar rcsT $arflags $out $in", - ) - - # This allows targets that only need to depend on $lib's API to declare an - # order-only dependency on $lib.TOC and avoid relinking such downstream - # dependencies when $lib changes only in non-public ways. - # The resulting string leaves an uninterpolated %{suffix} which - # is used in the final substitution below. - mtime_preserving_solink_base = ( - "if [ ! -e $lib -o ! -e $lib.TOC ]; then " - "%(solink)s && %(extract_toc)s > $lib.TOC; else " - "%(solink)s && %(extract_toc)s > $lib.tmp && " - "if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; " - "fi; fi" - % { - "solink": "$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s", - "extract_toc": ( - "{ $readelf -d $lib | grep SONAME ; " - "$nm -gD -f p $lib | cut -f1-2 -d' '; }" - ), - } - ) - - master_ninja.rule( - "solink", - description="SOLINK $lib", - restat=True, - command=mtime_preserving_solink_base - % {"suffix": "@$link_file_list"}, - rspfile="$link_file_list", - rspfile_content=( - "-Wl,--whole-archive $in $solibs -Wl," "--no-whole-archive $libs" - ), - pool="link_pool", - ) - master_ninja.rule( - "solink_module", - description="SOLINK(module) $lib", - restat=True, - command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"}, - rspfile="$link_file_list", - rspfile_content="-Wl,--start-group $in $solibs $libs -Wl,--end-group", - pool="link_pool", - ) - master_ninja.rule( - "link", - description="LINK $out", - command=( - "$ld $ldflags -o $out " - "-Wl,--start-group $in $solibs $libs -Wl,--end-group" - ), - pool="link_pool", - ) - elif flavor == "win": - master_ninja.rule( - "alink", - description="LIB $out", - command=( - "%s gyp-win-tool link-wrapper $arch False " - "$ar /nologo /ignore:4221 /OUT:$out @$out.rsp" % sys.executable - ), - rspfile="$out.rsp", - rspfile_content="$in_newline $libflags", - ) - _AddWinLinkRules(master_ninja, embed_manifest=True) - _AddWinLinkRules(master_ninja, embed_manifest=False) - else: - master_ninja.rule( - "objc", - description="OBJC $out", - command=( - "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc " - "$cflags_pch_objc -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - master_ninja.rule( - "objcxx", - description="OBJCXX $out", - command=( - "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc " - "$cflags_pch_objcc -c $in -o $out" - ), - depfile="$out.d", - deps=deps, - ) - master_ninja.rule( - "alink", - description="LIBTOOL-STATIC $out, POSTBUILDS", - command="rm -f $out && " - "./gyp-mac-tool filter-libtool libtool $libtool_flags " - "-static -o $out $in" - "$postbuilds", - ) - master_ninja.rule( - "lipo", - description="LIPO $out, POSTBUILDS", - command="rm -f $out && lipo -create $in -output $out$postbuilds", - ) - master_ninja.rule( - "solipo", - description="SOLIPO $out, POSTBUILDS", - command=( - "rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&" - "%(extract_toc)s > $lib.TOC" - % { - "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; " - "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }" - } - ), - ) - - # Record the public interface of $lib in $lib.TOC. See the corresponding - # comment in the posix section above for details. - solink_base = "$ld %(type)s $ldflags -o $lib %(suffix)s" - mtime_preserving_solink_base = ( - "if [ ! -e $lib -o ! -e $lib.TOC ] || " - # Always force dependent targets to relink if this library - # reexports something. Handling this correctly would require - # recursive TOC dumping but this is rare in practice, so punt. - "otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then " - "%(solink)s && %(extract_toc)s > $lib.TOC; " - "else " - "%(solink)s && %(extract_toc)s > $lib.tmp && " - "if ! cmp -s $lib.tmp $lib.TOC; then " - "mv $lib.tmp $lib.TOC ; " - "fi; " - "fi" - % { - "solink": solink_base, - "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; " - "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }", - } - ) - - solink_suffix = "@$link_file_list$postbuilds" - master_ninja.rule( - "solink", - description="SOLINK $lib, POSTBUILDS", - restat=True, - command=mtime_preserving_solink_base - % {"suffix": solink_suffix, "type": "-shared"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - master_ninja.rule( - "solink_notoc", - description="SOLINK $lib, POSTBUILDS", - restat=True, - command=solink_base % {"suffix": solink_suffix, "type": "-shared"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - - master_ninja.rule( - "solink_module", - description="SOLINK(module) $lib, POSTBUILDS", - restat=True, - command=mtime_preserving_solink_base - % {"suffix": solink_suffix, "type": "-bundle"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - master_ninja.rule( - "solink_module_notoc", - description="SOLINK(module) $lib, POSTBUILDS", - restat=True, - command=solink_base % {"suffix": solink_suffix, "type": "-bundle"}, - rspfile="$link_file_list", - rspfile_content="$in $solibs $libs", - pool="link_pool", - ) - - master_ninja.rule( - "link", - description="LINK $out, POSTBUILDS", - command=("$ld $ldflags -o $out " "$in $solibs $libs$postbuilds"), - pool="link_pool", - ) - master_ninja.rule( - "preprocess_infoplist", - description="PREPROCESS INFOPLIST $out", - command=( - "$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && " - "plutil -convert xml1 $out $out" - ), - ) - master_ninja.rule( - "copy_infoplist", - description="COPY INFOPLIST $in", - command="$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys", - ) - master_ninja.rule( - "merge_infoplist", - description="MERGE INFOPLISTS $in", - command="$env ./gyp-mac-tool merge-info-plist $out $in", - ) - master_ninja.rule( - "compile_xcassets", - description="COMPILE XCASSETS $in", - command="$env ./gyp-mac-tool compile-xcassets $keys $in", - ) - master_ninja.rule( - "compile_ios_framework_headers", - description="COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in", - command="$env ./gyp-mac-tool compile-ios-framework-header-map $out " - "$framework $in && $env ./gyp-mac-tool " - "copy-ios-framework-headers $framework $copy_headers", - ) - master_ninja.rule( - "mac_tool", - description="MACTOOL $mactool_cmd $in", - command="$env ./gyp-mac-tool $mactool_cmd $in $out $binary", - ) - master_ninja.rule( - "package_framework", - description="PACKAGE FRAMEWORK $out, POSTBUILDS", - command="./gyp-mac-tool package-framework $out $version$postbuilds " - "&& touch $out", - ) - master_ninja.rule( - "package_ios_framework", - description="PACKAGE IOS FRAMEWORK $out, POSTBUILDS", - command="./gyp-mac-tool package-ios-framework $out $postbuilds " - "&& touch $out", - ) - if flavor == "win": - master_ninja.rule( - "stamp", - description="STAMP $out", - command="%s gyp-win-tool stamp $out" % sys.executable, - ) - else: - master_ninja.rule( - "stamp", description="STAMP $out", command="${postbuilds}touch $out" - ) - if flavor == "win": - master_ninja.rule( - "copy", - description="COPY $in $out", - command="%s gyp-win-tool recursive-mirror $in $out" % sys.executable, - ) - elif flavor == "zos": - master_ninja.rule( - "copy", - description="COPY $in $out", - command="rm -rf $out && cp -fRP $in $out", - ) - else: - master_ninja.rule( - "copy", - description="COPY $in $out", - command="ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)", - ) - master_ninja.newline() - - all_targets = set() - for build_file in params["build_files"]: - for target in gyp.common.AllTargets( - target_list, target_dicts, os.path.normpath(build_file) - ): - all_targets.add(target) - all_outputs = set() - - # target_outputs is a map from qualified target name to a Target object. - target_outputs = {} - # target_short_names is a map from target short name to a list of Target - # objects. - target_short_names = {} - - # short name of targets that were skipped because they didn't contain anything - # interesting. - # NOTE: there may be overlap between this an non_empty_target_names. - empty_target_names = set() - - # Set of non-empty short target names. - # NOTE: there may be overlap between this an empty_target_names. - non_empty_target_names = set() - - for qualified_target in target_list: - # qualified_target is like: third_party/icu/icu.gyp:icui18n#target - build_file, name, toolset = gyp.common.ParseQualifiedTarget(qualified_target) - - this_make_global_settings = data[build_file].get("make_global_settings", []) - assert make_global_settings == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets. " - f"{this_make_global_settings} vs. {make_global_settings}" - ) - - spec = target_dicts[qualified_target] - if flavor == "mac": - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - # If build_file is a symlink, we must not follow it because there's a chance - # it could point to a path above toplevel_dir, and we cannot correctly deal - # with that case at the moment. - build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, False) - - qualified_target_for_hash = gyp.common.QualifiedTarget( - build_file, name, toolset - ) - qualified_target_for_hash = qualified_target_for_hash.encode("utf-8") - hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() - - base_path = os.path.dirname(build_file) - obj = "obj" - if toolset != "target": - obj += "." + toolset - output_file = os.path.join(obj, base_path, name + ".ninja") - - ninja_output = StringIO() - writer = NinjaWriter( - hash_for_rules, - target_outputs, - base_path, - build_dir, - ninja_output, - toplevel_build, - output_file, - flavor, - toplevel_dir=options.toplevel_dir, - ) - - target = writer.WriteSpec(spec, config_name, generator_flags) - - if ninja_output.tell() > 0: - # Only create files for ninja files that actually have contents. - with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file: - ninja_file.write(ninja_output.getvalue()) - ninja_output.close() - master_ninja.subninja(output_file) - - if target: - if name != target.FinalOutput() and spec["toolset"] == "target": - target_short_names.setdefault(name, []).append(target) - target_outputs[qualified_target] = target - if qualified_target in all_targets: - all_outputs.add(target.FinalOutput()) - non_empty_target_names.add(name) - else: - empty_target_names.add(name) - - if target_short_names: - # Write a short name to build this target. This benefits both the - # "build chrome" case as well as the gyp tests, which expect to be - # able to run actions and build libraries by their short name. - master_ninja.newline() - master_ninja.comment("Short names for targets.") - for short_name in sorted(target_short_names): - master_ninja.build( - short_name, - "phony", - [x.FinalOutput() for x in target_short_names[short_name]], - ) - - # Write phony targets for any empty targets that weren't written yet. As - # short names are not necessarily unique only do this for short names that - # haven't already been output for another target. - empty_target_names = empty_target_names - non_empty_target_names - if empty_target_names: - master_ninja.newline() - master_ninja.comment("Empty targets (output for completeness).") - for name in sorted(empty_target_names): - master_ninja.build(name, "phony") - - if all_outputs: - master_ninja.newline() - master_ninja.build("all", "phony", sorted(all_outputs)) - master_ninja.default(generator_flags.get("default_target", "all")) - - master_ninja_file.close() - - if generate_compile_commands: - compile_db = GenerateCompileDBWithNinja(toplevel_build) - compile_db_file = OpenOutput( - os.path.join(toplevel_build, "compile_commands.json") - ) - compile_db_file.write(json.dumps(compile_db, indent=2)) - compile_db_file.close() - - -def GenerateCompileDBWithNinja(path, targets=["all"]): - """Generates a compile database using ninja. - - Args: - path: The build directory to generate a compile database for. - targets: Additional targets to pass to ninja. - - Returns: - List of the contents of the compile database. - """ - ninja_path = shutil.which("ninja") - if ninja_path is None: - raise Exception("ninja not found in PATH") - json_compile_db = subprocess.check_output( - [ninja_path, "-C", path] - + targets - + ["-t", "compdb", "cc", "cxx", "objc", "objcxx"] - ) - return json.loads(json_compile_db) - - -def PerformBuild(data, configurations, params): - options = params["options"] - for config in configurations: - builddir = os.path.join(options.toplevel_dir, "out", config) - arguments = ["ninja", "-C", builddir] - print(f"Building [{config}]: {arguments}") - subprocess.check_call(arguments) - - -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - (target_list, target_dicts, data, params, config_name) = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - -def GenerateOutput(target_list, target_dicts, data, params): - # Update target_dicts for iOS device builds. - target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator( - target_dicts - ) - - user_config = params.get("generator_flags", {}).get("config", None) - if gyp.common.GetFlavor(params) == "win": - target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) - target_list, target_dicts = MSVSUtil.InsertLargePdbShims( - target_list, target_dicts, generator_default_variables - ) - - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) - else: - config_names = target_dicts[target_list[0]]["configurations"] - if params["parallel"]: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append( - (target_list, target_dicts, data, params, config_name) - ) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt as e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig( - target_list, target_dicts, data, params, config_name - ) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py deleted file mode 100644 index 15cddfdf2443b..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the ninja.py file. """ - -from pathlib import Path -import sys -import unittest - -import gyp.generator.ninja as ninja - - -class TestPrefixesAndSuffixes(unittest.TestCase): - def test_BinaryNamesWindows(self): - # These cannot run on non-Windows as they require a VS installation to - # correctly handle variable expansion. - if sys.platform.startswith("win"): - writer = ninja.NinjaWriter( - "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win" - ) - spec = {"target_name": "wee"} - self.assertTrue( - writer.ComputeOutputFileName(spec, "executable").endswith(".exe") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "static_library").endswith(".lib") - ) - - def test_BinaryNamesLinux(self): - writer = ninja.NinjaWriter( - "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux" - ) - spec = {"target_name": "wee"} - self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable")) - self.assertTrue( - writer.ComputeOutputFileName(spec, "shared_library").startswith("lib") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "static_library").startswith("lib") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "shared_library").endswith(".so") - ) - self.assertTrue( - writer.ComputeOutputFileName(spec, "static_library").endswith(".a") - ) - - def test_GenerateCompileDBWithNinja(self): - build_dir = ( - Path(__file__).resolve().parent.parent.parent.parent / "data" / "ninja" - ) - compile_db = ninja.GenerateCompileDBWithNinja(build_dir) - assert len(compile_db) == 1 - assert compile_db[0]["directory"] == str(build_dir) - assert compile_db[0]["command"] == "cc my.in my.out" - assert compile_db[0]["file"] == "my.in" - assert compile_db[0]["output"] == "my.out" - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py deleted file mode 100644 index 1ac672c3876bd..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py +++ /dev/null @@ -1,1391 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -import filecmp -import gyp.common -import gyp.xcodeproj_file -import gyp.xcode_ninja -import errno -import os -import sys -import posixpath -import re -import shutil -import subprocess -import tempfile - - -# Project files generated by this module will use _intermediate_var as a -# custom Xcode setting whose value is a DerivedSources-like directory that's -# project-specific and configuration-specific. The normal choice, -# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive -# as it is likely that multiple targets within a single project file will want -# to access the same set of generated files. The other option, -# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, -# it is not configuration-specific. INTERMEDIATE_DIR is defined as -# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). -_intermediate_var = "INTERMEDIATE_DIR" - -# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all -# targets that share the same BUILT_PRODUCTS_DIR. -_shared_intermediate_var = "SHARED_INTERMEDIATE_DIR" - -_library_search_paths_var = "LIBRARY_SEARCH_PATHS" - -generator_default_variables = { - "EXECUTABLE_PREFIX": "", - "EXECUTABLE_SUFFIX": "", - "STATIC_LIB_PREFIX": "lib", - "SHARED_LIB_PREFIX": "lib", - "STATIC_LIB_SUFFIX": ".a", - "SHARED_LIB_SUFFIX": ".dylib", - # INTERMEDIATE_DIR is a place for targets to build up intermediate products. - # It is specific to each build environment. It is only guaranteed to exist - # and be constant within the context of a project, corresponding to a single - # input file. Some build environments may allow their intermediate directory - # to be shared on a wider scale, but this is not guaranteed. - "INTERMEDIATE_DIR": "$(%s)" % _intermediate_var, - "OS": "mac", - "PRODUCT_DIR": "$(BUILT_PRODUCTS_DIR)", - "LIB_DIR": "$(BUILT_PRODUCTS_DIR)", - "RULE_INPUT_ROOT": "$(INPUT_FILE_BASE)", - "RULE_INPUT_EXT": "$(INPUT_FILE_SUFFIX)", - "RULE_INPUT_NAME": "$(INPUT_FILE_NAME)", - "RULE_INPUT_PATH": "$(INPUT_FILE_PATH)", - "RULE_INPUT_DIRNAME": "$(INPUT_FILE_DIRNAME)", - "SHARED_INTERMEDIATE_DIR": "$(%s)" % _shared_intermediate_var, - "CONFIGURATION_NAME": "$(CONFIGURATION)", -} - -# The Xcode-specific sections that hold paths. -generator_additional_path_sections = [ - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", - # 'mac_framework_dirs', input already handles _dirs endings. -] - -# The Xcode-specific keys that exist on targets and aren't moved down to -# configurations. -generator_additional_non_configuration_keys = [ - "ios_app_extension", - "ios_watch_app", - "ios_watchkit_extension", - "mac_bundle", - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", - "mac_xctest_bundle", - "mac_xcuitest_bundle", - "xcode_create_dependents_test_runner", -] - -# We want to let any rules apply to files that are resources also. -generator_extra_sources_for_rules = [ - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", -] - -generator_filelist_paths = None - -# Xcode's standard set of library directories, which don't need to be duplicated -# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. -xcode_standard_library_dirs = frozenset( - ["$(SDKROOT)/usr/lib", "$(SDKROOT)/usr/local/lib"] -) - - -def CreateXCConfigurationList(configuration_names): - xccl = gyp.xcodeproj_file.XCConfigurationList({"buildConfigurations": []}) - if len(configuration_names) == 0: - configuration_names = ["Default"] - for configuration_name in configuration_names: - xcbc = gyp.xcodeproj_file.XCBuildConfiguration({"name": configuration_name}) - xccl.AppendProperty("buildConfigurations", xcbc) - xccl.SetProperty("defaultConfigurationName", configuration_names[0]) - return xccl - - -class XcodeProject: - def __init__(self, gyp_path, path, build_file_dict): - self.gyp_path = gyp_path - self.path = path - self.project = gyp.xcodeproj_file.PBXProject(path=path) - projectDirPath = gyp.common.RelativePath( - os.path.dirname(os.path.abspath(self.gyp_path)), - os.path.dirname(path) or ".", - ) - self.project.SetProperty("projectDirPath", projectDirPath) - self.project_file = gyp.xcodeproj_file.XCProjectFile( - {"rootObject": self.project} - ) - self.build_file_dict = build_file_dict - - # TODO(mark): add destructor that cleans up self.path if created_dir is - # True and things didn't complete successfully. Or do something even - # better with "try"? - self.created_dir = False - try: - os.makedirs(self.path) - self.created_dir = True - except OSError as e: - if e.errno != errno.EEXIST: - raise - - def Finalize1(self, xcode_targets, serialize_all_tests): - # Collect a list of all of the build configuration names used by the - # various targets in the file. It is very heavily advised to keep each - # target in an entire project (even across multiple project files) using - # the same set of configuration names. - configurations = [] - for xct in self.project.GetProperty("targets"): - xccl = xct.GetProperty("buildConfigurationList") - xcbcs = xccl.GetProperty("buildConfigurations") - for xcbc in xcbcs: - name = xcbc.GetProperty("name") - if name not in configurations: - configurations.append(name) - - # Replace the XCConfigurationList attached to the PBXProject object with - # a new one specifying all of the configuration names used by the various - # targets. - try: - xccl = CreateXCConfigurationList(configurations) - self.project.SetProperty("buildConfigurationList", xccl) - except Exception: - sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) - raise - - # The need for this setting is explained above where _intermediate_var is - # defined. The comments below about wanting to avoid project-wide build - # settings apply here too, but this needs to be set on a project-wide basis - # so that files relative to the _intermediate_var setting can be displayed - # properly in the Xcode UI. - # - # Note that for configuration-relative files such as anything relative to - # _intermediate_var, for the purposes of UI tree view display, Xcode will - # only resolve the configuration name once, when the project file is - # opened. If the active build configuration is changed, the project file - # must be closed and reopened if it is desired for the tree view to update. - # This is filed as Apple radar 6588391. - xccl.SetBuildSetting( - _intermediate_var, "$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)" - ) - xccl.SetBuildSetting( - _shared_intermediate_var, "$(SYMROOT)/DerivedSources/$(CONFIGURATION)" - ) - - # Set user-specified project-wide build settings and config files. This - # is intended to be used very sparingly. Really, almost everything should - # go into target-specific build settings sections. The project-wide - # settings are only intended to be used in cases where Xcode attempts to - # resolve variable references in a project context as opposed to a target - # context, such as when resolving sourceTree references while building up - # the tree tree view for UI display. - # Any values set globally are applied to all configurations, then any - # per-configuration values are applied. - for xck, xcv in self.build_file_dict.get("xcode_settings", {}).items(): - xccl.SetBuildSetting(xck, xcv) - if "xcode_config_file" in self.build_file_dict: - config_ref = self.project.AddOrGetFileInRootGroup( - self.build_file_dict["xcode_config_file"] - ) - xccl.SetBaseConfiguration(config_ref) - build_file_configurations = self.build_file_dict.get("configurations", {}) - if build_file_configurations: - for config_name in configurations: - build_file_configuration_named = build_file_configurations.get( - config_name, {} - ) - if build_file_configuration_named: - xcc = xccl.ConfigurationNamed(config_name) - for xck, xcv in build_file_configuration_named.get( - "xcode_settings", {} - ).items(): - xcc.SetBuildSetting(xck, xcv) - if "xcode_config_file" in build_file_configuration_named: - config_ref = self.project.AddOrGetFileInRootGroup( - build_file_configurations[config_name]["xcode_config_file"] - ) - xcc.SetBaseConfiguration(config_ref) - - # Sort the targets based on how they appeared in the input. - # TODO(mark): Like a lot of other things here, this assumes internal - # knowledge of PBXProject - in this case, of its "targets" property. - - # ordinary_targets are ordinary targets that are already in the project - # file. run_test_targets are the targets that run unittests and should be - # used for the Run All Tests target. support_targets are the action/rule - # targets used by GYP file targets, just kept for the assert check. - ordinary_targets = [] - run_test_targets = [] - support_targets = [] - - # targets is full list of targets in the project. - targets = [] - - # does the it define it's own "all"? - has_custom_all = False - - # targets_for_all is the list of ordinary_targets that should be listed - # in this project's "All" target. It includes each non_runtest_target - # that does not have suppress_wildcard set. - targets_for_all = [] - - for target in self.build_file_dict["targets"]: - target_name = target["target_name"] - toolset = target["toolset"] - qualified_target = gyp.common.QualifiedTarget( - self.gyp_path, target_name, toolset - ) - xcode_target = xcode_targets[qualified_target] - # Make sure that the target being added to the sorted list is already in - # the unsorted list. - assert xcode_target in self.project._properties["targets"] - targets.append(xcode_target) - ordinary_targets.append(xcode_target) - if xcode_target.support_target: - support_targets.append(xcode_target.support_target) - targets.append(xcode_target.support_target) - - if not int(target.get("suppress_wildcard", False)): - targets_for_all.append(xcode_target) - - if target_name.lower() == "all": - has_custom_all = True - - # If this target has a 'run_as' attribute, add its target to the - # targets, and add it to the test targets. - if target.get("run_as"): - # Make a target to run something. It should have one - # dependency, the parent xcode target. - xccl = CreateXCConfigurationList(configurations) - run_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - "name": "Run " + target_name, - "productName": xcode_target.GetProperty("productName"), - "buildConfigurationList": xccl, - }, - parent=self.project, - ) - run_target.AddDependency(xcode_target) - - command = target["run_as"] - script = "" - if command.get("working_directory"): - script = ( - script - + 'cd "%s"\n' - % gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - command.get("working_directory") - ) - ) - - if command.get("environment"): - script = ( - script - + "\n".join( - [ - 'export %s="%s"' - % ( - key, - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - val - ), - ) - for (key, val) in command.get("environment").items() - ] - ) - + "\n" - ) - - # Some test end up using sockets, files on disk, etc. and can get - # confused if more then one test runs at a time. The generator - # flag 'xcode_serialize_all_test_runs' controls the forcing of all - # tests serially. It defaults to True. To get serial runs this - # little bit of python does the same as the linux flock utility to - # make sure only one runs at a time. - command_prefix = "" - if serialize_all_tests: - command_prefix = """python -c "import fcntl, subprocess, sys -file = open('$TMPDIR/GYP_serialize_test_runs', 'a') -fcntl.flock(file.fileno(), fcntl.LOCK_EX) -sys.exit(subprocess.call(sys.argv[1:]))" """ - - # If we were unable to exec for some reason, we want to exit - # with an error, and fixup variable references to be shell - # syntax instead of xcode syntax. - script = ( - script - + "exec " - + command_prefix - + "%s\nexit 1\n" - % gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - gyp.common.EncodePOSIXShellList(command.get("action")) - ) - ) - - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - {"shellScript": script, "showEnvVarsInLog": 0} - ) - run_target.AppendProperty("buildPhases", ssbp) - - # Add the run target to the project file. - targets.append(run_target) - run_test_targets.append(run_target) - xcode_target.test_runner = run_target - - # Make sure that the list of targets being replaced is the same length as - # the one replacing it, but allow for the added test runner targets. - assert len(self.project._properties["targets"]) == len(ordinary_targets) + len( - support_targets - ) - - self.project._properties["targets"] = targets - - # Get rid of unnecessary levels of depth in groups like the Source group. - self.project.RootGroupsTakeOverOnlyChildren(True) - - # Sort the groups nicely. Do this after sorting the targets, because the - # Products group is sorted based on the order of the targets. - self.project.SortGroups() - - # Create an "All" target if there's more than one target in this project - # file and the project didn't define its own "All" target. Put a generated - # "All" target first so that people opening up the project for the first - # time will build everything by default. - if len(targets_for_all) > 1 and not has_custom_all: - xccl = CreateXCConfigurationList(configurations) - all_target = gyp.xcodeproj_file.PBXAggregateTarget( - {"buildConfigurationList": xccl, "name": "All"}, parent=self.project - ) - - for target in targets_for_all: - all_target.AddDependency(target) - - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._properties. It's important to get the "All" target first, - # though. - self.project._properties["targets"].insert(0, all_target) - - # The same, but for run_test_targets. - if len(run_test_targets) > 1: - xccl = CreateXCConfigurationList(configurations) - run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( - {"buildConfigurationList": xccl, "name": "Run All Tests"}, - parent=self.project, - ) - for run_test_target in run_test_targets: - run_all_tests_target.AddDependency(run_test_target) - - # Insert after the "All" target, which must exist if there is more than - # one run_test_target. - self.project._properties["targets"].insert(1, run_all_tests_target) - - def Finalize2(self, xcode_targets, xcode_target_to_target_dict): - # Finalize2 needs to happen in a separate step because the process of - # updating references to other projects depends on the ordering of targets - # within remote project files. Finalize1 is responsible for sorting duty, - # and once all project files are sorted, Finalize2 can come in and update - # these references. - - # To support making a "test runner" target that will run all the tests - # that are direct dependents of any given target, we look for - # xcode_create_dependents_test_runner being set on an Aggregate target, - # and generate a second target that will run the tests runners found under - # the marked target. - for bf_tgt in self.build_file_dict["targets"]: - if int(bf_tgt.get("xcode_create_dependents_test_runner", 0)): - tgt_name = bf_tgt["target_name"] - toolset = bf_tgt["toolset"] - qualified_target = gyp.common.QualifiedTarget( - self.gyp_path, tgt_name, toolset - ) - xcode_target = xcode_targets[qualified_target] - if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): - # Collect all the run test targets. - all_run_tests = [] - pbxtds = xcode_target.GetProperty("dependencies") - for pbxtd in pbxtds: - pbxcip = pbxtd.GetProperty("targetProxy") - dependency_xct = pbxcip.GetProperty("remoteGlobalIDString") - if hasattr(dependency_xct, "test_runner"): - all_run_tests.append(dependency_xct.test_runner) - - # Directly depend on all the runners as they depend on the target - # that builds them. - if len(all_run_tests) > 0: - run_all_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - "name": "Run %s Tests" % tgt_name, - "productName": tgt_name, - }, - parent=self.project, - ) - for run_test_target in all_run_tests: - run_all_target.AddDependency(run_test_target) - - # Insert the test runner after the related target. - idx = self.project._properties["targets"].index(xcode_target) - self.project._properties["targets"].insert( - idx + 1, run_all_target - ) - - # Update all references to other projects, to make sure that the lists of - # remote products are complete. Otherwise, Xcode will fill them in when - # it opens the project file, which will result in unnecessary diffs. - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._other_pbxprojects. - for other_pbxproject in self.project._other_pbxprojects: - self.project.AddOrGetProjectReference(other_pbxproject) - - self.project.SortRemoteProductReferences() - - # Give everything an ID. - self.project_file.ComputeIDs() - - # Make sure that no two objects in the project file have the same ID. If - # multiple objects wind up with the same ID, upon loading the file, Xcode - # will only recognize one object (the last one in the file?) and the - # results are unpredictable. - self.project_file.EnsureNoIDCollisions() - - def Write(self): - # Write the project file to a temporary location first. Xcode watches for - # changes to the project file and presents a UI sheet offering to reload - # the project when it does change. However, in some cases, especially when - # multiple projects are open or when Xcode is busy, things don't work so - # seamlessly. Sometimes, Xcode is able to detect that a project file has - # changed but can't unload it because something else is referencing it. - # To mitigate this problem, and to avoid even having Xcode present the UI - # sheet when an open project is rewritten for inconsequential changes, the - # project file is written to a temporary file in the xcodeproj directory - # first. The new temporary file is then compared to the existing project - # file, if any. If they differ, the new file replaces the old; otherwise, - # the new project file is simply deleted. Xcode properly detects a file - # being renamed over an open project file as a change and so it remains - # able to present the "project file changed" sheet under this system. - # Writing to a temporary file first also avoids the possible problem of - # Xcode rereading an incomplete project file. - (output_fd, new_pbxproj_path) = tempfile.mkstemp( - suffix=".tmp", prefix="project.pbxproj.gyp.", dir=self.path - ) - - try: - output_file = os.fdopen(output_fd, "w") - - self.project_file.Print(output_file) - output_file.close() - - pbxproj_path = os.path.join(self.path, "project.pbxproj") - - same = False - try: - same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(new_pbxproj_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(0o77) - os.umask(umask) - - os.chmod(new_pbxproj_path, 0o666 & ~umask) - os.rename(new_pbxproj_path, pbxproj_path) - - except Exception: - # Don't leave turds behind. In fact, if this code was responsible for - # creating the xcodeproj directory, get rid of that too. - os.unlink(new_pbxproj_path) - if self.created_dir: - shutil.rmtree(self.path, True) - raise - - -def AddSourceToTarget(source, type, pbxp, xct): - # TODO(mark): Perhaps source_extensions and library_extensions can be made a - # little bit fancier. - source_extensions = ["c", "cc", "cpp", "cxx", "m", "mm", "s", "swift"] - - # .o is conceptually more of a "source" than a "library," but Xcode thinks - # of "sources" as things to compile and "libraries" (or "frameworks") as - # things to link with. Adding an object file to an Xcode target's frameworks - # phase works properly. - library_extensions = ["a", "dylib", "framework", "o"] - - basename = posixpath.basename(source) - (root, ext) = posixpath.splitext(basename) - if ext: - ext = ext[1:].lower() - - if ext in source_extensions and type != "none": - xct.SourcesPhase().AddFile(source) - elif ext in library_extensions and type != "none": - xct.FrameworksPhase().AddFile(source) - else: - # Files that aren't added to a sources or frameworks build phase can still - # go into the project file, just not as part of a build phase. - pbxp.AddOrGetFileInRootGroup(source) - - -def AddResourceToTarget(resource, pbxp, xct): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - xct.ResourcesPhase().AddFile(resource) - - -def AddHeaderToTarget(header, pbxp, xct, is_public): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - settings = "{ATTRIBUTES = (%s, ); }" % ("Private", "Public")[is_public] - xct.HeadersPhase().AddFile(header, settings) - - -_xcode_variable_re = re.compile(r"(\$\((.*?)\))") - - -def ExpandXcodeVariables(string, expansions): - """Expands Xcode-style $(VARIABLES) in string per the expansions dict. - - In some rare cases, it is appropriate to expand Xcode variables when a - project file is generated. For any substring $(VAR) in string, if VAR is a - key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. - Any $(VAR) substring in string for which VAR is not a key in the expansions - dict will remain in the returned string. - """ - - matches = _xcode_variable_re.findall(string) - if matches is None: - return string - - matches.reverse() - for match in matches: - (to_replace, variable) = match - if variable not in expansions: - continue - - replacement = expansions[variable] - string = re.sub(re.escape(to_replace), replacement, string) - - return string - - -_xcode_define_re = re.compile(r"([\\\"\' ])") - - -def EscapeXcodeDefine(s): - """We must escape the defines that we give to XCode so that it knows not to - split on spaces and to respect backslash and quote literals. However, we - must not quote the define, or Xcode will incorrectly interpret variables - especially $(inherited).""" - return re.sub(_xcode_define_re, r"\\\1", s) - - -def PerformBuild(data, configurations, params): - options = params["options"] - - for build_file, build_file_dict in data.items(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != ".gyp": - continue - xcodeproj_path = build_file_root + options.suffix + ".xcodeproj" - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - - for config in configurations: - arguments = ["xcodebuild", "-project", xcodeproj_path] - arguments += ["-configuration", config] - print(f"Building [{config}]: {arguments}") - subprocess.check_call(arguments) - - -def CalculateGeneratorInputInfo(params): - toplevel = params["options"].toplevel_dir - if params.get("flavor") == "ninja": - generator_dir = os.path.relpath(params["options"].generator_output or ".") - output_dir = params.get("generator_flags", {}).get("output_dir", "out") - output_dir = os.path.normpath(os.path.join(generator_dir, output_dir)) - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, output_dir, "gypfiles-xcode-ninja") - ) - else: - output_dir = os.path.normpath(os.path.join(toplevel, "xcodebuild")) - qualified_out_dir = os.path.normpath( - os.path.join(toplevel, output_dir, "gypfiles") - ) - - global generator_filelist_paths - generator_filelist_paths = { - "toplevel": toplevel, - "qualified_out_dir": qualified_out_dir, - } - - -def GenerateOutput(target_list, target_dicts, data, params): - # Optionally configure each spec to use ninja as the external builder. - ninja_wrapper = params.get("flavor") == "ninja" - if ninja_wrapper: - (target_list, target_dicts, data) = gyp.xcode_ninja.CreateWrapper( - target_list, target_dicts, data, params - ) - - options = params["options"] - generator_flags = params.get("generator_flags", {}) - parallel_builds = generator_flags.get("xcode_parallel_builds", True) - serialize_all_tests = generator_flags.get("xcode_serialize_all_test_runs", True) - upgrade_check_project_version = generator_flags.get( - "xcode_upgrade_check_project_version", None - ) - - # Format upgrade_check_project_version with leading zeros as needed. - if upgrade_check_project_version: - upgrade_check_project_version = str(upgrade_check_project_version) - while len(upgrade_check_project_version) < 4: - upgrade_check_project_version = "0" + upgrade_check_project_version - - skip_excluded_files = not generator_flags.get("xcode_list_excluded_files", True) - xcode_projects = {} - for build_file, build_file_dict in data.items(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != ".gyp": - continue - xcodeproj_path = build_file_root + options.suffix + ".xcodeproj" - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) - xcode_projects[build_file] = xcp - pbxp = xcp.project - - # Set project-level attributes from multiple options - project_attributes = {} - if parallel_builds: - project_attributes["BuildIndependentTargetsInParallel"] = "YES" - if upgrade_check_project_version: - project_attributes["LastUpgradeCheck"] = upgrade_check_project_version - project_attributes[ - "LastTestingUpgradeCheck" - ] = upgrade_check_project_version - project_attributes["LastSwiftUpdateCheck"] = upgrade_check_project_version - pbxp.SetProperty("attributes", project_attributes) - - # Add gyp/gypi files to project - if not generator_flags.get("standalone"): - main_group = pbxp.GetProperty("mainGroup") - build_group = gyp.xcodeproj_file.PBXGroup({"name": "Build"}) - main_group.AppendChild(build_group) - for included_file in build_file_dict["included_files"]: - build_group.AddOrGetFileByPath(included_file, False) - - xcode_targets = {} - xcode_target_to_target_dict = {} - for qualified_target in target_list: - [build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget( - qualified_target - ) - - spec = target_dicts[qualified_target] - if spec["toolset"] != "target": - raise Exception( - "Multiple toolsets not supported in xcode build (target %s)" - % qualified_target - ) - configuration_names = [spec["default_configuration"]] - for configuration_name in sorted(spec["configurations"].keys()): - if configuration_name not in configuration_names: - configuration_names.append(configuration_name) - xcp = xcode_projects[build_file] - pbxp = xcp.project - - # Set up the configurations for the target according to the list of names - # supplied. - xccl = CreateXCConfigurationList(configuration_names) - - # Create an XCTarget subclass object for the target. The type with - # "+bundle" appended will be used if the target has "mac_bundle" set. - # loadable_modules not in a mac_bundle are mapped to - # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets - # to create a single-file mh_bundle. - _types = { - "executable": "com.apple.product-type.tool", - "loadable_module": "com.googlecode.gyp.xcode.bundle", - "shared_library": "com.apple.product-type.library.dynamic", - "static_library": "com.apple.product-type.library.static", - "mac_kernel_extension": "com.apple.product-type.kernel-extension", - "executable+bundle": "com.apple.product-type.application", - "loadable_module+bundle": "com.apple.product-type.bundle", - "loadable_module+xctest": "com.apple.product-type.bundle.unit-test", - "loadable_module+xcuitest": "com.apple.product-type.bundle.ui-testing", - "shared_library+bundle": "com.apple.product-type.framework", - "executable+extension+bundle": "com.apple.product-type.app-extension", - "executable+watch+extension+bundle": - "com.apple.product-type.watchkit-extension", - "executable+watch+bundle": "com.apple.product-type.application.watchapp", - "mac_kernel_extension+bundle": "com.apple.product-type.kernel-extension", - } - - target_properties = { - "buildConfigurationList": xccl, - "name": target_name, - } - - type = spec["type"] - is_xctest = int(spec.get("mac_xctest_bundle", 0)) - is_xcuitest = int(spec.get("mac_xcuitest_bundle", 0)) - is_bundle = int(spec.get("mac_bundle", 0)) or is_xctest - is_app_extension = int(spec.get("ios_app_extension", 0)) - is_watchkit_extension = int(spec.get("ios_watchkit_extension", 0)) - is_watch_app = int(spec.get("ios_watch_app", 0)) - if type != "none": - type_bundle_key = type - if is_xcuitest: - type_bundle_key += "+xcuitest" - assert type == "loadable_module", ( - "mac_xcuitest_bundle targets must have type loadable_module " - "(target %s)" % target_name - ) - elif is_xctest: - type_bundle_key += "+xctest" - assert type == "loadable_module", ( - "mac_xctest_bundle targets must have type loadable_module " - "(target %s)" % target_name - ) - elif is_app_extension: - assert is_bundle, ( - "ios_app_extension flag requires mac_bundle " - "(target %s)" % target_name - ) - type_bundle_key += "+extension+bundle" - elif is_watchkit_extension: - assert is_bundle, ( - "ios_watchkit_extension flag requires mac_bundle " - "(target %s)" % target_name - ) - type_bundle_key += "+watch+extension+bundle" - elif is_watch_app: - assert is_bundle, ( - "ios_watch_app flag requires mac_bundle " - "(target %s)" % target_name - ) - type_bundle_key += "+watch+bundle" - elif is_bundle: - type_bundle_key += "+bundle" - - xctarget_type = gyp.xcodeproj_file.PBXNativeTarget - try: - target_properties["productType"] = _types[type_bundle_key] - except KeyError as e: - gyp.common.ExceptionAppend( - e, - "-- unknown product type while " "writing target %s" % target_name, - ) - raise - else: - xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget - assert not is_bundle, ( - 'mac_bundle targets cannot have type none (target "%s")' % target_name - ) - assert not is_xcuitest, ( - 'mac_xcuitest_bundle targets cannot have type none (target "%s")' - % target_name - ) - assert not is_xctest, ( - 'mac_xctest_bundle targets cannot have type none (target "%s")' - % target_name - ) - - target_product_name = spec.get("product_name") - if target_product_name is not None: - target_properties["productName"] = target_product_name - - xct = xctarget_type( - target_properties, - parent=pbxp, - force_outdir=spec.get("product_dir"), - force_prefix=spec.get("product_prefix"), - force_extension=spec.get("product_extension"), - ) - pbxp.AppendProperty("targets", xct) - xcode_targets[qualified_target] = xct - xcode_target_to_target_dict[xct] = spec - - spec_actions = spec.get("actions", []) - spec_rules = spec.get("rules", []) - - # Xcode has some "issues" with checking dependencies for the "Compile - # sources" step with any source files/headers generated by actions/rules. - # To work around this, if a target is building anything directly (not - # type "none"), then a second target is used to run the GYP actions/rules - # and is made a dependency of this target. This way the work is done - # before the dependency checks for what should be recompiled. - support_xct = None - # The Xcode "issues" don't affect xcode-ninja builds, since the dependency - # logic all happens in ninja. Don't bother creating the extra targets in - # that case. - if type != "none" and (spec_actions or spec_rules) and not ninja_wrapper: - support_xccl = CreateXCConfigurationList(configuration_names) - support_target_suffix = generator_flags.get( - "support_target_suffix", " Support" - ) - support_target_properties = { - "buildConfigurationList": support_xccl, - "name": target_name + support_target_suffix, - } - if target_product_name: - support_target_properties["productName"] = ( - target_product_name + " Support" - ) - support_xct = gyp.xcodeproj_file.PBXAggregateTarget( - support_target_properties, parent=pbxp - ) - pbxp.AppendProperty("targets", support_xct) - xct.AddDependency(support_xct) - # Hang the support target off the main target so it can be tested/found - # by the generator during Finalize. - xct.support_target = support_xct - - prebuild_index = 0 - - # Add custom shell script phases for "actions" sections. - for action in spec_actions: - # There's no need to write anything into the script to ensure that the - # output directories already exist, because Xcode will look at the - # declared outputs and automatically ensure that they exist for us. - - # Do we have a message to print when this action runs? - message = action.get("message") - if message: - message = "echo note: " + gyp.common.EncodePOSIXShellArgument(message) - else: - message = "" - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(action["action"]) - - # Convert Xcode-type variable references to sh-compatible environment - # variable references. - message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) - action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - action_string - ) - - script = "" - # Include the optional message - if message_sh: - script += message_sh + "\n" - # Be sure the script runs in exec, and that if exec fails, the script - # exits signalling an error. - script += "exec " + action_string_sh + "\nexit 1\n" - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - { - "inputPaths": action["inputs"], - "name": 'Action "' + action["action_name"] + '"', - "outputPaths": action["outputs"], - "shellScript": script, - "showEnvVarsInLog": 0, - } - ) - - if support_xct: - support_xct.AppendProperty("buildPhases", ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties["buildPhases"].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # TODO(mark): Should verify that at most one of these is specified. - if int(action.get("process_outputs_as_sources", False)): - for output in action["outputs"]: - AddSourceToTarget(output, type, pbxp, xct) - - if int(action.get("process_outputs_as_mac_bundle_resources", False)): - for output in action["outputs"]: - AddResourceToTarget(output, pbxp, xct) - - # tgt_mac_bundle_resources holds the list of bundle resources so - # the rule processing can check against it. - if is_bundle: - tgt_mac_bundle_resources = spec.get("mac_bundle_resources", []) - else: - tgt_mac_bundle_resources = [] - - # Add custom shell script phases driving "make" for "rules" sections. - # - # Xcode's built-in rule support is almost powerful enough to use directly, - # but there are a few significant deficiencies that render them unusable. - # There are workarounds for some of its inadequacies, but in aggregate, - # the workarounds added complexity to the generator, and some workarounds - # actually require input files to be crafted more carefully than I'd like. - # Consequently, until Xcode rules are made more capable, "rules" input - # sections will be handled in Xcode output by shell script build phases - # performed prior to the compilation phase. - # - # The following problems with Xcode rules were found. The numbers are - # Apple radar IDs. I hope that these shortcomings are addressed, I really - # liked having the rules handled directly in Xcode during the period that - # I was prototyping this. - # - # 6588600 Xcode compiles custom script rule outputs too soon, compilation - # fails. This occurs when rule outputs from distinct inputs are - # interdependent. The only workaround is to put rules and their - # inputs in a separate target from the one that compiles the rule - # outputs. This requires input file cooperation and it means that - # process_outputs_as_sources is unusable. - # 6584932 Need to declare that custom rule outputs should be excluded from - # compilation. A possible workaround is to lie to Xcode about a - # rule's output, giving it a dummy file it doesn't know how to - # compile. The rule action script would need to touch the dummy. - # 6584839 I need a way to declare additional inputs to a custom rule. - # A possible workaround is a shell script phase prior to - # compilation that touches a rule's primary input files if any - # would-be additional inputs are newer than the output. Modifying - # the source tree - even just modification times - feels dirty. - # 6564240 Xcode "custom script" build rules always dump all environment - # variables. This is a low-prioroty problem and is not a - # show-stopper. - rules_by_ext = {} - for rule in spec_rules: - rules_by_ext[rule["extension"]] = rule - - # First, some definitions: - # - # A "rule source" is a file that was listed in a target's "sources" - # list and will have a rule applied to it on the basis of matching the - # rule's "extensions" attribute. Rule sources are direct inputs to - # rules. - # - # Rule definitions may specify additional inputs in their "inputs" - # attribute. These additional inputs are used for dependency tracking - # purposes. - # - # A "concrete output" is a rule output with input-dependent variables - # resolved. For example, given a rule with: - # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], - # if the target's "sources" list contained "one.ext" and "two.ext", - # the "concrete output" for rule input "two.ext" would be "two.cc". If - # a rule specifies multiple outputs, each input file that the rule is - # applied to will have the same number of concrete outputs. - # - # If any concrete outputs are outdated or missing relative to their - # corresponding rule_source or to any specified additional input, the - # rule action must be performed to generate the concrete outputs. - - # concrete_outputs_by_rule_source will have an item at the same index - # as the rule['rule_sources'] that it corresponds to. Each item is a - # list of all of the concrete outputs for the rule_source. - concrete_outputs_by_rule_source = [] - - # concrete_outputs_all is a flat list of all concrete outputs that this - # rule is able to produce, given the known set of input files - # (rule_sources) that apply to it. - concrete_outputs_all = [] - - # messages & actions are keyed by the same indices as rule['rule_sources'] - # and concrete_outputs_by_rule_source. They contain the message and - # action to perform after resolving input-dependent variables. The - # message is optional, in which case None is stored for each rule source. - messages = [] - actions = [] - - for rule_source in rule.get("rule_sources", []): - rule_source_dirname, rule_source_basename = posixpath.split(rule_source) - (rule_source_root, rule_source_ext) = posixpath.splitext( - rule_source_basename - ) - - # These are the same variable names that Xcode uses for its own native - # rule support. Because Xcode's rule engine is not being used, they - # need to be expanded as they are written to the makefile. - rule_input_dict = { - "INPUT_FILE_BASE": rule_source_root, - "INPUT_FILE_SUFFIX": rule_source_ext, - "INPUT_FILE_NAME": rule_source_basename, - "INPUT_FILE_PATH": rule_source, - "INPUT_FILE_DIRNAME": rule_source_dirname, - } - - concrete_outputs_for_this_rule_source = [] - for output in rule.get("outputs", []): - # Fortunately, Xcode and make both use $(VAR) format for their - # variables, so the expansion is the only transformation necessary. - # Any remaining $(VAR)-type variables in the string can be given - # directly to make, which will pick up the correct settings from - # what Xcode puts into the environment. - concrete_output = ExpandXcodeVariables(output, rule_input_dict) - concrete_outputs_for_this_rule_source.append(concrete_output) - - # Add all concrete outputs to the project. - pbxp.AddOrGetFileInRootGroup(concrete_output) - - concrete_outputs_by_rule_source.append( - concrete_outputs_for_this_rule_source - ) - concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) - - # TODO(mark): Should verify that at most one of these is specified. - if int(rule.get("process_outputs_as_sources", False)): - for output in concrete_outputs_for_this_rule_source: - AddSourceToTarget(output, type, pbxp, xct) - - # If the file came from the mac_bundle_resources list or if the rule - # is marked to process outputs as bundle resource, do so. - was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources - if was_mac_bundle_resource or int( - rule.get("process_outputs_as_mac_bundle_resources", False) - ): - for output in concrete_outputs_for_this_rule_source: - AddResourceToTarget(output, pbxp, xct) - - # Do we have a message to print when this rule runs? - message = rule.get("message") - if message: - message = gyp.common.EncodePOSIXShellArgument(message) - message = ExpandXcodeVariables(message, rule_input_dict) - messages.append(message) - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(rule["action"]) - - action = ExpandXcodeVariables(action_string, rule_input_dict) - actions.append(action) - - if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibility for collision here. Consider - # target "t" rule "A_r" and target "t_A" rule "r". - makefile_name = "%s.make" % re.sub( - "[^a-zA-Z0-9_]", "_", "{}_{}".format(target_name, rule["rule_name"]) - ) - makefile_path = os.path.join( - xcode_projects[build_file].path, makefile_name - ) - # TODO(mark): try/close? Write to a temporary file and swap it only - # if it's got changes? - makefile = open(makefile_path, "w") - - # make will build the first target in the makefile by default. By - # convention, it's called "all". List all (or at least one) - # concrete output for each rule source as a prerequisite of the "all" - # target. - makefile.write("all: \\\n") - for concrete_output_index, concrete_output_by_rule_source in enumerate( - concrete_outputs_by_rule_source - ): - # Only list the first (index [0]) concrete output of each input - # in the "all" target. Otherwise, a parallel make (-j > 1) would - # attempt to process each input multiple times simultaneously. - # Otherwise, "all" could just contain the entire list of - # concrete_outputs_all. - concrete_output = concrete_output_by_rule_source[0] - if ( - concrete_output_index - == len(concrete_outputs_by_rule_source) - 1 - ): - eol = "" - else: - eol = " \\" - makefile.write(f" {concrete_output}{eol}\n") - - for (rule_source, concrete_outputs, message, action) in zip( - rule["rule_sources"], - concrete_outputs_by_rule_source, - messages, - actions, - ): - makefile.write("\n") - - # Add a rule that declares it can build each concrete output of a - # rule source. Collect the names of the directories that are - # required. - concrete_output_dirs = [] - for concrete_output_index, concrete_output in enumerate( - concrete_outputs - ): - bol = "" if concrete_output_index == 0 else " " - makefile.write(f"{bol}{concrete_output} \\\n") - - concrete_output_dir = posixpath.dirname(concrete_output) - if ( - concrete_output_dir - and concrete_output_dir not in concrete_output_dirs - ): - concrete_output_dirs.append(concrete_output_dir) - - makefile.write(" : \\\n") - - # The prerequisites for this rule are the rule source itself and - # the set of additional rule inputs, if any. - prerequisites = [rule_source] - prerequisites.extend(rule.get("inputs", [])) - for prerequisite_index, prerequisite in enumerate(prerequisites): - if prerequisite_index == len(prerequisites) - 1: - eol = "" - else: - eol = " \\" - makefile.write(f" {prerequisite}{eol}\n") - - # Make sure that output directories exist before executing the rule - # action. - if len(concrete_output_dirs) > 0: - makefile.write( - '\t@mkdir -p "%s"\n' % '" "'.join(concrete_output_dirs) - ) - - # The rule message and action have already had - # the necessary variable substitutions performed. - if message: - # Mark it with note: so Xcode picks it up in build output. - makefile.write("\t@echo note: %s\n" % message) - makefile.write("\t%s\n" % action) - - makefile.close() - - # It might be nice to ensure that needed output directories exist - # here rather than in each target in the Makefile, but that wouldn't - # work if there ever was a concrete output that had an input-dependent - # variable anywhere other than in the leaf position. - - # Don't declare any inputPaths or outputPaths. If they're present, - # Xcode will provide a slight optimization by only running the script - # phase if any output is missing or outdated relative to any input. - # Unfortunately, it will also assume that all outputs are touched by - # the script, and if the outputs serve as files in a compilation - # phase, they will be unconditionally rebuilt. Since make might not - # rebuild everything that could be declared here as an output, this - # extra compilation activity is unnecessary. With inputPaths and - # outputPaths not supplied, make will always be called, but it knows - # enough to not do anything when everything is up-to-date. - - # To help speed things up, pass -j COUNT to make so it does some work - # in parallel. Don't use ncpus because Xcode will build ncpus targets - # in parallel and if each target happens to have a rules step, there - # would be ncpus^2 things going. With a machine that has 2 quad-core - # Xeons, a build can quickly run out of processes based on - # scheduling/other tasks, and randomly failing builds are no good. - script = ( - """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" -if [ "${JOB_COUNT}" -gt 4 ]; then - JOB_COUNT=4 -fi -exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" -exit 1 -""" - % makefile_name - ) - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - { - "name": 'Rule "' + rule["rule_name"] + '"', - "shellScript": script, - "showEnvVarsInLog": 0, - } - ) - - if support_xct: - support_xct.AppendProperty("buildPhases", ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move - # into xcodeproj_file itself. - xct._properties["buildPhases"].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # Extra rule inputs also go into the project file. Concrete outputs were - # already added when they were computed. - groups = ["inputs", "inputs_excluded"] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith("_excluded")] - for group in groups: - for item in rule.get(group, []): - pbxp.AddOrGetFileInRootGroup(item) - - # Add "sources". - for source in spec.get("sources", []): - (source_root, source_extension) = posixpath.splitext(source) - if source_extension[1:] not in rules_by_ext: - # AddSourceToTarget will add the file to a root group if it's not - # already there. - AddSourceToTarget(source, type, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(source) - - # Add "mac_bundle_resources" and "mac_framework_private_headers" if - # it's a bundle of any type. - if is_bundle: - for resource in tgt_mac_bundle_resources: - (resource_root, resource_extension) = posixpath.splitext(resource) - if resource_extension[1:] not in rules_by_ext: - AddResourceToTarget(resource, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(resource) - - for header in spec.get("mac_framework_private_headers", []): - AddHeaderToTarget(header, pbxp, xct, False) - - # Add "mac_framework_headers". These can be valid for both frameworks - # and static libraries. - if is_bundle or type == "static_library": - for header in spec.get("mac_framework_headers", []): - AddHeaderToTarget(header, pbxp, xct, True) - - # Add "copies". - pbxcp_dict = {} - for copy_group in spec.get("copies", []): - dest = copy_group["destination"] - if dest[0] not in ("/", "$"): - # Relative paths are relative to $(SRCROOT). - dest = "$(SRCROOT)/" + dest - - code_sign = int(copy_group.get("xcode_code_sign", 0)) - settings = (None, "{ATTRIBUTES = (CodeSignOnCopy, ); }")[code_sign] - - # Coalesce multiple "copies" sections in the same target with the same - # "destination" property into the same PBXCopyFilesBuildPhase, otherwise - # they'll wind up with ID collisions. - pbxcp = pbxcp_dict.get(dest, None) - if pbxcp is None: - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase( - {"name": "Copy to " + copy_group["destination"]}, parent=xct - ) - pbxcp.SetDestination(dest) - - # TODO(mark): The usual comment about this knowing too much about - # gyp.xcodeproj_file internals applies. - xct._properties["buildPhases"].insert(prebuild_index, pbxcp) - - pbxcp_dict[dest] = pbxcp - - for file in copy_group["files"]: - pbxcp.AddFile(file, settings) - - # Excluded files can also go into the project file. - if not skip_excluded_files: - for key in [ - "sources", - "mac_bundle_resources", - "mac_framework_headers", - "mac_framework_private_headers", - ]: - excluded_key = key + "_excluded" - for item in spec.get(excluded_key, []): - pbxp.AddOrGetFileInRootGroup(item) - - # So can "inputs" and "outputs" sections of "actions" groups. - groups = ["inputs", "inputs_excluded", "outputs", "outputs_excluded"] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith("_excluded")] - for action in spec.get("actions", []): - for group in groups: - for item in action.get(group, []): - # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not - # sources. - if not item.startswith("$(BUILT_PRODUCTS_DIR)/"): - pbxp.AddOrGetFileInRootGroup(item) - - for postbuild in spec.get("postbuilds", []): - action_string_sh = gyp.common.EncodePOSIXShellList(postbuild["action"]) - script = "exec " + action_string_sh + "\nexit 1\n" - - # Make the postbuild step depend on the output of ld or ar from this - # target. Apparently putting the script step after the link step isn't - # sufficient to ensure proper ordering in all cases. With an input - # declared but no outputs, the script step should run every time, as - # desired. - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( - { - "inputPaths": ["$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)"], - "name": 'Postbuild "' + postbuild["postbuild_name"] + '"', - "shellScript": script, - "showEnvVarsInLog": 0, - } - ) - xct.AppendProperty("buildPhases", ssbp) - - # Add dependencies before libraries, because adding a dependency may imply - # adding a library. It's preferable to keep dependencies listed first - # during a link phase so that they can override symbols that would - # otherwise be provided by libraries, which will usually include system - # libraries. On some systems, ld is finicky and even requires the - # libraries to be ordered in such a way that unresolved symbols in - # earlier-listed libraries may only be resolved by later-listed libraries. - # The Mac linker doesn't work that way, but other platforms do, and so - # their linker invocations need to be constructed in this way. There's - # no compelling reason for Xcode's linker invocations to differ. - - if "dependencies" in spec: - for dependency in spec["dependencies"]: - xct.AddDependency(xcode_targets[dependency]) - # The support project also gets the dependencies (in case they are - # needed for the actions/rules to work). - if support_xct: - support_xct.AddDependency(xcode_targets[dependency]) - - if "libraries" in spec: - for library in spec["libraries"]: - xct.FrameworksPhase().AddFile(library) - # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. - # I wish Xcode handled this automatically. - library_dir = posixpath.dirname(library) - if library_dir not in xcode_standard_library_dirs and ( - not xct.HasBuildSetting(_library_search_paths_var) - or library_dir not in xct.GetBuildSetting(_library_search_paths_var) - ): - xct.AppendBuildSetting(_library_search_paths_var, library_dir) - - for configuration_name in configuration_names: - configuration = spec["configurations"][configuration_name] - xcbc = xct.ConfigurationNamed(configuration_name) - for include_dir in configuration.get("mac_framework_dirs", []): - xcbc.AppendBuildSetting("FRAMEWORK_SEARCH_PATHS", include_dir) - for include_dir in configuration.get("include_dirs", []): - xcbc.AppendBuildSetting("HEADER_SEARCH_PATHS", include_dir) - for library_dir in configuration.get("library_dirs", []): - if library_dir not in xcode_standard_library_dirs and ( - not xcbc.HasBuildSetting(_library_search_paths_var) - or library_dir - not in xcbc.GetBuildSetting(_library_search_paths_var) - ): - xcbc.AppendBuildSetting(_library_search_paths_var, library_dir) - - if "defines" in configuration: - for define in configuration["defines"]: - set_define = EscapeXcodeDefine(define) - xcbc.AppendBuildSetting("GCC_PREPROCESSOR_DEFINITIONS", set_define) - if "xcode_settings" in configuration: - for xck, xcv in configuration["xcode_settings"].items(): - xcbc.SetBuildSetting(xck, xcv) - if "xcode_config_file" in configuration: - config_ref = pbxp.AddOrGetFileInRootGroup( - configuration["xcode_config_file"] - ) - xcbc.SetBaseConfiguration(config_ref) - - build_files = [] - for build_file, build_file_dict in data.items(): - if build_file.endswith(".gyp"): - build_files.append(build_file) - - for build_file in build_files: - xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) - - for build_file in build_files: - xcode_projects[build_file].Finalize2(xcode_targets, xcode_target_to_target_dict) - - for build_file in build_files: - xcode_projects[build_file].Write() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py deleted file mode 100644 index 49772d1f4d810..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the xcode.py file. """ - -import gyp.generator.xcode as xcode -import unittest -import sys - - -class TestEscapeXcodeDefine(unittest.TestCase): - if sys.platform == "darwin": - - def test_InheritedRemainsUnescaped(self): - self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)") - - def test_Escaping(self): - self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\') - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/input.py deleted file mode 100644 index 7150269cda585..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/input.py +++ /dev/null @@ -1,3115 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -import ast - -import gyp.common -import gyp.simple_copy -import multiprocessing -import os.path -import re -import shlex -import signal -import subprocess -import sys -import threading -import traceback -from gyp.common import GypError -from gyp.common import OrderedSet -from packaging.version import Version - -# A list of types that are treated as linkable. -linkable_types = [ - "executable", - "shared_library", - "loadable_module", - "mac_kernel_extension", - "windows_driver", -] - -# A list of sections that contain links to other targets. -dependency_sections = ["dependencies", "export_dependent_settings"] - -# base_path_sections is a list of sections defined by GYP that contain -# pathnames. The generators can provide more keys, the two lists are merged -# into path_sections, but you should call IsPathSection instead of using either -# list directly. -base_path_sections = [ - "destination", - "files", - "include_dirs", - "inputs", - "libraries", - "outputs", - "sources", -] -path_sections = set() - -# These per-process dictionaries are used to cache build file data when loading -# in parallel mode. -per_process_data = {} -per_process_aux_data = {} - - -def IsPathSection(section): - # If section ends in one of the '=+?!' characters, it's applied to a section - # without the trailing characters. '/' is notably absent from this list, - # because there's no way for a regular expression to be treated as a path. - while section and section[-1:] in "=+?!": - section = section[:-1] - - if section in path_sections: - return True - - # Sections matching the regexp '_(dir|file|path)s?$' are also - # considered PathSections. Using manual string matching since that - # is much faster than the regexp and this can be called hundreds of - # thousands of times so micro performance matters. - if "_" in section: - tail = section[-6:] - if tail[-1] == "s": - tail = tail[:-1] - if tail[-5:] in ("_file", "_path"): - return True - return tail[-4:] == "_dir" - - return False - - -# base_non_configuration_keys is a list of key names that belong in the target -# itself and should not be propagated into its configurations. It is merged -# with a list that can come from the generator to -# create non_configuration_keys. -base_non_configuration_keys = [ - # Sections that must exist inside targets and not configurations. - "actions", - "configurations", - "copies", - "default_configuration", - "dependencies", - "dependencies_original", - "libraries", - "postbuilds", - "product_dir", - "product_extension", - "product_name", - "product_prefix", - "rules", - "run_as", - "sources", - "standalone_static_library", - "suppress_wildcard", - "target_name", - "toolset", - "toolsets", - "type", - # Sections that can be found inside targets or configurations, but that - # should not be propagated from targets into their configurations. - "variables", -] -non_configuration_keys = [] - -# Keys that do not belong inside a configuration dictionary. -invalid_configuration_keys = [ - "actions", - "all_dependent_settings", - "configurations", - "dependencies", - "direct_dependent_settings", - "libraries", - "link_settings", - "sources", - "standalone_static_library", - "target_name", - "type", -] - -# Controls whether or not the generator supports multiple toolsets. -multiple_toolsets = False - -# Paths for converting filelist paths to output paths: { -# toplevel, -# qualified_output_dir, -# } -generator_filelist_paths = None - - -def GetIncludedBuildFiles(build_file_path, aux_data, included=None): - """Return a list of all build files included into build_file_path. - - The returned list will contain build_file_path as well as all other files - that it included, either directly or indirectly. Note that the list may - contain files that were included into a conditional section that evaluated - to false and was not merged into build_file_path's dict. - - aux_data is a dict containing a key for each build file or included build - file. Those keys provide access to dicts whose "included" keys contain - lists of all other files included by the build file. - - included should be left at its default None value by external callers. It - is used for recursion. - - The returned list will not contain any duplicate entries. Each build file - in the list will be relative to the current directory. - """ - - if included is None: - included = [] - - if build_file_path in included: - return included - - included.append(build_file_path) - - for included_build_file in aux_data[build_file_path].get("included", []): - GetIncludedBuildFiles(included_build_file, aux_data, included) - - return included - - -def CheckedEval(file_contents): - """Return the eval of a gyp file. - The gyp file is restricted to dictionaries and lists only, and - repeated keys are not allowed. - Note that this is slower than eval() is. - """ - - syntax_tree = ast.parse(file_contents) - assert isinstance(syntax_tree, ast.Module) - c1 = syntax_tree.body - assert len(c1) == 1 - c2 = c1[0] - assert isinstance(c2, ast.Expr) - return CheckNode(c2.value, []) - - -def CheckNode(node, keypath): - if isinstance(node, ast.Dict): - dict = {} - for key, value in zip(node.keys, node.values): - assert isinstance(key, ast.Str) - key = key.s - if key in dict: - raise GypError( - "Key '" - + key - + "' repeated at level " - + repr(len(keypath) + 1) - + " with key path '" - + ".".join(keypath) - + "'" - ) - kp = list(keypath) # Make a copy of the list for descending this node. - kp.append(key) - dict[key] = CheckNode(value, kp) - return dict - elif isinstance(node, ast.List): - children = [] - for index, child in enumerate(node.elts): - kp = list(keypath) # Copy list. - kp.append(repr(index)) - children.append(CheckNode(child, kp)) - return children - elif isinstance(node, ast.Str): - return node.s - else: - raise TypeError( - "Unknown AST node at key path '" + ".".join(keypath) + "': " + repr(node) - ) - - -def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check): - if build_file_path in data: - return data[build_file_path] - - if os.path.exists(build_file_path): - build_file_contents = open(build_file_path, encoding="utf-8").read() - else: - raise GypError(f"{build_file_path} not found (cwd: {os.getcwd()})") - - build_file_data = None - try: - if check: - build_file_data = CheckedEval(build_file_contents) - else: - build_file_data = eval(build_file_contents, {"__builtins__": {}}, None) - except SyntaxError as e: - e.filename = build_file_path - raise - except Exception as e: - gyp.common.ExceptionAppend(e, "while reading " + build_file_path) - raise - - if type(build_file_data) is not dict: - raise GypError("%s does not evaluate to a dictionary." % build_file_path) - - data[build_file_path] = build_file_data - aux_data[build_file_path] = {} - - # Scan for includes and merge them in. - if "skip_includes" not in build_file_data or not build_file_data["skip_includes"]: - try: - if is_target: - LoadBuildFileIncludesIntoDict( - build_file_data, build_file_path, data, aux_data, includes, check - ) - else: - LoadBuildFileIncludesIntoDict( - build_file_data, build_file_path, data, aux_data, None, check - ) - except Exception as e: - gyp.common.ExceptionAppend( - e, "while reading includes of " + build_file_path - ) - raise - - return build_file_data - - -def LoadBuildFileIncludesIntoDict( - subdict, subdict_path, data, aux_data, includes, check -): - includes_list = [] - if includes is not None: - includes_list.extend(includes) - if "includes" in subdict: - for include in subdict["includes"]: - # "include" is specified relative to subdict_path, so compute the real - # path to include by appending the provided "include" to the directory - # in which subdict_path resides. - relative_include = os.path.normpath( - os.path.join(os.path.dirname(subdict_path), include) - ) - includes_list.append(relative_include) - # Unhook the includes list, it's no longer needed. - del subdict["includes"] - - # Merge in the included files. - for include in includes_list: - if "included" not in aux_data[subdict_path]: - aux_data[subdict_path]["included"] = [] - aux_data[subdict_path]["included"].append(include) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) - - MergeDicts( - subdict, - LoadOneBuildFile(include, data, aux_data, None, False, check), - subdict_path, - include, - ) - - # Recurse into subdictionaries. - for k, v in subdict.items(): - if type(v) is dict: - LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check) - elif type(v) is list: - LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, check) - - -# This recurses into lists so that it can look for dicts. -def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check): - for item in sublist: - if type(item) is dict: - LoadBuildFileIncludesIntoDict( - item, sublist_path, data, aux_data, None, check - ) - elif type(item) is list: - LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check) - - -# Processes toolsets in all the targets. This recurses into condition entries -# since they can contain toolsets as well. -def ProcessToolsetsInDict(data): - if "targets" in data: - target_list = data["targets"] - new_target_list = [] - for target in target_list: - # If this target already has an explicit 'toolset', and no 'toolsets' - # list, don't modify it further. - if "toolset" in target and "toolsets" not in target: - new_target_list.append(target) - continue - if multiple_toolsets: - toolsets = target.get("toolsets", ["target"]) - else: - toolsets = ["target"] - # Make sure this 'toolsets' definition is only processed once. - if "toolsets" in target: - del target["toolsets"] - if len(toolsets) > 0: - # Optimization: only do copies if more than one toolset is specified. - for build in toolsets[1:]: - new_target = gyp.simple_copy.deepcopy(target) - new_target["toolset"] = build - new_target_list.append(new_target) - target["toolset"] = toolsets[0] - new_target_list.append(target) - data["targets"] = new_target_list - if "conditions" in data: - for condition in data["conditions"]: - if type(condition) is list: - for condition_dict in condition[1:]: - if type(condition_dict) is dict: - ProcessToolsetsInDict(condition_dict) - - -# TODO(mark): I don't love this name. It just means that it's going to load -# a build file that contains targets and is expected to provide a targets dict -# that contains the targets... -def LoadTargetBuildFile( - build_file_path, - data, - aux_data, - variables, - includes, - depth, - check, - load_dependencies, -): - # If depth is set, predefine the DEPTH variable to be a relative path from - # this build file's directory to the directory identified by depth. - if depth: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) - if d == "": - variables["DEPTH"] = "." - else: - variables["DEPTH"] = d.replace("\\", "/") - - # The 'target_build_files' key is only set when loading target build files in - # the non-parallel code path, where LoadTargetBuildFile is called - # recursively. In the parallel code path, we don't need to check whether the - # |build_file_path| has already been loaded, because the 'scheduled' set in - # ParallelState guarantees that we never load the same |build_file_path| - # twice. - if "target_build_files" in data: - if build_file_path in data["target_build_files"]: - # Already loaded. - return False - data["target_build_files"].add(build_file_path) - - gyp.DebugOutput( - gyp.DEBUG_INCLUDES, "Loading Target Build File '%s'", build_file_path - ) - - build_file_data = LoadOneBuildFile( - build_file_path, data, aux_data, includes, True, check - ) - - # Store DEPTH for later use in generators. - build_file_data["_DEPTH"] = depth - - # Set up the included_files key indicating which .gyp files contributed to - # this target dict. - if "included_files" in build_file_data: - raise GypError(build_file_path + " must not contain included_files key") - - included = GetIncludedBuildFiles(build_file_path, aux_data) - build_file_data["included_files"] = [] - for included_file in included: - # included_file is relative to the current directory, but it needs to - # be made relative to build_file_path's directory. - included_relative = gyp.common.RelativePath( - included_file, os.path.dirname(build_file_path) - ) - build_file_data["included_files"].append(included_relative) - - # Do a first round of toolsets expansion so that conditions can be defined - # per toolset. - ProcessToolsetsInDict(build_file_data) - - # Apply "pre"/"early" variable expansions and condition evaluations. - ProcessVariablesAndConditionsInDict( - build_file_data, PHASE_EARLY, variables, build_file_path - ) - - # Since some toolsets might have been defined conditionally, perform - # a second round of toolsets expansion now. - ProcessToolsetsInDict(build_file_data) - - # Look at each project's target_defaults dict, and merge settings into - # targets. - if "target_defaults" in build_file_data: - if "targets" not in build_file_data: - raise GypError("Unable to find targets in build file %s" % build_file_path) - - index = 0 - while index < len(build_file_data["targets"]): - # This procedure needs to give the impression that target_defaults is - # used as defaults, and the individual targets inherit from that. - # The individual targets need to be merged into the defaults. Make - # a deep copy of the defaults for each target, merge the target dict - # as found in the input file into that copy, and then hook up the - # copy with the target-specific data merged into it as the replacement - # target dict. - old_target_dict = build_file_data["targets"][index] - new_target_dict = gyp.simple_copy.deepcopy( - build_file_data["target_defaults"] - ) - MergeDicts( - new_target_dict, old_target_dict, build_file_path, build_file_path - ) - build_file_data["targets"][index] = new_target_dict - index += 1 - - # No longer needed. - del build_file_data["target_defaults"] - - # Look for dependencies. This means that dependency resolution occurs - # after "pre" conditionals and variable expansion, but before "post" - - # in other words, you can't put a "dependencies" section inside a "post" - # conditional within a target. - - dependencies = [] - if "targets" in build_file_data: - for target_dict in build_file_data["targets"]: - if "dependencies" not in target_dict: - continue - for dependency in target_dict["dependencies"]: - dependencies.append( - gyp.common.ResolveTarget(build_file_path, dependency, None)[0] - ) - - if load_dependencies: - for dependency in dependencies: - try: - LoadTargetBuildFile( - dependency, - data, - aux_data, - variables, - includes, - depth, - check, - load_dependencies, - ) - except Exception as e: - gyp.common.ExceptionAppend( - e, "while loading dependencies of %s" % build_file_path - ) - raise - else: - return (build_file_path, dependencies) - - -def CallLoadTargetBuildFile( - global_flags, - build_file_path, - variables, - includes, - depth, - check, - generator_input_info, -): - """Wrapper around LoadTargetBuildFile for parallel processing. - - This wrapper is used when LoadTargetBuildFile is executed in - a worker process. - """ - - try: - signal.signal(signal.SIGINT, signal.SIG_IGN) - - # Apply globals so that the worker process behaves the same. - for key, value in global_flags.items(): - globals()[key] = value - - SetGeneratorGlobals(generator_input_info) - result = LoadTargetBuildFile( - build_file_path, - per_process_data, - per_process_aux_data, - variables, - includes, - depth, - check, - False, - ) - if not result: - return result - - (build_file_path, dependencies) = result - - # We can safely pop the build_file_data from per_process_data because it - # will never be referenced by this process again, so we don't need to keep - # it in the cache. - build_file_data = per_process_data.pop(build_file_path) - - # This gets serialized and sent back to the main process via a pipe. - # It's handled in LoadTargetBuildFileCallback. - return (build_file_path, build_file_data, dependencies) - except GypError as e: - sys.stderr.write("gyp: %s\n" % e) - return None - except Exception as e: - print("Exception:", e, file=sys.stderr) - print(traceback.format_exc(), file=sys.stderr) - return None - - -class ParallelProcessingError(Exception): - pass - - -class ParallelState: - """Class to keep track of state when processing input files in parallel. - - If build files are loaded in parallel, use this to keep track of - state during farming out and processing parallel jobs. It's stored - in a global so that the callback function can have access to it. - """ - - def __init__(self): - # The multiprocessing pool. - self.pool = None - # The condition variable used to protect this object and notify - # the main loop when there might be more data to process. - self.condition = None - # The "data" dict that was passed to LoadTargetBuildFileParallel - self.data = None - # The number of parallel calls outstanding; decremented when a response - # was received. - self.pending = 0 - # The set of all build files that have been scheduled, so we don't - # schedule the same one twice. - self.scheduled = set() - # A list of dependency build file paths that haven't been scheduled yet. - self.dependencies = [] - # Flag to indicate if there was an error in a child process. - self.error = False - - def LoadTargetBuildFileCallback(self, result): - """Handle the results of running LoadTargetBuildFile in another process. - """ - self.condition.acquire() - if not result: - self.error = True - self.condition.notify() - self.condition.release() - return - (build_file_path0, build_file_data0, dependencies0) = result - self.data[build_file_path0] = build_file_data0 - self.data["target_build_files"].add(build_file_path0) - for new_dependency in dependencies0: - if new_dependency not in self.scheduled: - self.scheduled.add(new_dependency) - self.dependencies.append(new_dependency) - self.pending -= 1 - self.condition.notify() - self.condition.release() - - -def LoadTargetBuildFilesParallel( - build_files, data, variables, includes, depth, check, generator_input_info -): - parallel_state = ParallelState() - parallel_state.condition = threading.Condition() - # Make copies of the build_files argument that we can modify while working. - parallel_state.dependencies = list(build_files) - parallel_state.scheduled = set(build_files) - parallel_state.pending = 0 - parallel_state.data = data - - try: - parallel_state.condition.acquire() - while parallel_state.dependencies or parallel_state.pending: - if parallel_state.error: - break - if not parallel_state.dependencies: - parallel_state.condition.wait() - continue - - dependency = parallel_state.dependencies.pop() - - parallel_state.pending += 1 - global_flags = { - "path_sections": globals()["path_sections"], - "non_configuration_keys": globals()["non_configuration_keys"], - "multiple_toolsets": globals()["multiple_toolsets"], - } - - if not parallel_state.pool: - parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) - parallel_state.pool.apply_async( - CallLoadTargetBuildFile, - args=( - global_flags, - dependency, - variables, - includes, - depth, - check, - generator_input_info, - ), - callback=parallel_state.LoadTargetBuildFileCallback, - ) - except KeyboardInterrupt as e: - parallel_state.pool.terminate() - raise e - - parallel_state.condition.release() - - parallel_state.pool.close() - parallel_state.pool.join() - parallel_state.pool = None - - if parallel_state.error: - sys.exit(1) - - -# Look for the bracket that matches the first bracket seen in a -# string, and return the start and end as a tuple. For example, if -# the input is something like "<(foo <(bar)) blah", then it would -# return (1, 13), indicating the entire string except for the leading -# "<" and trailing " blah". -LBRACKETS = set("{[(") -BRACKETS = {"}": "{", "]": "[", ")": "("} - - -def FindEnclosingBracketGroup(input_str): - stack = [] - start = -1 - for index, char in enumerate(input_str): - if char in LBRACKETS: - stack.append(char) - if start == -1: - start = index - elif char in BRACKETS: - if not stack: - return (-1, -1) - if stack.pop() != BRACKETS[char]: - return (-1, -1) - if not stack: - return (start, index + 1) - return (-1, -1) - - -def IsStrCanonicalInt(string): - """Returns True if |string| is in its canonical integer form. - - The canonical form is such that str(int(string)) == string. - """ - if type(string) is str: - # This function is called a lot so for maximum performance, avoid - # involving regexps which would otherwise make the code much - # shorter. Regexps would need twice the time of this function. - if string: - if string == "0": - return True - if string[0] == "-": - string = string[1:] - if not string: - return False - if "1" <= string[0] <= "9": - return string.isdigit() - - return False - - -# This matches things like "<(asdf)", "(?P<(?:(?:!?@?)|\|)?)" - r"(?P[-a-zA-Z0-9_.]+)?" - r"\((?P\s*\[?)" - r"(?P.*?)(\]?)\))" -) - -# This matches the same as early_variable_re, but with '>' instead of '<'. -late_variable_re = re.compile( - r"(?P(?P>(?:(?:!?@?)|\|)?)" - r"(?P[-a-zA-Z0-9_.]+)?" - r"\((?P\s*\[?)" - r"(?P.*?)(\]?)\))" -) - -# This matches the same as early_variable_re, but with '^' instead of '<'. -latelate_variable_re = re.compile( - r"(?P(?P[\^](?:(?:!?@?)|\|)?)" - r"(?P[-a-zA-Z0-9_.]+)?" - r"\((?P\s*\[?)" - r"(?P.*?)(\]?)\))" -) - -# Global cache of results from running commands so they don't have to be run -# more then once. -cached_command_results = {} - - -def FixupPlatformCommand(cmd): - if sys.platform == "win32": - if type(cmd) is list: - cmd = [re.sub("^cat ", "type ", cmd[0])] + cmd[1:] - else: - cmd = re.sub("^cat ", "type ", cmd) - return cmd - - -PHASE_EARLY = 0 -PHASE_LATE = 1 -PHASE_LATELATE = 2 - - -def ExpandVariables(input, phase, variables, build_file): - # Look for the pattern that gets expanded into variables - if phase == PHASE_EARLY: - variable_re = early_variable_re - expansion_symbol = "<" - elif phase == PHASE_LATE: - variable_re = late_variable_re - expansion_symbol = ">" - elif phase == PHASE_LATELATE: - variable_re = latelate_variable_re - expansion_symbol = "^" - else: - assert False - - input_str = str(input) - if IsStrCanonicalInt(input_str): - return int(input_str) - - # Do a quick scan to determine if an expensive regex search is warranted. - if expansion_symbol not in input_str: - return input_str - - # Get the entire list of matches as a list of MatchObject instances. - # (using findall here would return strings instead of MatchObjects). - matches = list(variable_re.finditer(input_str)) - if not matches: - return input_str - - output = input_str - # Reverse the list of matches so that replacements are done right-to-left. - # That ensures that earlier replacements won't mess up the string in a - # way that causes later calls to find the earlier substituted text instead - # of what's intended for replacement. - matches.reverse() - for match_group in matches: - match = match_group.groupdict() - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match) - # match['replace'] is the substring to look for, match['type'] - # is the character code for the replacement type (< > ! <| >| <@ - # >@ !@), match['is_array'] contains a '[' for command - # arrays, and match['content'] is the name of the variable (< >) - # or command to run (!). match['command_string'] is an optional - # command string. Currently, only 'pymod_do_main' is supported. - - # run_command is true if a ! variant is used. - run_command = "!" in match["type"] - command_string = match["command_string"] - - # file_list is true if a | variant is used. - file_list = "|" in match["type"] - - # Capture these now so we can adjust them later. - replace_start = match_group.start("replace") - replace_end = match_group.end("replace") - - # Find the ending paren, and re-evaluate the contained string. - (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) - - # Adjust the replacement range to match the entire command - # found by FindEnclosingBracketGroup (since the variable_re - # probably doesn't match the entire command if it contained - # nested variables). - replace_end = replace_start + c_end - - # Find the "real" replacement, matching the appropriate closing - # paren, and adjust the replacement start and end. - replacement = input_str[replace_start:replace_end] - - # Figure out what the contents of the variable parens are. - contents_start = replace_start + c_start + 1 - contents_end = replace_end - 1 - contents = input_str[contents_start:contents_end] - - # Do filter substitution now for <|(). - # Admittedly, this is different than the evaluation order in other - # contexts. However, since filtration has no chance to run on <|(), - # this seems like the only obvious way to give them access to filters. - if file_list: - processed_variables = gyp.simple_copy.deepcopy(variables) - ProcessListFiltersInDict(contents, processed_variables) - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, processed_variables, build_file) - else: - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, variables, build_file) - - # Strip off leading/trailing whitespace so that variable matches are - # simpler below (and because they are rarely needed). - contents = contents.strip() - - # expand_to_list is true if an @ variant is used. In that case, - # the expansion should result in a list. Note that the caller - # is to be expecting a list in return, and not all callers do - # because not all are working in list context. Also, for list - # expansions, there can be no other text besides the variable - # expansion in the input string. - expand_to_list = "@" in match["type"] and input_str == replacement - - if run_command or file_list: - # Find the build file's directory, so commands can be run or file lists - # generated relative to it. - build_file_dir = os.path.dirname(build_file) - if build_file_dir == "" and not file_list: - # If build_file is just a leaf filename indicating a file in the - # current directory, build_file_dir might be an empty string. Set - # it to None to signal to subprocess.Popen that it should run the - # command in the current directory. - build_file_dir = None - - # Support <|(listfile.txt ...) which generates a file - # containing items from a gyp list, generated at gyp time. - # This works around actions/rules which have more inputs than will - # fit on the command line. - if file_list: - contents_list = contents if type(contents) is list else contents.split(" ") - replacement = contents_list[0] - if os.path.isabs(replacement): - raise GypError('| cannot handle absolute paths, got "%s"' % replacement) - - if not generator_filelist_paths: - path = os.path.join(build_file_dir, replacement) - else: - if os.path.isabs(build_file_dir): - toplevel = generator_filelist_paths["toplevel"] - rel_build_file_dir = gyp.common.RelativePath( - build_file_dir, toplevel - ) - else: - rel_build_file_dir = build_file_dir - qualified_out_dir = generator_filelist_paths["qualified_out_dir"] - path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement) - gyp.common.EnsureDirExists(path) - - replacement = gyp.common.RelativePath(path, build_file_dir) - f = gyp.common.WriteOnDiff(path) - for i in contents_list[1:]: - f.write("%s\n" % i) - f.close() - - elif run_command: - use_shell = True - if match["is_array"]: - contents = eval(contents) - use_shell = False - - # Check for a cached value to avoid executing commands, or generating - # file lists more than once. The cache key contains the command to be - # run as well as the directory to run it from, to account for commands - # that depend on their current directory. - # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, - # someone could author a set of GYP files where each time the command - # is invoked it produces different output by design. When the need - # arises, the syntax should be extended to support no caching off a - # command's output so it is run every time. - cache_key = (str(contents), build_file_dir) - cached_value = cached_command_results.get(cache_key, None) - if cached_value is None: - gyp.DebugOutput( - gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'", - contents, - build_file_dir, - ) - - replacement = "" - - if command_string == "pymod_do_main": - # 0: - raise GypError( - "Call to '%s' returned exit status %d while in %s." - % (contents, result.returncode, build_file) - ) - replacement = result.stdout.decode("utf-8").rstrip() - - cached_command_results[cache_key] = replacement - else: - gyp.DebugOutput( - gyp.DEBUG_VARIABLES, - "Had cache value for command '%s' in directory '%s'", - contents, - build_file_dir, - ) - replacement = cached_value - - else: - if contents not in variables: - if contents[-1] in ["!", "/"]: - # In order to allow cross-compiles (nacl) to happen more naturally, - # we will allow references to >(sources/) etc. to resolve to - # and empty list if undefined. This allows actions to: - # 'action!': [ - # '>@(_sources!)', - # ], - # 'action/': [ - # '>@(_sources/)', - # ], - replacement = [] - else: - raise GypError( - "Undefined variable " + contents + " in " + build_file - ) - else: - replacement = variables[contents] - - if isinstance(replacement, bytes) and not isinstance(replacement, str): - replacement = replacement.decode("utf-8") # done on Python 3 only - if type(replacement) is list: - for item in replacement: - if isinstance(item, bytes) and not isinstance(item, str): - item = item.decode("utf-8") # done on Python 3 only - if not contents[-1] == "/" and type(item) not in (str, int): - raise GypError( - "Variable " - + contents - + " must expand to a string or list of strings; " - + "list contains a " - + item.__class__.__name__ - ) - # Run through the list and handle variable expansions in it. Since - # the list is guaranteed not to contain dicts, this won't do anything - # with conditions sections. - ProcessVariablesAndConditionsInList( - replacement, phase, variables, build_file - ) - elif type(replacement) not in (str, int): - raise GypError( - "Variable " - + contents - + " must expand to a string or list of strings; " - + "found a " - + replacement.__class__.__name__ - ) - - if expand_to_list: - # Expanding in list context. It's guaranteed that there's only one - # replacement to do in |input_str| and that it's this replacement. See - # above. - if type(replacement) is list: - # If it's already a list, make a copy. - output = replacement[:] - else: - # Split it the same way sh would split arguments. - output = shlex.split(str(replacement)) - else: - # Expanding in string context. - encoded_replacement = "" - if type(replacement) is list: - # When expanding a list into string context, turn the list items - # into a string in a way that will work with a subprocess call. - # - # TODO(mark): This isn't completely correct. This should - # call a generator-provided function that observes the - # proper list-to-argument quoting rules on a specific - # platform instead of just calling the POSIX encoding - # routine. - encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) - else: - encoded_replacement = replacement - - output = ( - output[:replace_start] + str(encoded_replacement) + output[replace_end:] - ) - # Prepare for the next match iteration. - input_str = output - - if output == input: - gyp.DebugOutput( - gyp.DEBUG_VARIABLES, - "Found only identity matches on %r, avoiding infinite " "recursion.", - output, - ) - else: - # Look for more matches now that we've replaced some, to deal with - # expanding local variables (variables defined in the same - # variables block as this one). - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output) - if type(output) is list: - if output and type(output[0]) is list: - # Leave output alone if it's a list of lists. - # We don't want such lists to be stringified. - pass - else: - new_output = [] - for item in output: - new_output.append( - ExpandVariables(item, phase, variables, build_file) - ) - output = new_output - else: - output = ExpandVariables(output, phase, variables, build_file) - - # Convert all strings that are canonically-represented integers into integers. - if type(output) is list: - for index, outstr in enumerate(output): - if IsStrCanonicalInt(outstr): - output[index] = int(outstr) - elif IsStrCanonicalInt(output): - output = int(output) - - return output - - -# The same condition is often evaluated over and over again so it -# makes sense to cache as much as possible between evaluations. -cached_conditions_asts = {} - - -def EvalCondition(condition, conditions_key, phase, variables, build_file): - """Returns the dict that should be used or None if the result was - that nothing should be used.""" - if type(condition) is not list: - raise GypError(conditions_key + " must be a list") - if len(condition) < 2: - # It's possible that condition[0] won't work in which case this - # attempt will raise its own IndexError. That's probably fine. - raise GypError( - conditions_key - + " " - + condition[0] - + " must be at least length 2, not " - + str(len(condition)) - ) - - i = 0 - result = None - while i < len(condition): - cond_expr = condition[i] - true_dict = condition[i + 1] - if type(true_dict) is not dict: - raise GypError( - f"{conditions_key} {cond_expr} must be followed by a dictionary, " - f"not {type(true_dict)}" - ) - if len(condition) > i + 2 and type(condition[i + 2]) is dict: - false_dict = condition[i + 2] - i = i + 3 - if i != len(condition): - raise GypError( - f"{conditions_key} {cond_expr} has " - f"{len(condition) - i} unexpected trailing items" - ) - else: - false_dict = None - i = i + 2 - if result is None: - result = EvalSingleCondition( - cond_expr, true_dict, false_dict, phase, variables, build_file - ) - - return result - - -def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file): - """Returns true_dict if cond_expr evaluates to true, and false_dict - otherwise.""" - # Do expansions on the condition itself. Since the condition can naturally - # contain variable references without needing to resort to GYP expansion - # syntax, this is of dubious value for variables, but someone might want to - # use a command expansion directly inside a condition. - cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, build_file) - if type(cond_expr_expanded) not in (str, int): - raise ValueError( - "Variable expansion in this context permits str and int " - + "only, found " - + cond_expr_expanded.__class__.__name__ - ) - - try: - if cond_expr_expanded in cached_conditions_asts: - ast_code = cached_conditions_asts[cond_expr_expanded] - else: - ast_code = compile(cond_expr_expanded, "", "eval") - cached_conditions_asts[cond_expr_expanded] = ast_code - env = {"__builtins__": {}, "v": Version} - if eval(ast_code, env, variables): - return true_dict - return false_dict - except SyntaxError as e: - syntax_error = SyntaxError( - "%s while evaluating condition '%s' in %s " - "at character %d." % (str(e.args[0]), e.text, build_file, e.offset), - e.filename, - e.lineno, - e.offset, - e.text, - ) - raise syntax_error - except NameError as e: - gyp.common.ExceptionAppend( - e, - f"while evaluating condition '{cond_expr_expanded}' in {build_file}", - ) - raise GypError(e) - - -def ProcessConditionsInDict(the_dict, phase, variables, build_file): - # Process a 'conditions' or 'target_conditions' section in the_dict, - # depending on phase. - # early -> conditions - # late -> target_conditions - # latelate -> no conditions - # - # Each item in a conditions list consists of cond_expr, a string expression - # evaluated as the condition, and true_dict, a dict that will be merged into - # the_dict if cond_expr evaluates to true. Optionally, a third item, - # false_dict, may be present. false_dict is merged into the_dict if - # cond_expr evaluates to false. - # - # Any dict merged into the_dict will be recursively processed for nested - # conditionals and other expansions, also according to phase, immediately - # prior to being merged. - - if phase == PHASE_EARLY: - conditions_key = "conditions" - elif phase == PHASE_LATE: - conditions_key = "target_conditions" - elif phase == PHASE_LATELATE: - return - else: - assert False - - if conditions_key not in the_dict: - return - - conditions_list = the_dict[conditions_key] - # Unhook the conditions list, it's no longer needed. - del the_dict[conditions_key] - - for condition in conditions_list: - merge_dict = EvalCondition( - condition, conditions_key, phase, variables, build_file - ) - - if merge_dict is not None: - # Expand variables and nested conditinals in the merge_dict before - # merging it. - ProcessVariablesAndConditionsInDict( - merge_dict, phase, variables, build_file - ) - - MergeDicts(the_dict, merge_dict, build_file, build_file) - - -def LoadAutomaticVariablesFromDict(variables, the_dict): - # Any keys with plain string values in the_dict become automatic variables. - # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.items(): - if type(value) in (str, int, list): - variables["_" + key] = value - - -def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): - # Any keys in the_dict's "variables" dict, if it has one, becomes a - # variable. The variable name is the key name in the "variables" dict. - # Variables that end with the % character are set only if they are unset in - # the variables dict. the_dict_key is the name of the key that accesses - # the_dict in the_dict's parent dict. If the_dict's parent is not a dict - # (it could be a list or it could be parentless because it is a root dict), - # the_dict_key will be None. - for key, value in the_dict.get("variables", {}).items(): - if type(value) not in (str, int, list): - continue - - if key.endswith("%"): - variable_name = key[:-1] - if variable_name in variables: - # If the variable is already set, don't set it. - continue - if the_dict_key == "variables" and variable_name in the_dict: - # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a variables sub-dict of a - # variables dict), use the_dict's definition. - value = the_dict[variable_name] - else: - variable_name = key - - variables[variable_name] = value - - -def ProcessVariablesAndConditionsInDict( - the_dict, phase, variables_in, build_file, the_dict_key=None -): - """Handle all variable and command expansion and conditional evaluation. - - This function is the public entry point for all variable expansions and - conditional evaluations. The variables_in dictionary will not be modified - by this function. - """ - - # Make a copy of the variables_in dict that can be modified during the - # loading of automatics and the loading of the variables dict. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - - if "variables" in the_dict: - # Make sure all the local variables are added to the variables - # list before we process them so that you can reference one - # variable from another. They will be fully expanded by recursion - # in ExpandVariables. - for key, value in the_dict["variables"].items(): - variables[key] = value - - # Handle the associated variables dict first, so that any variable - # references within can be resolved prior to using them as variables. - # Pass a copy of the variables dict to avoid having it be tainted. - # Otherwise, it would have extra automatics added for everything that - # should just be an ordinary variable in this scope. - ProcessVariablesAndConditionsInDict( - the_dict["variables"], phase, variables, build_file, "variables" - ) - - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - for key, value in the_dict.items(): - # Skip "variables", which was already processed if present. - if key != "variables" and type(value) is str: - expanded = ExpandVariables(value, phase, variables, build_file) - if type(expanded) not in (str, int): - raise ValueError( - "Variable expansion in this context permits str and int " - + "only, found " - + expanded.__class__.__name__ - + " for " - + key - ) - the_dict[key] = expanded - - # Variable expansion may have resulted in changes to automatics. Reload. - # TODO(mark): Optimization: only reload if no changes were made. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Process conditions in this dict. This is done after variable expansion - # so that conditions may take advantage of expanded variables. For example, - # if the_dict contains: - # {'type': '<(library_type)', - # 'conditions': [['_type=="static_library"', { ... }]]}, - # _type, as used in the condition, will only be set to the value of - # library_type if variable expansion is performed before condition - # processing. However, condition processing should occur prior to recursion - # so that variables (both automatic and "variables" dict type) may be - # adjusted by conditions sections, merged into the_dict, and have the - # intended impact on contained dicts. - # - # This arrangement means that a "conditions" section containing a "variables" - # section will only have those variables effective in subdicts, not in - # the_dict. The workaround is to put a "conditions" section within a - # "variables" section. For example: - # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will not result in "IS_MAC" being appended to the "defines" list in the - # current scope but would result in it being appended to the "defines" list - # within "my_subdict". By comparison: - # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will append "IS_MAC" to both "defines" lists. - - # Evaluate conditions sections, allowing variable expansions within them - # as well as nested conditionals. This will process a 'conditions' or - # 'target_conditions' section, perform appropriate merging and recursive - # conditional and variable processing, and then remove the conditions section - # from the_dict if it is present. - ProcessConditionsInDict(the_dict, phase, variables, build_file) - - # Conditional processing may have resulted in changes to automatics or the - # variables dict. Reload. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Recurse into child dicts, or process child lists which may result in - # further recursion into descendant dicts. - for key, value in the_dict.items(): - # Skip "variables" and string values, which were already processed if - # present. - if key == "variables" or type(value) is str: - continue - if type(value) is dict: - # Pass a copy of the variables dict so that subdicts can't influence - # parents. - ProcessVariablesAndConditionsInDict( - value, phase, variables, build_file, key - ) - elif type(value) is list: - # The list itself can't influence the variables dict, and - # ProcessVariablesAndConditionsInList will make copies of the variables - # dict if it needs to pass it to something that can influence it. No - # copy is necessary here. - ProcessVariablesAndConditionsInList(value, phase, variables, build_file) - elif type(value) is not int: - raise TypeError("Unknown type " + value.__class__.__name__ + " for " + key) - - -def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file): - # Iterate using an index so that new values can be assigned into the_list. - index = 0 - while index < len(the_list): - item = the_list[index] - if type(item) is dict: - # Make a copy of the variables dict so that it won't influence anything - # outside of its own scope. - ProcessVariablesAndConditionsInDict(item, phase, variables, build_file) - elif type(item) is list: - ProcessVariablesAndConditionsInList(item, phase, variables, build_file) - elif type(item) is str: - expanded = ExpandVariables(item, phase, variables, build_file) - if type(expanded) in (str, int): - the_list[index] = expanded - elif type(expanded) is list: - the_list[index : index + 1] = expanded - index += len(expanded) - - # index now identifies the next item to examine. Continue right now - # without falling into the index increment below. - continue - else: - raise ValueError( - "Variable expansion in this context permits strings and " - + "lists only, found " - + expanded.__class__.__name__ - + " at " - + index - ) - elif type(item) is not int: - raise TypeError( - "Unknown type " + item.__class__.__name__ + " at index " + index - ) - index = index + 1 - - -def BuildTargetsDict(data): - """Builds a dict mapping fully-qualified target names to their target dicts. - - |data| is a dict mapping loaded build files by pathname relative to the - current directory. Values in |data| are build file contents. For each - |data| value with a "targets" key, the value of the "targets" key is taken - as a list containing target dicts. Each target's fully-qualified name is - constructed from the pathname of the build file (|data| key) and its - "target_name" property. These fully-qualified names are used as the keys - in the returned dict. These keys provide access to the target dicts, - the dicts in the "targets" lists. - """ - - targets = {} - for build_file in data["target_build_files"]: - for target in data[build_file].get("targets", []): - target_name = gyp.common.QualifiedTarget( - build_file, target["target_name"], target["toolset"] - ) - if target_name in targets: - raise GypError("Duplicate target definitions for " + target_name) - targets[target_name] = target - - return targets - - -def QualifyDependencies(targets): - """Make dependency links fully-qualified relative to the current directory. - - |targets| is a dict mapping fully-qualified target names to their target - dicts. For each target in this dict, keys known to contain dependency - links are examined, and any dependencies referenced will be rewritten - so that they are fully-qualified and relative to the current directory. - All rewritten dependencies are suitable for use as keys to |targets| or a - similar dict. - """ - - all_dependency_sections = [ - dep + op for dep in dependency_sections for op in ("", "!", "/") - ] - - for target, target_dict in targets.items(): - target_build_file = gyp.common.BuildFile(target) - toolset = target_dict["toolset"] - for dependency_key in all_dependency_sections: - dependencies = target_dict.get(dependency_key, []) - for index, dep in enumerate(dependencies): - dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dep, toolset - ) - if not multiple_toolsets: - # Ignore toolset specification in the dependency if it is specified. - dep_toolset = toolset - dependency = gyp.common.QualifiedTarget( - dep_file, dep_target, dep_toolset - ) - dependencies[index] = dependency - - # Make sure anything appearing in a list other than "dependencies" also - # appears in the "dependencies" list. - if ( - dependency_key != "dependencies" - and dependency not in target_dict["dependencies"] - ): - raise GypError( - "Found " - + dependency - + " in " - + dependency_key - + " of " - + target - + ", but not in dependencies" - ) - - -def ExpandWildcardDependencies(targets, data): - """Expands dependencies specified as build_file:*. - - For each target in |targets|, examines sections containing links to other - targets. If any such section contains a link of the form build_file:*, it - is taken as a wildcard link, and is expanded to list each target in - build_file. The |data| dict provides access to build file dicts. - - Any target that does not wish to be included by wildcard can provide an - optional "suppress_wildcard" key in its target dict. When present and - true, a wildcard dependency link will not include such targets. - - All dependency names, including the keys to |targets| and the values in each - dependency list, must be qualified when this function is called. - """ - - for target, target_dict in targets.items(): - target_build_file = gyp.common.BuildFile(target) - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - - # Loop this way instead of "for dependency in" or "for index in range" - # because the dependencies list will be modified within the loop body. - index = 0 - while index < len(dependencies): - ( - dependency_build_file, - dependency_target, - dependency_toolset, - ) = gyp.common.ParseQualifiedTarget(dependencies[index]) - if dependency_target != "*" and dependency_toolset != "*": - # Not a wildcard. Keep it moving. - index = index + 1 - continue - - if dependency_build_file == target_build_file: - # It's an error for a target to depend on all other targets in - # the same file, because a target cannot depend on itself. - raise GypError( - "Found wildcard in " - + dependency_key - + " of " - + target - + " referring to same build file" - ) - - # Take the wildcard out and adjust the index so that the next - # dependency in the list will be processed the next time through the - # loop. - del dependencies[index] - index = index - 1 - - # Loop through the targets in the other build file, adding them to - # this target's list of dependencies in place of the removed - # wildcard. - dependency_target_dicts = data[dependency_build_file]["targets"] - for dependency_target_dict in dependency_target_dicts: - if int(dependency_target_dict.get("suppress_wildcard", False)): - continue - dependency_target_name = dependency_target_dict["target_name"] - if ( - dependency_target not in {"*", dependency_target_name} - ): - continue - dependency_target_toolset = dependency_target_dict["toolset"] - if ( - dependency_toolset not in {"*", dependency_target_toolset} - ): - continue - dependency = gyp.common.QualifiedTarget( - dependency_build_file, - dependency_target_name, - dependency_target_toolset, - ) - index = index + 1 - dependencies.insert(index, dependency) - - index = index + 1 - - -def Unify(items): - """Removes duplicate elements from items, keeping the first element.""" - seen = {} - return [seen.setdefault(e, e) for e in items if e not in seen] - - -def RemoveDuplicateDependencies(targets): - """Makes sure every dependency appears only once in all targets's dependency - lists.""" - for target_name, target_dict in targets.items(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - target_dict[dependency_key] = Unify(dependencies) - - -def Filter(items, item): - """Removes item from items.""" - res = {} - return [res.setdefault(e, e) for e in items if e != item] - - -def RemoveSelfDependencies(targets): - """Remove self dependencies from targets that have the prune_self_dependency - variable set.""" - for target_name, target_dict in targets.items(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - for t in dependencies: - if t == target_name and ( - targets[t] - .get("variables", {}) - .get("prune_self_dependency", 0) - ): - target_dict[dependency_key] = Filter( - dependencies, target_name - ) - - -def RemoveLinkDependenciesFromNoneTargets(targets): - """Remove dependencies having the 'link_dependency' attribute from the 'none' - targets.""" - for target_name, target_dict in targets.items(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - for t in dependencies: - if target_dict.get("type", None) == "none": - if targets[t].get("variables", {}).get("link_dependency", 0): - target_dict[dependency_key] = Filter( - target_dict[dependency_key], t - ) - - -class DependencyGraphNode: - """ - - Attributes: - ref: A reference to an object that this DependencyGraphNode represents. - dependencies: List of DependencyGraphNodes on which this one depends. - dependents: List of DependencyGraphNodes that depend on this one. - """ - - class CircularException(GypError): - pass - - def __init__(self, ref): - self.ref = ref - self.dependencies = [] - self.dependents = [] - - def __repr__(self): - return "" % self.ref - - def FlattenToList(self): - # flat_list is the sorted list of dependencies - actually, the list items - # are the "ref" attributes of DependencyGraphNodes. Every target will - # appear in flat_list after all of its dependencies, and before all of its - # dependents. - flat_list = OrderedSet() - - def ExtractNodeRef(node): - """Extracts the object that the node represents from the given node.""" - return node.ref - - # in_degree_zeros is the list of DependencyGraphNodes that have no - # dependencies not in flat_list. Initially, it is a copy of the children - # of this node, because when the graph was built, nodes with no - # dependencies were made implicit dependents of the root node. - in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef) - - while in_degree_zeros: - # Nodes in in_degree_zeros have no dependencies not in flat_list, so they - # can be appended to flat_list. Take these nodes out of in_degree_zeros - # as work progresses, so that the next node to process from the list can - # always be accessed at a consistent position. - node = in_degree_zeros.pop() - flat_list.add(node.ref) - - # Look at dependents of the node just added to flat_list. Some of them - # may now belong in in_degree_zeros. - for node_dependent in sorted(node.dependents, key=ExtractNodeRef): - is_in_degree_zero = True - # TODO: We want to check through the - # node_dependent.dependencies list but if it's long and we - # always start at the beginning, then we get O(n^2) behaviour. - for node_dependent_dependency in sorted( - node_dependent.dependencies, key=ExtractNodeRef - ): - if node_dependent_dependency.ref not in flat_list: - # The dependent one or more dependencies not in flat_list. - # There will be more chances to add it to flat_list - # when examining it again as a dependent of those other - # dependencies, provided that there are no cycles. - is_in_degree_zero = False - break - - if is_in_degree_zero: - # All of the dependent's dependencies are already in flat_list. Add - # it to in_degree_zeros where it will be processed in a future - # iteration of the outer loop. - in_degree_zeros += [node_dependent] - - return list(flat_list) - - def FindCycles(self): - """ - Returns a list of cycles in the graph, where each cycle is its own list. - """ - results = [] - visited = set() - - def Visit(node, path): - for child in node.dependents: - if child in path: - results.append([child] + path[: path.index(child) + 1]) - elif child not in visited: - visited.add(child) - Visit(child, [child] + path) - - visited.add(self) - Visit(self, [self]) - - return results - - def DirectDependencies(self, dependencies=None): - """Returns a list of just direct dependencies.""" - if dependencies is None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - - return dependencies - - def _AddImportedDependencies(self, targets, dependencies=None): - """Given a list of direct dependencies, adds indirect dependencies that - other dependencies have declared to export their settings. - - This method does not operate on self. Rather, it operates on the list - of dependencies in the |dependencies| argument. For each dependency in - that list, if any declares that it exports the settings of one of its - own dependencies, those dependencies whose settings are "passed through" - are added to the list. As new items are added to the list, they too will - be processed, so it is possible to import settings through multiple levels - of dependencies. - - This method is not terribly useful on its own, it depends on being - "primed" with a list of direct dependencies such as one provided by - DirectDependencies. DirectAndImportedDependencies is intended to be the - public entry point. - """ - - if dependencies is None: - dependencies = [] - - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - # Add any dependencies whose settings should be imported to the list - # if not already present. Newly-added items will be checked for - # their own imports when the list iteration reaches them. - # Rather than simply appending new items, insert them after the - # dependency that exported them. This is done to more closely match - # the depth-first method used by DeepDependencies. - add_index = 1 - for imported_dependency in dependency_dict.get( - "export_dependent_settings", [] - ): - if imported_dependency not in dependencies: - dependencies.insert(index + add_index, imported_dependency) - add_index = add_index + 1 - index = index + 1 - - return dependencies - - def DirectAndImportedDependencies(self, targets, dependencies=None): - """Returns a list of a target's direct dependencies and all indirect - dependencies that a dependency has advertised settings should be exported - through the dependency for. - """ - - dependencies = self.DirectDependencies(dependencies) - return self._AddImportedDependencies(targets, dependencies) - - def DeepDependencies(self, dependencies=None): - """Returns an OrderedSet of all of a target's dependencies, recursively.""" - if dependencies is None: - # Using a list to get ordered output and a set to do fast "is it - # already added" checks. - dependencies = OrderedSet() - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref is None: - continue - if dependency.ref not in dependencies: - dependency.DeepDependencies(dependencies) - dependencies.add(dependency.ref) - - return dependencies - - def _LinkDependenciesInternal( - self, targets, include_shared_libraries, dependencies=None, initial=True - ): - """Returns an OrderedSet of dependency targets that are linked - into this target. - - This function has a split personality, depending on the setting of - |initial|. Outside callers should always leave |initial| at its default - setting. - - When adding a target to the list of dependencies, this function will - recurse into itself with |initial| set to False, to collect dependencies - that are linked into the linkable target for which the list is being built. - - If |include_shared_libraries| is False, the resulting dependencies will not - include shared_library targets that are linked into this target. - """ - if dependencies is None: - # Using a list to get ordered output and a set to do fast "is it - # already added" checks. - dependencies = OrderedSet() - - # Check for None, corresponding to the root node. - if self.ref is None: - return dependencies - - # It's kind of sucky that |targets| has to be passed into this function, - # but that's presently the easiest way to access the target dicts so that - # this function can find target types. - - if "target_name" not in targets[self.ref]: - raise GypError("Missing 'target_name' field in target.") - - if "type" not in targets[self.ref]: - raise GypError( - "Missing 'type' field in target %s" % targets[self.ref]["target_name"] - ) - - target_type = targets[self.ref]["type"] - - is_linkable = target_type in linkable_types - - if initial and not is_linkable: - # If this is the first target being examined and it's not linkable, - # return an empty list of link dependencies, because the link - # dependencies are intended to apply to the target itself (initial is - # True) and this target won't be linked. - return dependencies - - # Don't traverse 'none' targets if explicitly excluded. - if target_type == "none" and not targets[self.ref].get( - "dependencies_traverse", True - ): - dependencies.add(self.ref) - return dependencies - - # Executables, mac kernel extensions, windows drivers and loadable modules - # are already fully and finally linked. Nothing else can be a link - # dependency of them, there can only be dependencies in the sense that a - # dependent target might run an executable or load the loadable_module. - if not initial and target_type in ( - "executable", - "loadable_module", - "mac_kernel_extension", - "windows_driver", - ): - return dependencies - - # Shared libraries are already fully linked. They should only be included - # in |dependencies| when adjusting static library dependencies (in order to - # link against the shared_library's import lib), but should not be included - # in |dependencies| when propagating link_settings. - # The |include_shared_libraries| flag controls which of these two cases we - # are handling. - if ( - not initial - and target_type == "shared_library" - and not include_shared_libraries - ): - return dependencies - - # The target is linkable, add it to the list of link dependencies. - if self.ref not in dependencies: - dependencies.add(self.ref) - if initial or not is_linkable: - # If this is a subsequent target and it's linkable, don't look any - # further for linkable dependencies, as they'll already be linked into - # this target linkable. Always look at dependencies of the initial - # target, and always look at dependencies of non-linkables. - for dependency in self.dependencies: - dependency._LinkDependenciesInternal( - targets, include_shared_libraries, dependencies, False - ) - - return dependencies - - def DependenciesForLinkSettings(self, targets): - """ - Returns a list of dependency targets whose link_settings should be merged - into this target. - """ - - # TODO(sbaig) Currently, chrome depends on the bug that shared libraries' - # link_settings are propagated. So for now, we will allow it, unless the - # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to - # False. Once chrome is fixed, we can remove this flag. - include_shared_libraries = targets[self.ref].get( - "allow_sharedlib_linksettings_propagation", True - ) - return self._LinkDependenciesInternal(targets, include_shared_libraries) - - def DependenciesToLinkAgainst(self, targets): - """ - Returns a list of dependency targets that are linked into this target. - """ - return self._LinkDependenciesInternal(targets, True) - - -def BuildDependencyList(targets): - # Create a DependencyGraphNode for each target. Put it into a dict for easy - # access. - dependency_nodes = {} - for target, spec in targets.items(): - if target not in dependency_nodes: - dependency_nodes[target] = DependencyGraphNode(target) - - # Set up the dependency links. Targets that have no dependencies are treated - # as dependent on root_node. - root_node = DependencyGraphNode(None) - for target, spec in targets.items(): - target_node = dependency_nodes[target] - dependencies = spec.get("dependencies") - if not dependencies: - target_node.dependencies = [root_node] - root_node.dependents.append(target_node) - else: - for dependency in dependencies: - dependency_node = dependency_nodes.get(dependency) - if not dependency_node: - raise GypError( - "Dependency '%s' not found while " - "trying to load target %s" % (dependency, target) - ) - target_node.dependencies.append(dependency_node) - dependency_node.dependents.append(target_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(targets): - if not root_node.dependents: - # If all targets have dependencies, add the first target as a dependent - # of root_node so that the cycle can be discovered from root_node. - target = next(iter(targets)) - target_node = dependency_nodes[target] - target_node.dependencies.append(root_node) - root_node.dependents.append(target_node) - - cycles = [] - for cycle in root_node.FindCycles(): - paths = [node.ref for node in cycle] - cycles.append("Cycle: %s" % " -> ".join(paths)) - raise DependencyGraphNode.CircularException( - "Cycles in dependency graph detected:\n" + "\n".join(cycles) - ) - - return [dependency_nodes, flat_list] - - -def VerifyNoGYPFileCircularDependencies(targets): - # Create a DependencyGraphNode for each gyp file containing a target. Put - # it into a dict for easy access. - dependency_nodes = {} - for target in targets: - build_file = gyp.common.BuildFile(target) - if build_file not in dependency_nodes: - dependency_nodes[build_file] = DependencyGraphNode(build_file) - - # Set up the dependency links. - for target, spec in targets.items(): - build_file = gyp.common.BuildFile(target) - build_file_node = dependency_nodes[build_file] - target_dependencies = spec.get("dependencies", []) - for dependency in target_dependencies: - try: - dependency_build_file = gyp.common.BuildFile(dependency) - except GypError as e: - gyp.common.ExceptionAppend( - e, "while computing dependencies of .gyp file %s" % build_file - ) - raise - - if dependency_build_file == build_file: - # A .gyp file is allowed to refer back to itself. - continue - dependency_node = dependency_nodes.get(dependency_build_file) - if not dependency_node: - raise GypError("Dependency '%s' not found" % dependency_build_file) - if dependency_node not in build_file_node.dependencies: - build_file_node.dependencies.append(dependency_node) - dependency_node.dependents.append(build_file_node) - - # Files that have no dependencies are treated as dependent on root_node. - root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.values(): - if len(build_file_node.dependencies) == 0: - build_file_node.dependencies.append(root_node) - root_node.dependents.append(build_file_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(dependency_nodes): - if not root_node.dependents: - # If all files have dependencies, add the first file as a dependent - # of root_node so that the cycle can be discovered from root_node. - file_node = next(iter(dependency_nodes.values())) - file_node.dependencies.append(root_node) - root_node.dependents.append(file_node) - cycles = [] - for cycle in root_node.FindCycles(): - paths = [node.ref for node in cycle] - cycles.append("Cycle: %s" % " -> ".join(paths)) - raise DependencyGraphNode.CircularException( - "Cycles in .gyp file dependency graph detected:\n" + "\n".join(cycles) - ) - - -def DoDependentSettings(key, flat_list, targets, dependency_nodes): - # key should be one of all_dependent_settings, direct_dependent_settings, - # or link_settings. - - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - - if key == "all_dependent_settings": - dependencies = dependency_nodes[target].DeepDependencies() - elif key == "direct_dependent_settings": - dependencies = dependency_nodes[target].DirectAndImportedDependencies( - targets - ) - elif key == "link_settings": - dependencies = dependency_nodes[target].DependenciesForLinkSettings(targets) - else: - raise GypError( - "DoDependentSettings doesn't know how to determine " - "dependencies for " + key - ) - - for dependency in dependencies: - dependency_dict = targets[dependency] - if key not in dependency_dict: - continue - dependency_build_file = gyp.common.BuildFile(dependency) - MergeDicts( - target_dict, dependency_dict[key], build_file, dependency_build_file - ) - - -def AdjustStaticLibraryDependencies( - flat_list, targets, dependency_nodes, sort_dependencies -): - # Recompute target "dependencies" properties. For each static library - # target, remove "dependencies" entries referring to other static libraries, - # unless the dependency has the "hard_dependency" attribute set. For each - # linkable target, add a "dependencies" entry referring to all of the - # target's computed list of link dependencies (including static libraries - # if no such entry is already present. - for target in flat_list: - target_dict = targets[target] - target_type = target_dict["type"] - - if target_type == "static_library": - if "dependencies" not in target_dict: - continue - - target_dict["dependencies_original"] = target_dict.get("dependencies", [])[ - : - ] - - # A static library should not depend on another static library unless - # the dependency relationship is "hard," which should only be done when - # a dependent relies on some side effect other than just the build - # product, like a rule or action output. Further, if a target has a - # non-hard dependency, but that dependency exports a hard dependency, - # the non-hard dependency can safely be removed, but the exported hard - # dependency must be added to the target to keep the same dependency - # ordering. - dependencies = dependency_nodes[target].DirectAndImportedDependencies( - targets - ) - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - - # Remove every non-hard static library dependency and remove every - # non-static library dependency that isn't a direct dependency. - if ( - dependency_dict["type"] == "static_library" - and not dependency_dict.get("hard_dependency", False) - ) or ( - dependency_dict["type"] != "static_library" - and dependency not in target_dict["dependencies"] - ): - # Take the dependency out of the list, and don't increment index - # because the next dependency to analyze will shift into the index - # formerly occupied by the one being removed. - del dependencies[index] - else: - index = index + 1 - - # Update the dependencies. If the dependencies list is empty, it's not - # needed, so unhook it. - if len(dependencies) > 0: - target_dict["dependencies"] = dependencies - else: - del target_dict["dependencies"] - - elif target_type in linkable_types: - # Get a list of dependency targets that should be linked into this - # target. Add them to the dependencies list if they're not already - # present. - - link_dependencies = dependency_nodes[target].DependenciesToLinkAgainst( - targets - ) - for dependency in link_dependencies: - if dependency == target: - continue - if "dependencies" not in target_dict: - target_dict["dependencies"] = [] - if dependency not in target_dict["dependencies"]: - target_dict["dependencies"].append(dependency) - # Sort the dependencies list in the order from dependents to dependencies. - # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D. - # Note: flat_list is already sorted in the order from dependencies to - # dependents. - if sort_dependencies and "dependencies" in target_dict: - target_dict["dependencies"] = [ - dep - for dep in reversed(flat_list) - if dep in target_dict["dependencies"] - ] - - -# Initialize this here to speed up MakePathRelative. -exception_re = re.compile(r"""["']?[-/$<>^]""") - - -def MakePathRelative(to_file, fro_file, item): - # If item is a relative path, it's relative to the build file dict that it's - # coming from. Fix it up to make it relative to the build file dict that - # it's going into. - # Exception: any |item| that begins with these special characters is - # returned without modification. - # / Used when a path is already absolute (shortcut optimization; - # such paths would be returned as absolute anyway) - # $ Used for build environment variables - # - Used for some build environment flags (such as -lapr-1 in a - # "libraries" section) - # < Used for our own variable and command expansions (see ExpandVariables) - # > Used for our own variable and command expansions (see ExpandVariables) - # ^ Used for our own variable and command expansions (see ExpandVariables) - # - # "/' Used when a value is quoted. If these are present, then we - # check the second character instead. - # - if to_file == fro_file or exception_re.match(item): - return item - else: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - ret = os.path.normpath( - os.path.join( - gyp.common.RelativePath( - os.path.dirname(fro_file), os.path.dirname(to_file) - ), - item, - ) - ).replace("\\", "/") - if item.endswith("/"): - ret += "/" - return ret - - -def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): - # Python documentation recommends objects which do not support hash - # set this value to None. Python library objects follow this rule. - def is_hashable(val): - return val.__hash__ - - # If x is hashable, returns whether x is in s. Else returns whether x is in items. - def is_in_set_or_list(x, s, items): - if is_hashable(x): - return x in s - return x in items - - prepend_index = 0 - - # Make membership testing of hashables in |to| (in particular, strings) - # faster. - hashable_to_set = {x for x in to if is_hashable(x)} - for item in fro: - singleton = False - if type(item) in (str, int): - # The cheap and easy case. - to_item = MakePathRelative(to_file, fro_file, item) if is_paths else item - - if not (type(item) is str and item.startswith("-")): - # Any string that doesn't begin with a "-" is a singleton - it can - # only appear once in a list, to be enforced by the list merge append - # or prepend. - singleton = True - elif type(item) is dict: - # Make a copy of the dictionary, continuing to look for paths to fix. - # The other intelligent aspects of merge processing won't apply because - # item is being merged into an empty dict. - to_item = {} - MergeDicts(to_item, item, to_file, fro_file) - elif type(item) is list: - # Recurse, making a copy of the list. If the list contains any - # descendant dicts, path fixing will occur. Note that here, custom - # values for is_paths and append are dropped; those are only to be - # applied to |to| and |fro|, not sublists of |fro|. append shouldn't - # matter anyway because the new |to_item| list is empty. - to_item = [] - MergeLists(to_item, item, to_file, fro_file) - else: - raise TypeError( - "Attempt to merge list item of unsupported type " - + item.__class__.__name__ - ) - - if append: - # If appending a singleton that's already in the list, don't append. - # This ensures that the earliest occurrence of the item will stay put. - if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to): - to.append(to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - else: - # If prepending a singleton that's already in the list, remove the - # existing instance and proceed with the prepend. This ensures that the - # item appears at the earliest possible position in the list. - while singleton and to_item in to: - to.remove(to_item) - - # Don't just insert everything at index 0. That would prepend the new - # items to the list in reverse order, which would be an unwelcome - # surprise. - to.insert(prepend_index, to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - prepend_index = prepend_index + 1 - - -def MergeDicts(to, fro, to_file, fro_file): - # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.items(): - # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give - # copy semantics. Something else may want to merge from the |fro| dict - # later, and having the same dict ref pointed to twice in the tree isn't - # what anyone wants considering that the dicts may subsequently be - # modified. - if k in to: - bad_merge = False - if type(v) in (str, int): - if type(to[k]) not in (str, int): - bad_merge = True - elif not isinstance(v, type(to[k])): - bad_merge = True - - if bad_merge: - raise TypeError( - "Attempt to merge dict value of type " - + v.__class__.__name__ - + " into incompatible type " - + to[k].__class__.__name__ - + " for key " - + k - ) - if type(v) in (str, int): - # Overwrite the existing value, if any. Cheap and easy. - is_path = IsPathSection(k) - if is_path: - to[k] = MakePathRelative(to_file, fro_file, v) - else: - to[k] = v - elif type(v) is dict: - # Recurse, guaranteeing copies will be made of objects that require it. - if k not in to: - to[k] = {} - MergeDicts(to[k], v, to_file, fro_file) - elif type(v) is list: - # Lists in dicts can be merged with different policies, depending on - # how the key in the "from" dict (k, the from-key) is written. - # - # If the from-key has ...the to-list will have this action - # this character appended:... applied when receiving the from-list: - # = replace - # + prepend - # ? set, only if to-list does not yet exist - # (none) append - # - # This logic is list-specific, but since it relies on the associated - # dict key, it's checked in this dict-oriented function. - ext = k[-1] - append = True - if ext == "=": - list_base = k[:-1] - lists_incompatible = [list_base, list_base + "?"] - to[list_base] = [] - elif ext == "+": - list_base = k[:-1] - lists_incompatible = [list_base + "=", list_base + "?"] - append = False - elif ext == "?": - list_base = k[:-1] - lists_incompatible = [list_base, list_base + "=", list_base + "+"] - else: - list_base = k - lists_incompatible = [list_base + "=", list_base + "?"] - - # Some combinations of merge policies appearing together are meaningless. - # It's stupid to replace and append simultaneously, for example. Append - # and prepend are the only policies that can coexist. - for list_incompatible in lists_incompatible: - if list_incompatible in fro: - raise GypError( - "Incompatible list policies " + k + " and " + list_incompatible - ) - - if list_base in to: - if ext == "?": - # If the key ends in "?", the list will only be merged if it doesn't - # already exist. - continue - elif type(to[list_base]) is not list: - # This may not have been checked above if merging in a list with an - # extension character. - raise TypeError( - "Attempt to merge dict value of type " - + v.__class__.__name__ - + " into incompatible type " - + to[list_base].__class__.__name__ - + " for key " - + list_base - + "(" - + k - + ")" - ) - else: - to[list_base] = [] - - # Call MergeLists, which will make copies of objects that require it. - # MergeLists can recurse back into MergeDicts, although this will be - # to make copies of dicts (with paths fixed), there will be no - # subsequent dict "merging" once entering a list because lists are - # always replaced, appended to, or prepended to. - is_paths = IsPathSection(list_base) - MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) - else: - raise TypeError( - "Attempt to merge dict value of unsupported type " - + v.__class__.__name__ - + " for key " - + k - ) - - -def MergeConfigWithInheritance( - new_configuration_dict, build_file, target_dict, configuration, visited -): - # Skip if previously visited. - if configuration in visited: - return - - # Look at this configuration. - configuration_dict = target_dict["configurations"][configuration] - - # Merge in parents. - for parent in configuration_dict.get("inherit_from", []): - MergeConfigWithInheritance( - new_configuration_dict, - build_file, - target_dict, - parent, - visited + [configuration], - ) - - # Merge it into the new config. - MergeDicts(new_configuration_dict, configuration_dict, build_file, build_file) - - # Drop abstract. - if "abstract" in new_configuration_dict: - del new_configuration_dict["abstract"] - - -def SetUpConfigurations(target, target_dict): - # key_suffixes is a list of key suffixes that might appear on key names. - # These suffixes are handled in conditional evaluations (for =, +, and ?) - # and rules/exclude processing (for ! and /). Keys with these suffixes - # should be treated the same as keys without. - key_suffixes = ["=", "+", "?", "!", "/"] - - build_file = gyp.common.BuildFile(target) - - # Provide a single configuration by default if none exists. - # TODO(mark): Signal an error if default_configurations exists but - # configurations does not. - if "configurations" not in target_dict: - target_dict["configurations"] = {"Default": {}} - if "default_configuration" not in target_dict: - concrete = [ - i - for (i, config) in target_dict["configurations"].items() - if not config.get("abstract") - ] - target_dict["default_configuration"] = sorted(concrete)[0] - - merged_configurations = {} - configs = target_dict["configurations"] - for (configuration, old_configuration_dict) in configs.items(): - # Skip abstract configurations (saves work only). - if old_configuration_dict.get("abstract"): - continue - # Configurations inherit (most) settings from the enclosing target scope. - # Get the inheritance relationship right by making a copy of the target - # dict. - new_configuration_dict = {} - for (key, target_val) in target_dict.items(): - key_ext = key[-1:] - key_base = key[:-1] if key_ext in key_suffixes else key - if key_base not in non_configuration_keys: - new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val) - - # Merge in configuration (with all its parents first). - MergeConfigWithInheritance( - new_configuration_dict, build_file, target_dict, configuration, [] - ) - - merged_configurations[configuration] = new_configuration_dict - - # Put the new configurations back into the target dict as a configuration. - for configuration in merged_configurations: - target_dict["configurations"][configuration] = merged_configurations[ - configuration - ] - - # Now drop all the abstract ones. - configs = target_dict["configurations"] - target_dict["configurations"] = { - k: v for k, v in configs.items() if not v.get("abstract") - } - - # Now that all of the target's configurations have been built, go through - # the target dict's keys and remove everything that's been moved into a - # "configurations" section. - delete_keys = [] - for key in target_dict: - key_ext = key[-1:] - key_base = key[:-1] if key_ext in key_suffixes else key - if key_base not in non_configuration_keys: - delete_keys.append(key) - for key in delete_keys: - del target_dict[key] - - # Check the configurations to see if they contain invalid keys. - for configuration in target_dict["configurations"]: - configuration_dict = target_dict["configurations"][configuration] - for key in configuration_dict: - if key in invalid_configuration_keys: - raise GypError( - "%s not allowed in the %s configuration, found in " - "target %s" % (key, configuration, target) - ) - - -def ProcessListFiltersInDict(name, the_dict): - """Process regular expression and exclusion-based filters on lists. - - An exclusion list is in a dict key named with a trailing "!", like - "sources!". Every item in such a list is removed from the associated - main list, which in this example, would be "sources". Removed items are - placed into a "sources_excluded" list in the dict. - - Regular expression (regex) filters are contained in dict keys named with a - trailing "/", such as "sources/" to operate on the "sources" list. Regex - filters in a dict take the form: - 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'], - ['include', '_mac\\.cc$'] ], - The first filter says to exclude all files ending in _linux.cc, _mac.cc, and - _win.cc. The second filter then includes all files ending in _mac.cc that - are now or were once in the "sources" list. Items matching an "exclude" - filter are subject to the same processing as would occur if they were listed - by name in an exclusion list (ending in "!"). Items matching an "include" - filter are brought back into the main list if previously excluded by an - exclusion list or exclusion regex filter. Subsequent matching "exclude" - patterns can still cause items to be excluded after matching an "include". - """ - - # Look through the dictionary for any lists whose keys end in "!" or "/". - # These are lists that will be treated as exclude lists and regular - # expression-based exclude/include lists. Collect the lists that are - # needed first, looking for the lists that they operate on, and assemble - # then into |lists|. This is done in a separate loop up front, because - # the _included and _excluded keys need to be added to the_dict, and that - # can't be done while iterating through it. - - lists = [] - del_lists = [] - for key, value in the_dict.items(): - if not key: - continue - operation = key[-1] - if operation not in {"!", "/"}: - continue - - if type(value) is not list: - raise ValueError( - name + " key " + key + " must be list, not " + value.__class__.__name__ - ) - - list_key = key[:-1] - if list_key not in the_dict: - # This happens when there's a list like "sources!" but no corresponding - # "sources" list. Since there's nothing for it to operate on, queue up - # the "sources!" list for deletion now. - del_lists.append(key) - continue - - if type(the_dict[list_key]) is not list: - value = the_dict[list_key] - raise ValueError( - name - + " key " - + list_key - + " must be list, not " - + value.__class__.__name__ - + " when applying " - + {"!": "exclusion", "/": "regex"}[operation] - ) - - if list_key not in lists: - lists.append(list_key) - - # Delete the lists that are known to be unneeded at this point. - for del_list in del_lists: - del the_dict[del_list] - - for list_key in lists: - the_list = the_dict[list_key] - - # Initialize the list_actions list, which is parallel to the_list. Each - # item in list_actions identifies whether the corresponding item in - # the_list should be excluded, unconditionally preserved (included), or - # whether no exclusion or inclusion has been applied. Items for which - # no exclusion or inclusion has been applied (yet) have value -1, items - # excluded have value 0, and items included have value 1. Includes and - # excludes override previous actions. All items in list_actions are - # initialized to -1 because no excludes or includes have been processed - # yet. - list_actions = list((-1,) * len(the_list)) - - exclude_key = list_key + "!" - if exclude_key in the_dict: - for exclude_item in the_dict[exclude_key]: - for index, list_item in enumerate(the_list): - if exclude_item == list_item: - # This item matches the exclude_item, so set its action to 0 - # (exclude). - list_actions[index] = 0 - - # The "whatever!" list is no longer needed, dump it. - del the_dict[exclude_key] - - regex_key = list_key + "/" - if regex_key in the_dict: - for regex_item in the_dict[regex_key]: - [action, pattern] = regex_item - pattern_re = re.compile(pattern) - - if action == "exclude": - # This item matches an exclude regex, set its value to 0 (exclude). - action_value = 0 - elif action == "include": - # This item matches an include regex, set its value to 1 (include). - action_value = 1 - else: - # This is an action that doesn't make any sense. - raise ValueError( - "Unrecognized action " - + action - + " in " - + name - + " key " - + regex_key - ) - - for index, list_item in enumerate(the_list): - if list_actions[index] == action_value: - # Even if the regex matches, nothing will change so continue - # (regex searches are expensive). - continue - if pattern_re.search(list_item): - # Regular expression match. - list_actions[index] = action_value - - # The "whatever/" list is no longer needed, dump it. - del the_dict[regex_key] - - # Add excluded items to the excluded list. - # - # Note that exclude_key ("sources!") is different from excluded_key - # ("sources_excluded"). The exclude_key list is input and it was already - # processed and deleted; the excluded_key list is output and it's about - # to be created. - excluded_key = list_key + "_excluded" - if excluded_key in the_dict: - raise GypError( - name + " key " + excluded_key + " must not be present prior " - " to applying exclusion/regex filters for " + list_key - ) - - excluded_list = [] - - # Go backwards through the list_actions list so that as items are deleted, - # the indices of items that haven't been seen yet don't shift. That means - # that things need to be prepended to excluded_list to maintain them in the - # same order that they existed in the_list. - for index in range(len(list_actions) - 1, -1, -1): - if list_actions[index] == 0: - # Dump anything with action 0 (exclude). Keep anything with action 1 - # (include) or -1 (no include or exclude seen for the item). - excluded_list.insert(0, the_list[index]) - del the_list[index] - - # If anything was excluded, put the excluded list into the_dict at - # excluded_key. - if len(excluded_list) > 0: - the_dict[excluded_key] = excluded_list - - # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.items(): - if type(value) is dict: - ProcessListFiltersInDict(key, value) - elif type(value) is list: - ProcessListFiltersInList(key, value) - - -def ProcessListFiltersInList(name, the_list): - for item in the_list: - if type(item) is dict: - ProcessListFiltersInDict(name, item) - elif type(item) is list: - ProcessListFiltersInList(name, item) - - -def ValidateTargetType(target, target_dict): - """Ensures the 'type' field on the target is one of the known types. - - Arguments: - target: string, name of target. - target_dict: dict, target spec. - - Raises an exception on error. - """ - VALID_TARGET_TYPES = ( - "executable", - "loadable_module", - "static_library", - "shared_library", - "mac_kernel_extension", - "none", - "windows_driver", - ) - target_type = target_dict.get("type", None) - if target_type not in VALID_TARGET_TYPES: - raise GypError( - "Target %s has an invalid target type '%s'. " - "Must be one of %s." % (target, target_type, "/".join(VALID_TARGET_TYPES)) - ) - if ( - target_dict.get("standalone_static_library", 0) - and not target_type == "static_library" - ): - raise GypError( - "Target %s has type %s but standalone_static_library flag is" - " only valid for static_library type." % (target, target_type) - ) - - -def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): - """Ensures that the rules sections in target_dict are valid and consistent, - and determines which sources they apply to. - - Arguments: - target: string, name of target. - target_dict: dict, target spec containing "rules" and "sources" lists. - extra_sources_for_rules: a list of keys to scan for rule matches in - addition to 'sources'. - """ - - # Dicts to map between values found in rules' 'rule_name' and 'extension' - # keys and the rule dicts themselves. - rule_names = {} - rule_extensions = {} - - rules = target_dict.get("rules", []) - for rule in rules: - # Make sure that there's no conflict among rule names and extensions. - rule_name = rule["rule_name"] - if rule_name in rule_names: - raise GypError( - f"rule {rule_name} exists in duplicate, target {target}" - ) - rule_names[rule_name] = rule - - rule_extension = rule["extension"] - if rule_extension.startswith("."): - rule_extension = rule_extension[1:] - if rule_extension in rule_extensions: - raise GypError( - ( - "extension %s associated with multiple rules, " - + "target %s rules %s and %s" - ) - % ( - rule_extension, - target, - rule_extensions[rule_extension]["rule_name"], - rule_name, - ) - ) - rule_extensions[rule_extension] = rule - - # Make sure rule_sources isn't already there. It's going to be - # created below if needed. - if "rule_sources" in rule: - raise GypError( - "rule_sources must not exist in input, target %s rule %s" - % (target, rule_name) - ) - - rule_sources = [] - source_keys = ["sources"] - source_keys.extend(extra_sources_for_rules) - for source_key in source_keys: - for source in target_dict.get(source_key, []): - (source_root, source_extension) = os.path.splitext(source) - if source_extension.startswith("."): - source_extension = source_extension[1:] - if source_extension == rule_extension: - rule_sources.append(source) - - if len(rule_sources) > 0: - rule["rule_sources"] = rule_sources - - -def ValidateRunAsInTarget(target, target_dict, build_file): - target_name = target_dict.get("target_name") - run_as = target_dict.get("run_as") - if not run_as: - return - if type(run_as) is not dict: - raise GypError( - "The 'run_as' in target %s from file %s should be a " - "dictionary." % (target_name, build_file) - ) - action = run_as.get("action") - if not action: - raise GypError( - "The 'run_as' in target %s from file %s must have an " - "'action' section." % (target_name, build_file) - ) - if type(action) is not list: - raise GypError( - "The 'action' for 'run_as' in target %s from file %s " - "must be a list." % (target_name, build_file) - ) - working_directory = run_as.get("working_directory") - if working_directory and type(working_directory) is not str: - raise GypError( - "The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % (target_name, build_file) - ) - environment = run_as.get("environment") - if environment and type(environment) is not dict: - raise GypError( - "The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % (target_name, build_file) - ) - - -def ValidateActionsInTarget(target, target_dict, build_file): - """Validates the inputs to the actions in a target.""" - target_name = target_dict.get("target_name") - actions = target_dict.get("actions", []) - for action in actions: - action_name = action.get("action_name") - if not action_name: - raise GypError( - "Anonymous action in target %s. " - "An action must have an 'action_name' field." % target_name - ) - inputs = action.get("inputs", None) - if inputs is None: - raise GypError("Action in target %s has no inputs." % target_name) - action_command = action.get("action") - if action_command and not action_command[0]: - raise GypError("Empty action as command in target %s." % target_name) - - -def TurnIntIntoStrInDict(the_dict): - """Given dict the_dict, recursively converts all integers into strings. - """ - # Use items instead of iteritems because there's no need to try to look at - # reinserted keys and their associated values. - for k, v in the_dict.items(): - if type(v) is int: - v = str(v) - the_dict[k] = v - elif type(v) is dict: - TurnIntIntoStrInDict(v) - elif type(v) is list: - TurnIntIntoStrInList(v) - - if type(k) is int: - del the_dict[k] - the_dict[str(k)] = v - - -def TurnIntIntoStrInList(the_list): - """Given list the_list, recursively converts all integers into strings. - """ - for index, item in enumerate(the_list): - if type(item) is int: - the_list[index] = str(item) - elif type(item) is dict: - TurnIntIntoStrInDict(item) - elif type(item) is list: - TurnIntIntoStrInList(item) - - -def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data): - """Return only the targets that are deep dependencies of |root_targets|.""" - qualified_root_targets = [] - for target in root_targets: - target = target.strip() - qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list) - if not qualified_targets: - raise GypError("Could not find target %s" % target) - qualified_root_targets.extend(qualified_targets) - - wanted_targets = {} - for target in qualified_root_targets: - wanted_targets[target] = targets[target] - for dependency in dependency_nodes[target].DeepDependencies(): - wanted_targets[dependency] = targets[dependency] - - wanted_flat_list = [t for t in flat_list if t in wanted_targets] - - # Prune unwanted targets from each build_file's data dict. - for build_file in data["target_build_files"]: - if "targets" not in data[build_file]: - continue - new_targets = [] - for target in data[build_file]["targets"]: - qualified_name = gyp.common.QualifiedTarget( - build_file, target["target_name"], target["toolset"] - ) - if qualified_name in wanted_targets: - new_targets.append(target) - data[build_file]["targets"] = new_targets - - return wanted_targets, wanted_flat_list - - -def VerifyNoCollidingTargets(targets): - """Verify that no two targets in the same directory share the same name. - - Arguments: - targets: A list of targets in the form 'path/to/file.gyp:target_name'. - """ - # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'. - used = {} - for target in targets: - # Separate out 'path/to/file.gyp, 'target_name' from - # 'path/to/file.gyp:target_name'. - path, name = target.rsplit(":", 1) - # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'. - subdir, gyp = os.path.split(path) - # Use '.' for the current directory '', so that the error messages make - # more sense. - if not subdir: - subdir = "." - # Prepare a key like 'path/to:target_name'. - key = subdir + ":" + name - if key in used: - # Complain if this target is already used. - raise GypError( - 'Duplicate target name "%s" in directory "%s" used both ' - 'in "%s" and "%s".' % (name, subdir, gyp, used[key]) - ) - used[key] = gyp - - -def SetGeneratorGlobals(generator_input_info): - # Set up path_sections and non_configuration_keys with the default data plus - # the generator-specific data. - global path_sections - path_sections = set(base_path_sections) - path_sections.update(generator_input_info["path_sections"]) - - global non_configuration_keys - non_configuration_keys = base_non_configuration_keys[:] - non_configuration_keys.extend(generator_input_info["non_configuration_keys"]) - - global multiple_toolsets - multiple_toolsets = generator_input_info["generator_supports_multiple_toolsets"] - - global generator_filelist_paths - generator_filelist_paths = generator_input_info["generator_filelist_paths"] - - -def Load( - build_files, - variables, - includes, - depth, - generator_input_info, - check, - circular_check, - parallel, - root_targets, -): - SetGeneratorGlobals(generator_input_info) - # A generator can have other lists (in addition to sources) be processed - # for rules. - extra_sources_for_rules = generator_input_info["extra_sources_for_rules"] - - # Load build files. This loads every target-containing build file into - # the |data| dictionary such that the keys to |data| are build file names, - # and the values are the entire build file contents after "early" or "pre" - # processing has been done and includes have been resolved. - # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as - # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps - # track of the keys corresponding to "target" files. - data = {"target_build_files": set()} - # Normalize paths everywhere. This is important because paths will be - # used as keys to the data dict and for references between input files. - build_files = set(map(os.path.normpath, build_files)) - if parallel: - LoadTargetBuildFilesParallel( - build_files, data, variables, includes, depth, check, generator_input_info - ) - else: - aux_data = {} - for build_file in build_files: - try: - LoadTargetBuildFile( - build_file, data, aux_data, variables, includes, depth, check, True - ) - except Exception as e: - gyp.common.ExceptionAppend(e, "while trying to load %s" % build_file) - raise - - # Build a dict to access each target's subdict by qualified name. - targets = BuildTargetsDict(data) - - # Fully qualify all dependency links. - QualifyDependencies(targets) - - # Remove self-dependencies from targets that have 'prune_self_dependencies' - # set to 1. - RemoveSelfDependencies(targets) - - # Expand dependencies specified as build_file:*. - ExpandWildcardDependencies(targets, data) - - # Remove all dependencies marked as 'link_dependency' from the targets of - # type 'none'. - RemoveLinkDependenciesFromNoneTargets(targets) - - # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.items(): - tmp_dict = {} - for key_base in dependency_sections: - for op in ("", "!", "/"): - key = key_base + op - if key in target_dict: - tmp_dict[key] = target_dict[key] - del target_dict[key] - ProcessListFiltersInDict(target_name, tmp_dict) - # Write the results back to |target_dict|. - for key in tmp_dict: - target_dict[key] = tmp_dict[key] - - # Make sure every dependency appears at most once. - RemoveDuplicateDependencies(targets) - - if circular_check: - # Make sure that any targets in a.gyp don't contain dependencies in other - # .gyp files that further depend on a.gyp. - VerifyNoGYPFileCircularDependencies(targets) - - [dependency_nodes, flat_list] = BuildDependencyList(targets) - - if root_targets: - # Remove, from |targets| and |flat_list|, the targets that are not deep - # dependencies of the targets specified in |root_targets|. - targets, flat_list = PruneUnwantedTargets( - targets, flat_list, dependency_nodes, root_targets, data - ) - - # Check that no two targets in the same directory have the same name. - VerifyNoCollidingTargets(flat_list) - - # Handle dependent settings of various types. - for settings_type in [ - "all_dependent_settings", - "direct_dependent_settings", - "link_settings", - ]: - DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) - - # Take out the dependent settings now that they've been published to all - # of the targets that require them. - for target in flat_list: - if settings_type in targets[target]: - del targets[target][settings_type] - - # Make sure static libraries don't declare dependencies on other static - # libraries, but that linkables depend on all unlinked static libraries - # that they need so that their link steps will be correct. - gii = generator_input_info - if gii["generator_wants_static_library_dependencies_adjusted"]: - AdjustStaticLibraryDependencies( - flat_list, - targets, - dependency_nodes, - gii["generator_wants_sorted_dependencies"], - ) - - # Apply "post"/"late"/"target" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATE, variables, build_file - ) - - # Move everything that can go into a "configurations" section into one. - for target in flat_list: - target_dict = targets[target] - SetUpConfigurations(target, target_dict) - - # Apply exclude (!) and regex (/) list filters. - for target in flat_list: - target_dict = targets[target] - ProcessListFiltersInDict(target, target_dict) - - # Apply "latelate" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATELATE, variables, build_file - ) - - # Make sure that the rules make sense, and build up rule_sources lists as - # needed. Not all generators will need to use the rule_sources lists, but - # some may, and it seems best to build the list in a common spot. - # Also validate actions and run_as elements in targets. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ValidateTargetType(target, target_dict) - ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) - ValidateRunAsInTarget(target, target_dict, build_file) - ValidateActionsInTarget(target, target_dict, build_file) - - # Generators might not expect ints. Turn them into strs. - TurnIntIntoStrInDict(data) - - # TODO(mark): Return |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - return [flat_list, targets, data] diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/input_test.py deleted file mode 100755 index a18f72e9ebb0a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/input_test.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the input.py file.""" - -import gyp.input -import unittest - - -class TestFindCycles(unittest.TestCase): - def setUp(self): - self.nodes = {} - for x in ("a", "b", "c", "d", "e"): - self.nodes[x] = gyp.input.DependencyGraphNode(x) - - def _create_dependency(self, dependent, dependency): - dependent.dependencies.append(dependency) - dependency.dependents.append(dependent) - - def test_no_cycle_empty_graph(self): - for label, node in self.nodes.items(): - self.assertEqual([], node.FindCycles()) - - def test_no_cycle_line(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["c"]) - self._create_dependency(self.nodes["c"], self.nodes["d"]) - - for label, node in self.nodes.items(): - self.assertEqual([], node.FindCycles()) - - def test_no_cycle_dag(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["a"], self.nodes["c"]) - self._create_dependency(self.nodes["b"], self.nodes["c"]) - - for label, node in self.nodes.items(): - self.assertEqual([], node.FindCycles()) - - def test_cycle_self_reference(self): - self._create_dependency(self.nodes["a"], self.nodes["a"]) - - self.assertEqual( - [[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles() - ) - - def test_cycle_two_nodes(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["a"]) - - self.assertEqual( - [[self.nodes["a"], self.nodes["b"], self.nodes["a"]]], - self.nodes["a"].FindCycles(), - ) - self.assertEqual( - [[self.nodes["b"], self.nodes["a"], self.nodes["b"]]], - self.nodes["b"].FindCycles(), - ) - - def test_two_cycles(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["a"]) - - self._create_dependency(self.nodes["b"], self.nodes["c"]) - self._create_dependency(self.nodes["c"], self.nodes["b"]) - - cycles = self.nodes["a"].FindCycles() - self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles) - self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles) - self.assertEqual(2, len(cycles)) - - def test_big_cycle(self): - self._create_dependency(self.nodes["a"], self.nodes["b"]) - self._create_dependency(self.nodes["b"], self.nodes["c"]) - self._create_dependency(self.nodes["c"], self.nodes["d"]) - self._create_dependency(self.nodes["d"], self.nodes["e"]) - self._create_dependency(self.nodes["e"], self.nodes["a"]) - - self.assertEqual( - [ - [ - self.nodes["a"], - self.nodes["b"], - self.nodes["c"], - self.nodes["d"], - self.nodes["e"], - self.nodes["a"], - ] - ], - self.nodes["a"].FindCycles(), - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py deleted file mode 100755 index 59647c9a89034..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py +++ /dev/null @@ -1,771 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - - -import fcntl -import fnmatch -import glob -import json -import os -import plistlib -import re -import shutil -import struct -import subprocess -import sys -import tempfile - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool: - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace("-", "") - - def ExecCopyBundleResource(self, source, dest, convert_to_binary): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - convert_to_binary = convert_to_binary == "True" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - # TODO(thakis): This copies file attributes like mtime, while the - # single-file branch below doesn't. This should probably be changed to - # be consistent with the single-file branch. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == ".xib": - return self._CopyXIBFile(source, dest) - elif extension == ".storyboard": - return self._CopyXIBFile(source, dest) - elif extension == ".strings" and not convert_to_binary: - self._CopyStringsFile(source, dest) - else: - if os.path.exists(dest): - os.unlink(dest) - shutil.copy(source, dest) - - if convert_to_binary and extension in (".plist", ".strings"): - self._ConvertToBinary(dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - - # ibtool sometimes crashes with relative paths. See crbug.com/314728. - base = os.path.dirname(os.path.realpath(__file__)) - if os.path.relpath(source): - source = os.path.join(base, source) - if os.path.relpath(dest): - dest = os.path.join(base, dest) - - args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"] - - if os.environ["XCODE_VERSION_ACTUAL"] > "0700": - args.extend(["--auto-activate-custom-fonts"]) - if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ: - args.extend( - [ - "--target-device", - "iphone", - "--target-device", - "ipad", - "--minimum-deployment-target", - os.environ["IPHONEOS_DEPLOYMENT_TARGET"], - ] - ) - else: - args.extend( - [ - "--target-device", - "mac", - "--minimum-deployment-target", - os.environ["MACOSX_DEPLOYMENT_TARGET"], - ] - ) - - args.extend( - ["--output-format", "human-readable-text", "--compile", dest, source] - ) - - ibtool_section_re = re.compile(r"/\*.*\*/") - ibtool_re = re.compile(r".*note:.*is clipping its content") - try: - stdout = subprocess.check_output(args) - except subprocess.CalledProcessError as e: - print(e.output) - raise - current_section_header = None - for line in stdout.splitlines(): - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - print(current_section_header) - current_section_header = None - print(line) - return 0 - - def _ConvertToBinary(self, dest): - subprocess.check_call( - ["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest] - ) - - def _CopyStringsFile(self, source, dest): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - - with open(source, "rb") as in_file: - s = in_file.read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - with open(dest, "wb") as fp: - fp.write(s.decode(input_code).encode("UTF-16")) - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - with open(file_name, "rb") as fp: - try: - header = fp.read(3) - except Exception: - return None - if header.startswith(b"\xFE\xFF"): - return "UTF-16" - elif header.startswith(b"\xFF\xFE"): - return "UTF-16" - elif header.startswith(b"\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - with open(source) as fd: - lines = fd.read() - - # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). - plist = plistlib.readPlistFromString(lines) - if keys: - plist.update(json.loads(keys[0])) - lines = plistlib.writePlistToString(plist) - - # Go through all the environment variables and replace them as variables in - # the file. - IDENT_RE = re.compile(r"[_/\s]") - for key in os.environ: - if key.startswith("_"): - continue - evar = "${%s}" % key - evalue = os.environ[key] - lines = lines.replace(lines, evar, evalue) - - # Xcode supports various suffices on environment variables, which are - # all undocumented. :rfc1034identifier is used in the standard project - # template these days, and :identifier was used earlier. They are used to - # convert non-url characters into things that look like valid urls -- - # except that the replacement character for :identifier, '_' isn't valid - # in a URL either -- oops, hence :rfc1034identifier was born. - evar = "${%s:identifier}" % key - evalue = IDENT_RE.sub("_", os.environ[key]) - lines = lines.replace(lines, evar, evalue) - - evar = "${%s:rfc1034identifier}" % key - evalue = IDENT_RE.sub("-", os.environ[key]) - lines = lines.replace(lines, evar, evalue) - - # Remove any keys with values that haven't been replaced. - lines = lines.splitlines() - for i in range(len(lines)): - if lines[i].strip().startswith("${"): - lines[i] = None - lines[i - 1] = None - lines = "\n".join(line for line in lines if line is not None) - - # Write out the file with variables replaced. - with open(dest, "w") as fd: - fd.write(lines) - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - if convert_to_binary == "True": - self._ConvertToBinary(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist["CFBundlePackageType"] - if package_type != "APPL": - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get("CFBundleSignature", "????") - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = "?" * 4 - - dest = os.path.join(os.path.dirname(info_plist), "PkgInfo") - with open(dest, "w") as fp: - fp.write(f"{package_type}{signature_code}") - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out '/path/to/libtool: file: foo.o has no - symbols'.""" - libtool_re = re.compile( - r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$" - ) - libtool_re5 = re.compile( - r"^.*libtool: warning for library: " - + r".* the table of contents is empty " - + r"\(no object file members in the library define global symbols\)$" - ) - env = os.environ.copy() - # Ref: - # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c - # The problem with this flag is that it resets the file mtime on the file to - # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. - env["ZERO_AR_DATE"] = "1" - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) - err = libtoolout.communicate()[1].decode("utf-8") - for line in err.splitlines(): - if not libtool_re.match(line) and not libtool_re5.match(line): - print(line, file=sys.stderr) - # Unconditionally touch the output .a file on the command line if present - # and the command succeeded. A bit hacky. - if not libtoolout.returncode: - for i in range(len(cmd_list) - 1): - if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"): - os.utime(cmd_list[i + 1], None) - break - return libtoolout.returncode - - def ExecPackageIosFramework(self, framework): - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split(".")[0] - module_path = os.path.join(framework, "Modules") - if not os.path.exists(module_path): - os.mkdir(module_path) - module_template = ( - "framework module %s {\n" - ' umbrella header "%s.h"\n' - "\n" - " export *\n" - " module * { export * }\n" - "}\n" % (binary, binary) - ) - - with open(os.path.join(module_path, "module.modulemap"), "w") as module_file: - module_file.write(module_template) - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split(".")[0] - - CURRENT = "Current" - RESOURCES = "Resources" - VERSIONS = "Versions" - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers): - framework_name = os.path.basename(framework).split(".")[0] - all_headers = [os.path.abspath(header) for header in all_headers] - filelist = {} - for header in all_headers: - filename = os.path.basename(header) - filelist[filename] = header - filelist[os.path.join(framework_name, filename)] = header - WriteHmap(out, filelist) - - def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers): - header_path = os.path.join(framework, "Headers") - if not os.path.exists(header_path): - os.makedirs(header_path) - for header in copy_headers: - shutil.copy(header, os.path.join(header_path, os.path.basename(header))) - - def ExecCompileXcassets(self, keys, *inputs): - """Compiles multiple .xcassets files into a single .car file. - - This invokes 'actool' to compile all the inputs .xcassets files. The - |keys| arguments is a json-encoded dictionary of extra arguments to - pass to 'actool' when the asset catalogs contains an application icon - or a launch image. - - Note that 'actool' does not create the Assets.car file if the asset - catalogs does not contains imageset. - """ - command_line = [ - "xcrun", - "actool", - "--output-format", - "human-readable-text", - "--compress-pngs", - "--notices", - "--warnings", - "--errors", - ] - is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ - if is_iphone_target: - platform = os.environ["CONFIGURATION"].split("-")[-1] - if platform not in ("iphoneos", "iphonesimulator"): - platform = "iphonesimulator" - command_line.extend( - [ - "--platform", - platform, - "--target-device", - "iphone", - "--target-device", - "ipad", - "--minimum-deployment-target", - os.environ["IPHONEOS_DEPLOYMENT_TARGET"], - "--compile", - os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]), - ] - ) - else: - command_line.extend( - [ - "--platform", - "macosx", - "--target-device", - "mac", - "--minimum-deployment-target", - os.environ["MACOSX_DEPLOYMENT_TARGET"], - "--compile", - os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]), - ] - ) - if keys: - keys = json.loads(keys) - for key, value in keys.items(): - arg_name = "--" + key - if isinstance(value, bool): - if value: - command_line.append(arg_name) - elif isinstance(value, list): - for v in value: - command_line.append(arg_name) - command_line.append(str(v)) - else: - command_line.append(arg_name) - command_line.append(str(value)) - # Note: actool crashes if inputs path are relative, so use os.path.abspath - # to get absolute path name for inputs. - command_line.extend(map(os.path.abspath, inputs)) - subprocess.check_call(command_line) - - def ExecMergeInfoPlist(self, output, *inputs): - """Merge multiple .plist files into a single .plist file.""" - merged_plist = {} - for path in inputs: - plist = self._LoadPlistMaybeBinary(path) - self._MergePlist(merged_plist, plist) - plistlib.writePlist(merged_plist, output) - - def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve): - """Code sign a bundle. - - This function tries to code sign an iOS bundle, following the same - algorithm as Xcode: - 1. pick the provisioning profile that best match the bundle identifier, - and copy it into the bundle as embedded.mobileprovision, - 2. copy Entitlements.plist from user or SDK next to the bundle, - 3. code sign the bundle. - """ - substitutions, overrides = self._InstallProvisioningProfile( - provisioning, self._GetCFBundleIdentifier() - ) - entitlements_path = self._InstallEntitlements( - entitlements, substitutions, overrides - ) - - args = ["codesign", "--force", "--sign", key] - if preserve == "True": - args.extend(["--deep", "--preserve-metadata=identifier,entitlements"]) - else: - args.extend(["--entitlements", entitlements_path]) - args.extend(["--timestamp=none", path]) - subprocess.check_call(args) - - def _InstallProvisioningProfile(self, profile, bundle_identifier): - """Installs embedded.mobileprovision into the bundle. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple containing two dictionary: variables substitutions and values - to overrides when generating the entitlements file. - """ - source_path, provisioning_data, team_id = self._FindProvisioningProfile( - profile, bundle_identifier - ) - target_path = os.path.join( - os.environ["BUILT_PRODUCTS_DIR"], - os.environ["CONTENTS_FOLDER_PATH"], - "embedded.mobileprovision", - ) - shutil.copy2(source_path, target_path) - substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".") - return substitutions, provisioning_data["Entitlements"] - - def _FindProvisioningProfile(self, profile, bundle_identifier): - """Finds the .mobileprovision file to use for signing the bundle. - - Checks all the installed provisioning profiles (or if the user specified - the PROVISIONING_PROFILE variable, only consult it) and select the most - specific that correspond to the bundle identifier. - - Args: - profile: string, optional, short name of the .mobileprovision file - to use, if empty or the file is missing, the best file installed - will be used - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - - Returns: - A tuple of the path to the selected provisioning profile, the data of - the embedded plist in the provisioning profile and the team identifier - to use for code signing. - - Raises: - SystemExit: if no .mobileprovision can be used to sign the bundle. - """ - profiles_dir = os.path.join( - os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles" - ) - if not os.path.isdir(profiles_dir): - print( - "cannot find mobile provisioning for %s" % (bundle_identifier), - file=sys.stderr, - ) - sys.exit(1) - provisioning_profiles = None - if profile: - profile_path = os.path.join(profiles_dir, profile + ".mobileprovision") - if os.path.exists(profile_path): - provisioning_profiles = [profile_path] - if not provisioning_profiles: - provisioning_profiles = glob.glob( - os.path.join(profiles_dir, "*.mobileprovision") - ) - valid_provisioning_profiles = {} - for profile_path in provisioning_profiles: - profile_data = self._LoadProvisioningProfile(profile_path) - app_id_pattern = profile_data.get("Entitlements", {}).get( - "application-identifier", "" - ) - for team_identifier in profile_data.get("TeamIdentifier", []): - app_id = f"{team_identifier}.{bundle_identifier}" - if fnmatch.fnmatch(app_id, app_id_pattern): - valid_provisioning_profiles[app_id_pattern] = ( - profile_path, - profile_data, - team_identifier, - ) - if not valid_provisioning_profiles: - print( - "cannot find mobile provisioning for %s" % (bundle_identifier), - file=sys.stderr, - ) - sys.exit(1) - # If the user has multiple provisioning profiles installed that can be - # used for ${bundle_identifier}, pick the most specific one (ie. the - # provisioning profile whose pattern is the longest). - selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) - return valid_provisioning_profiles[selected_key] - - def _LoadProvisioningProfile(self, profile_path): - """Extracts the plist embedded in a provisioning profile. - - Args: - profile_path: string, path to the .mobileprovision file - - Returns: - Content of the plist embedded in the provisioning profile as a dictionary. - """ - with tempfile.NamedTemporaryFile() as temp: - subprocess.check_call( - ["security", "cms", "-D", "-i", profile_path, "-o", temp.name] - ) - return self._LoadPlistMaybeBinary(temp.name) - - def _MergePlist(self, merged_plist, plist): - """Merge |plist| into |merged_plist|.""" - for key, value in plist.items(): - if isinstance(value, dict): - merged_value = merged_plist.get(key, {}) - if isinstance(merged_value, dict): - self._MergePlist(merged_value, value) - merged_plist[key] = merged_value - else: - merged_plist[key] = value - else: - merged_plist[key] = value - - def _LoadPlistMaybeBinary(self, plist_path): - """Loads into a memory a plist possibly encoded in binary format. - - This is a wrapper around plistlib.readPlist that tries to convert the - plist to the XML format if it can't be parsed (assuming that it is in - the binary format). - - Args: - plist_path: string, path to a plist file, in XML or binary format - - Returns: - Content of the plist as a dictionary. - """ - try: - # First, try to read the file using plistlib that only supports XML, - # and if an exception is raised, convert a temporary copy to XML and - # load that copy. - return plistlib.readPlist(plist_path) - except Exception: - pass - with tempfile.NamedTemporaryFile() as temp: - shutil.copy2(plist_path, temp.name) - subprocess.check_call(["plutil", "-convert", "xml1", temp.name]) - return plistlib.readPlist(temp.name) - - def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): - """Constructs a dictionary of variable substitutions for Entitlements.plist. - - Args: - bundle_identifier: string, value of CFBundleIdentifier from Info.plist - app_identifier_prefix: string, value for AppIdentifierPrefix - - Returns: - Dictionary of substitutions to apply when generating Entitlements.plist. - """ - return { - "CFBundleIdentifier": bundle_identifier, - "AppIdentifierPrefix": app_identifier_prefix, - } - - def _GetCFBundleIdentifier(self): - """Extracts CFBundleIdentifier value from Info.plist in the bundle. - - Returns: - Value of CFBundleIdentifier in the Info.plist located in the bundle. - """ - info_plist_path = os.path.join( - os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"] - ) - info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) - return info_plist_data["CFBundleIdentifier"] - - def _InstallEntitlements(self, entitlements, substitutions, overrides): - """Generates and install the ${BundleName}.xcent entitlements file. - - Expands variables "$(variable)" pattern in the source entitlements file, - add extra entitlements defined in the .mobileprovision file and the copy - the generated plist to "${BundlePath}.xcent". - - Args: - entitlements: string, optional, path to the Entitlements.plist template - to use, defaults to "${SDKROOT}/Entitlements.plist" - substitutions: dictionary, variable substitutions - overrides: dictionary, values to add to the entitlements - - Returns: - Path to the generated entitlements file. - """ - source_path = entitlements - target_path = os.path.join( - os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent" - ) - if not source_path: - source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist") - shutil.copy2(source_path, target_path) - data = self._LoadPlistMaybeBinary(target_path) - data = self._ExpandVariables(data, substitutions) - if overrides: - for key in overrides: - if key not in data: - data[key] = overrides[key] - plistlib.writePlist(data, target_path) - return target_path - - def _ExpandVariables(self, data, substitutions): - """Expands variables "$(variable)" in data. - - Args: - data: object, can be either string, list or dictionary - substitutions: dictionary, variable substitutions to perform - - Returns: - Copy of data where each references to "$(variable)" has been replaced - by the corresponding value found in substitutions, or left intact if - the key was not found. - """ - if isinstance(data, str): - for key, value in substitutions.items(): - data = data.replace("$(%s)" % key, value) - return data - if isinstance(data, list): - return [self._ExpandVariables(v, substitutions) for v in data] - if isinstance(data, dict): - return {k: self._ExpandVariables(data[k], substitutions) for k in data} - return data - - -def NextGreaterPowerOf2(x): - return 2 ** (x).bit_length() - - -def WriteHmap(output_name, filelist): - """Generates a header map based on |filelist|. - - Per Mark Mentovai: - A header map is structured essentially as a hash table, keyed by names used - in #includes, and providing pathnames to the actual files. - - The implementation below and the comment above comes from inspecting: - http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt - while also looking at the implementation in clang in: - https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp - """ - magic = 1751998832 - version = 1 - _reserved = 0 - count = len(filelist) - capacity = NextGreaterPowerOf2(count) - strings_offset = 24 + (12 * capacity) - max_value_length = max(len(value) for value in filelist.values()) - - out = open(output_name, "wb") - out.write( - struct.pack( - " 0 or arg.count("/") > 1: - arg = os.path.normpath(arg) - - # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes - # preceding it, and results in n backslashes + the quote. So we substitute - # in 2* what we match, +1 more, plus the quote. - if quote_cmd: - arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg) - - # %'s also need to be doubled otherwise they're interpreted as batch - # positional arguments. Also make sure to escape the % so that they're - # passed literally through escaping so they can be singled to just the - # original %. Otherwise, trying to pass the literal representation that - # looks like an environment variable to the shell (e.g. %PATH%) would fail. - arg = arg.replace("%", "%%") - - # These commands are used in rsp files, so no escaping for the shell (via ^) - # is necessary. - - # As a workaround for programs that don't use CommandLineToArgvW, gyp - # supports msvs_quote_cmd=0, which simply disables all quoting. - if quote_cmd: - # Finally, wrap the whole thing in quotes so that the above quote rule - # applies and whitespace isn't a word break. - return f'"{arg}"' - - return arg - - -def EncodeRspFileList(args, quote_cmd): - """Process a list of arguments using QuoteCmdExeArgument.""" - # Note that the first argument is assumed to be the command. Don't add - # quotes around it because then built-ins like 'echo', etc. won't work. - # Take care to normpath only the path in the case of 'call ../x.bat' because - # otherwise the whole thing is incorrectly interpreted as a path and not - # normalized correctly. - if not args: - return "" - if args[0].startswith("call "): - call, program = args[0].split(" ", 1) - program = call + " " + os.path.normpath(program) - else: - program = os.path.normpath(args[0]) - return (program + " " - + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:])) - - -def _GenericRetrieve(root, default, path): - """Given a list of dictionary keys |path| and a tree of dicts |root|, find - value at path, or return |default| if any of the path doesn't exist.""" - if not root: - return default - if not path: - return root - return _GenericRetrieve(root.get(path[0]), default, path[1:]) - - -def _AddPrefix(element, prefix): - """Add |prefix| to |element| or each subelement if element is iterable.""" - if element is None: - return element - # Note, not Iterable because we don't want to handle strings like that. - if isinstance(element, (list, tuple)): - return [prefix + e for e in element] - else: - return prefix + element - - -def _DoRemapping(element, map): - """If |element| then remap it through |map|. If |element| is iterable then - each item will be remapped. Any elements not found will be removed.""" - if map is not None and element is not None: - if not callable(map): - map = map.get # Assume it's a dict, otherwise a callable to do the remap. - if isinstance(element, (list, tuple)): - element = filter(None, [map(elem) for elem in element]) - else: - element = map(element) - return element - - -def _AppendOrReturn(append, element): - """If |append| is None, simply return |element|. If |append| is not None, - then add |element| to it, adding each item in |element| if it's a list or - tuple.""" - if append is not None and element is not None: - if isinstance(element, (list, tuple)): - append.extend(element) - else: - append.append(element) - else: - return element - - -def _FindDirectXInstallation(): - """Try to find an installation location for the DirectX SDK. Check for the - standard environment variable, and if that doesn't exist, try to find - via the registry. May return None if not found in either location.""" - # Return previously calculated value, if there is one - if hasattr(_FindDirectXInstallation, "dxsdk_dir"): - return _FindDirectXInstallation.dxsdk_dir - - dxsdk_dir = os.environ.get("DXSDK_DIR") - if not dxsdk_dir: - # Setup params to pass to and attempt to launch reg.exe. - cmd = ["reg.exe", "query", r"HKLM\Software\Microsoft\DirectX", "/s"] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout = p.communicate()[0].decode("utf-8") - for line in stdout.splitlines(): - if "InstallPath" in line: - dxsdk_dir = line.split(" ")[3] + "\\" - - # Cache return value - _FindDirectXInstallation.dxsdk_dir = dxsdk_dir - return dxsdk_dir - - -def GetGlobalVSMacroEnv(vs_version): - """Get a dict of variables mapping internal VS macro names to their gyp - equivalents. Returns all variables that are independent of the target.""" - env = {} - # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when - # Visual Studio is actually installed. - if vs_version.Path(): - env["$(VSInstallDir)"] = vs_version.Path() - env["$(VCInstallDir)"] = os.path.join(vs_version.Path(), "VC") + "\\" - # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be - # set. This happens when the SDK is sync'd via src-internal, rather than - # by typical end-user installation of the SDK. If it's not set, we don't - # want to leave the unexpanded variable in the path, so simply strip it. - dxsdk_dir = _FindDirectXInstallation() - env["$(DXSDK_DIR)"] = dxsdk_dir if dxsdk_dir else "" - # Try to find an installation location for the Windows DDK by checking - # the WDK_DIR environment variable, may be None. - env["$(WDK_DIR)"] = os.environ.get("WDK_DIR", "") - return env - - -def ExtractSharedMSVSSystemIncludes(configs, generator_flags): - """Finds msvs_system_include_dirs that are common to all targets, removes - them from all targets, and returns an OrderedSet containing them.""" - all_system_includes = OrderedSet(configs[0].get("msvs_system_include_dirs", [])) - for config in configs[1:]: - system_includes = config.get("msvs_system_include_dirs", []) - all_system_includes = all_system_includes & OrderedSet(system_includes) - if not all_system_includes: - return None - # Expand macros in all_system_includes. - env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags)) - expanded_system_includes = OrderedSet( - [ExpandMacros(include, env) for include in all_system_includes] - ) - if any("$" in include for include in expanded_system_includes): - # Some path relies on target-specific variables, bail. - return None - - # Remove system includes shared by all targets from the targets. - for config in configs: - includes = config.get("msvs_system_include_dirs", []) - if includes: # Don't insert a msvs_system_include_dirs key if not needed. - # This must check the unexpanded includes list: - new_includes = [i for i in includes if i not in all_system_includes] - config["msvs_system_include_dirs"] = new_includes - return expanded_system_includes - - -class MsvsSettings: - """A class that understands the gyp 'msvs_...' values (especially the - msvs_settings field). They largely correpond to the VS2008 IDE DOM. This - class helps map those settings to command line options.""" - - def __init__(self, spec, generator_flags): - self.spec = spec - self.vs_version = GetVSVersion(generator_flags) - - supported_fields = [ - ("msvs_configuration_attributes", dict), - ("msvs_settings", dict), - ("msvs_system_include_dirs", list), - ("msvs_disabled_warnings", list), - ("msvs_precompiled_header", str), - ("msvs_precompiled_source", str), - ("msvs_configuration_platform", str), - ("msvs_target_platform", str), - ] - configs = spec["configurations"] - for field, default in supported_fields: - setattr(self, field, {}) - for configname, config in configs.items(): - getattr(self, field)[configname] = config.get(field, default()) - - self.msvs_cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."]) - - unsupported_fields = [ - "msvs_prebuild", - "msvs_postbuild", - ] - unsupported = [] - for field in unsupported_fields: - for config in configs.values(): - if field in config: - unsupported += [ - "{} not supported (target {}).".format( - field, spec["target_name"] - ) - ] - if unsupported: - raise Exception("\n".join(unsupported)) - - def GetExtension(self): - """Returns the extension for the target, with no leading dot. - - Uses 'product_extension' if specified, otherwise uses MSVS defaults based on - the target type. - """ - ext = self.spec.get("product_extension", None) - if ext: - return ext - return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "") - - def GetVSMacroEnv(self, base_to_build=None, config=None): - """Get a dict of variables mapping internal VS macro names to their gyp - equivalents.""" - target_arch = self.GetArch(config) - target_platform = "Win32" if target_arch == "x86" else target_arch - target_name = self.spec.get("product_prefix", "") + self.spec.get( - "product_name", self.spec["target_name"] - ) - target_dir = base_to_build + "\\" if base_to_build else "" - target_ext = "." + self.GetExtension() - target_file_name = target_name + target_ext - - replacements = { - "$(InputName)": "${root}", - "$(InputPath)": "${source}", - "$(IntDir)": "$!INTERMEDIATE_DIR", - "$(OutDir)\\": target_dir, - "$(PlatformName)": target_platform, - "$(ProjectDir)\\": "", - "$(ProjectName)": self.spec["target_name"], - "$(TargetDir)\\": target_dir, - "$(TargetExt)": target_ext, - "$(TargetFileName)": target_file_name, - "$(TargetName)": target_name, - "$(TargetPath)": os.path.join(target_dir, target_file_name), - } - replacements.update(GetGlobalVSMacroEnv(self.vs_version)) - return replacements - - def ConvertVSMacros(self, s, base_to_build=None, config=None): - """Convert from VS macro names to something equivalent.""" - env = self.GetVSMacroEnv(base_to_build, config=config) - return ExpandMacros(s, env) - - def AdjustLibraries(self, libraries): - """Strip -l from library if it's specified with that.""" - libs = [lib[2:] if lib.startswith("-l") else lib for lib in libraries] - return [ - lib + ".lib" - if not lib.lower().endswith(".lib") and not lib.lower().endswith(".obj") - else lib - for lib in libs - ] - - def _GetAndMunge(self, field, path, default, prefix, append, map): - """Retrieve a value from |field| at |path| or return |default|. If - |append| is specified, and the item is found, it will be appended to that - object instead of returned. If |map| is specified, results will be - remapped through |map| before being returned or appended.""" - result = _GenericRetrieve(field, default, path) - result = _DoRemapping(result, map) - result = _AddPrefix(result, prefix) - return _AppendOrReturn(append, result) - - class _GetWrapper: - def __init__(self, parent, field, base_path, append=None): - self.parent = parent - self.field = field - self.base_path = [base_path] - self.append = append - - def __call__(self, name, map=None, prefix="", default=None): - return self.parent._GetAndMunge( - self.field, - self.base_path + [name], - default=default, - prefix=prefix, - append=self.append, - map=map, - ) - - def GetArch(self, config): - """Get architecture based on msvs_configuration_platform and - msvs_target_platform. Returns either 'x86' or 'x64'.""" - configuration_platform = self.msvs_configuration_platform.get(config, "") - platform = self.msvs_target_platform.get(config, "") - if not platform: # If no specific override, use the configuration's. - platform = configuration_platform - # Map from platform to architecture. - return {"Win32": "x86", "x64": "x64", "ARM64": "arm64"}.get(platform, "x86") - - def _TargetConfig(self, config): - """Returns the target-specific configuration.""" - # There's two levels of architecture/platform specification in VS. The - # first level is globally for the configuration (this is what we consider - # "the" config at the gyp level, which will be something like 'Debug' or - # 'Release'), VS2015 and later only use this level - if int(self.vs_version.short_name) >= 2015: - return config - # and a second target-specific configuration, which is an - # override for the global one. |config| is remapped here to take into - # account the local target-specific overrides to the global configuration. - arch = self.GetArch(config) - if arch == "x64" and not config.endswith("_x64"): - config += "_x64" - if arch == "x86" and config.endswith("_x64"): - config = config.rsplit("_", 1)[0] - return config - - def _Setting(self, path, config, default=None, prefix="", append=None, map=None): - """_GetAndMunge for msvs_settings.""" - return self._GetAndMunge( - self.msvs_settings[config], path, default, prefix, append, map - ) - - def _ConfigAttrib( - self, path, config, default=None, prefix="", append=None, map=None - ): - """_GetAndMunge for msvs_configuration_attributes.""" - return self._GetAndMunge( - self.msvs_configuration_attributes[config], - path, - default, - prefix, - append, - map, - ) - - def AdjustIncludeDirs(self, include_dirs, config): - """Updates include_dirs to expand VS specific paths, and adds the system - include dirs used for platform SDK and similar.""" - config = self._TargetConfig(config) - includes = include_dirs + self.msvs_system_include_dirs[config] - includes.extend( - self._Setting( - ("VCCLCompilerTool", "AdditionalIncludeDirectories"), config, default=[] - ) - ) - return [self.ConvertVSMacros(p, config=config) for p in includes] - - def AdjustMidlIncludeDirs(self, midl_include_dirs, config): - """Updates midl_include_dirs to expand VS specific paths, and adds the - system include dirs used for platform SDK and similar.""" - config = self._TargetConfig(config) - includes = midl_include_dirs + self.msvs_system_include_dirs[config] - includes.extend( - self._Setting( - ("VCMIDLTool", "AdditionalIncludeDirectories"), config, default=[] - ) - ) - return [self.ConvertVSMacros(p, config=config) for p in includes] - - def GetComputedDefines(self, config): - """Returns the set of defines that are injected to the defines list based - on other VS settings.""" - config = self._TargetConfig(config) - defines = [] - if self._ConfigAttrib(["CharacterSet"], config) == "1": - defines.extend(("_UNICODE", "UNICODE")) - if self._ConfigAttrib(["CharacterSet"], config) == "2": - defines.append("_MBCS") - defines.extend( - self._Setting( - ("VCCLCompilerTool", "PreprocessorDefinitions"), config, default=[] - ) - ) - return defines - - def GetCompilerPdbName(self, config, expand_special): - """Get the pdb file name that should be used for compiler invocations, or - None if there's no explicit name specified.""" - config = self._TargetConfig(config) - pdbname = self._Setting(("VCCLCompilerTool", "ProgramDataBaseFileName"), config) - if pdbname: - pdbname = expand_special(self.ConvertVSMacros(pdbname)) - return pdbname - - def GetMapFileName(self, config, expand_special): - """Gets the explicitly overridden map file name for a target or returns None - if it's not set.""" - config = self._TargetConfig(config) - map_file = self._Setting(("VCLinkerTool", "MapFileName"), config) - if map_file: - map_file = expand_special(self.ConvertVSMacros(map_file, config=config)) - return map_file - - def GetOutputName(self, config, expand_special): - """Gets the explicitly overridden output name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - type = self.spec["type"] - root = "VCLibrarianTool" if type == "static_library" else "VCLinkerTool" - # TODO(scottmg): Handle OutputDirectory without OutputFile. - output_file = self._Setting((root, "OutputFile"), config) - if output_file: - output_file = expand_special( - self.ConvertVSMacros(output_file, config=config) - ) - return output_file - - def GetPDBName(self, config, expand_special, default): - """Gets the explicitly overridden pdb name for a target or returns - default if it's not overridden, or if no pdb will be generated.""" - config = self._TargetConfig(config) - output_file = self._Setting(("VCLinkerTool", "ProgramDatabaseFile"), config) - generate_debug_info = self._Setting( - ("VCLinkerTool", "GenerateDebugInformation"), config - ) - if generate_debug_info == "true": - if output_file: - return expand_special(self.ConvertVSMacros(output_file, config=config)) - else: - return default - else: - return None - - def GetNoImportLibrary(self, config): - """If NoImportLibrary: true, ninja will not expect the output to include - an import library.""" - config = self._TargetConfig(config) - noimplib = self._Setting(("NoImportLibrary",), config) - return noimplib == "true" - - def GetAsmflags(self, config): - """Returns the flags that need to be added to ml invocations.""" - config = self._TargetConfig(config) - asmflags = [] - safeseh = self._Setting(("MASM", "UseSafeExceptionHandlers"), config) - if safeseh == "true": - asmflags.append("/safeseh") - return asmflags - - def GetCflags(self, config): - """Returns the flags that need to be added to .c and .cc compilations.""" - config = self._TargetConfig(config) - cflags = [] - cflags.extend(["/wd" + w for w in self.msvs_disabled_warnings[config]]) - cl = self._GetWrapper( - self, self.msvs_settings[config], "VCCLCompilerTool", append=cflags - ) - cl( - "Optimization", - map={"0": "d", "1": "1", "2": "2", "3": "x"}, - prefix="/O", - default="2", - ) - cl("InlineFunctionExpansion", prefix="/Ob") - cl("DisableSpecificWarnings", prefix="/wd") - cl("StringPooling", map={"true": "/GF"}) - cl("EnableFiberSafeOptimizations", map={"true": "/GT"}) - cl("OmitFramePointers", map={"false": "-", "true": ""}, prefix="/Oy") - cl("EnableIntrinsicFunctions", map={"false": "-", "true": ""}, prefix="/Oi") - cl("FavorSizeOrSpeed", map={"1": "t", "2": "s"}, prefix="/O") - cl( - "FloatingPointModel", - map={"0": "precise", "1": "strict", "2": "fast"}, - prefix="/fp:", - default="0", - ) - cl("CompileAsManaged", map={"false": "", "true": "/clr"}) - cl("WholeProgramOptimization", map={"true": "/GL"}) - cl("WarningLevel", prefix="/W") - cl("WarnAsError", map={"true": "/WX"}) - cl( - "CallingConvention", - map={"0": "d", "1": "r", "2": "z", "3": "v"}, - prefix="/G", - ) - cl("DebugInformationFormat", map={"1": "7", "3": "i", "4": "I"}, prefix="/Z") - cl("RuntimeTypeInfo", map={"true": "/GR", "false": "/GR-"}) - cl("EnableFunctionLevelLinking", map={"true": "/Gy", "false": "/Gy-"}) - cl("MinimalRebuild", map={"true": "/Gm"}) - cl("BufferSecurityCheck", map={"true": "/GS", "false": "/GS-"}) - cl("BasicRuntimeChecks", map={"1": "s", "2": "u", "3": "1"}, prefix="/RTC") - cl( - "RuntimeLibrary", - map={"0": "T", "1": "Td", "2": "D", "3": "Dd"}, - prefix="/M", - ) - cl("ExceptionHandling", map={"1": "sc", "2": "a"}, prefix="/EH") - cl("DefaultCharIsUnsigned", map={"true": "/J"}) - cl( - "TreatWChar_tAsBuiltInType", - map={"false": "-", "true": ""}, - prefix="/Zc:wchar_t", - ) - cl("EnablePREfast", map={"true": "/analyze"}) - cl("AdditionalOptions", prefix="") - cl( - "EnableEnhancedInstructionSet", - map={"1": "SSE", "2": "SSE2", "3": "AVX", "4": "IA32", "5": "AVX2"}, - prefix="/arch:", - ) - cflags.extend( - [ - "/FI" + f - for f in self._Setting( - ("VCCLCompilerTool", "ForcedIncludeFiles"), config, default=[] - ) - ] - ) - if float(self.vs_version.project_version) >= 12.0: - # New flag introduced in VS2013 (project version 12.0) Forces writes to - # the program database (PDB) to be serialized through MSPDBSRV.EXE. - # https://msdn.microsoft.com/en-us/library/dn502518.aspx - cflags.append("/FS") - # ninja handles parallelism by itself, don't have the compiler do it too. - cflags = [x for x in cflags if not x.startswith("/MP")] - return cflags - - def _GetPchFlags(self, config, extension): - """Get the flags to be added to the cflags for precompiled header support.""" - config = self._TargetConfig(config) - # The PCH is only built once by a particular source file. Usage of PCH must - # only be for the same language (i.e. C vs. C++), so only include the pch - # flags when the language matches. - if self.msvs_precompiled_header[config]: - source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1] - if _LanguageMatchesForPch(source_ext, extension): - pch = self.msvs_precompiled_header[config] - pchbase = os.path.split(pch)[1] - return ["/Yu" + pch, "/FI" + pch, "/Fp${pchprefix}." + pchbase + ".pch"] - return [] - - def GetCflagsC(self, config): - """Returns the flags that need to be added to .c compilations.""" - config = self._TargetConfig(config) - return self._GetPchFlags(config, ".c") - - def GetCflagsCC(self, config): - """Returns the flags that need to be added to .cc compilations.""" - config = self._TargetConfig(config) - return ["/TP"] + self._GetPchFlags(config, ".cc") - - def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): - """Get and normalize the list of paths in AdditionalLibraryDirectories - setting.""" - config = self._TargetConfig(config) - libpaths = self._Setting( - (root, "AdditionalLibraryDirectories"), config, default=[] - ) - libpaths = [ - os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(p, config=config))) - for p in libpaths - ] - return ['/LIBPATH:"' + p + '"' for p in libpaths] - - def GetLibFlags(self, config, gyp_to_build_path): - """Returns the flags that need to be added to lib commands.""" - config = self._TargetConfig(config) - libflags = [] - lib = self._GetWrapper( - self, self.msvs_settings[config], "VCLibrarianTool", append=libflags - ) - libflags.extend( - self._GetAdditionalLibraryDirectories( - "VCLibrarianTool", config, gyp_to_build_path - ) - ) - lib("LinkTimeCodeGeneration", map={"true": "/LTCG"}) - lib( - "TargetMachine", - map={"1": "X86", "17": "X64", "3": "ARM"}, - prefix="/MACHINE:", - ) - lib("AdditionalOptions") - return libflags - - def GetDefFile(self, gyp_to_build_path): - """Returns the .def file from sources, if any. Otherwise returns None.""" - spec = self.spec - if spec["type"] in ("shared_library", "loadable_module", "executable"): - def_files = [ - s for s in spec.get("sources", []) if s.lower().endswith(".def") - ] - if len(def_files) == 1: - return gyp_to_build_path(def_files[0]) - elif len(def_files) > 1: - raise Exception("Multiple .def files") - return None - - def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path): - """.def files get implicitly converted to a ModuleDefinitionFile for the - linker in the VS generator. Emulate that behaviour here.""" - def_file = self.GetDefFile(gyp_to_build_path) - if def_file: - ldflags.append('/DEF:"%s"' % def_file) - - def GetPGDName(self, config, expand_special): - """Gets the explicitly overridden pgd name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - output_file = self._Setting(("VCLinkerTool", "ProfileGuidedDatabase"), config) - if output_file: - output_file = expand_special( - self.ConvertVSMacros(output_file, config=config) - ) - return output_file - - def GetLdflags( - self, - config, - gyp_to_build_path, - expand_special, - manifest_base_name, - output_name, - is_executable, - build_dir, - ): - """Returns the flags that need to be added to link commands, and the - manifest files.""" - config = self._TargetConfig(config) - ldflags = [] - ld = self._GetWrapper( - self, self.msvs_settings[config], "VCLinkerTool", append=ldflags - ) - self._GetDefFileAsLdflags(ldflags, gyp_to_build_path) - ld("GenerateDebugInformation", map={"true": "/DEBUG"}) - # TODO: These 'map' values come from machineTypeOption enum, - # and does not have an official value for ARM64 in VS2017 (yet). - # It needs to verify the ARM64 value when machineTypeOption is updated. - ld( - "TargetMachine", - map={"1": "X86", "17": "X64", "3": "ARM", "18": "ARM64"}, - prefix="/MACHINE:", - ) - ldflags.extend( - self._GetAdditionalLibraryDirectories( - "VCLinkerTool", config, gyp_to_build_path - ) - ) - ld("DelayLoadDLLs", prefix="/DELAYLOAD:") - ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"}) - out = self.GetOutputName(config, expand_special) - if out: - ldflags.append("/OUT:" + out) - pdb = self.GetPDBName(config, expand_special, output_name + ".pdb") - if pdb: - ldflags.append("/PDB:" + pdb) - pgd = self.GetPGDName(config, expand_special) - if pgd: - ldflags.append("/PGD:" + pgd) - map_file = self.GetMapFileName(config, expand_special) - ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"}) - ld("MapExports", map={"true": "/MAPINFO:EXPORTS"}) - ld("AdditionalOptions", prefix="") - - minimum_required_version = self._Setting( - ("VCLinkerTool", "MinimumRequiredVersion"), config, default="" - ) - if minimum_required_version: - minimum_required_version = "," + minimum_required_version - ld( - "SubSystem", - map={ - "1": "CONSOLE%s" % minimum_required_version, - "2": "WINDOWS%s" % minimum_required_version, - }, - prefix="/SUBSYSTEM:", - ) - - stack_reserve_size = self._Setting( - ("VCLinkerTool", "StackReserveSize"), config, default="" - ) - if stack_reserve_size: - stack_commit_size = self._Setting( - ("VCLinkerTool", "StackCommitSize"), config, default="" - ) - if stack_commit_size: - stack_commit_size = "," + stack_commit_size - ldflags.append(f"/STACK:{stack_reserve_size}{stack_commit_size}") - - ld("TerminalServerAware", map={"1": ":NO", "2": ""}, prefix="/TSAWARE") - ld("LinkIncremental", map={"1": ":NO", "2": ""}, prefix="/INCREMENTAL") - ld("BaseAddress", prefix="/BASE:") - ld("FixedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/FIXED") - ld("RandomizedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/DYNAMICBASE") - ld("DataExecutionPrevention", map={"1": ":NO", "2": ""}, prefix="/NXCOMPAT") - ld("OptimizeReferences", map={"1": "NOREF", "2": "REF"}, prefix="/OPT:") - ld("ForceSymbolReferences", prefix="/INCLUDE:") - ld("EnableCOMDATFolding", map={"1": "NOICF", "2": "ICF"}, prefix="/OPT:") - ld( - "LinkTimeCodeGeneration", - map={"1": "", "2": ":PGINSTRUMENT", "3": ":PGOPTIMIZE", "4": ":PGUPDATE"}, - prefix="/LTCG", - ) - ld("IgnoreDefaultLibraryNames", prefix="/NODEFAULTLIB:") - ld("ResourceOnlyDLL", map={"true": "/NOENTRY"}) - ld("EntryPointSymbol", prefix="/ENTRY:") - ld("Profile", map={"true": "/PROFILE"}) - ld("LargeAddressAware", map={"1": ":NO", "2": ""}, prefix="/LARGEADDRESSAWARE") - # TODO(scottmg): This should sort of be somewhere else (not really a flag). - ld("AdditionalDependencies", prefix="") - - safeseh_default = "true" if self.GetArch(config) == "x86" else None - ld( - "ImageHasSafeExceptionHandlers", - map={"false": ":NO", "true": ""}, - prefix="/SAFESEH", - default=safeseh_default, - ) - - # If the base address is not specifically controlled, DYNAMICBASE should - # be on by default. - if not any("DYNAMICBASE" in flag or flag == "/FIXED" for flag in ldflags): - ldflags.append("/DYNAMICBASE") - - # If the NXCOMPAT flag has not been specified, default to on. Despite the - # documentation that says this only defaults to on when the subsystem is - # Vista or greater (which applies to the linker), the IDE defaults it on - # unless it's explicitly off. - if not any("NXCOMPAT" in flag for flag in ldflags): - ldflags.append("/NXCOMPAT") - - have_def_file = any(flag.startswith("/DEF:") for flag in ldflags) - ( - manifest_flags, - intermediate_manifest, - manifest_files, - ) = self._GetLdManifestFlags( - config, - manifest_base_name, - gyp_to_build_path, - is_executable and not have_def_file, - build_dir, - ) - ldflags.extend(manifest_flags) - return ldflags, intermediate_manifest, manifest_files - - def _GetLdManifestFlags( - self, config, name, gyp_to_build_path, allow_isolation, build_dir - ): - """Returns a 3-tuple: - - the set of flags that need to be added to the link to generate - a default manifest - - the intermediate manifest that the linker will generate that should be - used to assert it doesn't add anything to the merged one. - - the list of all the manifest files to be merged by the manifest tool and - included into the link.""" - generate_manifest = self._Setting( - ("VCLinkerTool", "GenerateManifest"), config, default="true" - ) - if generate_manifest != "true": - # This means not only that the linker should not generate the intermediate - # manifest but also that the manifest tool should do nothing even when - # additional manifests are specified. - return ["/MANIFEST:NO"], [], [] - - output_name = name + ".intermediate.manifest" - flags = [ - "/MANIFEST", - "/ManifestFile:" + output_name, - ] - - # Instead of using the MANIFESTUAC flags, we generate a .manifest to - # include into the list of manifests. This allows us to avoid the need to - # do two passes during linking. The /MANIFEST flag and /ManifestFile are - # still used, and the intermediate manifest is used to assert that the - # final manifest we get from merging all the additional manifest files - # (plus the one we generate here) isn't modified by merging the - # intermediate into it. - - # Always NO, because we generate a manifest file that has what we want. - flags.append("/MANIFESTUAC:NO") - - config = self._TargetConfig(config) - enable_uac = self._Setting( - ("VCLinkerTool", "EnableUAC"), config, default="true" - ) - manifest_files = [] - generated_manifest_outer = ( - "" - "" - "%s" - ) - if enable_uac == "true": - execution_level = self._Setting( - ("VCLinkerTool", "UACExecutionLevel"), config, default="0" - ) - execution_level_map = { - "0": "asInvoker", - "1": "highestAvailable", - "2": "requireAdministrator", - } - - ui_access = self._Setting( - ("VCLinkerTool", "UACUIAccess"), config, default="false" - ) - - level = execution_level_map[execution_level] - inner = f""" - - - - - - -""" - else: - inner = "" - - generated_manifest_contents = generated_manifest_outer % inner - generated_name = name + ".generated.manifest" - # Need to join with the build_dir here as we're writing it during - # generation time, but we return the un-joined version because the build - # will occur in that directory. We only write the file if the contents - # have changed so that simply regenerating the project files doesn't - # cause a relink. - build_dir_generated_name = os.path.join(build_dir, generated_name) - gyp.common.EnsureDirExists(build_dir_generated_name) - f = gyp.common.WriteOnDiff(build_dir_generated_name) - f.write(generated_manifest_contents) - f.close() - manifest_files = [generated_name] - - if allow_isolation: - flags.append("/ALLOWISOLATION") - - manifest_files += self._GetAdditionalManifestFiles(config, gyp_to_build_path) - return flags, output_name, manifest_files - - def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): - """Gets additional manifest files that are added to the default one - generated by the linker.""" - files = self._Setting( - ("VCManifestTool", "AdditionalManifestFiles"), config, default=[] - ) - if isinstance(files, str): - files = files.split(";") - return [ - os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config))) - for f in files - ] - - def IsUseLibraryDependencyInputs(self, config): - """Returns whether the target should be linked via Use Library Dependency - Inputs (using component .objs of a given .lib).""" - config = self._TargetConfig(config) - uldi = self._Setting(("VCLinkerTool", "UseLibraryDependencyInputs"), config) - return uldi == "true" - - def IsEmbedManifest(self, config): - """Returns whether manifest should be linked into binary.""" - config = self._TargetConfig(config) - embed = self._Setting( - ("VCManifestTool", "EmbedManifest"), config, default="true" - ) - return embed == "true" - - def IsLinkIncremental(self, config): - """Returns whether the target should be linked incrementally.""" - config = self._TargetConfig(config) - link_inc = self._Setting(("VCLinkerTool", "LinkIncremental"), config) - return link_inc != "1" - - def GetRcflags(self, config, gyp_to_ninja_path): - """Returns the flags that need to be added to invocations of the resource - compiler.""" - config = self._TargetConfig(config) - rcflags = [] - rc = self._GetWrapper( - self, self.msvs_settings[config], "VCResourceCompilerTool", append=rcflags - ) - rc("AdditionalIncludeDirectories", map=gyp_to_ninja_path, prefix="/I") - rcflags.append("/I" + gyp_to_ninja_path(".")) - rc("PreprocessorDefinitions", prefix="/d") - # /l arg must be in hex without leading '0x' - rc("Culture", prefix="/l", map=lambda x: hex(int(x))[2:]) - return rcflags - - def BuildCygwinBashCommandLine(self, args, path_to_base): - """Build a command line that runs args via cygwin bash. We assume that all - incoming paths are in Windows normpath'd form, so they need to be - converted to posix style for the part of the command line that's passed to - bash. We also have to do some Visual Studio macro emulation here because - various rules use magic VS names for things. Also note that rules that - contain ninja variables cannot be fixed here (for example ${source}), so - the outer generator needs to make sure that the paths that are written out - are in posix style, if the command line will be used here.""" - cygwin_dir = os.path.normpath( - os.path.join(path_to_base, self.msvs_cygwin_dirs[0]) - ) - cd = ("cd %s" % path_to_base).replace("\\", "/") - args = [a.replace("\\", "/").replace('"', '\\"') for a in args] - args = ["'%s'" % a.replace("'", "'\\''") for a in args] - bash_cmd = " ".join(args) - cmd = ( - 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir - + f'bash -c "{cd} ; {bash_cmd}"' - ) - return cmd - - RuleShellFlags = collections.namedtuple("RuleShellFlags", ["cygwin", "quote"]) - - def GetRuleShellFlags(self, rule): - """Return RuleShellFlags about how the given rule should be run. This - includes whether it should run under cygwin (msvs_cygwin_shell), and - whether the commands should be quoted (msvs_quote_cmd).""" - # If the variable is unset, or set to 1 we use cygwin - cygwin = int(rule.get("msvs_cygwin_shell", - self.spec.get("msvs_cygwin_shell", 1))) != 0 - # Default to quoting. There's only a few special instances where the - # target command uses non-standard command line parsing and handle quotes - # and quote escaping differently. - quote_cmd = int(rule.get("msvs_quote_cmd", 1)) - assert quote_cmd != 0 or cygwin != 1, \ - "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0" - return MsvsSettings.RuleShellFlags(cygwin, quote_cmd) - - def _HasExplicitRuleForExtension(self, spec, extension): - """Determine if there's an explicit rule for a particular extension.""" - return any(rule["extension"] == extension for rule in spec.get("rules", [])) - - def _HasExplicitIdlActions(self, spec): - """Determine if an action should not run midl for .idl files.""" - return any( - action.get("explicit_idl_action", 0) for action in spec.get("actions", []) - ) - - def HasExplicitIdlRulesOrActions(self, spec): - """Determine if there's an explicit rule or action for idl files. When - there isn't we need to generate implicit rules to build MIDL .idl files.""" - return self._HasExplicitRuleForExtension( - spec, "idl" - ) or self._HasExplicitIdlActions(spec) - - def HasExplicitAsmRules(self, spec): - """Determine if there's an explicit rule for asm files. When there isn't we - need to generate implicit rules to assemble .asm files.""" - return self._HasExplicitRuleForExtension(spec, "asm") - - def GetIdlBuildData(self, source, config): - """Determine the implicit outputs for an idl file. Returns output - directory, outputs, and variables and flags that are required.""" - config = self._TargetConfig(config) - midl_get = self._GetWrapper(self, self.msvs_settings[config], "VCMIDLTool") - - def midl(name, default=None): - return self.ConvertVSMacros(midl_get(name, default=default), config=config) - - tlb = midl("TypeLibraryName", default="${root}.tlb") - header = midl("HeaderFileName", default="${root}.h") - dlldata = midl("DLLDataFileName", default="dlldata.c") - iid = midl("InterfaceIdentifierFileName", default="${root}_i.c") - proxy = midl("ProxyFileName", default="${root}_p.c") - # Note that .tlb is not included in the outputs as it is not always - # generated depending on the content of the input idl file. - outdir = midl("OutputDirectory", default="") - output = [header, dlldata, iid, proxy] - variables = [ - ("tlb", tlb), - ("h", header), - ("dlldata", dlldata), - ("iid", iid), - ("proxy", proxy), - ] - # TODO(scottmg): Are there configuration settings to set these flags? - target_platform = self.GetArch(config) - if target_platform == "x86": - target_platform = "win32" - flags = ["/char", "signed", "/env", target_platform, "/Oicf"] - return outdir, output, variables, flags - - -def _LanguageMatchesForPch(source_ext, pch_source_ext): - c_exts = (".c",) - cc_exts = (".cc", ".cxx", ".cpp") - return (source_ext in c_exts and pch_source_ext in c_exts) or ( - source_ext in cc_exts and pch_source_ext in cc_exts - ) - - -class PrecompiledHeader: - """Helper to generate dependencies and build rules to handle generation of - precompiled headers. Interface matches the GCH handler in xcode_emulation.py. - """ - - def __init__( - self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext - ): - self.settings = settings - self.config = config - pch_source = self.settings.msvs_precompiled_source[self.config] - self.pch_source = gyp_to_build_path(pch_source) - filename, _ = os.path.splitext(pch_source) - self.output_obj = gyp_to_unique_output(filename + obj_ext).lower() - - def _PchHeader(self): - """Get the header that will appear in an #include line for all source - files.""" - return self.settings.msvs_precompiled_header[self.config] - - def GetObjDependencies(self, sources, objs, arch): - """Given a list of sources files and the corresponding object files, - returns a list of the pch files that should be depended upon. The - additional wrapping in the return value is for interface compatibility - with make.py on Mac, and xcode_emulation.py.""" - assert arch is None - if not self._PchHeader(): - return [] - pch_ext = os.path.splitext(self.pch_source)[1] - for source in sources: - if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext): - return [(None, None, self.output_obj)] - return [] - - def GetPchBuildCommands(self, arch): - """Not used on Windows as there are no additional build steps required - (instead, existing steps are modified in GetFlagsModifications below).""" - return [] - - def GetFlagsModifications( - self, input, output, implicit, command, cflags_c, cflags_cc, expand_special - ): - """Get the modified cflags and implicit dependencies that should be used - for the pch compilation step.""" - if input == self.pch_source: - pch_output = ["/Yc" + self._PchHeader()] - if command == "cxx": - return ( - [("cflags_cc", map(expand_special, cflags_cc + pch_output))], - self.output_obj, - [], - ) - elif command == "cc": - return ( - [("cflags_c", map(expand_special, cflags_c + pch_output))], - self.output_obj, - [], - ) - return [], output, implicit - - -vs_version = None - - -def GetVSVersion(generator_flags): - global vs_version - if not vs_version: - vs_version = gyp.MSVSVersion.SelectVisualStudioVersion( - generator_flags.get("msvs_version", "auto"), allow_fallback=False - ) - return vs_version - - -def _GetVsvarsSetupArgs(generator_flags, arch): - vs = GetVSVersion(generator_flags) - return vs.SetupScript() - - -def ExpandMacros(string, expansions): - """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv - for the canonical way to retrieve a suitable dict.""" - if "$" in string: - for old, new in expansions.items(): - assert "$(" not in new, new - string = string.replace(old, new) - return string - - -def _ExtractImportantEnvironment(output_of_set): - """Extracts environment variables required for the toolchain to run from - a textual dump output by the cmd.exe 'set' command.""" - envvars_to_save = ( - "goma_.*", # TODO(scottmg): This is ugly, but needed for goma. - "include", - "lib", - "libpath", - "path", - "pathext", - "systemroot", - "temp", - "tmp", - ) - env = {} - # This occasionally happens and leads to misleading SYSTEMROOT error messages - # if not caught here. - if output_of_set.count("=") == 0: - raise Exception("Invalid output_of_set. Value is:\n%s" % output_of_set) - for line in output_of_set.splitlines(): - for envvar in envvars_to_save: - if re.match(envvar + "=", line.lower()): - var, setting = line.split("=", 1) - if envvar == "path": - # Our own rules (for running gyp-win-tool) and other actions in - # Chromium rely on python being in the path. Add the path to this - # python here so that if it's not in the path when ninja is run - # later, python will still be found. - setting = os.path.dirname(sys.executable) + os.pathsep + setting - env[var.upper()] = setting - break - for required in ("SYSTEMROOT", "TEMP", "TMP"): - if required not in env: - raise Exception( - 'Environment variable "%s" ' - "required to be set to valid path" % required - ) - return env - - -def _FormatAsEnvironmentBlock(envvar_dict): - """Format as an 'environment block' directly suitable for CreateProcess. - Briefly this is a list of key=value\0, terminated by an additional \0. See - CreateProcess documentation for more details.""" - block = "" - nul = "\0" - for key, value in envvar_dict.items(): - block += key + "=" + value + nul - block += nul - return block - - -def _ExtractCLPath(output_of_where): - """Gets the path to cl.exe based on the output of calling the environment - setup batch file, followed by the equivalent of `where`.""" - # Take the first line, as that's the first found in the PATH. - for line in output_of_where.strip().splitlines(): - if line.startswith("LOC:"): - return line[len("LOC:") :].strip() - - -def GenerateEnvironmentFiles( - toplevel_build_dir, generator_flags, system_includes, open_out -): - """It's not sufficient to have the absolute path to the compiler, linker, - etc. on Windows, as those tools rely on .dlls being in the PATH. We also - need to support both x86 and x64 compilers within the same build (to support - msvs_target_platform hackery). Different architectures require a different - compiler binary, and different supporting environment variables (INCLUDE, - LIB, LIBPATH). So, we extract the environment here, wrap all invocations - of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which - sets up the environment, and then we do not prefix the compiler with - an absolute path, instead preferring something like "cl.exe" in the rule - which will then run whichever the environment setup has put in the path. - When the following procedure to generate environment files does not - meet your requirement (e.g. for custom toolchains), you can pass - "-G ninja_use_custom_environment_files" to the gyp to suppress file - generation and use custom environment files prepared by yourself.""" - archs = ("x86", "x64") - if generator_flags.get("ninja_use_custom_environment_files", 0): - cl_paths = {} - for arch in archs: - cl_paths[arch] = "cl.exe" - return cl_paths - vs = GetVSVersion(generator_flags) - cl_paths = {} - for arch in archs: - # Extract environment variables for subprocesses. - args = vs.SetupScript(arch) - args.extend(("&&", "set")) - popen = subprocess.Popen( - args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - variables = popen.communicate()[0].decode("utf-8") - if popen.returncode != 0: - raise Exception('"%s" failed with error %d' % (args, popen.returncode)) - env = _ExtractImportantEnvironment(variables) - - # Inject system includes from gyp files into INCLUDE. - if system_includes: - system_includes = system_includes | OrderedSet( - env.get("INCLUDE", "").split(";") - ) - env["INCLUDE"] = ";".join(system_includes) - - env_block = _FormatAsEnvironmentBlock(env) - f = open_out(os.path.join(toplevel_build_dir, "environment." + arch), "w") - f.write(env_block) - f.close() - - # Find cl.exe location for this architecture. - args = vs.SetupScript(arch) - args.extend( - ("&&", "for", "%i", "in", "(cl.exe)", "do", "@echo", "LOC:%~$PATH:i") - ) - popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE) - output = popen.communicate()[0].decode("utf-8") - cl_paths[arch] = _ExtractCLPath(output) - return cl_paths - - -def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): - """Emulate behavior of msvs_error_on_missing_sources present in the msvs - generator: Check that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation when building via - VS, and we want this check to match for people/bots that build using ninja, - so they're not surprised when the VS build fails.""" - if int(generator_flags.get("msvs_error_on_missing_sources", 0)): - no_specials = filter(lambda x: "$" not in x, sources) - relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] - missing = [x for x in relative if not os.path.exists(x)] - if missing: - # They'll look like out\Release\..\..\stuff\things.cc, so normalize the - # path for a slightly less crazy looking output. - cleaned_up = [os.path.normpath(x) for x in missing] - raise Exception("Missing input files:\n%s" % "\n".join(cleaned_up)) - - -# Sets some values in default_variables, which are required for many -# generators, run on Windows. -def CalculateCommonVariables(default_variables, params): - generator_flags = params.get("generator_flags", {}) - - # Set a variable so conditions can be based on msvs_version. - msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) - default_variables["MSVS_VERSION"] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if "64" in os.environ.get("PROCESSOR_ARCHITECTURE", "") or "64" in os.environ.get( - "PROCESSOR_ARCHITEW6432", "" - ): - default_variables["MSVS_OS_BITS"] = 64 - else: - default_variables["MSVS_OS_BITS"] = 32 diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py deleted file mode 100644 index 0e3e86c7430d0..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py +++ /dev/null @@ -1,174 +0,0 @@ -# This file comes from -# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py -# Do not edit! Edit the upstream one instead. - -"""Python module for generating .ninja files. - -Note that this is emphatically not a required piece of Ninja; it's -just a helpful utility for build-file-generation systems that already -use Python. -""" - -import textwrap - - -def escape_path(word): - return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:") - - -class Writer: - def __init__(self, output, width=78): - self.output = output - self.width = width - - def newline(self): - self.output.write("\n") - - def comment(self, text): - for line in textwrap.wrap(text, self.width - 2): - self.output.write("# " + line + "\n") - - def variable(self, key, value, indent=0): - if value is None: - return - if isinstance(value, list): - value = " ".join(filter(None, value)) # Filter out empty strings. - self._line(f"{key} = {value}", indent) - - def pool(self, name, depth): - self._line("pool %s" % name) - self.variable("depth", depth, indent=1) - - def rule( - self, - name, - command, - description=None, - depfile=None, - generator=False, - pool=None, - restat=False, - rspfile=None, - rspfile_content=None, - deps=None, - ): - self._line("rule %s" % name) - self.variable("command", command, indent=1) - if description: - self.variable("description", description, indent=1) - if depfile: - self.variable("depfile", depfile, indent=1) - if generator: - self.variable("generator", "1", indent=1) - if pool: - self.variable("pool", pool, indent=1) - if restat: - self.variable("restat", "1", indent=1) - if rspfile: - self.variable("rspfile", rspfile, indent=1) - if rspfile_content: - self.variable("rspfile_content", rspfile_content, indent=1) - if deps: - self.variable("deps", deps, indent=1) - - def build( - self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None - ): - outputs = self._as_list(outputs) - all_inputs = self._as_list(inputs)[:] - out_outputs = list(map(escape_path, outputs)) - all_inputs = list(map(escape_path, all_inputs)) - - if implicit: - implicit = map(escape_path, self._as_list(implicit)) - all_inputs.append("|") - all_inputs.extend(implicit) - if order_only: - order_only = map(escape_path, self._as_list(order_only)) - all_inputs.append("||") - all_inputs.extend(order_only) - - self._line( - "build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs)) - ) - - if variables: - if isinstance(variables, dict): - iterator = iter(variables.items()) - else: - iterator = iter(variables) - - for key, val in iterator: - self.variable(key, val, indent=1) - - return outputs - - def include(self, path): - self._line("include %s" % path) - - def subninja(self, path): - self._line("subninja %s" % path) - - def default(self, paths): - self._line("default %s" % " ".join(self._as_list(paths))) - - def _count_dollars_before_index(self, s, i): - """Returns the number of '$' characters right in front of s[i].""" - dollar_count = 0 - dollar_index = i - 1 - while dollar_index > 0 and s[dollar_index] == "$": - dollar_count += 1 - dollar_index -= 1 - return dollar_count - - def _line(self, text, indent=0): - """Write 'text' word-wrapped at self.width characters.""" - leading_space = " " * indent - while len(leading_space) + len(text) > self.width: - # The text is too wide; wrap if possible. - - # Find the rightmost space that would obey our width constraint and - # that's not an escaped space. - available_space = self.width - len(leading_space) - len(" $") - space = available_space - while True: - space = text.rfind(" ", 0, space) - if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0: - break - - if space < 0: - # No such space; just use the first unescaped space we can find. - space = available_space - 1 - while True: - space = text.find(" ", space + 1) - if ( - space < 0 - or self._count_dollars_before_index(text, space) % 2 == 0 - ): - break - if space < 0: - # Give up on breaking. - break - - self.output.write(leading_space + text[0:space] + " $\n") - text = text[space + 1 :] - - # Subsequent lines are continuations, so indent them. - leading_space = " " * (indent + 2) - - self.output.write(leading_space + text + "\n") - - def _as_list(self, input): - if input is None: - return [] - if isinstance(input, list): - return input - return [input] - - -def escape(string): - """Escape a string such that it can be embedded into a Ninja file without - further interpretation.""" - assert "\n" not in string, "Ninja syntax does not allow newlines" - # We only have one special metacharacter: '$'. - return string.replace("$", "$$") diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py deleted file mode 100644 index 729cec0636273..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""A clone of the default copy.deepcopy that doesn't handle cyclic -structures or complex types except for dicts and lists. This is -because gyp copies so large structure that small copy overhead ends up -taking seconds in a project the size of Chromium.""" - - -class Error(Exception): - pass - - -__all__ = ["Error", "deepcopy"] - - -def deepcopy(x): - """Deep copy operation on gyp objects such as strings, ints, dicts - and lists. More than twice as fast as copy.deepcopy but much less - generic.""" - - try: - return _deepcopy_dispatch[type(x)](x) - except KeyError: - raise Error( - "Unsupported type %s for deepcopy. Use copy.deepcopy " - + "or expand simple_copy support." % type(x) - ) - - -_deepcopy_dispatch = d = {} - - -def _deepcopy_atomic(x): - return x - - -types = bool, float, int, str, type, type(None) - -for x in types: - d[x] = _deepcopy_atomic - - -def _deepcopy_list(x): - return [deepcopy(a) for a in x] - - -d[list] = _deepcopy_list - - -def _deepcopy_dict(x): - y = {} - for key, value in x.items(): - y[deepcopy(key)] = deepcopy(value) - return y - - -d[dict] = _deepcopy_dict - -del d diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py deleted file mode 100755 index 171d7295747fc..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py +++ /dev/null @@ -1,373 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions for Windows builds. - -These functions are executed via gyp-win-tool when using the ninja generator. -""" - - -import os -import re -import shutil -import subprocess -import stat -import string -import sys - -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) - -# A regex matching an argument corresponding to the output filename passed to -# link.exe. -_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P.+)$", re.IGNORECASE) - - -def main(args): - executor = WinTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class WinTool: - """This class performs all the Windows tooling steps. The methods can either - be executed directly, or dispatched from an argument list.""" - - def _UseSeparateMspdbsrv(self, env, args): - """Allows to use a unique instance of mspdbsrv.exe per linker instead of a - shared one.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - if args[0] != "link.exe": - return - - # Use the output filename passed to the linker to generate an endpoint name - # for mspdbsrv.exe. - endpoint_name = None - for arg in args: - m = _LINK_EXE_OUT_ARG.match(arg) - if m: - endpoint_name = re.sub( - r"\W+", "", "%s_%d" % (m.group("out"), os.getpid()) - ) - break - - if endpoint_name is None: - return - - # Adds the appropriate environment variable. This will be read by link.exe - # to know which instance of mspdbsrv.exe it should connect to (if it's - # not set then the default endpoint is used). - env["_MSPDBSRV_ENDPOINT_"] = endpoint_name - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like recursive-mirror to RecursiveMirror.""" - return name_string.title().replace("-", "") - - def _GetEnv(self, arch): - """Gets the saved environment from a file for a given architecture.""" - # The environment is saved as an "environment block" (see CreateProcess - # and msvs_emulation for details). We convert to a dict here. - # Drop last 2 NULs, one for list terminator, one for trailing vs. separator. - pairs = open(arch).read()[:-2].split("\0") - kvs = [item.split("=", 1) for item in pairs] - return dict(kvs) - - def ExecStamp(self, path): - """Simple stamp command.""" - open(path, "w").close() - - def ExecRecursiveMirror(self, source, dest): - """Emulation of rm -rf out && cp -af in out.""" - if os.path.exists(dest): - if os.path.isdir(dest): - - def _on_error(fn, path, excinfo): - # The operation failed, possibly because the file is set to - # read-only. If that's why, make it writable and try the op again. - if not os.access(path, os.W_OK): - os.chmod(path, stat.S_IWRITE) - fn(path) - - shutil.rmtree(dest, onerror=_on_error) - else: - if not os.access(dest, os.W_OK): - # Attempt to make the file writable before deleting it. - os.chmod(dest, stat.S_IWRITE) - os.unlink(dest) - - if os.path.isdir(source): - shutil.copytree(source, dest) - else: - shutil.copy2(source, dest) - - def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): - """Filter diagnostic output from link that looks like: - ' Creating library ui.dll.lib and object ui.dll.exp' - This happens when there are exports from the dll or exe. - """ - env = self._GetEnv(arch) - if use_separate_mspdbsrv == "True": - self._UseSeparateMspdbsrv(env, args) - if sys.platform == "win32": - args = list(args) # *args is a tuple by default, which is read-only. - args[0] = args[0].replace("/", "\\") - # https://docs.python.org/2/library/subprocess.html: - # "On Unix with shell=True [...] if args is a sequence, the first item - # specifies the command string, and any additional items will be treated as - # additional arguments to the shell itself. That is to say, Popen does the - # equivalent of: - # Popen(['/bin/sh', '-c', args[0], args[1], ...])" - # For that reason, since going through the shell doesn't seem necessary on - # non-Windows don't do that there. - link = subprocess.Popen( - args, - shell=sys.platform == "win32", - env=env, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - out = link.communicate()[0].decode("utf-8") - for line in out.splitlines(): - if ( - not line.startswith(" Creating library ") - and not line.startswith("Generating code") - and not line.startswith("Finished generating code") - ): - print(line) - return link.returncode - - def ExecLinkWithManifests( - self, - arch, - embed_manifest, - out, - ldcmd, - resname, - mt, - rc, - intermediate_manifest, - *manifests - ): - """A wrapper for handling creating a manifest resource and then executing - a link command.""" - # The 'normal' way to do manifests is to have link generate a manifest - # based on gathering dependencies from the object files, then merge that - # manifest with other manifests supplied as sources, convert the merged - # manifest to a resource, and then *relink*, including the compiled - # version of the manifest resource. This breaks incremental linking, and - # is generally overly complicated. Instead, we merge all the manifests - # provided (along with one that includes what would normally be in the - # linker-generated one, see msvs_emulation.py), and include that into the - # first and only link. We still tell link to generate a manifest, but we - # only use that to assert that our simpler process did not miss anything. - variables = { - "python": sys.executable, - "arch": arch, - "out": out, - "ldcmd": ldcmd, - "resname": resname, - "mt": mt, - "rc": rc, - "intermediate_manifest": intermediate_manifest, - "manifests": " ".join(manifests), - } - add_to_ld = "" - if manifests: - subprocess.check_call( - "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo " - "-manifest %(manifests)s -out:%(out)s.manifest" % variables - ) - if embed_manifest == "True": - subprocess.check_call( - "%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest" - " %(out)s.manifest.rc %(resname)s" % variables - ) - subprocess.check_call( - "%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s " - "%(out)s.manifest.rc" % variables - ) - add_to_ld = " %(out)s.manifest.res" % variables - subprocess.check_call(ldcmd + add_to_ld) - - # Run mt.exe on the theoretically complete manifest we generated, merging - # it with the one the linker generated to confirm that the linker - # generated one does not add anything. This is strictly unnecessary for - # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not - # used in a #pragma comment. - if manifests: - # Merge the intermediate one with ours to .assert.manifest, then check - # that .assert.manifest is identical to ours. - subprocess.check_call( - "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo " - "-manifest %(out)s.manifest %(intermediate_manifest)s " - "-out:%(out)s.assert.manifest" % variables - ) - assert_manifest = "%(out)s.assert.manifest" % variables - our_manifest = "%(out)s.manifest" % variables - # Load and normalize the manifests. mt.exe sometimes removes whitespace, - # and sometimes doesn't unfortunately. - with open(our_manifest) as our_f, open(assert_manifest) as assert_f: - translator = str.maketrans("", "", string.whitespace) - our_data = our_f.read().translate(translator) - assert_data = assert_f.read().translate(translator) - if our_data != assert_data: - os.unlink(out) - - def dump(filename): - print(filename, file=sys.stderr) - print("-----", file=sys.stderr) - with open(filename) as f: - print(f.read(), file=sys.stderr) - print("-----", file=sys.stderr) - - dump(intermediate_manifest) - dump(our_manifest) - dump(assert_manifest) - sys.stderr.write( - 'Linker generated manifest "%s" added to final manifest "%s" ' - '(result in "%s"). ' - "Were /MANIFEST switches used in #pragma statements? " - % (intermediate_manifest, our_manifest, assert_manifest) - ) - return 1 - - def ExecManifestWrapper(self, arch, *args): - """Run manifest tool with environment set. Strip out undesirable warning - (some XML blocks are recognized by the OS loader, but not the manifest - tool).""" - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out = popen.communicate()[0].decode("utf-8") - for line in out.splitlines(): - if line and "manifest authoring warning 81010002" not in line: - print(line) - return popen.returncode - - def ExecManifestToRc(self, arch, *args): - """Creates a resource file pointing a SxS assembly manifest. - |args| is tuple containing path to resource file, path to manifest file - and resource name which can be "1" (for executables) or "2" (for DLLs).""" - manifest_path, resource_path, resource_name = args - with open(resource_path, "w") as output: - output.write( - '#include \n%s RT_MANIFEST "%s"' - % (resource_name, os.path.abspath(manifest_path).replace("\\", "/")) - ) - - def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags): - """Filter noisy filenames output from MIDL compile step that isn't - quietable via command line flags. - """ - args = ( - ["midl", "/nologo"] - + list(flags) - + [ - "/out", - outdir, - "/tlb", - tlb, - "/h", - h, - "/dlldata", - dlldata, - "/iid", - iid, - "/proxy", - proxy, - idl, - ] - ) - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out = popen.communicate()[0].decode("utf-8") - # Filter junk out of stdout, and write filtered versions. Output we want - # to filter is pairs of lines that look like this: - # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl - # objidl.idl - lines = out.splitlines() - prefixes = ("Processing ", "64 bit Processing ") - processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)} - for line in lines: - if not line.startswith(prefixes) and line not in processing: - print(line) - return popen.returncode - - def ExecAsmWrapper(self, arch, *args): - """Filter logo banner from invocations of asm.exe.""" - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out = popen.communicate()[0].decode("utf-8") - for line in out.splitlines(): - if ( - not line.startswith("Copyright (C) Microsoft Corporation") - and not line.startswith("Microsoft (R) Macro Assembler") - and not line.startswith(" Assembling: ") - and line - ): - print(line) - return popen.returncode - - def ExecRcWrapper(self, arch, *args): - """Filter logo banner from invocations of rc.exe. Older versions of RC - don't support the /nologo flag.""" - env = self._GetEnv(arch) - popen = subprocess.Popen( - args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ) - out = popen.communicate()[0].decode("utf-8") - for line in out.splitlines(): - if ( - not line.startswith("Microsoft (R) Windows (R) Resource Compiler") - and not line.startswith("Copyright (C) Microsoft Corporation") - and line - ): - print(line) - return popen.returncode - - def ExecActionWrapper(self, arch, rspfile, *dir): - """Runs an action command line from a response file using the environment - for |arch|. If |dir| is supplied, use that as the working directory.""" - env = self._GetEnv(arch) - # TODO(scottmg): This is a temporary hack to get some specific variables - # through to actions that are set after gyp-time. http://crbug.com/333738. - for k, v in os.environ.items(): - if k not in env: - env[k] = v - args = open(rspfile).read() - dir = dir[0] if dir else None - return subprocess.call(args, shell=True, env=env, cwd=dir) - - def ExecClCompile(self, project_dir, selected_files): - """Executed by msvs-ninja projects when the 'ClCompile' target is used to - build selected C/C++ files.""" - project_dir = os.path.relpath(project_dir, BASE_DIR) - selected_files = selected_files.split(";") - ninja_targets = [ - os.path.join(project_dir, filename) + "^^" for filename in selected_files - ] - cmd = ["ninja.exe"] - cmd.extend(ninja_targets) - return subprocess.call(cmd, shell=True, cwd=BASE_DIR) - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py deleted file mode 100644 index 5f2c097f63e1f..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py +++ /dev/null @@ -1,1938 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This module contains classes that help to emulate xcodebuild behavior on top of -other build systems, such as make and ninja. -""" - - -import copy -import gyp.common -import os -import os.path -import re -import shlex -import subprocess -import sys -from gyp.common import GypError - -# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when -# "xcodebuild" is called too quickly (it has been found to return incorrect -# version number). -XCODE_VERSION_CACHE = None - -# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance -# corresponding to the installed version of Xcode. -XCODE_ARCHS_DEFAULT_CACHE = None - - -def XcodeArchsVariableMapping(archs, archs_including_64_bit=None): - """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable, - and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT).""" - mapping = {"$(ARCHS_STANDARD)": archs} - if archs_including_64_bit: - mapping["$(ARCHS_STANDARD_INCLUDING_64_BIT)"] = archs_including_64_bit - return mapping - - -class XcodeArchsDefault: - """A class to resolve ARCHS variable from xcode_settings, resolving Xcode - macros and implementing filtering by VALID_ARCHS. The expansion of macros - depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and - on the version of Xcode. - """ - - # Match variable like $(ARCHS_STANDARD). - variable_pattern = re.compile(r"\$\([a-zA-Z_][a-zA-Z0-9_]*\)$") - - def __init__(self, default, mac, iphonesimulator, iphoneos): - self._default = (default,) - self._archs = {"mac": mac, "ios": iphoneos, "iossim": iphonesimulator} - - def _VariableMapping(self, sdkroot): - """Returns the dictionary of variable mapping depending on the SDKROOT.""" - sdkroot = sdkroot.lower() - if "iphoneos" in sdkroot: - return self._archs["ios"] - elif "iphonesimulator" in sdkroot: - return self._archs["iossim"] - else: - return self._archs["mac"] - - def _ExpandArchs(self, archs, sdkroot): - """Expands variables references in ARCHS, and remove duplicates.""" - variable_mapping = self._VariableMapping(sdkroot) - expanded_archs = [] - for arch in archs: - if self.variable_pattern.match(arch): - variable = arch - try: - variable_expansion = variable_mapping[variable] - for arch in variable_expansion: - if arch not in expanded_archs: - expanded_archs.append(arch) - except KeyError: - print('Warning: Ignoring unsupported variable "%s".' % variable) - elif arch not in expanded_archs: - expanded_archs.append(arch) - return expanded_archs - - def ActiveArchs(self, archs, valid_archs, sdkroot): - """Expands variables references in ARCHS, and filter by VALID_ARCHS if it - is defined (if not set, Xcode accept any value in ARCHS, otherwise, only - values present in VALID_ARCHS are kept).""" - expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or "") - if valid_archs: - filtered_archs = [] - for arch in expanded_archs: - if arch in valid_archs: - filtered_archs.append(arch) - expanded_archs = filtered_archs - return expanded_archs - - -def GetXcodeArchsDefault(): - """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the - installed version of Xcode. The default values used by Xcode for ARCHS - and the expansion of the variables depends on the version of Xcode used. - - For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included - uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses - $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0 - and deprecated with Xcode 5.1. - - For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit - architecture as part of $(ARCHS_STANDARD) and default to only building it. - - For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part - of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they - are also part of $(ARCHS_STANDARD). - - All these rules are coded in the construction of the |XcodeArchsDefault| - object to use depending on the version of Xcode detected. The object is - for performance reason.""" - global XCODE_ARCHS_DEFAULT_CACHE - if XCODE_ARCHS_DEFAULT_CACHE: - return XCODE_ARCHS_DEFAULT_CACHE - xcode_version, _ = XcodeVersion() - if xcode_version < "0500": - XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( - "$(ARCHS_STANDARD)", - XcodeArchsVariableMapping(["i386"]), - XcodeArchsVariableMapping(["i386"]), - XcodeArchsVariableMapping(["armv7"]), - ) - elif xcode_version < "0510": - XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( - "$(ARCHS_STANDARD_INCLUDING_64_BIT)", - XcodeArchsVariableMapping(["x86_64"], ["x86_64"]), - XcodeArchsVariableMapping(["i386"], ["i386", "x86_64"]), - XcodeArchsVariableMapping( - ["armv7", "armv7s"], ["armv7", "armv7s", "arm64"] - ), - ) - else: - XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( - "$(ARCHS_STANDARD)", - XcodeArchsVariableMapping(["x86_64"], ["x86_64"]), - XcodeArchsVariableMapping(["i386", "x86_64"], ["i386", "x86_64"]), - XcodeArchsVariableMapping( - ["armv7", "armv7s", "arm64"], ["armv7", "armv7s", "arm64"] - ), - ) - return XCODE_ARCHS_DEFAULT_CACHE - - -class XcodeSettings: - """A class that understands the gyp 'xcode_settings' object.""" - - # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached - # at class-level for efficiency. - _sdk_path_cache = {} - _platform_path_cache = {} - _sdk_root_cache = {} - - # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so - # cached at class-level for efficiency. - _plist_cache = {} - - # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so - # cached at class-level for efficiency. - _codesigning_key_cache = {} - - def __init__(self, spec): - self.spec = spec - - self.isIOS = False - self.mac_toolchain_dir = None - self.header_map_path = None - - # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. - # This means self.xcode_settings[config] always contains all settings - # for that config -- the per-target settings as well. Settings that are - # the same for all configs are implicitly per-target settings. - self.xcode_settings = {} - configs = spec["configurations"] - for configname, config in configs.items(): - self.xcode_settings[configname] = config.get("xcode_settings", {}) - self._ConvertConditionalKeys(configname) - if self.xcode_settings[configname].get("IPHONEOS_DEPLOYMENT_TARGET", None): - self.isIOS = True - - # This is only non-None temporarily during the execution of some methods. - self.configname = None - - # Used by _AdjustLibrary to match .a and .dylib entries in libraries. - self.library_re = re.compile(r"^lib([^/]+)\.(a|dylib)$") - - def _ConvertConditionalKeys(self, configname): - """Converts or warns on conditional keys. Xcode supports conditional keys, - such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation - with some keys converted while the rest force a warning.""" - settings = self.xcode_settings[configname] - conditional_keys = [key for key in settings if key.endswith("]")] - for key in conditional_keys: - # If you need more, speak up at http://crbug.com/122592 - if key.endswith("[sdk=iphoneos*]"): - if configname.endswith("iphoneos"): - new_key = key.split("[")[0] - settings[new_key] = settings[key] - else: - print( - "Warning: Conditional keys not implemented, ignoring:", - " ".join(conditional_keys), - ) - del settings[key] - - def _Settings(self): - assert self.configname - return self.xcode_settings[self.configname] - - def _Test(self, test_key, cond_key, default): - return self._Settings().get(test_key, default) == cond_key - - def _Appendf(self, lst, test_key, format_str, default=None): - if test_key in self._Settings(): - lst.append(format_str % str(self._Settings()[test_key])) - elif default: - lst.append(format_str % str(default)) - - def _WarnUnimplemented(self, test_key): - if test_key in self._Settings(): - print('Warning: Ignoring not yet implemented key "%s".' % test_key) - - def IsBinaryOutputFormat(self, configname): - default = "binary" if self.isIOS else "xml" - format = self.xcode_settings[configname].get("INFOPLIST_OUTPUT_FORMAT", default) - return format == "binary" - - def IsIosFramework(self): - return self.spec["type"] == "shared_library" and self._IsBundle() and self.isIOS - - def _IsBundle(self): - return ( - int(self.spec.get("mac_bundle", 0)) != 0 - or self._IsXCTest() - or self._IsXCUiTest() - ) - - def _IsXCTest(self): - return int(self.spec.get("mac_xctest_bundle", 0)) != 0 - - def _IsXCUiTest(self): - return int(self.spec.get("mac_xcuitest_bundle", 0)) != 0 - - def _IsIosAppExtension(self): - return int(self.spec.get("ios_app_extension", 0)) != 0 - - def _IsIosWatchKitExtension(self): - return int(self.spec.get("ios_watchkit_extension", 0)) != 0 - - def _IsIosWatchApp(self): - return int(self.spec.get("ios_watch_app", 0)) != 0 - - def GetFrameworkVersion(self): - """Returns the framework version of the current target. Only valid for - bundles.""" - assert self._IsBundle() - return self.GetPerTargetSetting("FRAMEWORK_VERSION", default="A") - - def GetWrapperExtension(self): - """Returns the bundle extension (.app, .framework, .plugin, etc). Only - valid for bundles.""" - assert self._IsBundle() - if self.spec["type"] in ("loadable_module", "shared_library"): - default_wrapper_extension = { - "loadable_module": "bundle", - "shared_library": "framework", - }[self.spec["type"]] - wrapper_extension = self.GetPerTargetSetting( - "WRAPPER_EXTENSION", default=default_wrapper_extension - ) - return "." + self.spec.get("product_extension", wrapper_extension) - elif self.spec["type"] == "executable": - if self._IsIosAppExtension() or self._IsIosWatchKitExtension(): - return "." + self.spec.get("product_extension", "appex") - else: - return "." + self.spec.get("product_extension", "app") - else: - assert False, "Don't know extension for '{}', target '{}'".format( - self.spec["type"], - self.spec["target_name"], - ) - - def GetProductName(self): - """Returns PRODUCT_NAME.""" - return self.spec.get("product_name", self.spec["target_name"]) - - def GetFullProductName(self): - """Returns FULL_PRODUCT_NAME.""" - if self._IsBundle(): - return self.GetWrapperName() - else: - return self._GetStandaloneBinaryPath() - - def GetWrapperName(self): - """Returns the directory name of the bundle represented by this target. - Only valid for bundles.""" - assert self._IsBundle() - return self.GetProductName() + self.GetWrapperExtension() - - def GetBundleContentsFolderPath(self): - """Returns the qualified path to the bundle's contents folder. E.g. - Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles.""" - if self.isIOS: - return self.GetWrapperName() - assert self._IsBundle() - if self.spec["type"] == "shared_library": - return os.path.join( - self.GetWrapperName(), "Versions", self.GetFrameworkVersion() - ) - else: - # loadable_modules have a 'Contents' folder like executables. - return os.path.join(self.GetWrapperName(), "Contents") - - def GetBundleResourceFolder(self): - """Returns the qualified path to the bundle's resource folder. E.g. - Chromium.app/Contents/Resources. Only valid for bundles.""" - assert self._IsBundle() - if self.isIOS: - return self.GetBundleContentsFolderPath() - return os.path.join(self.GetBundleContentsFolderPath(), "Resources") - - def GetBundleExecutableFolderPath(self): - """Returns the qualified path to the bundle's executables folder. E.g. - Chromium.app/Contents/MacOS. Only valid for bundles.""" - assert self._IsBundle() - if self.spec["type"] in ("shared_library") or self.isIOS: - return self.GetBundleContentsFolderPath() - elif self.spec["type"] in ("executable", "loadable_module"): - return os.path.join(self.GetBundleContentsFolderPath(), "MacOS") - - def GetBundleJavaFolderPath(self): - """Returns the qualified path to the bundle's Java resource folder. - E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleResourceFolder(), "Java") - - def GetBundleFrameworksFolderPath(self): - """Returns the qualified path to the bundle's frameworks folder. E.g, - Chromium.app/Contents/Frameworks. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "Frameworks") - - def GetBundleSharedFrameworksFolderPath(self): - """Returns the qualified path to the bundle's frameworks folder. E.g, - Chromium.app/Contents/SharedFrameworks. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "SharedFrameworks") - - def GetBundleSharedSupportFolderPath(self): - """Returns the qualified path to the bundle's shared support folder. E.g, - Chromium.app/Contents/SharedSupport. Only valid for bundles.""" - assert self._IsBundle() - if self.spec["type"] == "shared_library": - return self.GetBundleResourceFolder() - else: - return os.path.join(self.GetBundleContentsFolderPath(), "SharedSupport") - - def GetBundlePlugInsFolderPath(self): - """Returns the qualified path to the bundle's plugins folder. E.g, - Chromium.app/Contents/PlugIns. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "PlugIns") - - def GetBundleXPCServicesFolderPath(self): - """Returns the qualified path to the bundle's XPC services folder. E.g, - Chromium.app/Contents/XPCServices. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), "XPCServices") - - def GetBundlePlistPath(self): - """Returns the qualified path to the bundle's plist file. E.g. - Chromium.app/Contents/Info.plist. Only valid for bundles.""" - assert self._IsBundle() - if ( - self.spec["type"] in ("executable", "loadable_module") - or self.IsIosFramework() - ): - return os.path.join(self.GetBundleContentsFolderPath(), "Info.plist") - else: - return os.path.join( - self.GetBundleContentsFolderPath(), "Resources", "Info.plist" - ) - - def GetProductType(self): - """Returns the PRODUCT_TYPE of this target.""" - if self._IsIosAppExtension(): - assert self._IsBundle(), ( - "ios_app_extension flag requires mac_bundle " - "(target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.app-extension" - if self._IsIosWatchKitExtension(): - assert self._IsBundle(), ( - "ios_watchkit_extension flag requires " - "mac_bundle (target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.watchkit-extension" - if self._IsIosWatchApp(): - assert self._IsBundle(), ( - "ios_watch_app flag requires mac_bundle " - "(target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.application.watchapp" - if self._IsXCUiTest(): - assert self._IsBundle(), ( - "mac_xcuitest_bundle flag requires mac_bundle " - "(target %s)" % self.spec["target_name"] - ) - return "com.apple.product-type.bundle.ui-testing" - if self._IsBundle(): - return { - "executable": "com.apple.product-type.application", - "loadable_module": "com.apple.product-type.bundle", - "shared_library": "com.apple.product-type.framework", - }[self.spec["type"]] - else: - return { - "executable": "com.apple.product-type.tool", - "loadable_module": "com.apple.product-type.library.dynamic", - "shared_library": "com.apple.product-type.library.dynamic", - "static_library": "com.apple.product-type.library.static", - }[self.spec["type"]] - - def GetMachOType(self): - """Returns the MACH_O_TYPE of this target.""" - # Weird, but matches Xcode. - if not self._IsBundle() and self.spec["type"] == "executable": - return "" - return { - "executable": "mh_execute", - "static_library": "staticlib", - "shared_library": "mh_dylib", - "loadable_module": "mh_bundle", - }[self.spec["type"]] - - def _GetBundleBinaryPath(self): - """Returns the name of the bundle binary of by this target. - E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join( - self.GetBundleExecutableFolderPath(), self.GetExecutableName() - ) - - def _GetStandaloneExecutableSuffix(self): - if "product_extension" in self.spec: - return "." + self.spec["product_extension"] - return { - "executable": "", - "static_library": ".a", - "shared_library": ".dylib", - "loadable_module": ".so", - }[self.spec["type"]] - - def _GetStandaloneExecutablePrefix(self): - return self.spec.get( - "product_prefix", - { - "executable": "", - "static_library": "lib", - "shared_library": "lib", - # Non-bundled loadable_modules are called foo.so for some reason - # (that is, .so and no prefix) with the xcode build -- match that. - "loadable_module": "", - }[self.spec["type"]], - ) - - def _GetStandaloneBinaryPath(self): - """Returns the name of the non-bundle binary represented by this target. - E.g. hello_world. Only valid for non-bundles.""" - assert not self._IsBundle() - assert self.spec["type"] in ( - "executable", - "shared_library", - "static_library", - "loadable_module", - ), ("Unexpected type %s" % self.spec["type"]) - target = self.spec["target_name"] - if self.spec["type"] == "static_library": - if target[:3] == "lib": - target = target[3:] - elif self.spec["type"] in ("loadable_module", "shared_library"): - if target[:3] == "lib": - target = target[3:] - - target_prefix = self._GetStandaloneExecutablePrefix() - target = self.spec.get("product_name", target) - target_ext = self._GetStandaloneExecutableSuffix() - return target_prefix + target + target_ext - - def GetExecutableName(self): - """Returns the executable name of the bundle represented by this target. - E.g. Chromium.""" - if self._IsBundle(): - return self.spec.get("product_name", self.spec["target_name"]) - else: - return self._GetStandaloneBinaryPath() - - def GetExecutablePath(self): - """Returns the qualified path to the primary executable of the bundle - represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium.""" - if self._IsBundle(): - return self._GetBundleBinaryPath() - else: - return self._GetStandaloneBinaryPath() - - def GetActiveArchs(self, configname): - """Returns the architectures this target should be built for.""" - config_settings = self.xcode_settings[configname] - xcode_archs_default = GetXcodeArchsDefault() - return xcode_archs_default.ActiveArchs( - config_settings.get("ARCHS"), - config_settings.get("VALID_ARCHS"), - config_settings.get("SDKROOT"), - ) - - def _GetSdkVersionInfoItem(self, sdk, infoitem): - # xcodebuild requires Xcode and can't run on Command Line Tools-only - # systems from 10.7 onward. - # Since the CLT has no SDK paths anyway, returning None is the - # most sensible route and should still do the right thing. - try: - return GetStdoutQuiet(["xcrun", "--sdk", sdk, infoitem]) - except GypError: - pass - - def _SdkRoot(self, configname): - if configname is None: - configname = self.configname - return self.GetPerConfigSetting("SDKROOT", configname, default="") - - def _XcodePlatformPath(self, configname=None): - sdk_root = self._SdkRoot(configname) - if sdk_root not in XcodeSettings._platform_path_cache: - platform_path = self._GetSdkVersionInfoItem( - sdk_root, "--show-sdk-platform-path" - ) - XcodeSettings._platform_path_cache[sdk_root] = platform_path - return XcodeSettings._platform_path_cache[sdk_root] - - def _SdkPath(self, configname=None): - sdk_root = self._SdkRoot(configname) - if sdk_root.startswith("/"): - return sdk_root - return self._XcodeSdkPath(sdk_root) - - def _XcodeSdkPath(self, sdk_root): - if sdk_root not in XcodeSettings._sdk_path_cache: - sdk_path = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-path") - XcodeSettings._sdk_path_cache[sdk_root] = sdk_path - if sdk_root: - XcodeSettings._sdk_root_cache[sdk_path] = sdk_root - return XcodeSettings._sdk_path_cache[sdk_root] - - def _AppendPlatformVersionMinFlags(self, lst): - self._Appendf(lst, "MACOSX_DEPLOYMENT_TARGET", "-mmacosx-version-min=%s") - if "IPHONEOS_DEPLOYMENT_TARGET" in self._Settings(): - # TODO: Implement this better? - sdk_path_basename = os.path.basename(self._SdkPath()) - if sdk_path_basename.lower().startswith("iphonesimulator"): - self._Appendf( - lst, "IPHONEOS_DEPLOYMENT_TARGET", "-mios-simulator-version-min=%s" - ) - else: - self._Appendf( - lst, "IPHONEOS_DEPLOYMENT_TARGET", "-miphoneos-version-min=%s" - ) - - def GetCflags(self, configname, arch=None): - """Returns flags that need to be added to .c, .cc, .m, and .mm - compilations.""" - # This functions (and the similar ones below) do not offer complete - # emulation of all xcode_settings keys. They're implemented on demand. - - self.configname = configname - cflags = [] - - sdk_root = self._SdkPath() - if "SDKROOT" in self._Settings() and sdk_root: - cflags.append("-isysroot") - cflags.append(sdk_root) - - if self.header_map_path: - cflags.append("-I%s" % self.header_map_path) - - if self._Test("CLANG_WARN_CONSTANT_CONVERSION", "YES", default="NO"): - cflags.append("-Wconstant-conversion") - - if self._Test("GCC_CHAR_IS_UNSIGNED_CHAR", "YES", default="NO"): - cflags.append("-funsigned-char") - - if self._Test("GCC_CW_ASM_SYNTAX", "YES", default="YES"): - cflags.append("-fasm-blocks") - - if "GCC_DYNAMIC_NO_PIC" in self._Settings(): - if self._Settings()["GCC_DYNAMIC_NO_PIC"] == "YES": - cflags.append("-mdynamic-no-pic") - else: - pass - # TODO: In this case, it depends on the target. xcode passes - # mdynamic-no-pic by default for executable and possibly static lib - # according to mento - - if self._Test("GCC_ENABLE_PASCAL_STRINGS", "YES", default="YES"): - cflags.append("-mpascal-strings") - - self._Appendf(cflags, "GCC_OPTIMIZATION_LEVEL", "-O%s", default="s") - - if self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES"): - dbg_format = self._Settings().get("DEBUG_INFORMATION_FORMAT", "dwarf") - if dbg_format == "dwarf": - cflags.append("-gdwarf-2") - elif dbg_format == "stabs": - raise NotImplementedError("stabs debug format is not supported yet.") - elif dbg_format == "dwarf-with-dsym": - cflags.append("-gdwarf-2") - else: - raise NotImplementedError("Unknown debug format %s" % dbg_format) - - if self._Settings().get("GCC_STRICT_ALIASING") == "YES": - cflags.append("-fstrict-aliasing") - elif self._Settings().get("GCC_STRICT_ALIASING") == "NO": - cflags.append("-fno-strict-aliasing") - - if self._Test("GCC_SYMBOLS_PRIVATE_EXTERN", "YES", default="NO"): - cflags.append("-fvisibility=hidden") - - if self._Test("GCC_TREAT_WARNINGS_AS_ERRORS", "YES", default="NO"): - cflags.append("-Werror") - - if self._Test("GCC_WARN_ABOUT_MISSING_NEWLINE", "YES", default="NO"): - cflags.append("-Wnewline-eof") - - # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or - # llvm-gcc. It also requires a fairly recent libtool, and - # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the - # path to the libLTO.dylib that matches the used clang. - if self._Test("LLVM_LTO", "YES", default="NO"): - cflags.append("-flto") - - self._AppendPlatformVersionMinFlags(cflags) - - # TODO: - if self._Test("COPY_PHASE_STRIP", "YES", default="NO"): - self._WarnUnimplemented("COPY_PHASE_STRIP") - self._WarnUnimplemented("GCC_DEBUGGING_SYMBOLS") - self._WarnUnimplemented("GCC_ENABLE_OBJC_EXCEPTIONS") - - # TODO: This is exported correctly, but assigning to it is not supported. - self._WarnUnimplemented("MACH_O_TYPE") - self._WarnUnimplemented("PRODUCT_TYPE") - - # If GYP_CROSSCOMPILE (--cross-compiling), disable architecture-specific - # additions and assume these will be provided as required via CC_host, - # CXX_host, CC_target and CXX_target. - if not gyp.common.CrossCompileRequested(): - if arch is not None: - archs = [arch] - else: - assert self.configname - archs = self.GetActiveArchs(self.configname) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented("ARCHS") - archs = ["i386"] - cflags.append("-arch") - cflags.append(archs[0]) - - if archs[0] in ("i386", "x86_64"): - if self._Test("GCC_ENABLE_SSE3_EXTENSIONS", "YES", default="NO"): - cflags.append("-msse3") - if self._Test( - "GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS", "YES", default="NO" - ): - cflags.append("-mssse3") # Note 3rd 's'. - if self._Test("GCC_ENABLE_SSE41_EXTENSIONS", "YES", default="NO"): - cflags.append("-msse4.1") - if self._Test("GCC_ENABLE_SSE42_EXTENSIONS", "YES", default="NO"): - cflags.append("-msse4.2") - - cflags += self._Settings().get("WARNING_CFLAGS", []) - - if self._IsXCTest(): - platform_root = self._XcodePlatformPath(configname) - if platform_root: - cflags.append("-F" + platform_root + "/Developer/Library/Frameworks/") - - framework_root = sdk_root if sdk_root else "" - config = self.spec["configurations"][self.configname] - framework_dirs = config.get("mac_framework_dirs", []) - for directory in framework_dirs: - cflags.append("-F" + directory.replace("$(SDKROOT)", framework_root)) - - self.configname = None - return cflags - - def GetCflagsC(self, configname): - """Returns flags that need to be added to .c, and .m compilations.""" - self.configname = configname - cflags_c = [] - if self._Settings().get("GCC_C_LANGUAGE_STANDARD", "") == "ansi": - cflags_c.append("-ansi") - else: - self._Appendf(cflags_c, "GCC_C_LANGUAGE_STANDARD", "-std=%s") - cflags_c += self._Settings().get("OTHER_CFLAGS", []) - self.configname = None - return cflags_c - - def GetCflagsCC(self, configname): - """Returns flags that need to be added to .cc, and .mm compilations.""" - self.configname = configname - cflags_cc = [] - - clang_cxx_language_standard = self._Settings().get( - "CLANG_CXX_LANGUAGE_STANDARD" - ) - # Note: Don't make c++0x to c++11 so that c++0x can be used with older - # clangs that don't understand c++11 yet (like Xcode 4.2's). - if clang_cxx_language_standard: - cflags_cc.append("-std=%s" % clang_cxx_language_standard) - - self._Appendf(cflags_cc, "CLANG_CXX_LIBRARY", "-stdlib=%s") - - if self._Test("GCC_ENABLE_CPP_RTTI", "NO", default="YES"): - cflags_cc.append("-fno-rtti") - if self._Test("GCC_ENABLE_CPP_EXCEPTIONS", "NO", default="YES"): - cflags_cc.append("-fno-exceptions") - if self._Test("GCC_INLINES_ARE_PRIVATE_EXTERN", "YES", default="NO"): - cflags_cc.append("-fvisibility-inlines-hidden") - if self._Test("GCC_THREADSAFE_STATICS", "NO", default="YES"): - cflags_cc.append("-fno-threadsafe-statics") - # Note: This flag is a no-op for clang, it only has an effect for gcc. - if self._Test("GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO", "NO", default="YES"): - cflags_cc.append("-Wno-invalid-offsetof") - - other_ccflags = [] - - for flag in self._Settings().get("OTHER_CPLUSPLUSFLAGS", ["$(inherited)"]): - # TODO: More general variable expansion. Missing in many other places too. - if flag in ("$inherited", "$(inherited)", "${inherited}"): - flag = "$OTHER_CFLAGS" - if flag in ("$OTHER_CFLAGS", "$(OTHER_CFLAGS)", "${OTHER_CFLAGS}"): - other_ccflags += self._Settings().get("OTHER_CFLAGS", []) - else: - other_ccflags.append(flag) - cflags_cc += other_ccflags - - self.configname = None - return cflags_cc - - def _AddObjectiveCGarbageCollectionFlags(self, flags): - gc_policy = self._Settings().get("GCC_ENABLE_OBJC_GC", "unsupported") - if gc_policy == "supported": - flags.append("-fobjc-gc") - elif gc_policy == "required": - flags.append("-fobjc-gc-only") - - def _AddObjectiveCARCFlags(self, flags): - if self._Test("CLANG_ENABLE_OBJC_ARC", "YES", default="NO"): - flags.append("-fobjc-arc") - - def _AddObjectiveCMissingPropertySynthesisFlags(self, flags): - if self._Test( - "CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS", "YES", default="NO" - ): - flags.append("-Wobjc-missing-property-synthesis") - - def GetCflagsObjC(self, configname): - """Returns flags that need to be added to .m compilations.""" - self.configname = configname - cflags_objc = [] - self._AddObjectiveCGarbageCollectionFlags(cflags_objc) - self._AddObjectiveCARCFlags(cflags_objc) - self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc) - self.configname = None - return cflags_objc - - def GetCflagsObjCC(self, configname): - """Returns flags that need to be added to .mm compilations.""" - self.configname = configname - cflags_objcc = [] - self._AddObjectiveCGarbageCollectionFlags(cflags_objcc) - self._AddObjectiveCARCFlags(cflags_objcc) - self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc) - if self._Test("GCC_OBJC_CALL_CXX_CDTORS", "YES", default="NO"): - cflags_objcc.append("-fobjc-call-cxx-cdtors") - self.configname = None - return cflags_objcc - - def GetInstallNameBase(self): - """Return DYLIB_INSTALL_NAME_BASE for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if self.spec["type"] != "shared_library" and ( - self.spec["type"] != "loadable_module" or self._IsBundle() - ): - return None - install_base = self.GetPerTargetSetting( - "DYLIB_INSTALL_NAME_BASE", - default="/Library/Frameworks" if self._IsBundle() else "/usr/local/lib", - ) - return install_base - - def _StandardizePath(self, path): - """Do :standardizepath processing for path.""" - # I'm not quite sure what :standardizepath does. Just call normpath(), - # but don't let @executable_path/../foo collapse to foo. - if "/" in path: - prefix, rest = "", path - if path.startswith("@"): - prefix, rest = path.split("/", 1) - rest = os.path.normpath(rest) # :standardizepath - path = os.path.join(prefix, rest) - return path - - def GetInstallName(self): - """Return LD_DYLIB_INSTALL_NAME for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if self.spec["type"] != "shared_library" and ( - self.spec["type"] != "loadable_module" or self._IsBundle() - ): - return None - - default_install_name = ( - "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)" - ) - install_name = self.GetPerTargetSetting( - "LD_DYLIB_INSTALL_NAME", default=default_install_name - ) - - # Hardcode support for the variables used in chromium for now, to - # unblock people using the make build. - if "$" in install_name: - assert install_name in ( - "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/" - "$(WRAPPER_NAME)/$(PRODUCT_NAME)", - default_install_name, - ), ( - "Variables in LD_DYLIB_INSTALL_NAME are not generally supported " - "yet in target '%s' (got '%s')" - % (self.spec["target_name"], install_name) - ) - - install_name = install_name.replace( - "$(DYLIB_INSTALL_NAME_BASE:standardizepath)", - self._StandardizePath(self.GetInstallNameBase()), - ) - if self._IsBundle(): - # These are only valid for bundles, hence the |if|. - install_name = install_name.replace( - "$(WRAPPER_NAME)", self.GetWrapperName() - ) - install_name = install_name.replace( - "$(PRODUCT_NAME)", self.GetProductName() - ) - else: - assert "$(WRAPPER_NAME)" not in install_name - assert "$(PRODUCT_NAME)" not in install_name - - install_name = install_name.replace( - "$(EXECUTABLE_PATH)", self.GetExecutablePath() - ) - return install_name - - def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path): - """Checks if ldflag contains a filename and if so remaps it from - gyp-directory-relative to build-directory-relative.""" - # This list is expanded on demand. - # They get matched as: - # -exported_symbols_list file - # -Wl,exported_symbols_list file - # -Wl,exported_symbols_list,file - LINKER_FILE = r"(\S+)" - WORD = r"\S+" - linker_flags = [ - ["-exported_symbols_list", LINKER_FILE], # Needed for NaCl. - ["-unexported_symbols_list", LINKER_FILE], - ["-reexported_symbols_list", LINKER_FILE], - ["-sectcreate", WORD, WORD, LINKER_FILE], # Needed for remoting. - ] - for flag_pattern in linker_flags: - regex = re.compile("(?:-Wl,)?" + "[ ,]".join(flag_pattern)) - m = regex.match(ldflag) - if m: - ldflag = ( - ldflag[: m.start(1)] - + gyp_to_build_path(m.group(1)) - + ldflag[m.end(1) :] - ) - # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, - # TODO(thakis): Update ffmpeg.gyp): - if ldflag.startswith("-L"): - ldflag = "-L" + gyp_to_build_path(ldflag[len("-L") :]) - return ldflag - - def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): - """Returns flags that need to be passed to the linker. - - Args: - configname: The name of the configuration to get ld flags for. - product_dir: The directory where products such static and dynamic - libraries are placed. This is added to the library search path. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build directory. - """ - self.configname = configname - ldflags = [] - - # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS - # can contain entries that depend on this. Explicitly absolutify these. - for ldflag in self._Settings().get("OTHER_LDFLAGS", []): - ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) - - if self._Test("DEAD_CODE_STRIPPING", "YES", default="NO"): - ldflags.append("-Wl,-dead_strip") - - if self._Test("PREBINDING", "YES", default="NO"): - ldflags.append("-Wl,-prebind") - - self._Appendf( - ldflags, "DYLIB_COMPATIBILITY_VERSION", "-compatibility_version %s" - ) - self._Appendf(ldflags, "DYLIB_CURRENT_VERSION", "-current_version %s") - - self._AppendPlatformVersionMinFlags(ldflags) - - if "SDKROOT" in self._Settings() and self._SdkPath(): - ldflags.append("-isysroot") - ldflags.append(self._SdkPath()) - - for library_path in self._Settings().get("LIBRARY_SEARCH_PATHS", []): - ldflags.append("-L" + gyp_to_build_path(library_path)) - - if "ORDER_FILE" in self._Settings(): - ldflags.append("-Wl,-order_file") - ldflags.append("-Wl," + gyp_to_build_path(self._Settings()["ORDER_FILE"])) - - if not gyp.common.CrossCompileRequested(): - if arch is not None: - archs = [arch] - else: - assert self.configname - archs = self.GetActiveArchs(self.configname) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented("ARCHS") - archs = ["i386"] - # Avoid quoting the space between -arch and the arch name - ldflags.append("-arch") - ldflags.append(archs[0]) - - # Xcode adds the product directory by default. - # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838 - ldflags.append("-L" + (product_dir if product_dir != "." else "./")) - - install_name = self.GetInstallName() - if install_name and self.spec["type"] != "loadable_module": - ldflags.append("-install_name") - ldflags.append(install_name.replace(" ", r"\ ")) - - for rpath in self._Settings().get("LD_RUNPATH_SEARCH_PATHS", []): - ldflags.append("-Wl,-rpath," + rpath) - - sdk_root = self._SdkPath() - if not sdk_root: - sdk_root = "" - config = self.spec["configurations"][self.configname] - framework_dirs = config.get("mac_framework_dirs", []) - for directory in framework_dirs: - ldflags.append("-F" + directory.replace("$(SDKROOT)", sdk_root)) - - if self._IsXCTest(): - platform_root = self._XcodePlatformPath(configname) - if sdk_root and platform_root: - ldflags.append("-F" + platform_root + "/Developer/Library/Frameworks/") - ldflags.append("-framework") - ldflags.append("XCTest") - - is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() - if sdk_root and is_extension: - # Adds the link flags for extensions. These flags are common for all - # extensions and provide loader and main function. - # These flags reflect the compilation options used by xcode to compile - # extensions. - xcode_version, _ = XcodeVersion() - if xcode_version < "0900": - ldflags.append("-lpkstart") - ldflags.append( - sdk_root - + "/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit" - ) - else: - ldflags.append("-e") - ldflags.append("_NSExtensionMain") - ldflags.append("-fapplication-extension") - - self._Appendf(ldflags, "CLANG_CXX_LIBRARY", "-stdlib=%s") - - self.configname = None - return ldflags - - def GetLibtoolflags(self, configname): - """Returns flags that need to be passed to the static linker. - - Args: - configname: The name of the configuration to get ld flags for. - """ - self.configname = configname - libtoolflags = [] - - for libtoolflag in self._Settings().get("OTHER_LDFLAGS", []): - libtoolflags.append(libtoolflag) - # TODO(thakis): ARCHS? - - self.configname = None - return libtoolflags - - def GetPerTargetSettings(self): - """Gets a list of all the per-target settings. This will only fetch keys - whose values are the same across all configurations.""" - first_pass = True - result = {} - for configname in sorted(self.xcode_settings.keys()): - if first_pass: - result = dict(self.xcode_settings[configname]) - first_pass = False - else: - for key, value in self.xcode_settings[configname].items(): - if key not in result: - continue - elif result[key] != value: - del result[key] - return result - - def GetPerConfigSetting(self, setting, configname, default=None): - if configname in self.xcode_settings: - return self.xcode_settings[configname].get(setting, default) - else: - return self.GetPerTargetSetting(setting, default) - - def GetPerTargetSetting(self, setting, default=None): - """Tries to get xcode_settings.setting from spec. Assumes that the setting - has the same value in all configurations and throws otherwise.""" - is_first_pass = True - result = None - for configname in sorted(self.xcode_settings.keys()): - if is_first_pass: - result = self.xcode_settings[configname].get(setting, None) - is_first_pass = False - else: - assert result == self.xcode_settings[configname].get(setting, None), ( - "Expected per-target setting for '%s', got per-config setting " - "(target %s)" % (setting, self.spec["target_name"]) - ) - if result is None: - return default - return result - - def _GetStripPostbuilds(self, configname, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - necessary to strip this target's binary. These should be run as postbuilds - before the actual postbuilds run.""" - self.configname = configname - - result = [] - if self._Test("DEPLOYMENT_POSTPROCESSING", "YES", default="NO") and self._Test( - "STRIP_INSTALLED_PRODUCT", "YES", default="NO" - ): - - default_strip_style = "debugging" - if ( - self.spec["type"] == "loadable_module" or self._IsIosAppExtension() - ) and self._IsBundle(): - default_strip_style = "non-global" - elif self.spec["type"] == "executable": - default_strip_style = "all" - - strip_style = self._Settings().get("STRIP_STYLE", default_strip_style) - strip_flags = {"all": "", "non-global": "-x", "debugging": "-S"}[ - strip_style - ] - - explicit_strip_flags = self._Settings().get("STRIPFLAGS", "") - if explicit_strip_flags: - strip_flags += " " + _NormalizeEnvVarReferences(explicit_strip_flags) - - if not quiet: - result.append("echo STRIP\\(%s\\)" % self.spec["target_name"]) - result.append(f"strip {strip_flags} {output_binary}") - - self.configname = None - return result - - def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - necessary to massage this target's debug information. These should be run - as postbuilds before the actual postbuilds run.""" - self.configname = configname - - # For static libraries, no dSYMs are created. - result = [] - if ( - self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES") - and self._Test( - "DEBUG_INFORMATION_FORMAT", "dwarf-with-dsym", default="dwarf" - ) - and self.spec["type"] != "static_library" - ): - if not quiet: - result.append("echo DSYMUTIL\\(%s\\)" % self.spec["target_name"]) - result.append("dsymutil {} -o {}".format(output_binary, output + ".dSYM")) - - self.configname = None - return result - - def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False): - """Returns a list of shell commands that contain the shell commands - to run as postbuilds for this target, before the actual postbuilds.""" - # dSYMs need to build before stripping happens. - return self._GetDebugInfoPostbuilds( - configname, output, output_binary, quiet - ) + self._GetStripPostbuilds(configname, output_binary, quiet) - - def _GetIOSPostbuilds(self, configname, output_binary): - """Return a shell command to codesign the iOS output binary so it can - be deployed to a device. This should be run as the very last step of the - build.""" - if not ( - self.isIOS - and (self.spec["type"] == "executable" or self._IsXCTest()) - or self.IsIosFramework() - ): - return [] - - postbuilds = [] - product_name = self.GetFullProductName() - settings = self.xcode_settings[configname] - - # Xcode expects XCTests to be copied into the TEST_HOST dir. - if self._IsXCTest(): - source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name) - test_host = os.path.dirname(settings.get("TEST_HOST")) - xctest_destination = os.path.join(test_host, "PlugIns", product_name) - postbuilds.extend([f"ditto {source} {xctest_destination}"]) - - key = self._GetIOSCodeSignIdentityKey(settings) - if not key: - return postbuilds - - # Warn for any unimplemented signing xcode keys. - unimpl = ["OTHER_CODE_SIGN_FLAGS"] - unimpl = set(unimpl) & set(self.xcode_settings[configname].keys()) - if unimpl: - print( - "Warning: Some codesign keys not implemented, ignoring: %s" - % ", ".join(sorted(unimpl)) - ) - - if self._IsXCTest(): - # For device xctests, Xcode copies two extra frameworks into $TEST_HOST. - test_host = os.path.dirname(settings.get("TEST_HOST")) - frameworks_dir = os.path.join(test_host, "Frameworks") - platform_root = self._XcodePlatformPath(configname) - frameworks = [ - "Developer/Library/PrivateFrameworks/IDEBundleInjection.framework", - "Developer/Library/Frameworks/XCTest.framework", - ] - for framework in frameworks: - source = os.path.join(platform_root, framework) - destination = os.path.join(frameworks_dir, os.path.basename(framework)) - postbuilds.extend([f"ditto {source} {destination}"]) - - # Then re-sign everything with 'preserve=True' - postbuilds.extend( - [ - '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' - % ( - os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), - key, - settings.get("CODE_SIGN_ENTITLEMENTS", ""), - settings.get("PROVISIONING_PROFILE", ""), - destination, - True, - ) - ] - ) - plugin_dir = os.path.join(test_host, "PlugIns") - targets = [os.path.join(plugin_dir, product_name), test_host] - for target in targets: - postbuilds.extend( - [ - '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' - % ( - os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), - key, - settings.get("CODE_SIGN_ENTITLEMENTS", ""), - settings.get("PROVISIONING_PROFILE", ""), - target, - True, - ) - ] - ) - - postbuilds.extend( - [ - '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' - % ( - os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), - key, - settings.get("CODE_SIGN_ENTITLEMENTS", ""), - settings.get("PROVISIONING_PROFILE", ""), - os.path.join("${BUILT_PRODUCTS_DIR}", product_name), - False, - ) - ] - ) - return postbuilds - - def _GetIOSCodeSignIdentityKey(self, settings): - identity = settings.get("CODE_SIGN_IDENTITY") - if not identity: - return None - if identity not in XcodeSettings._codesigning_key_cache: - output = subprocess.check_output( - ["security", "find-identity", "-p", "codesigning", "-v"] - ) - for line in output.splitlines(): - if identity in line: - fingerprint = line.split()[1] - cache = XcodeSettings._codesigning_key_cache - assert identity not in cache or fingerprint == cache[identity], ( - "Multiple codesigning fingerprints for identity: %s" % identity - ) - XcodeSettings._codesigning_key_cache[identity] = fingerprint - return XcodeSettings._codesigning_key_cache.get(identity, "") - - def AddImplicitPostbuilds( - self, configname, output, output_binary, postbuilds=[], quiet=False - ): - """Returns a list of shell commands that should run before and after - |postbuilds|.""" - assert output_binary is not None - pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet) - post = self._GetIOSPostbuilds(configname, output_binary) - return pre + postbuilds + post - - def _AdjustLibrary(self, library, config_name=None): - if library.endswith(".framework"): - l_flag = "-framework " + os.path.splitext(os.path.basename(library))[0] - else: - m = self.library_re.match(library) - l_flag = "-l" + m.group(1) if m else library - - sdk_root = self._SdkPath(config_name) - if not sdk_root: - sdk_root = "" - # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of - # ".dylib" without providing a real support for them. What it does, for - # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the - # library order and cause collision when building Chrome. - # - # Instead substitute ".tbd" to ".dylib" in the generated project when the - # following conditions are both true: - # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib", - # - the ".dylib" file does not exists but a ".tbd" file do. - library = l_flag.replace("$(SDKROOT)", sdk_root) - if l_flag.startswith("$(SDKROOT)"): - basename, ext = os.path.splitext(library) - if ext == ".dylib" and not os.path.exists(library): - tbd_library = basename + ".tbd" - if os.path.exists(tbd_library): - library = tbd_library - return library - - def AdjustLibraries(self, libraries, config_name=None): - """Transforms entries like 'Cocoa.framework' in libraries into entries like - '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc. - """ - libraries = [self._AdjustLibrary(library, config_name) for library in libraries] - return libraries - - def _BuildMachineOSBuild(self): - return GetStdout(["sw_vers", "-buildVersion"]) - - def _XcodeIOSDeviceFamily(self, configname): - family = self.xcode_settings[configname].get("TARGETED_DEVICE_FAMILY", "1") - return [int(x) for x in family.split(",")] - - def GetExtraPlistItems(self, configname=None): - """Returns a dictionary with extra items to insert into Info.plist.""" - if configname not in XcodeSettings._plist_cache: - cache = {} - cache["BuildMachineOSBuild"] = self._BuildMachineOSBuild() - - xcode_version, xcode_build = XcodeVersion() - cache["DTXcode"] = xcode_version - cache["DTXcodeBuild"] = xcode_build - compiler = self.xcode_settings[configname].get("GCC_VERSION") - if compiler is not None: - cache["DTCompiler"] = compiler - - sdk_root = self._SdkRoot(configname) - if not sdk_root: - sdk_root = self._DefaultSdkRoot() - sdk_version = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-version") - cache["DTSDKName"] = sdk_root + (sdk_version or "") - if xcode_version >= "0720": - cache["DTSDKBuild"] = self._GetSdkVersionInfoItem( - sdk_root, "--show-sdk-build-version" - ) - elif xcode_version >= "0430": - cache["DTSDKBuild"] = sdk_version - else: - cache["DTSDKBuild"] = cache["BuildMachineOSBuild"] - - if self.isIOS: - cache["MinimumOSVersion"] = self.xcode_settings[configname].get( - "IPHONEOS_DEPLOYMENT_TARGET" - ) - cache["DTPlatformName"] = sdk_root - cache["DTPlatformVersion"] = sdk_version - - if configname.endswith("iphoneos"): - cache["CFBundleSupportedPlatforms"] = ["iPhoneOS"] - cache["DTPlatformBuild"] = cache["DTSDKBuild"] - else: - cache["CFBundleSupportedPlatforms"] = ["iPhoneSimulator"] - # This is weird, but Xcode sets DTPlatformBuild to an empty field - # for simulator builds. - cache["DTPlatformBuild"] = "" - XcodeSettings._plist_cache[configname] = cache - - # Include extra plist items that are per-target, not per global - # XcodeSettings. - items = dict(XcodeSettings._plist_cache[configname]) - if self.isIOS: - items["UIDeviceFamily"] = self._XcodeIOSDeviceFamily(configname) - return items - - def _DefaultSdkRoot(self): - """Returns the default SDKROOT to use. - - Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode - project, then the environment variable was empty. Starting with this - version, Xcode uses the name of the newest SDK installed. - """ - xcode_version, _ = XcodeVersion() - if xcode_version < "0500": - return "" - default_sdk_path = self._XcodeSdkPath("") - default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path) - if default_sdk_root: - return default_sdk_root - try: - all_sdks = GetStdout(["xcodebuild", "-showsdks"]) - except GypError: - # If xcodebuild fails, there will be no valid SDKs - return "" - for line in all_sdks.splitlines(): - items = line.split() - if len(items) >= 3 and items[-2] == "-sdk": - sdk_root = items[-1] - sdk_path = self._XcodeSdkPath(sdk_root) - if sdk_path == default_sdk_path: - return sdk_root - return "" - - -class MacPrefixHeader: - """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. - - This feature consists of several pieces: - * If GCC_PREFIX_HEADER is present, all compilations in that project get an - additional |-include path_to_prefix_header| cflag. - * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is - instead compiled, and all other compilations in the project get an - additional |-include path_to_compiled_header| instead. - + Compiled prefix headers have the extension gch. There is one gch file for - every language used in the project (c, cc, m, mm), since gch files for - different languages aren't compatible. - + gch files themselves are built with the target's normal cflags, but they - obviously don't get the |-include| flag. Instead, they need a -x flag that - describes their language. - + All o files in the target need to depend on the gch file, to make sure - it's built before any o file is built. - - This class helps with some of these tasks, but it needs help from the build - system for writing dependencies to the gch files, for writing build commands - for the gch files, and for figuring out the location of the gch files. - """ - - def __init__( - self, xcode_settings, gyp_path_to_build_path, gyp_path_to_build_output - ): - """If xcode_settings is None, all methods on this class are no-ops. - - Args: - gyp_path_to_build_path: A function that takes a gyp-relative path, - and returns a path relative to the build directory. - gyp_path_to_build_output: A function that takes a gyp-relative path and - a language code ('c', 'cc', 'm', or 'mm'), and that returns a path - to where the output of precompiling that path for that language - should be placed (without the trailing '.gch'). - """ - # This doesn't support per-configuration prefix headers. Good enough - # for now. - self.header = None - self.compile_headers = False - if xcode_settings: - self.header = xcode_settings.GetPerTargetSetting("GCC_PREFIX_HEADER") - self.compile_headers = ( - xcode_settings.GetPerTargetSetting( - "GCC_PRECOMPILE_PREFIX_HEADER", default="NO" - ) - != "NO" - ) - self.compiled_headers = {} - if self.header: - if self.compile_headers: - for lang in ["c", "cc", "m", "mm"]: - self.compiled_headers[lang] = gyp_path_to_build_output( - self.header, lang - ) - self.header = gyp_path_to_build_path(self.header) - - def _CompiledHeader(self, lang, arch): - assert self.compile_headers - h = self.compiled_headers[lang] - if arch: - h += "." + arch - return h - - def GetInclude(self, lang, arch=None): - """Gets the cflags to include the prefix header for language |lang|.""" - if self.compile_headers and lang in self.compiled_headers: - return "-include %s" % self._CompiledHeader(lang, arch) - elif self.header: - return "-include %s" % self.header - else: - return "" - - def _Gch(self, lang, arch): - """Returns the actual file name of the prefix header for language |lang|.""" - assert self.compile_headers - return self._CompiledHeader(lang, arch) + ".gch" - - def GetObjDependencies(self, sources, objs, arch=None): - """Given a list of source files and the corresponding object files, returns - a list of (source, object, gch) tuples, where |gch| is the build-directory - relative path to the gch file each object file depends on. |compilable[i]| - has to be the source file belonging to |objs[i]|.""" - if not self.header or not self.compile_headers: - return [] - - result = [] - for source, obj in zip(sources, objs): - ext = os.path.splitext(source)[1] - lang = { - ".c": "c", - ".cpp": "cc", - ".cc": "cc", - ".cxx": "cc", - ".m": "m", - ".mm": "mm", - }.get(ext, None) - if lang: - result.append((source, obj, self._Gch(lang, arch))) - return result - - def GetPchBuildCommands(self, arch=None): - """Returns [(path_to_gch, language_flag, language, header)]. - |path_to_gch| and |header| are relative to the build directory. - """ - if not self.header or not self.compile_headers: - return [] - return [ - (self._Gch("c", arch), "-x c-header", "c", self.header), - (self._Gch("cc", arch), "-x c++-header", "cc", self.header), - (self._Gch("m", arch), "-x objective-c-header", "m", self.header), - (self._Gch("mm", arch), "-x objective-c++-header", "mm", self.header), - ] - - -def XcodeVersion(): - """Returns a tuple of version and build version of installed Xcode.""" - # `xcodebuild -version` output looks like - # Xcode 4.6.3 - # Build version 4H1503 - # or like - # Xcode 3.2.6 - # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0 - # BuildVersion: 10M2518 - # Convert that to ('0463', '4H1503') or ('0326', '10M2518'). - global XCODE_VERSION_CACHE - if XCODE_VERSION_CACHE: - return XCODE_VERSION_CACHE - version = "" - build = "" - try: - version_list = GetStdoutQuiet(["xcodebuild", "-version"]).splitlines() - # In some circumstances xcodebuild exits 0 but doesn't return - # the right results; for example, a user on 10.7 or 10.8 with - # a bogus path set via xcode-select - # In that case this may be a CLT-only install so fall back to - # checking that version. - if len(version_list) < 2: - raise GypError("xcodebuild returned unexpected results") - version = version_list[0].split()[-1] # Last word on first line - build = version_list[-1].split()[-1] # Last word on last line - except GypError: # Xcode not installed so look for XCode Command Line Tools - version = CLTVersion() # macOS Catalina returns 11.0.0.0.1.1567737322 - if not version: - raise GypError("No Xcode or CLT version detected!") - # Be careful to convert "4.2.3" to "0423" and "11.0.0" to "1100": - version = version.split(".")[:3] # Just major, minor, micro - version[0] = version[0].zfill(2) # Add a leading zero if major is one digit - version = ("".join(version) + "00")[:4] # Limit to exactly four characters - XCODE_VERSION_CACHE = (version, build) - return XCODE_VERSION_CACHE - - -# This function ported from the logic in Homebrew's CLT version check -def CLTVersion(): - """Returns the version of command-line tools from pkgutil.""" - # pkgutil output looks like - # package-id: com.apple.pkg.CLTools_Executables - # version: 5.0.1.0.1.1382131676 - # volume: / - # location: / - # install-time: 1382544035 - # groups: com.apple.FindSystemFiles.pkg-group - # com.apple.DevToolsBoth.pkg-group - # com.apple.DevToolsNonRelocatableShared.pkg-group - STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo" - FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI" - MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables" - - regex = re.compile("version: (?P.+)") - for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]: - try: - output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key]) - return re.search(regex, output).groupdict()["version"] - except GypError: - continue - - regex = re.compile(r"Command Line Tools for Xcode\s+(?P\S+)") - try: - output = GetStdout(["/usr/sbin/softwareupdate", "--history"]) - return re.search(regex, output).groupdict()["version"] - except GypError: - return None - - -def GetStdoutQuiet(cmdlist): - """Returns the content of standard output returned by invoking |cmdlist|. - Ignores the stderr. - Raises |GypError| if the command return with a non-zero return code.""" - job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out = job.communicate()[0].decode("utf-8") - if job.returncode != 0: - raise GypError("Error %d running %s" % (job.returncode, cmdlist[0])) - return out.rstrip("\n") - - -def GetStdout(cmdlist): - """Returns the content of standard output returned by invoking |cmdlist|. - Raises |GypError| if the command return with a non-zero return code.""" - job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) - out = job.communicate()[0].decode("utf-8") - if job.returncode != 0: - sys.stderr.write(out + "\n") - raise GypError("Error %d running %s" % (job.returncode, cmdlist[0])) - return out.rstrip("\n") - - -def MergeGlobalXcodeSettingsToSpec(global_dict, spec): - """Merges the global xcode_settings dictionary into each configuration of the - target represented by spec. For keys that are both in the global and the local - xcode_settings dict, the local key gets precedence. - """ - # The xcode generator special-cases global xcode_settings and does something - # that amounts to merging in the global xcode_settings into each local - # xcode_settings dict. - global_xcode_settings = global_dict.get("xcode_settings", {}) - for config in spec["configurations"].values(): - if "xcode_settings" in config: - new_settings = global_xcode_settings.copy() - new_settings.update(config["xcode_settings"]) - config["xcode_settings"] = new_settings - - -def IsMacBundle(flavor, spec): - """Returns if |spec| should be treated as a bundle. - - Bundles are directories with a certain subdirectory structure, instead of - just a single file. Bundle rules do not produce a binary but also package - resources into that directory.""" - is_mac_bundle = ( - int(spec.get("mac_xctest_bundle", 0)) != 0 - or int(spec.get("mac_xcuitest_bundle", 0)) != 0 - or (int(spec.get("mac_bundle", 0)) != 0 and flavor == "mac") - ) - - if is_mac_bundle: - assert spec["type"] != "none", ( - 'mac_bundle targets cannot have type none (target "%s")' - % spec["target_name"] - ) - return is_mac_bundle - - -def GetMacBundleResources(product_dir, xcode_settings, resources): - """Yields (output, resource) pairs for every resource in |resources|. - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - resources: A list of bundle resources, relative to the build directory. - """ - dest = os.path.join(product_dir, xcode_settings.GetBundleResourceFolder()) - for res in resources: - output = dest - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangeable. - assert " " not in res, "Spaces in resource filenames not supported (%s)" % res - - # Split into (path,file). - res_parts = os.path.split(res) - - # Now split the path into (prefix,maybe.lproj). - lproj_parts = os.path.split(res_parts[0]) - # If the resource lives in a .lproj bundle, add that to the destination. - if lproj_parts[1].endswith(".lproj"): - output = os.path.join(output, lproj_parts[1]) - - output = os.path.join(output, res_parts[1]) - # Compiled XIB files are referred to by .nib. - if output.endswith(".xib"): - output = os.path.splitext(output)[0] + ".nib" - # Compiled storyboard files are referred to by .storyboardc. - if output.endswith(".storyboard"): - output = os.path.splitext(output)[0] + ".storyboardc" - - yield output, res - - -def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): - """Returns (info_plist, dest_plist, defines, extra_env), where: - * |info_plist| is the source plist path, relative to the - build directory, - * |dest_plist| is the destination plist path, relative to the - build directory, - * |defines| is a list of preprocessor defines (empty if the plist - shouldn't be preprocessed, - * |extra_env| is a dict of env variables that should be exported when - invoking |mac_tool copy-info-plist|. - - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build directory. - """ - info_plist = xcode_settings.GetPerTargetSetting("INFOPLIST_FILE") - if not info_plist: - return None, None, [], {} - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangeable. - assert " " not in info_plist, ( - "Spaces in Info.plist filenames not supported (%s)" % info_plist - ) - - info_plist = gyp_path_to_build_path(info_plist) - - # If explicitly set to preprocess the plist, invoke the C preprocessor and - # specify any defines as -D flags. - if ( - xcode_settings.GetPerTargetSetting("INFOPLIST_PREPROCESS", default="NO") - == "YES" - ): - # Create an intermediate file based on the path. - defines = shlex.split( - xcode_settings.GetPerTargetSetting( - "INFOPLIST_PREPROCESSOR_DEFINITIONS", default="" - ) - ) - else: - defines = [] - - dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath()) - extra_env = xcode_settings.GetPerTargetSettings() - - return info_plist, dest_plist, defines, extra_env - - -def _GetXcodeEnv( - xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None -): - """Return the environment variables that Xcode would set. See - http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 - for a full list. - - Args: - xcode_settings: An XcodeSettings object. If this is None, this function - returns an empty dict. - built_products_dir: Absolute path to the built products dir. - srcroot: Absolute path to the source root. - configuration: The build configuration name. - additional_settings: An optional dict with more values to add to the - result. - """ - - if not xcode_settings: - return {} - - # This function is considered a friend of XcodeSettings, so let it reach into - # its implementation details. - spec = xcode_settings.spec - - # These are filled in on an as-needed basis. - env = { - "BUILT_FRAMEWORKS_DIR": built_products_dir, - "BUILT_PRODUCTS_DIR": built_products_dir, - "CONFIGURATION": configuration, - "PRODUCT_NAME": xcode_settings.GetProductName(), - # For FULL_PRODUCT_NAME see: - # /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec # noqa: E501 - "SRCROOT": srcroot, - "SOURCE_ROOT": "${SRCROOT}", - # This is not true for static libraries, but currently the env is only - # written for bundles: - "TARGET_BUILD_DIR": built_products_dir, - "TEMP_DIR": "${TMPDIR}", - "XCODE_VERSION_ACTUAL": XcodeVersion()[0], - } - if xcode_settings.GetPerConfigSetting("SDKROOT", configuration): - env["SDKROOT"] = xcode_settings._SdkPath(configuration) - else: - env["SDKROOT"] = "" - - if xcode_settings.mac_toolchain_dir: - env["DEVELOPER_DIR"] = xcode_settings.mac_toolchain_dir - - if spec["type"] in ( - "executable", - "static_library", - "shared_library", - "loadable_module", - ): - env["EXECUTABLE_NAME"] = xcode_settings.GetExecutableName() - env["EXECUTABLE_PATH"] = xcode_settings.GetExecutablePath() - env["FULL_PRODUCT_NAME"] = xcode_settings.GetFullProductName() - mach_o_type = xcode_settings.GetMachOType() - if mach_o_type: - env["MACH_O_TYPE"] = mach_o_type - env["PRODUCT_TYPE"] = xcode_settings.GetProductType() - if xcode_settings._IsBundle(): - # xcodeproj_file.py sets the same Xcode subfolder value for this as for - # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value. - env["BUILT_FRAMEWORKS_DIR"] = os.path.join( - built_products_dir + os.sep + xcode_settings.GetBundleFrameworksFolderPath() - ) - env["CONTENTS_FOLDER_PATH"] = xcode_settings.GetBundleContentsFolderPath() - env["EXECUTABLE_FOLDER_PATH"] = xcode_settings.GetBundleExecutableFolderPath() - env[ - "UNLOCALIZED_RESOURCES_FOLDER_PATH" - ] = xcode_settings.GetBundleResourceFolder() - env["JAVA_FOLDER_PATH"] = xcode_settings.GetBundleJavaFolderPath() - env["FRAMEWORKS_FOLDER_PATH"] = xcode_settings.GetBundleFrameworksFolderPath() - env[ - "SHARED_FRAMEWORKS_FOLDER_PATH" - ] = xcode_settings.GetBundleSharedFrameworksFolderPath() - env[ - "SHARED_SUPPORT_FOLDER_PATH" - ] = xcode_settings.GetBundleSharedSupportFolderPath() - env["PLUGINS_FOLDER_PATH"] = xcode_settings.GetBundlePlugInsFolderPath() - env["XPCSERVICES_FOLDER_PATH"] = xcode_settings.GetBundleXPCServicesFolderPath() - env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath() - env["WRAPPER_NAME"] = xcode_settings.GetWrapperName() - - install_name = xcode_settings.GetInstallName() - if install_name: - env["LD_DYLIB_INSTALL_NAME"] = install_name - install_name_base = xcode_settings.GetInstallNameBase() - if install_name_base: - env["DYLIB_INSTALL_NAME_BASE"] = install_name_base - xcode_version, _ = XcodeVersion() - if xcode_version >= "0500" and not env.get("SDKROOT"): - sdk_root = xcode_settings._SdkRoot(configuration) - if not sdk_root: - sdk_root = xcode_settings._XcodeSdkPath("") - if sdk_root is None: - sdk_root = "" - env["SDKROOT"] = sdk_root - - if not additional_settings: - additional_settings = {} - else: - # Flatten lists to strings. - for k in additional_settings: - if not isinstance(additional_settings[k], str): - additional_settings[k] = " ".join(additional_settings[k]) - additional_settings.update(env) - - for k in additional_settings: - additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) - - return additional_settings - - -def _NormalizeEnvVarReferences(str): - """Takes a string containing variable references in the form ${FOO}, $(FOO), - or $FOO, and returns a string with all variable references in the form ${FOO}. - """ - # $FOO -> ${FOO} - str = re.sub(r"\$([a-zA-Z_][a-zA-Z0-9_]*)", r"${\1}", str) - - # $(FOO) -> ${FOO} - matches = re.findall(r"(\$\(([a-zA-Z0-9\-_]+)\))", str) - for match in matches: - to_replace, variable = match - assert "$(" not in match, "$($(FOO)) variables not supported: " + match - str = str.replace(to_replace, "${" + variable + "}") - - return str - - -def ExpandEnvVars(string, expansions): - """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the - expansions list. If the variable expands to something that references - another variable, this variable is expanded as well if it's in env -- - until no variables present in env are left.""" - for k, v in reversed(expansions): - string = string.replace("${" + k + "}", v) - string = string.replace("$(" + k + ")", v) - string = string.replace("$" + k, v) - return string - - -def _TopologicallySortedEnvVarKeys(env): - """Takes a dict |env| whose values are strings that can refer to other keys, - for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of - env such that key2 is after key1 in L if env[key2] refers to env[key1]. - - Throws an Exception in case of dependency cycles. - """ - # Since environment variables can refer to other variables, the evaluation - # order is important. Below is the logic to compute the dependency graph - # and sort it. - regex = re.compile(r"\$\{([a-zA-Z0-9\-_]+)\}") - - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - # We can then reverse the result of the topological sort at the end. - # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - matches = {v for v in regex.findall(env[node]) if v in env} - for dependee in matches: - assert "${" not in dependee, "Nested variables not supported: " + dependee - return matches - - try: - # Topologically sort, and then reverse, because we used an edge definition - # that's inverted from the expected result of this function (see comment - # above). - order = gyp.common.TopologicallySorted(env.keys(), GetEdges) - order.reverse() - return order - except gyp.common.CycleError as e: - raise GypError( - "Xcode environment variables are cyclically dependent: " + str(e.nodes) - ) - - -def GetSortedXcodeEnv( - xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None -): - env = _GetXcodeEnv( - xcode_settings, built_products_dir, srcroot, configuration, additional_settings - ) - return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)] - - -def GetSpecPostbuildCommands(spec, quiet=False): - """Returns the list of postbuilds explicitly defined on |spec|, in a form - executable by a shell.""" - postbuilds = [] - for postbuild in spec.get("postbuilds", []): - if not quiet: - postbuilds.append( - "echo POSTBUILD\\(%s\\) %s" - % (spec["target_name"], postbuild["postbuild_name"]) - ) - postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild["action"])) - return postbuilds - - -def _HasIOSTarget(targets): - """Returns true if any target contains the iOS specific key - IPHONEOS_DEPLOYMENT_TARGET.""" - for target_dict in targets.values(): - for config in target_dict["configurations"].values(): - if config.get("xcode_settings", {}).get("IPHONEOS_DEPLOYMENT_TARGET"): - return True - return False - - -def _AddIOSDeviceConfigurations(targets): - """Clone all targets and append -iphoneos to the name. Configure these targets - to build for iOS devices and use correct architectures for those builds.""" - for target_dict in targets.values(): - toolset = target_dict["toolset"] - configs = target_dict["configurations"] - for config_name, simulator_config_dict in dict(configs).items(): - iphoneos_config_dict = copy.deepcopy(simulator_config_dict) - configs[config_name + "-iphoneos"] = iphoneos_config_dict - configs[config_name + "-iphonesimulator"] = simulator_config_dict - if toolset == "target": - simulator_config_dict["xcode_settings"]["SDKROOT"] = "iphonesimulator" - iphoneos_config_dict["xcode_settings"]["SDKROOT"] = "iphoneos" - return targets - - -def CloneConfigurationForDeviceAndEmulator(target_dicts): - """If |target_dicts| contains any iOS targets, automatically create -iphoneos - targets for iOS device builds.""" - if _HasIOSTarget(target_dicts): - return _AddIOSDeviceConfigurations(target_dicts) - return target_dicts diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation_test.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation_test.py deleted file mode 100644 index 98b02320d5a9e..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation_test.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit tests for the xcode_emulation.py file.""" - -from gyp.xcode_emulation import XcodeSettings -import sys -import unittest - - -class TestXcodeSettings(unittest.TestCase): - def setUp(self): - if sys.platform != "darwin": - self.skipTest("This test only runs on macOS") - - def test_GetCflags(self): - target = { - "type": "static_library", - "configurations": { - "Release": {}, - }, - } - configuration_name = "Release" - xcode_settings = XcodeSettings(target) - cflags = xcode_settings.GetCflags(configuration_name, "arm64") - - # Do not quote `-arch arm64` with spaces in one string. - self.assertEqual( - cflags, - ["-fasm-blocks", "-mpascal-strings", "-Os", "-gdwarf-2", "-arch", "arm64"], - ) - - def GypToBuildPath(self, path): - return path - - def test_GetLdflags(self): - target = { - "type": "static_library", - "configurations": { - "Release": {}, - }, - } - configuration_name = "Release" - xcode_settings = XcodeSettings(target) - ldflags = xcode_settings.GetLdflags( - configuration_name, "PRODUCT_DIR", self.GypToBuildPath, "arm64" - ) - - # Do not quote `-arch arm64` with spaces in one string. - self.assertEqual(ldflags, ["-arch", "arm64", "-LPRODUCT_DIR"]) - - -if __name__ == "__main__": - unittest.main() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py deleted file mode 100644 index bb74eacbeaf4a..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py +++ /dev/null @@ -1,302 +0,0 @@ -# Copyright (c) 2014 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode-ninja wrapper project file generator. - -This updates the data structures passed to the Xcode gyp generator to build -with ninja instead. The Xcode project itself is transformed into a list of -executable targets, each with a build step to build with ninja, and a target -with every source and resource file. This appears to sidestep some of the -major performance headaches experienced using complex projects and large number -of targets within Xcode. -""" - -import errno -import gyp.generator.ninja -import os -import re -import xml.sax.saxutils - - -def _WriteWorkspace(main_gyp, sources_gyp, params): - """ Create a workspace to wrap main and sources gyp paths. """ - (build_file_root, build_file_ext) = os.path.splitext(main_gyp) - workspace_path = build_file_root + ".xcworkspace" - options = params["options"] - if options.generator_output: - workspace_path = os.path.join(options.generator_output, workspace_path) - try: - os.makedirs(workspace_path) - except OSError as e: - if e.errno != errno.EEXIST: - raise - output_string = ( - '\n' + '\n' - ) - for gyp_name in [main_gyp, sources_gyp]: - name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj" - name = xml.sax.saxutils.quoteattr("group:" + name) - output_string += " \n" % name - output_string += "\n" - - workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata") - - try: - with open(workspace_file) as input_file: - input_string = input_file.read() - if input_string == output_string: - return - except OSError: - # Ignore errors if the file doesn't exist. - pass - - with open(workspace_file, "w") as output_file: - output_file.write(output_string) - - -def _TargetFromSpec(old_spec, params): - """ Create fake target for xcode-ninja wrapper. """ - # Determine ninja top level build dir (e.g. /path/to/out). - ninja_toplevel = None - jobs = 0 - if params: - options = params["options"] - ninja_toplevel = os.path.join( - options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params) - ) - jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0) - - target_name = old_spec.get("target_name") - product_name = old_spec.get("product_name", target_name) - product_extension = old_spec.get("product_extension") - - ninja_target = {} - ninja_target["target_name"] = target_name - ninja_target["product_name"] = product_name - if product_extension: - ninja_target["product_extension"] = product_extension - ninja_target["toolset"] = old_spec.get("toolset") - ninja_target["default_configuration"] = old_spec.get("default_configuration") - ninja_target["configurations"] = {} - - # Tell Xcode to look in |ninja_toplevel| for build products. - new_xcode_settings = {} - if ninja_toplevel: - new_xcode_settings["CONFIGURATION_BUILD_DIR"] = ( - "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel - ) - - if "configurations" in old_spec: - for config in old_spec["configurations"]: - old_xcode_settings = old_spec["configurations"][config].get( - "xcode_settings", {} - ) - if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings: - new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO" - new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[ - "IPHONEOS_DEPLOYMENT_TARGET" - ] - for key in ["BUNDLE_LOADER", "TEST_HOST"]: - if key in old_xcode_settings: - new_xcode_settings[key] = old_xcode_settings[key] - - ninja_target["configurations"][config] = {} - ninja_target["configurations"][config][ - "xcode_settings" - ] = new_xcode_settings - - ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0) - ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0) - ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0) - ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0) - ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0) - ninja_target["type"] = old_spec["type"] - if ninja_toplevel: - ninja_target["actions"] = [ - { - "action_name": "Compile and copy %s via ninja" % target_name, - "inputs": [], - "outputs": [], - "action": [ - "env", - "PATH=%s" % os.environ["PATH"], - "ninja", - "-C", - new_xcode_settings["CONFIGURATION_BUILD_DIR"], - target_name, - ], - "message": "Compile and copy %s via ninja" % target_name, - }, - ] - if jobs > 0: - ninja_target["actions"][0]["action"].extend(("-j", jobs)) - return ninja_target - - -def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): - """Limit targets for Xcode wrapper. - - Xcode sometimes performs poorly with too many targets, so only include - proper executable targets, with filters to customize. - Arguments: - target_extras: Regular expression to always add, matching any target. - executable_target_pattern: Regular expression limiting executable targets. - spec: Specifications for target. - """ - target_name = spec.get("target_name") - # Always include targets matching target_extras. - if target_extras is not None and re.search(target_extras, target_name): - return True - - # Otherwise just show executable targets and xc_tests. - if int(spec.get("mac_xctest_bundle", 0)) != 0 or ( - spec.get("type", "") == "executable" - and spec.get("product_extension", "") != "bundle" - ): - - # If there is a filter and the target does not match, exclude the target. - if executable_target_pattern is not None: - if not re.search(executable_target_pattern, target_name): - return False - return True - return False - - -def CreateWrapper(target_list, target_dicts, data, params): - """Initialize targets for the ninja wrapper. - - This sets up the necessary variables in the targets to generate Xcode projects - that use ninja as an external builder. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dict of flattened build files keyed on gyp path. - params: Dict of global options for gyp. - """ - orig_gyp = params["build_files"][0] - for gyp_name, gyp_dict in data.items(): - if gyp_name == orig_gyp: - depth = gyp_dict["_DEPTH"] - - # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE - # and prepend .ninja before the .gyp extension. - generator_flags = params.get("generator_flags", {}) - main_gyp = generator_flags.get("xcode_ninja_main_gyp", None) - if main_gyp is None: - (build_file_root, build_file_ext) = os.path.splitext(orig_gyp) - main_gyp = build_file_root + ".ninja" + build_file_ext - - # Create new |target_list|, |target_dicts| and |data| data structures. - new_target_list = [] - new_target_dicts = {} - new_data = {} - - # Set base keys needed for |data|. - new_data[main_gyp] = {} - new_data[main_gyp]["included_files"] = [] - new_data[main_gyp]["targets"] = [] - new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {}) - - # Normally the xcode-ninja generator includes only valid executable targets. - # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to - # executable targets that match the pattern. (Default all) - executable_target_pattern = generator_flags.get( - "xcode_ninja_executable_target_pattern", None - ) - - # For including other non-executable targets, add the matching target name - # to the |xcode_ninja_target_pattern| regular expression. (Default none) - target_extras = generator_flags.get("xcode_ninja_target_pattern", None) - - for old_qualified_target in target_list: - spec = target_dicts[old_qualified_target] - if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): - # Add to new_target_list. - target_name = spec.get("target_name") - new_target_name = f"{main_gyp}:{target_name}#target" - new_target_list.append(new_target_name) - - # Add to new_target_dicts. - new_target_dicts[new_target_name] = _TargetFromSpec(spec, params) - - # Add to new_data. - for old_target in data[old_qualified_target.split(":")[0]]["targets"]: - if old_target["target_name"] == target_name: - new_data_target = {} - new_data_target["target_name"] = old_target["target_name"] - new_data_target["toolset"] = old_target["toolset"] - new_data[main_gyp]["targets"].append(new_data_target) - - # Create sources target. - sources_target_name = "sources_for_indexing" - sources_target = _TargetFromSpec( - { - "target_name": sources_target_name, - "toolset": "target", - "default_configuration": "Default", - "mac_bundle": "0", - "type": "executable", - }, - None, - ) - - # Tell Xcode to look everywhere for headers. - sources_target["configurations"] = {"Default": {"include_dirs": [depth]}} - - # Put excluded files into the sources target so they can be opened in Xcode. - skip_excluded_files = not generator_flags.get( - "xcode_ninja_list_excluded_files", True - ) - - sources = [] - for target, target_dict in target_dicts.items(): - base = os.path.dirname(target) - files = target_dict.get("sources", []) + target_dict.get( - "mac_bundle_resources", [] - ) - - if not skip_excluded_files: - files.extend( - target_dict.get("sources_excluded", []) - + target_dict.get("mac_bundle_resources_excluded", []) - ) - - for action in target_dict.get("actions", []): - files.extend(action.get("inputs", [])) - - if not skip_excluded_files: - files.extend(action.get("inputs_excluded", [])) - - # Remove files starting with $. These are mostly intermediate files for the - # build system. - files = [file for file in files if not file.startswith("$")] - - # Make sources relative to root build file. - relative_path = os.path.dirname(main_gyp) - sources += [ - os.path.relpath(os.path.join(base, file), relative_path) for file in files - ] - - sources_target["sources"] = sorted(set(sources)) - - # Put sources_to_index in it's own gyp. - sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp") - fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target" - - # Add to new_target_list, new_target_dicts and new_data. - new_target_list.append(fully_qualified_target_name) - new_target_dicts[fully_qualified_target_name] = sources_target - new_data_target = {} - new_data_target["target_name"] = sources_target["target_name"] - new_data_target["_DEPTH"] = depth - new_data_target["toolset"] = "target" - new_data[sources_gyp] = {} - new_data[sources_gyp]["targets"] = [] - new_data[sources_gyp]["included_files"] = [] - new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {}) - new_data[sources_gyp]["targets"].append(new_data_target) - - # Write workspace to file. - _WriteWorkspace(main_gyp, sources_gyp, params) - return (new_target_list, new_target_dicts, new_data) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py deleted file mode 100644 index 33c667c266bf6..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py +++ /dev/null @@ -1,3198 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode project file generator. - -This module is both an Xcode project file generator and a documentation of the -Xcode project file format. Knowledge of the project file format was gained -based on extensive experience with Xcode, and by making changes to projects in -Xcode.app and observing the resultant changes in the associated project files. - -XCODE PROJECT FILES - -The generator targets the file format as written by Xcode 3.2 (specifically, -3.2.6), but past experience has taught that the format has not changed -significantly in the past several years, and future versions of Xcode are able -to read older project files. - -Xcode project files are "bundled": the project "file" from an end-user's -perspective is actually a directory with an ".xcodeproj" extension. The -project file from this module's perspective is actually a file inside this -directory, always named "project.pbxproj". This file contains a complete -description of the project and is all that is needed to use the xcodeproj. -Other files contained in the xcodeproj directory are simply used to store -per-user settings, such as the state of various UI elements in the Xcode -application. - -The project.pbxproj file is a property list, stored in a format almost -identical to the NeXTstep property list format. The file is able to carry -Unicode data, and is encoded in UTF-8. The root element in the property list -is a dictionary that contains several properties of minimal interest, and two -properties of immense interest. The most important property is a dictionary -named "objects". The entire structure of the project is represented by the -children of this property. The objects dictionary is keyed by unique 96-bit -values represented by 24 uppercase hexadecimal characters. Each value in the -objects dictionary is itself a dictionary, describing an individual object. - -Each object in the dictionary is a member of a class, which is identified by -the "isa" property of each object. A variety of classes are represented in a -project file. Objects can refer to other objects by ID, using the 24-character -hexadecimal object key. A project's objects form a tree, with a root object -of class PBXProject at the root. As an example, the PBXProject object serves -as parent to an XCConfigurationList object defining the build configurations -used in the project, a PBXGroup object serving as a container for all files -referenced in the project, and a list of target objects, each of which defines -a target in the project. There are several different types of target object, -such as PBXNativeTarget and PBXAggregateTarget. In this module, this -relationship is expressed by having each target type derive from an abstract -base named XCTarget. - -The project.pbxproj file's root dictionary also contains a property, sibling to -the "objects" dictionary, named "rootObject". The value of rootObject is a -24-character object key referring to the root PBXProject object in the -objects dictionary. - -In Xcode, every file used as input to a target or produced as a final product -of a target must appear somewhere in the hierarchy rooted at the PBXGroup -object referenced by the PBXProject's mainGroup property. A PBXGroup is -generally represented as a folder in the Xcode application. PBXGroups can -contain other PBXGroups as well as PBXFileReferences, which are pointers to -actual files. - -Each XCTarget contains a list of build phases, represented in this module by -the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations -are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the -"Compile Sources" and "Link Binary With Libraries" phases displayed in the -Xcode application. Files used as input to these phases (for example, source -files in the former case and libraries and frameworks in the latter) are -represented by PBXBuildFile objects, referenced by elements of "files" lists -in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile -object as a "weak" reference: it does not "own" the PBXBuildFile, which is -owned by the root object's mainGroup or a descendant group. In most cases, the -layer of indirection between an XCBuildPhase and a PBXFileReference via a -PBXBuildFile appears extraneous, but there's actually one reason for this: -file-specific compiler flags are added to the PBXBuildFile object so as to -allow a single file to be a member of multiple targets while having distinct -compiler flags for each. These flags can be modified in the Xcode applciation -in the "Build" tab of a File Info window. - -When a project is open in the Xcode application, Xcode will rewrite it. As -such, this module is careful to adhere to the formatting used by Xcode, to -avoid insignificant changes appearing in the file when it is used in the -Xcode application. This will keep version control repositories happy, and -makes it possible to compare a project file used in Xcode to one generated by -this module to determine if any significant changes were made in the -application. - -Xcode has its own way of assigning 24-character identifiers to each object, -which is not duplicated here. Because the identifier only is only generated -once, when an object is created, and is then left unchanged, there is no need -to attempt to duplicate Xcode's behavior in this area. The generator is free -to select any identifier, even at random, to refer to the objects it creates, -and Xcode will retain those identifiers and use them when subsequently -rewriting the project file. However, the generator would choose new random -identifiers each time the project files are generated, leading to difficulties -comparing "used" project files to "pristine" ones produced by this module, -and causing the appearance of changes as every object identifier is changed -when updated projects are checked in to a version control repository. To -mitigate this problem, this module chooses identifiers in a more deterministic -way, by hashing a description of each object as well as its parent and ancestor -objects. This strategy should result in minimal "shift" in IDs as successive -generations of project files are produced. - -THIS MODULE - -This module introduces several classes, all derived from the XCObject class. -Nearly all of the "brains" are built into the XCObject class, which understands -how to create and modify objects, maintain the proper tree structure, compute -identifiers, and print objects. For the most part, classes derived from -XCObject need only provide a _schema class object, a dictionary that -expresses what properties objects of the class may contain. - -Given this structure, it's possible to build a minimal project file by creating -objects of the appropriate types and making the proper connections: - - config_list = XCConfigurationList() - group = PBXGroup() - project = PBXProject({'buildConfigurationList': config_list, - 'mainGroup': group}) - -With the project object set up, it can be added to an XCProjectFile object. -XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject -subclass that does not actually correspond to a class type found in a project -file. Rather, it is used to represent the project file's root dictionary. -Printing an XCProjectFile will print the entire project file, including the -full "objects" dictionary. - - project_file = XCProjectFile({'rootObject': project}) - project_file.ComputeIDs() - project_file.Print() - -Xcode project files are always encoded in UTF-8. This module will accept -strings of either the str class or the unicode class. Strings of class str -are assumed to already be encoded in UTF-8. Obviously, if you're just using -ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. -Strings of class unicode are handled properly and encoded in UTF-8 when -a project file is output. -""" - -import gyp.common -from functools import cmp_to_key -import hashlib -from operator import attrgetter -import posixpath -import re -import struct -import sys - - -def cmp(x, y): - return (x > y) - (x < y) - - -# See XCObject._EncodeString. This pattern is used to determine when a string -# can be printed unquoted. Strings that match this pattern may be printed -# unquoted. Strings that do not match must be quoted and may be further -# transformed to be properly encoded. Note that this expression matches the -# characters listed with "+", for 1 or more occurrences: if a string is empty, -# it must not match this pattern, because it needs to be encoded as "". -_unquoted = re.compile("^[A-Za-z0-9$./_]+$") - -# Strings that match this pattern are quoted regardless of what _unquoted says. -# Oddly, Xcode will quote any string with a run of three or more underscores. -_quoted = re.compile("___") - -# This pattern should match any character that needs to be escaped by -# XCObject._EncodeString. See that function. -_escaped = re.compile('[\\\\"]|[\x00-\x1f]') - - -# Used by SourceTreeAndPathFromPath -_path_leading_variable = re.compile(r"^\$\((.*?)\)(/(.*))?$") - - -def SourceTreeAndPathFromPath(input_path): - """Given input_path, returns a tuple with sourceTree and path values. - - Examples: - input_path (source_tree, output_path) - '$(VAR)/path' ('VAR', 'path') - '$(VAR)' ('VAR', None) - 'path' (None, 'path') - """ - - source_group_match = _path_leading_variable.match(input_path) - if source_group_match: - source_tree = source_group_match.group(1) - output_path = source_group_match.group(3) # This may be None. - else: - source_tree = None - output_path = input_path - - return (source_tree, output_path) - - -def ConvertVariablesToShellSyntax(input_string): - return re.sub(r"\$\((.*?)\)", "${\\1}", input_string) - - -class XCObject: - """The abstract base of all class types used in Xcode project files. - - Class variables: - _schema: A dictionary defining the properties of this class. The keys to - _schema are string property keys as used in project files. Values - are a list of four or five elements: - [ is_list, property_type, is_strong, is_required, default ] - is_list: True if the property described is a list, as opposed - to a single element. - property_type: The type to use as the value of the property, - or if is_list is True, the type to use for each - element of the value's list. property_type must - be an XCObject subclass, or one of the built-in - types str, int, or dict. - is_strong: If property_type is an XCObject subclass, is_strong - is True to assert that this class "owns," or serves - as parent, to the property value (or, if is_list is - True, values). is_strong must be False if - property_type is not an XCObject subclass. - is_required: True if the property is required for the class. - Note that is_required being True does not preclude - an empty string ("", in the case of property_type - str) or list ([], in the case of is_list True) from - being set for the property. - default: Optional. If is_required is True, default may be set - to provide a default value for objects that do not supply - their own value. If is_required is True and default - is not provided, users of the class must supply their own - value for the property. - Note that although the values of the array are expressed in - boolean terms, subclasses provide values as integers to conserve - horizontal space. - _should_print_single_line: False in XCObject. Subclasses whose objects - should be written to the project file in the - alternate single-line format, such as - PBXFileReference and PBXBuildFile, should - set this to True. - _encode_transforms: Used by _EncodeString to encode unprintable characters. - The index into this list is the ordinal of the - character to transform; each value is a string - used to represent the character in the output. XCObject - provides an _encode_transforms list suitable for most - XCObject subclasses. - _alternate_encode_transforms: Provided for subclasses that wish to use - the alternate encoding rules. Xcode seems - to use these rules when printing objects in - single-line format. Subclasses that desire - this behavior should set _encode_transforms - to _alternate_encode_transforms. - _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs - to construct this object's ID. Most classes that need custom - hashing behavior should do it by overriding Hashables, - but in some cases an object's parent may wish to push a - hashable value into its child, and it can do so by appending - to _hashables. - Attributes: - id: The object's identifier, a 24-character uppercase hexadecimal string. - Usually, objects being created should not set id until the entire - project file structure is built. At that point, UpdateIDs() should - be called on the root object to assign deterministic values for id to - each object in the tree. - parent: The object's parent. This is set by a parent XCObject when a child - object is added to it. - _properties: The object's property dictionary. An object's properties are - described by its class' _schema variable. - """ - - _schema = {} - _should_print_single_line = False - - # See _EncodeString. - _encode_transforms = [] - i = 0 - while i < ord(" "): - _encode_transforms.append("\\U%04x" % i) - i = i + 1 - _encode_transforms[7] = "\\a" - _encode_transforms[8] = "\\b" - _encode_transforms[9] = "\\t" - _encode_transforms[10] = "\\n" - _encode_transforms[11] = "\\v" - _encode_transforms[12] = "\\f" - _encode_transforms[13] = "\\n" - - _alternate_encode_transforms = list(_encode_transforms) - _alternate_encode_transforms[9] = chr(9) - _alternate_encode_transforms[10] = chr(10) - _alternate_encode_transforms[11] = chr(11) - - def __init__(self, properties=None, id=None, parent=None): - self.id = id - self.parent = parent - self._properties = {} - self._hashables = [] - self._SetDefaultsFromSchema() - self.UpdateProperties(properties) - - def __repr__(self): - try: - name = self.Name() - except NotImplementedError: - return f"<{self.__class__.__name__} at 0x{id(self):x}>" - return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" - - def Copy(self): - """Make a copy of this object. - - The new object will have its own copy of lists and dicts. Any XCObject - objects owned by this object (marked "strong") will be copied in the - new object, even those found in lists. If this object has any weak - references to other XCObjects, the same references are added to the new - object without making a copy. - """ - - that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.items(): - is_strong = self._schema[key][2] - - if isinstance(value, XCObject): - if is_strong: - new_value = value.Copy() - new_value.parent = that - that._properties[key] = new_value - else: - that._properties[key] = value - elif isinstance(value, (str, int)): - that._properties[key] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe to - # call Copy. - that._properties[key] = [] - for item in value: - new_item = item.Copy() - new_item.parent = that - that._properties[key].append(new_item) - else: - that._properties[key] = value[:] - elif isinstance(value, dict): - # dicts are never strong. - if is_strong: - raise TypeError( - "Strong dict for key " + key + " in " + self.__class__.__name__ - ) - else: - that._properties[key] = value.copy() - else: - raise TypeError( - "Unexpected type " - + value.__class__.__name__ - + " for key " - + key - + " in " - + self.__class__.__name__ - ) - - return that - - def Name(self): - """Return the name corresponding to an object. - - Not all objects necessarily need to be nameable, and not all that do have - a "name" property. Override as needed. - """ - - # If the schema indicates that "name" is required, try to access the - # property even if it doesn't exist. This will result in a KeyError - # being raised for the property that should be present, which seems more - # appropriate than NotImplementedError in this case. - if "name" in self._properties or ( - "name" in self._schema and self._schema["name"][3] - ): - return self._properties["name"] - - raise NotImplementedError(self.__class__.__name__ + " must implement Name") - - def Comment(self): - """Return a comment string for the object. - - Most objects just use their name as the comment, but PBXProject uses - different values. - - The returned comment is not escaped and does not have any comment marker - strings applied to it. - """ - - return self.Name() - - def Hashables(self): - hashables = [self.__class__.__name__] - - name = self.Name() - if name is not None: - hashables.append(name) - - hashables.extend(self._hashables) - - return hashables - - def HashablesForChild(self): - return None - - def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None): - """Set "id" properties deterministically. - - An object's "id" property is set based on a hash of its class type and - name, as well as the class type and name of all ancestor objects. As - such, it is only advisable to call ComputeIDs once an entire project file - tree is built. - - If recursive is True, recurse into all descendant objects and update their - hashes. - - If overwrite is True, any existing value set in the "id" property will be - replaced. - """ - - def _HashUpdate(hash, data): - """Update hash with data's length and contents. - - If the hash were updated only with the value of data, it would be - possible for clowns to induce collisions by manipulating the names of - their objects. By adding the length, it's exceedingly less likely that - ID collisions will be encountered, intentionally or not. - """ - - hash.update(struct.pack(">i", len(data))) - if isinstance(data, str): - data = data.encode("utf-8") - hash.update(data) - - if seed_hash is None: - seed_hash = hashlib.sha1() - - hash = seed_hash.copy() - - hashables = self.Hashables() - assert len(hashables) > 0 - for hashable in hashables: - _HashUpdate(hash, hashable) - - if recursive: - hashables_for_child = self.HashablesForChild() - if hashables_for_child is None: - child_hash = hash - else: - assert len(hashables_for_child) > 0 - child_hash = seed_hash.copy() - for hashable in hashables_for_child: - _HashUpdate(child_hash, hashable) - - for child in self.Children(): - child.ComputeIDs(recursive, overwrite, child_hash) - - if overwrite or self.id is None: - # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is - # is 160 bits. Instead of throwing out 64 bits of the digest, xor them - # into the portion that gets used. - assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size // 4 - digest_ints = struct.unpack(">" + "I" * digest_int_count, hash.digest()) - id_ints = [0, 0, 0] - for index in range(0, digest_int_count): - id_ints[index % 3] ^= digest_ints[index] - self.id = "%08X%08X%08X" % tuple(id_ints) - - def EnsureNoIDCollisions(self): - """Verifies that no two objects have the same ID. Checks all descendants. - """ - - ids = {} - descendants = self.Descendants() - for descendant in descendants: - if descendant.id in ids: - other = ids[descendant.id] - raise KeyError( - 'Duplicate ID %s, objects "%s" and "%s" in "%s"' - % ( - descendant.id, - str(descendant._properties), - str(other._properties), - self._properties["rootObject"].Name(), - ) - ) - ids[descendant.id] = descendant - - def Children(self): - """Returns a list of all of this object's owned (strong) children.""" - - children = [] - for property, attributes in self._schema.items(): - (is_list, property_type, is_strong) = attributes[0:3] - if is_strong and property in self._properties: - if not is_list: - children.append(self._properties[property]) - else: - children.extend(self._properties[property]) - return children - - def Descendants(self): - """Returns a list of all of this object's descendants, including this - object. - """ - - children = self.Children() - descendants = [self] - for child in children: - descendants.extend(child.Descendants()) - return descendants - - def PBXProjectAncestor(self): - # The base case for recursion is defined at PBXProject.PBXProjectAncestor. - if self.parent: - return self.parent.PBXProjectAncestor() - return None - - def _EncodeComment(self, comment): - """Encodes a comment to be placed in the project file output, mimicking - Xcode behavior. - """ - - # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If - # the string already contains a "*/", it is turned into "(*)/". This keeps - # the file writer from outputting something that would be treated as the - # end of a comment in the middle of something intended to be entirely a - # comment. - - return "/* " + comment.replace("*/", "(*)/") + " */" - - def _EncodeTransform(self, match): - # This function works closely with _EncodeString. It will only be called - # by re.sub with match.group(0) containing a character matched by the - # the _escaped expression. - char = match.group(0) - - # Backslashes (\) and quotation marks (") are always replaced with a - # backslash-escaped version of the same. Everything else gets its - # replacement from the class' _encode_transforms array. - if char == "\\": - return "\\\\" - if char == '"': - return '\\"' - return self._encode_transforms[ord(char)] - - def _EncodeString(self, value): - """Encodes a string to be placed in the project file output, mimicking - Xcode behavior. - """ - - # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, - # $ (dollar sign), . (period), and _ (underscore) is present. Also use - # quotation marks to represent empty strings. - # - # Escape " (double-quote) and \ (backslash) by preceding them with a - # backslash. - # - # Some characters below the printable ASCII range are encoded specially: - # 7 ^G BEL is encoded as "\a" - # 8 ^H BS is encoded as "\b" - # 11 ^K VT is encoded as "\v" - # 12 ^L NP is encoded as "\f" - # 127 ^? DEL is passed through as-is without escaping - # - In PBXFileReference and PBXBuildFile objects: - # 9 ^I HT is passed through as-is without escaping - # 10 ^J NL is passed through as-is without escaping - # 13 ^M CR is passed through as-is without escaping - # - In other objects: - # 9 ^I HT is encoded as "\t" - # 10 ^J NL is encoded as "\n" - # 13 ^M CR is encoded as "\n" rendering it indistinguishable from - # 10 ^J NL - # All other characters within the ASCII control character range (0 through - # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point - # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". - # Characters above the ASCII range are passed through to the output encoded - # as UTF-8 without any escaping. These mappings are contained in the - # class' _encode_transforms list. - - if _unquoted.search(value) and not _quoted.search(value): - return value - - return '"' + _escaped.sub(self._EncodeTransform, value) + '"' - - def _XCPrint(self, file, tabs, line): - file.write("\t" * tabs + line) - - def _XCPrintableValue(self, tabs, value, flatten_list=False): - """Returns a representation of value that may be printed in a project file, - mimicking Xcode's behavior. - - _XCPrintableValue can handle str and int values, XCObjects (which are - made printable by returning their id property), and list and dict objects - composed of any of the above types. When printing a list or dict, and - _should_print_single_line is False, the tabs parameter is used to determine - how much to indent the lines corresponding to the items in the list or - dict. - - If flatten_list is True, single-element lists will be transformed into - strings. - """ - - printable = "" - comment = None - - if self._should_print_single_line: - sep = " " - element_tabs = "" - end_tabs = "" - else: - sep = "\n" - element_tabs = "\t" * (tabs + 1) - end_tabs = "\t" * tabs - - if isinstance(value, XCObject): - printable += value.id - comment = value.Comment() - elif isinstance(value, str): - printable += self._EncodeString(value) - elif isinstance(value, str): - printable += self._EncodeString(value.encode("utf-8")) - elif isinstance(value, int): - printable += str(value) - elif isinstance(value, list): - if flatten_list and len(value) <= 1: - if len(value) == 0: - printable += self._EncodeString("") - else: - printable += self._EncodeString(value[0]) - else: - printable = "(" + sep - for item in value: - printable += ( - element_tabs - + self._XCPrintableValue(tabs + 1, item, flatten_list) - + "," - + sep - ) - printable += end_tabs + ")" - elif isinstance(value, dict): - printable = "{" + sep - for item_key, item_value in sorted(value.items()): - printable += ( - element_tabs - + self._XCPrintableValue(tabs + 1, item_key, flatten_list) - + " = " - + self._XCPrintableValue(tabs + 1, item_value, flatten_list) - + ";" - + sep - ) - printable += end_tabs + "}" - else: - raise TypeError("Can't make " + value.__class__.__name__ + " printable") - - if comment: - printable += " " + self._EncodeComment(comment) - - return printable - - def _XCKVPrint(self, file, tabs, key, value): - """Prints a key and value, members of an XCObject's _properties dictionary, - to file. - - tabs is an int identifying the indentation level. If the class' - _should_print_single_line variable is True, tabs is ignored and the - key-value pair will be followed by a space insead of a newline. - """ - - if self._should_print_single_line: - printable = "" - after_kv = " " - else: - printable = "\t" * tabs - after_kv = "\n" - - # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy - # objects without comments. Sometimes it prints them with comments, but - # the majority of the time, it doesn't. To avoid unnecessary changes to - # the project file after Xcode opens it, don't write comments for - # remoteGlobalIDString. This is a sucky hack and it would certainly be - # cleaner to extend the schema to indicate whether or not a comment should - # be printed, but since this is the only case where the problem occurs and - # Xcode itself can't seem to make up its mind, the hack will suffice. - # - # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. - if key == "remoteGlobalIDString" and isinstance(self, PBXContainerItemProxy): - value_to_print = value.id - else: - value_to_print = value - - # PBXBuildFile's settings property is represented in the output as a dict, - # but a hack here has it represented as a string. Arrange to strip off the - # quotes so that it shows up in the output as expected. - if key == "settings" and isinstance(self, PBXBuildFile): - strip_value_quotes = True - else: - strip_value_quotes = False - - # In another one-off, let's set flatten_list on buildSettings properties - # of XCBuildConfiguration objects, because that's how Xcode treats them. - if key == "buildSettings" and isinstance(self, XCBuildConfiguration): - flatten_list = True - else: - flatten_list = False - - try: - printable_key = self._XCPrintableValue(tabs, key, flatten_list) - printable_value = self._XCPrintableValue(tabs, value_to_print, flatten_list) - if ( - strip_value_quotes - and len(printable_value) > 1 - and printable_value[0] == '"' - and printable_value[-1] == '"' - ): - printable_value = printable_value[1:-1] - printable += printable_key + " = " + printable_value + ";" + after_kv - except TypeError as e: - gyp.common.ExceptionAppend(e, 'while printing key "%s"' % key) - raise - - self._XCPrint(file, 0, printable) - - def Print(self, file=sys.stdout): - """Prints a reprentation of this object to file, adhering to Xcode output - formatting. - """ - - self.VerifyHasRequiredProperties() - - if self._should_print_single_line: - # When printing an object in a single line, Xcode doesn't put any space - # between the beginning of a dictionary (or presumably a list) and the - # first contained item, so you wind up with snippets like - # ...CDEF = {isa = PBXFileReference; fileRef = 0123... - # If it were me, I would have put a space in there after the opening - # curly, but I guess this is just another one of those inconsistencies - # between how Xcode prints PBXFileReference and PBXBuildFile objects as - # compared to other objects. Mimic Xcode's behavior here by using an - # empty string for sep. - sep = "" - end_tabs = 0 - else: - sep = "\n" - end_tabs = 2 - - # Start the object. For example, '\t\tPBXProject = {\n'. - self._XCPrint(file, 2, self._XCPrintableValue(2, self) + " = {" + sep) - - # "isa" isn't in the _properties dictionary, it's an intrinsic property - # of the class which the object belongs to. Xcode always outputs "isa" - # as the first element of an object dictionary. - self._XCKVPrint(file, 3, "isa", self.__class__.__name__) - - # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.items()): - self._XCKVPrint(file, 3, property, value) - - # End the object. - self._XCPrint(file, end_tabs, "};\n") - - def UpdateProperties(self, properties, do_copy=False): - """Merge the supplied properties into the _properties dictionary. - - The input properties must adhere to the class schema or a KeyError or - TypeError exception will be raised. If adding an object of an XCObject - subclass and the schema indicates a strong relationship, the object's - parent will be set to this object. - - If do_copy is True, then lists, dicts, strong-owned XCObjects, and - strong-owned XCObjects in lists will be copied instead of having their - references added. - """ - - if properties is None: - return - - for property, value in properties.items(): - # Make sure the property is in the schema. - if property not in self._schema: - raise KeyError(property + " not in " + self.__class__.__name__) - - # Make sure the property conforms to the schema. - (is_list, property_type, is_strong) = self._schema[property][0:3] - if is_list: - if value.__class__ != list: - raise TypeError( - property - + " of " - + self.__class__.__name__ - + " must be list, not " - + value.__class__.__name__ - ) - for item in value: - if not isinstance(item, property_type) and not ( - isinstance(item, str) and property_type == str - ): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError( - "item of " - + property - + " of " - + self.__class__.__name__ - + " must be " - + property_type.__name__ - + ", not " - + item.__class__.__name__ - ) - elif not isinstance(value, property_type) and not ( - isinstance(value, str) and property_type == str - ): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError( - property - + " of " - + self.__class__.__name__ - + " must be " - + property_type.__name__ - + ", not " - + value.__class__.__name__ - ) - - # Checks passed, perform the assignment. - if do_copy: - if isinstance(value, XCObject): - if is_strong: - self._properties[property] = value.Copy() - else: - self._properties[property] = value - elif isinstance(value, (str, int)): - self._properties[property] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, - # so it's safe to call Copy. - self._properties[property] = [] - for item in value: - self._properties[property].append(item.Copy()) - else: - self._properties[property] = value[:] - elif isinstance(value, dict): - self._properties[property] = value.copy() - else: - raise TypeError( - "Don't know how to copy a " - + value.__class__.__name__ - + " object for " - + property - + " in " - + self.__class__.__name__ - ) - else: - self._properties[property] = value - - # Set up the child's back-reference to this object. Don't use |value| - # any more because it may not be right if do_copy is true. - if is_strong: - if not is_list: - self._properties[property].parent = self - else: - for item in self._properties[property]: - item.parent = self - - def HasProperty(self, key): - return key in self._properties - - def GetProperty(self, key): - return self._properties[key] - - def SetProperty(self, key, value): - self.UpdateProperties({key: value}) - - def DelProperty(self, key): - if key in self._properties: - del self._properties[key] - - def AppendProperty(self, key, value): - # TODO(mark): Support ExtendProperty too (and make this call that)? - - # Schema validation. - if key not in self._schema: - raise KeyError(key + " not in " + self.__class__.__name__) - - (is_list, property_type, is_strong) = self._schema[key][0:3] - if not is_list: - raise TypeError(key + " of " + self.__class__.__name__ + " must be list") - if not isinstance(value, property_type): - raise TypeError( - "item of " - + key - + " of " - + self.__class__.__name__ - + " must be " - + property_type.__name__ - + ", not " - + value.__class__.__name__ - ) - - # If the property doesn't exist yet, create a new empty list to receive the - # item. - self._properties[key] = self._properties.get(key, []) - - # Set up the ownership link. - if is_strong: - value.parent = self - - # Store the item. - self._properties[key].append(value) - - def VerifyHasRequiredProperties(self): - """Ensure that all properties identified as required by the schema are - set. - """ - - # TODO(mark): A stronger verification mechanism is needed. Some - # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.items(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and property not in self._properties: - raise KeyError(self.__class__.__name__ + " requires " + property) - - def _SetDefaultsFromSchema(self): - """Assign object default values according to the schema. This will not - overwrite properties that have already been set.""" - - defaults = {} - for property, attributes in self._schema.items(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if ( - is_required - and len(attributes) >= 5 - and property not in self._properties - ): - default = attributes[4] - - defaults[property] = default - - if len(defaults) > 0: - # Use do_copy=True so that each new object gets its own copy of strong - # objects, lists, and dicts. - self.UpdateProperties(defaults, do_copy=True) - - -class XCHierarchicalElement(XCObject): - """Abstract base for PBXGroup and PBXFileReference. Not represented in a - project file.""" - - # TODO(mark): Do name and path belong here? Probably so. - # If path is set and name is not, name may have a default value. Name will - # be set to the basename of path, if the basename of path is different from - # the full value of path. If path is already just a leaf name, name will - # not be set. - _schema = XCObject._schema.copy() - _schema.update( - { - "comments": [0, str, 0, 0], - "fileEncoding": [0, str, 0, 0], - "includeInIndex": [0, int, 0, 0], - "indentWidth": [0, int, 0, 0], - "lineEnding": [0, int, 0, 0], - "sourceTree": [0, str, 0, 1, ""], - "tabWidth": [0, int, 0, 0], - "usesTabs": [0, int, 0, 0], - "wrapsLines": [0, int, 0, 0], - } - ) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - if "path" in self._properties and "name" not in self._properties: - path = self._properties["path"] - name = posixpath.basename(path) - if name not in ("", path): - self.SetProperty("name", name) - - if "path" in self._properties and ( - "sourceTree" not in self._properties - or self._properties["sourceTree"] == "" - ): - # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take - # the variable out and make the path be relative to that variable by - # assigning the variable name as the sourceTree. - (source_tree, path) = SourceTreeAndPathFromPath(self._properties["path"]) - if source_tree is not None: - self._properties["sourceTree"] = source_tree - if path is not None: - self._properties["path"] = path - if ( - source_tree is not None - and path is None - and "name" not in self._properties - ): - # The path was of the form "$(SDKROOT)" with no path following it. - # This object is now relative to that variable, so it has no path - # attribute of its own. It does, however, keep a name. - del self._properties["path"] - self._properties["name"] = source_tree - - def Name(self): - if "name" in self._properties: - return self._properties["name"] - elif "path" in self._properties: - return self._properties["path"] - else: - # This happens in the case of the root PBXGroup. - return None - - def Hashables(self): - """Custom hashables for XCHierarchicalElements. - - XCHierarchicalElements are special. Generally, their hashes shouldn't - change if the paths don't change. The normal XCObject implementation of - Hashables adds a hashable for each object, which means that if - the hierarchical structure changes (possibly due to changes caused when - TakeOverOnlyChild runs and encounters slight changes in the hierarchy), - the hashes will change. For example, if a project file initially contains - a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent - a/b. If someone later adds a/f2 to the project file, a/b can no longer be - collapsed, and f1 winds up with parent b and grandparent a. That would - be sufficient to change f1's hash. - - To counteract this problem, hashables for all XCHierarchicalElements except - for the main group (which has neither a name nor a path) are taken to be - just the set of path components. Because hashables are inherited from - parents, this provides assurance that a/b/f1 has the same set of hashables - whether its parent is b or a/b. - - The main group is a special case. As it is permitted to have no name or - path, it is permitted to use the standard XCObject hash mechanism. This - is not considered a problem because there can be only one main group. - """ - - if self == self.PBXProjectAncestor()._properties["mainGroup"]: - # super - return XCObject.Hashables(self) - - hashables = [] - - # Put the name in first, ensuring that if TakeOverOnlyChild collapses - # children into a top-level group like "Source", the name always goes - # into the list of hashables without interfering with path components. - if "name" in self._properties: - # Make it less likely for people to manipulate hashes by following the - # pattern of always pushing an object type value onto the list first. - hashables.append(self.__class__.__name__ + ".name") - hashables.append(self._properties["name"]) - - # NOTE: This still has the problem that if an absolute path is encountered, - # including paths with a sourceTree, they'll still inherit their parents' - # hashables, even though the paths aren't relative to their parents. This - # is not expected to be much of a problem in practice. - path = self.PathFromSourceTreeAndPath() - if path is not None: - components = path.split(posixpath.sep) - for component in components: - hashables.append(self.__class__.__name__ + ".path") - hashables.append(component) - - hashables.extend(self._hashables) - - return hashables - - def Compare(self, other): - # Allow comparison of these types. PBXGroup has the highest sort rank; - # PBXVariantGroup is treated as equal to PBXFileReference. - valid_class_types = { - PBXFileReference: "file", - PBXGroup: "group", - PBXVariantGroup: "file", - } - self_type = valid_class_types[self.__class__] - other_type = valid_class_types[other.__class__] - - if self_type == other_type: - # If the two objects are of the same sort rank, compare their names. - return cmp(self.Name(), other.Name()) - - # Otherwise, sort groups before everything else. - if self_type == "group": - return -1 - return 1 - - def CompareRootGroup(self, other): - # This function should be used only to compare direct children of the - # containing PBXProject's mainGroup. These groups should appear in the - # listed order. - # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the - # generator should have a way of influencing this list rather than having - # to hardcode for the generator here. - order = [ - "Source", - "Intermediates", - "Projects", - "Frameworks", - "Products", - "Build", - ] - - # If the groups aren't in the listed order, do a name comparison. - # Otherwise, groups in the listed order should come before those that - # aren't. - self_name = self.Name() - other_name = other.Name() - self_in = isinstance(self, PBXGroup) and self_name in order - other_in = isinstance(self, PBXGroup) and other_name in order - if not self_in and not other_in: - return self.Compare(other) - if self_name in order and other_name not in order: - return -1 - if other_name in order and self_name not in order: - return 1 - - # If both groups are in the listed order, go by the defined order. - self_index = order.index(self_name) - other_index = order.index(other_name) - if self_index < other_index: - return -1 - if self_index > other_index: - return 1 - return 0 - - def PathFromSourceTreeAndPath(self): - # Turn the object's sourceTree and path properties into a single flat - # string of a form comparable to the path parameter. If there's a - # sourceTree property other than "", wrap it in $(...) for the - # comparison. - components = [] - if self._properties["sourceTree"] != "": - components.append("$(" + self._properties["sourceTree"] + ")") - if "path" in self._properties: - components.append(self._properties["path"]) - - if len(components) > 0: - return posixpath.join(*components) - - return None - - def FullPath(self): - # Returns a full path to self relative to the project file, or relative - # to some other source tree. Start with self, and walk up the chain of - # parents prepending their paths, if any, until no more parents are - # available (project-relative path) or until a path relative to some - # source tree is found. - xche = self - path = None - while isinstance(xche, XCHierarchicalElement) and ( - path is None or (not path.startswith("/") and not path.startswith("$")) - ): - this_path = xche.PathFromSourceTreeAndPath() - if this_path is not None and path is not None: - path = posixpath.join(this_path, path) - elif this_path is not None: - path = this_path - xche = xche.parent - - return path - - -class PBXGroup(XCHierarchicalElement): - """ - Attributes: - _children_by_path: Maps pathnames of children of this PBXGroup to the - actual child XCHierarchicalElement objects. - _variant_children_by_name_and_path: Maps (name, path) tuples of - PBXVariantGroup children to the actual child PBXVariantGroup objects. - """ - - _schema = XCHierarchicalElement._schema.copy() - _schema.update( - { - "children": [1, XCHierarchicalElement, 1, 1, []], - "name": [0, str, 0, 0], - "path": [0, str, 0, 0], - } - ) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCHierarchicalElement.__init__(self, properties, id, parent) - self._children_by_path = {} - self._variant_children_by_name_and_path = {} - for child in self._properties.get("children", []): - self._AddChildToDicts(child) - - def Hashables(self): - # super - hashables = XCHierarchicalElement.Hashables(self) - - # It is not sufficient to just rely on name and parent to build a unique - # hashable : a node could have two child PBXGroup sharing a common name. - # To add entropy the hashable is enhanced with the names of all its - # children. - for child in self._properties.get("children", []): - child_name = child.Name() - if child_name is not None: - hashables.append(child_name) - - return hashables - - def HashablesForChild(self): - # To avoid a circular reference the hashables used to compute a child id do - # not include the child names. - return XCHierarchicalElement.Hashables(self) - - def _AddChildToDicts(self, child): - # Sets up this PBXGroup object's dicts to reference the child properly. - child_path = child.PathFromSourceTreeAndPath() - if child_path: - if child_path in self._children_by_path: - raise ValueError("Found multiple children with path " + child_path) - self._children_by_path[child_path] = child - - if isinstance(child, PBXVariantGroup): - child_name = child._properties.get("name", None) - key = (child_name, child_path) - if key in self._variant_children_by_name_and_path: - raise ValueError( - "Found multiple PBXVariantGroup children with " - + "name " - + str(child_name) - + " and path " - + str(child_path) - ) - self._variant_children_by_name_and_path[key] = child - - def AppendChild(self, child): - # Callers should use this instead of calling - # AppendProperty('children', child) directly because this function - # maintains the group's dicts. - self.AppendProperty("children", child) - self._AddChildToDicts(child) - - def GetChildByName(self, name): - # This is not currently optimized with a dict as GetChildByPath is because - # it has few callers. Most callers probably want GetChildByPath. This - # function is only useful to get children that have names but no paths, - # which is rare. The children of the main group ("Source", "Products", - # etc.) is pretty much the only case where this likely to come up. - # - # TODO(mark): Maybe this should raise an error if more than one child is - # present with the same name. - if "children" not in self._properties: - return None - - for child in self._properties["children"]: - if child.Name() == name: - return child - - return None - - def GetChildByPath(self, path): - if not path: - return None - - if path in self._children_by_path: - return self._children_by_path[path] - - return None - - def GetChildByRemoteObject(self, remote_object): - # This method is a little bit esoteric. Given a remote_object, which - # should be a PBXFileReference in another project file, this method will - # return this group's PBXReferenceProxy object serving as a local proxy - # for the remote PBXFileReference. - # - # This function might benefit from a dict optimization as GetChildByPath - # for some workloads, but profiling shows that it's not currently a - # problem. - if "children" not in self._properties: - return None - - for child in self._properties["children"]: - if not isinstance(child, PBXReferenceProxy): - continue - - container_proxy = child._properties["remoteRef"] - if container_proxy._properties["remoteGlobalIDString"] == remote_object: - return child - - return None - - def AddOrGetFileByPath(self, path, hierarchical): - """Returns an existing or new file reference corresponding to path. - - If hierarchical is True, this method will create or use the necessary - hierarchical group structure corresponding to path. Otherwise, it will - look in and create an item in the current group only. - - If an existing matching reference is found, it is returned, otherwise, a - new one will be created, added to the correct group, and returned. - - If path identifies a directory by virtue of carrying a trailing slash, - this method returns a PBXFileReference of "folder" type. If path - identifies a variant, by virtue of it identifying a file inside a directory - with an ".lproj" extension, this method returns a PBXVariantGroup - containing the variant named by path, and possibly other variants. For - all other paths, a "normal" PBXFileReference will be returned. - """ - - # Adding or getting a directory? Directories end with a trailing slash. - is_dir = False - if path.endswith("/"): - is_dir = True - path = posixpath.normpath(path) - if is_dir: - path = path + "/" - - # Adding or getting a variant? Variants are files inside directories - # with an ".lproj" extension. Xcode uses variants for localization. For - # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named - # MainMenu.nib inside path/to, and give it a variant named Language. In - # this example, grandparent would be set to path/to and parent_root would - # be set to Language. - variant_name = None - parent = posixpath.dirname(path) - grandparent = posixpath.dirname(parent) - parent_basename = posixpath.basename(parent) - (parent_root, parent_ext) = posixpath.splitext(parent_basename) - if parent_ext == ".lproj": - variant_name = parent_root - if grandparent == "": - grandparent = None - - # Putting a directory inside a variant group is not currently supported. - assert not is_dir or variant_name is None - - path_split = path.split(posixpath.sep) - if ( - len(path_split) == 1 - or ((is_dir or variant_name is not None) and len(path_split) == 2) - or not hierarchical - ): - # The PBXFileReference or PBXVariantGroup will be added to or gotten from - # this PBXGroup, no recursion necessary. - if variant_name is None: - # Add or get a PBXFileReference. - file_ref = self.GetChildByPath(path) - if file_ref is not None: - assert file_ref.__class__ == PBXFileReference - else: - file_ref = PBXFileReference({"path": path}) - self.AppendChild(file_ref) - else: - # Add or get a PBXVariantGroup. The variant group name is the same - # as the basename (MainMenu.nib in the example above). grandparent - # specifies the path to the variant group itself, and path_split[-2:] - # is the path of the specific variant relative to its group. - variant_group_name = posixpath.basename(path) - variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( - variant_group_name, grandparent - ) - variant_path = posixpath.sep.join(path_split[-2:]) - variant_ref = variant_group_ref.GetChildByPath(variant_path) - if variant_ref is not None: - assert variant_ref.__class__ == PBXFileReference - else: - variant_ref = PBXFileReference( - {"name": variant_name, "path": variant_path} - ) - variant_group_ref.AppendChild(variant_ref) - # The caller is interested in the variant group, not the specific - # variant file. - file_ref = variant_group_ref - return file_ref - else: - # Hierarchical recursion. Add or get a PBXGroup corresponding to the - # outermost path component, and then recurse into it, chopping off that - # path component. - next_dir = path_split[0] - group_ref = self.GetChildByPath(next_dir) - if group_ref is not None: - assert group_ref.__class__ == PBXGroup - else: - group_ref = PBXGroup({"path": next_dir}) - self.AppendChild(group_ref) - return group_ref.AddOrGetFileByPath( - posixpath.sep.join(path_split[1:]), hierarchical - ) - - def AddOrGetVariantGroupByNameAndPath(self, name, path): - """Returns an existing or new PBXVariantGroup for name and path. - - If a PBXVariantGroup identified by the name and path arguments is already - present as a child of this object, it is returned. Otherwise, a new - PBXVariantGroup with the correct properties is created, added as a child, - and returned. - - This method will generally be called by AddOrGetFileByPath, which knows - when to create a variant group based on the structure of the pathnames - passed to it. - """ - - key = (name, path) - if key in self._variant_children_by_name_and_path: - variant_group_ref = self._variant_children_by_name_and_path[key] - assert variant_group_ref.__class__ == PBXVariantGroup - return variant_group_ref - - variant_group_properties = {"name": name} - if path is not None: - variant_group_properties["path"] = path - variant_group_ref = PBXVariantGroup(variant_group_properties) - self.AppendChild(variant_group_ref) - - return variant_group_ref - - def TakeOverOnlyChild(self, recurse=False): - """If this PBXGroup has only one child and it's also a PBXGroup, take - it over by making all of its children this object's children. - - This function will continue to take over only children when those children - are groups. If there are three PBXGroups representing a, b, and c, with - c inside b and b inside a, and a and b have no other children, this will - result in a taking over both b and c, forming a PBXGroup for a/b/c. - - If recurse is True, this function will recurse into children and ask them - to collapse themselves by taking over only children as well. Assuming - an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f - (d1, d2, and f are files, the rest are groups), recursion will result in - a group for a/b/c containing a group for d3/e. - """ - - # At this stage, check that child class types are PBXGroup exactly, - # instead of using isinstance. The only subclass of PBXGroup, - # PBXVariantGroup, should not participate in reparenting in the same way: - # reparenting by merging different object types would be wrong. - while ( - len(self._properties["children"]) == 1 - and self._properties["children"][0].__class__ == PBXGroup - ): - # Loop to take over the innermost only-child group possible. - - child = self._properties["children"][0] - - # Assume the child's properties, including its children. Save a copy - # of this object's old properties, because they'll still be needed. - # This object retains its existing id and parent attributes. - old_properties = self._properties - self._properties = child._properties - self._children_by_path = child._children_by_path - - if ( - "sourceTree" not in self._properties - or self._properties["sourceTree"] == "" - ): - # The child was relative to its parent. Fix up the path. Note that - # children with a sourceTree other than "" are not relative to - # their parents, so no path fix-up is needed in that case. - if "path" in old_properties: - if "path" in self._properties: - # Both the original parent and child have paths set. - self._properties["path"] = posixpath.join( - old_properties["path"], self._properties["path"] - ) - else: - # Only the original parent has a path, use it. - self._properties["path"] = old_properties["path"] - if "sourceTree" in old_properties: - # The original parent had a sourceTree set, use it. - self._properties["sourceTree"] = old_properties["sourceTree"] - - # If the original parent had a name set, keep using it. If the original - # parent didn't have a name but the child did, let the child's name - # live on. If the name attribute seems unnecessary now, get rid of it. - if "name" in old_properties and old_properties["name"] not in ( - None, - self.Name(), - ): - self._properties["name"] = old_properties["name"] - if ( - "name" in self._properties - and "path" in self._properties - and self._properties["name"] == self._properties["path"] - ): - del self._properties["name"] - - # Notify all children of their new parent. - for child in self._properties["children"]: - child.parent = self - - # If asked to recurse, recurse. - if recurse: - for child in self._properties["children"]: - if child.__class__ == PBXGroup: - child.TakeOverOnlyChild(recurse) - - def SortGroup(self): - self._properties["children"] = sorted( - self._properties["children"], key=cmp_to_key(lambda x, y: x.Compare(y)) - ) - - # Recurse. - for child in self._properties["children"]: - if isinstance(child, PBXGroup): - child.SortGroup() - - -class XCFileLikeElement(XCHierarchicalElement): - # Abstract base for objects that can be used as the fileRef property of - # PBXBuildFile. - - def PathHashables(self): - # A PBXBuildFile that refers to this object will call this method to - # obtain additional hashables specific to this XCFileLikeElement. Don't - # just use this object's hashables, they're not specific and unique enough - # on their own (without access to the parent hashables.) Instead, provide - # hashables that identify this object by path by getting its hashables as - # well as the hashables of ancestor XCHierarchicalElement objects. - - hashables = [] - xche = self - while isinstance(xche, XCHierarchicalElement): - xche_hashables = xche.Hashables() - for index, xche_hashable in enumerate(xche_hashables): - hashables.insert(index, xche_hashable) - xche = xche.parent - return hashables - - -class XCContainerPortal(XCObject): - # Abstract base for objects that can be used as the containerPortal property - # of PBXContainerItemProxy. - pass - - -class XCRemoteObject(XCObject): - # Abstract base for objects that can be used as the remoteGlobalIDString - # property of PBXContainerItemProxy. - pass - - -class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): - _schema = XCFileLikeElement._schema.copy() - _schema.update( - { - "explicitFileType": [0, str, 0, 0], - "lastKnownFileType": [0, str, 0, 0], - "name": [0, str, 0, 0], - "path": [0, str, 0, 1], - } - ) - - # Weird output rules for PBXFileReference. - _should_print_single_line = True - # super - _encode_transforms = XCFileLikeElement._alternate_encode_transforms - - def __init__(self, properties=None, id=None, parent=None): - # super - XCFileLikeElement.__init__(self, properties, id, parent) - if "path" in self._properties and self._properties["path"].endswith("/"): - self._properties["path"] = self._properties["path"][:-1] - is_dir = True - else: - is_dir = False - - if ( - "path" in self._properties - and "lastKnownFileType" not in self._properties - and "explicitFileType" not in self._properties - ): - # TODO(mark): This is the replacement for a replacement for a quick hack. - # It is no longer incredibly sucky, but this list needs to be extended. - extension_map = { - "a": "archive.ar", - "app": "wrapper.application", - "bdic": "file", - "bundle": "wrapper.cfbundle", - "c": "sourcecode.c.c", - "cc": "sourcecode.cpp.cpp", - "cpp": "sourcecode.cpp.cpp", - "css": "text.css", - "cxx": "sourcecode.cpp.cpp", - "dart": "sourcecode", - "dylib": "compiled.mach-o.dylib", - "framework": "wrapper.framework", - "gyp": "sourcecode", - "gypi": "sourcecode", - "h": "sourcecode.c.h", - "hxx": "sourcecode.cpp.h", - "icns": "image.icns", - "java": "sourcecode.java", - "js": "sourcecode.javascript", - "kext": "wrapper.kext", - "m": "sourcecode.c.objc", - "mm": "sourcecode.cpp.objcpp", - "nib": "wrapper.nib", - "o": "compiled.mach-o.objfile", - "pdf": "image.pdf", - "pl": "text.script.perl", - "plist": "text.plist.xml", - "pm": "text.script.perl", - "png": "image.png", - "py": "text.script.python", - "r": "sourcecode.rez", - "rez": "sourcecode.rez", - "s": "sourcecode.asm", - "storyboard": "file.storyboard", - "strings": "text.plist.strings", - "swift": "sourcecode.swift", - "ttf": "file", - "xcassets": "folder.assetcatalog", - "xcconfig": "text.xcconfig", - "xcdatamodel": "wrapper.xcdatamodel", - "xcdatamodeld": "wrapper.xcdatamodeld", - "xib": "file.xib", - "y": "sourcecode.yacc", - } - - prop_map = { - "dart": "explicitFileType", - "gyp": "explicitFileType", - "gypi": "explicitFileType", - } - - if is_dir: - file_type = "folder" - prop_name = "lastKnownFileType" - else: - basename = posixpath.basename(self._properties["path"]) - (root, ext) = posixpath.splitext(basename) - # Check the map using a lowercase extension. - # TODO(mark): Maybe it should try with the original case first and fall - # back to lowercase, in case there are any instances where case - # matters. There currently aren't. - if ext != "": - ext = ext[1:].lower() - - # TODO(mark): "text" is the default value, but "file" is appropriate - # for unrecognized files not containing text. Xcode seems to choose - # based on content. - file_type = extension_map.get(ext, "text") - prop_name = prop_map.get(ext, "lastKnownFileType") - - self._properties[prop_name] = file_type - - -class PBXVariantGroup(PBXGroup, XCFileLikeElement): - """PBXVariantGroup is used by Xcode to represent localizations.""" - - # No additions to the schema relative to PBXGroup. - pass - - -# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below -# because it uses PBXContainerItemProxy, defined below. - - -class XCBuildConfiguration(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "baseConfigurationReference": [0, PBXFileReference, 0, 0], - "buildSettings": [0, dict, 0, 1, {}], - "name": [0, str, 0, 1], - } - ) - - def HasBuildSetting(self, key): - return key in self._properties["buildSettings"] - - def GetBuildSetting(self, key): - return self._properties["buildSettings"][key] - - def SetBuildSetting(self, key, value): - # TODO(mark): If a list, copy? - self._properties["buildSettings"][key] = value - - def AppendBuildSetting(self, key, value): - if key not in self._properties["buildSettings"]: - self._properties["buildSettings"][key] = [] - self._properties["buildSettings"][key].append(value) - - def DelBuildSetting(self, key): - if key in self._properties["buildSettings"]: - del self._properties["buildSettings"][key] - - def SetBaseConfiguration(self, value): - self._properties["baseConfigurationReference"] = value - - -class XCConfigurationList(XCObject): - # _configs is the default list of configurations. - _configs = [ - XCBuildConfiguration({"name": "Debug"}), - XCBuildConfiguration({"name": "Release"}), - ] - - _schema = XCObject._schema.copy() - _schema.update( - { - "buildConfigurations": [1, XCBuildConfiguration, 1, 1, _configs], - "defaultConfigurationIsVisible": [0, int, 0, 1, 1], - "defaultConfigurationName": [0, str, 0, 1, "Release"], - } - ) - - def Name(self): - return ( - "Build configuration list for " - + self.parent.__class__.__name__ - + ' "' - + self.parent.Name() - + '"' - ) - - def ConfigurationNamed(self, name): - """Convenience accessor to obtain an XCBuildConfiguration by name.""" - for configuration in self._properties["buildConfigurations"]: - if configuration._properties["name"] == name: - return configuration - - raise KeyError(name) - - def DefaultConfiguration(self): - """Convenience accessor to obtain the default XCBuildConfiguration.""" - return self.ConfigurationNamed(self._properties["defaultConfigurationName"]) - - def HasBuildSetting(self, key): - """Determines the state of a build setting in all XCBuildConfiguration - child objects. - - If all child objects have key in their build settings, and the value is the - same in all child objects, returns 1. - - If no child objects have the key in their build settings, returns 0. - - If some, but not all, child objects have the key in their build settings, - or if any children have different values for the key, returns -1. - """ - - has = None - value = None - for configuration in self._properties["buildConfigurations"]: - configuration_has = configuration.HasBuildSetting(key) - if has is None: - has = configuration_has - elif has != configuration_has: - return -1 - - if configuration_has: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - elif value != configuration_value: - return -1 - - if not has: - return 0 - - return 1 - - def GetBuildSetting(self, key): - """Gets the build setting for key. - - All child XCConfiguration objects must have the same value set for the - setting, or a ValueError will be raised. - """ - - # TODO(mark): This is wrong for build settings that are lists. The list - # contents should be compared (and a list copy returned?) - - value = None - for configuration in self._properties["buildConfigurations"]: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - else: - if value != configuration_value: - raise ValueError("Variant values for " + key) - - return value - - def SetBuildSetting(self, key, value): - """Sets the build setting for key to value in all child - XCBuildConfiguration objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - """Appends value to the build setting for key, which is treated as a list, - in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - """Deletes the build setting key from all child XCBuildConfiguration - objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.DelBuildSetting(key) - - def SetBaseConfiguration(self, value): - """Sets the build configuration in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties["buildConfigurations"]: - configuration.SetBaseConfiguration(value) - - -class PBXBuildFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "fileRef": [0, XCFileLikeElement, 0, 1], - "settings": [0, str, 0, 0], # hack, it's a dict - } - ) - - # Weird output rules for PBXBuildFile. - _should_print_single_line = True - _encode_transforms = XCObject._alternate_encode_transforms - - def Name(self): - # Example: "main.cc in Sources" - return self._properties["fileRef"].Name() + " in " + self.parent.Name() - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # It is not sufficient to just rely on Name() to get the - # XCFileLikeElement's name, because that is not a complete pathname. - # PathHashables returns hashables unique enough that no two - # PBXBuildFiles should wind up with the same set of hashables, unless - # someone adds the same file multiple times to the same target. That - # would be considered invalid anyway. - hashables.extend(self._properties["fileRef"].PathHashables()) - - return hashables - - -class XCBuildPhase(XCObject): - """Abstract base for build phase classes. Not represented in a project - file. - - Attributes: - _files_by_path: A dict mapping each path of a child in the files list by - path (keys) to the corresponding PBXBuildFile children (values). - _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) - to the corresponding PBXBuildFile children (values). - """ - - # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't - # actually have a "files" list. XCBuildPhase should not have "files" but - # another abstract subclass of it should provide this, and concrete build - # phase types that do have "files" lists should be derived from that new - # abstract subclass. XCBuildPhase should only provide buildActionMask and - # runOnlyForDeploymentPostprocessing, and not files or the various - # file-related methods and attributes. - - _schema = XCObject._schema.copy() - _schema.update( - { - "buildActionMask": [0, int, 0, 1, 0x7FFFFFFF], - "files": [1, PBXBuildFile, 1, 1, []], - "runOnlyForDeploymentPostprocessing": [0, int, 0, 1, 0], - } - ) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - - self._files_by_path = {} - self._files_by_xcfilelikeelement = {} - for pbxbuildfile in self._properties.get("files", []): - self._AddBuildFileToDicts(pbxbuildfile) - - def FileGroup(self, path): - # Subclasses must override this by returning a two-element tuple. The - # first item in the tuple should be the PBXGroup to which "path" should be - # added, either as a child or deeper descendant. The second item should - # be a boolean indicating whether files should be added into hierarchical - # groups or one single flat group. - raise NotImplementedError(self.__class__.__name__ + " must implement FileGroup") - - def _AddPathToDict(self, pbxbuildfile, path): - """Adds path to the dict tracking paths belonging to this build phase. - - If the path is already a member of this build phase, raises an exception. - """ - - if path in self._files_by_path: - raise ValueError("Found multiple build files with path " + path) - self._files_by_path[path] = pbxbuildfile - - def _AddBuildFileToDicts(self, pbxbuildfile, path=None): - """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. - - If path is specified, then it is the path that is being added to the - phase, and pbxbuildfile must contain either a PBXFileReference directly - referencing that path, or it must contain a PBXVariantGroup that itself - contains a PBXFileReference referencing the path. - - If path is not specified, either the PBXFileReference's path or the paths - of all children of the PBXVariantGroup are taken as being added to the - phase. - - If the path is already present in the phase, raises an exception. - - If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile - are already present in the phase, referenced by a different PBXBuildFile - object, raises an exception. This does not raise an exception when - a PBXFileReference or PBXVariantGroup reappear and are referenced by the - same PBXBuildFile that has already introduced them, because in the case - of PBXVariantGroup objects, they may correspond to multiple paths that are - not all added simultaneously. When this situation occurs, the path needs - to be added to _files_by_path, but nothing needs to change in - _files_by_xcfilelikeelement, and the caller should have avoided adding - the PBXBuildFile if it is already present in the list of children. - """ - - xcfilelikeelement = pbxbuildfile._properties["fileRef"] - - paths = [] - if path is not None: - # It's best when the caller provides the path. - if isinstance(xcfilelikeelement, PBXVariantGroup): - paths.append(path) - else: - # If the caller didn't provide a path, there can be either multiple - # paths (PBXVariantGroup) or one. - if isinstance(xcfilelikeelement, PBXVariantGroup): - for variant in xcfilelikeelement._properties["children"]: - paths.append(variant.FullPath()) - else: - paths.append(xcfilelikeelement.FullPath()) - - # Add the paths first, because if something's going to raise, the - # messages provided by _AddPathToDict are more useful owing to its - # having access to a real pathname and not just an object's Name(). - for a_path in paths: - self._AddPathToDict(pbxbuildfile, a_path) - - # If another PBXBuildFile references this XCFileLikeElement, there's a - # problem. - if ( - xcfilelikeelement in self._files_by_xcfilelikeelement - and self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile - ): - raise ValueError( - "Found multiple build files for " + xcfilelikeelement.Name() - ) - self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile - - def AppendBuildFile(self, pbxbuildfile, path=None): - # Callers should use this instead of calling - # AppendProperty('files', pbxbuildfile) directly because this function - # maintains the object's dicts. Better yet, callers can just call AddFile - # with a pathname and not worry about building their own PBXBuildFile - # objects. - self.AppendProperty("files", pbxbuildfile) - self._AddBuildFileToDicts(pbxbuildfile, path) - - def AddFile(self, path, settings=None): - (file_group, hierarchical) = self.FileGroup(path) - file_ref = file_group.AddOrGetFileByPath(path, hierarchical) - - if file_ref in self._files_by_xcfilelikeelement and isinstance( - file_ref, PBXVariantGroup - ): - # There's already a PBXBuildFile in this phase corresponding to the - # PBXVariantGroup. path just provides a new variant that belongs to - # the group. Add the path to the dict. - pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] - self._AddBuildFileToDicts(pbxbuildfile, path) - else: - # Add a new PBXBuildFile to get file_ref into the phase. - if settings is None: - pbxbuildfile = PBXBuildFile({"fileRef": file_ref}) - else: - pbxbuildfile = PBXBuildFile({"fileRef": file_ref, "settings": settings}) - self.AppendBuildFile(pbxbuildfile, path) - - -class PBXHeadersBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Headers" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXResourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Resources" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXSourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Sources" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXFrameworksBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return "Frameworks" - - def FileGroup(self, path): - (root, ext) = posixpath.splitext(path) - if ext != "": - ext = ext[1:].lower() - if ext == "o": - # .o files are added to Xcode Frameworks phases, but conceptually aren't - # frameworks, they're more like sources or intermediates. Redirect them - # to show up in one of those other groups. - return self.PBXProjectAncestor().RootGroupForPath(path) - else: - return (self.PBXProjectAncestor().FrameworksGroup(), False) - - -class PBXShellScriptBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update( - { - "inputPaths": [1, str, 0, 1, []], - "name": [0, str, 0, 0], - "outputPaths": [1, str, 0, 1, []], - "shellPath": [0, str, 0, 1, "/bin/sh"], - "shellScript": [0, str, 0, 1], - "showEnvVarsInLog": [0, int, 0, 0], - } - ) - - def Name(self): - if "name" in self._properties: - return self._properties["name"] - - return "ShellScript" - - -class PBXCopyFilesBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update( - { - "dstPath": [0, str, 0, 1], - "dstSubfolderSpec": [0, int, 0, 1], - "name": [0, str, 0, 0], - } - ) - - # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)". - # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path" - # or None. If group 3 is "path", group 4 will be None otherwise group 4 is - # "DIR2" and group 6 is "path". - path_tree_re = re.compile(r"^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$") - - # path_tree_{first,second}_to_subfolder map names of Xcode variables to the - # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase - # object. - path_tree_first_to_subfolder = { - # Types that can be chosen via the Xcode UI. - "BUILT_PRODUCTS_DIR": 16, # Products Directory - "BUILT_FRAMEWORKS_DIR": 10, # Not an official Xcode macro. - # Existed before support for the - # names below was added. Maps to - # "Frameworks". - } - - path_tree_second_to_subfolder = { - "WRAPPER_NAME": 1, # Wrapper - # Although Xcode's friendly name is "Executables", the destination - # is demonstrably the value of the build setting - # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH. - "EXECUTABLE_FOLDER_PATH": 6, # Executables. - "UNLOCALIZED_RESOURCES_FOLDER_PATH": 7, # Resources - "JAVA_FOLDER_PATH": 15, # Java Resources - "FRAMEWORKS_FOLDER_PATH": 10, # Frameworks - "SHARED_FRAMEWORKS_FOLDER_PATH": 11, # Shared Frameworks - "SHARED_SUPPORT_FOLDER_PATH": 12, # Shared Support - "PLUGINS_FOLDER_PATH": 13, # PlugIns - # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec. - # Note that it re-uses the BUILT_PRODUCTS_DIR value for - # dstSubfolderSpec. dstPath is set below. - "XPCSERVICES_FOLDER_PATH": 16, # XPC Services. - } - - def Name(self): - if "name" in self._properties: - return self._properties["name"] - - return "CopyFiles" - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - def SetDestination(self, path): - """Set the dstSubfolderSpec and dstPath properties from path. - - path may be specified in the same notation used for XCHierarchicalElements, - specifically, "$(DIR)/path". - """ - - path_tree_match = self.path_tree_re.search(path) - if path_tree_match: - path_tree = path_tree_match.group(1) - if path_tree in self.path_tree_first_to_subfolder: - subfolder = self.path_tree_first_to_subfolder[path_tree] - relative_path = path_tree_match.group(3) - if relative_path is None: - relative_path = "" - - if subfolder == 16 and path_tree_match.group(4) is not None: - # BUILT_PRODUCTS_DIR (16) is the first element in a path whose - # second element is possibly one of the variable names in - # path_tree_second_to_subfolder. Xcode sets the values of all these - # variables to relative paths so .gyp files must prefix them with - # BUILT_PRODUCTS_DIR, e.g. - # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then - # xcode_emulation.py can export these variables with the same values - # as Xcode yet make & ninja files can determine the absolute path - # to the target. Xcode uses the dstSubfolderSpec value set here - # to determine the full path. - # - # An alternative of xcode_emulation.py setting the values to - # absolute paths when exporting these variables has been - # ruled out because then the values would be different - # depending on the build tool. - # - # Another alternative is to invent new names for the variables used - # to match to the subfolder indices in the second table. .gyp files - # then will not need to prepend $(BUILT_PRODUCTS_DIR) because - # xcode_emulation.py can set the values of those variables to - # the absolute paths when exporting. This is possibly the thinking - # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner. - # - # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because - # this same way could be used to specify destinations in .gyp files - # that pre-date this addition to GYP. However they would only work - # with the Xcode generator. - # The previous version of xcode_emulation.py - # does not export these variables. Such files will get the benefit - # of the Xcode UI showing the proper destination name simply by - # regenerating the projects with this version of GYP. - path_tree = path_tree_match.group(4) - relative_path = path_tree_match.group(6) - separator = "/" - - if path_tree in self.path_tree_second_to_subfolder: - subfolder = self.path_tree_second_to_subfolder[path_tree] - if relative_path is None: - relative_path = "" - separator = "" - if path_tree == "XPCSERVICES_FOLDER_PATH": - relative_path = ( - "$(CONTENTS_FOLDER_PATH)/XPCServices" - + separator - + relative_path - ) - else: - # subfolder = 16 from above - # The second element of the path is an unrecognized variable. - # Include it and any remaining elements in relative_path. - relative_path = path_tree_match.group(3) - - else: - # The path starts with an unrecognized Xcode variable - # name like $(SRCROOT). Xcode will still handle this - # as an "absolute path" that starts with the variable. - subfolder = 0 - relative_path = path - elif path.startswith("/"): - # Special case. Absolute paths are in dstSubfolderSpec 0. - subfolder = 0 - relative_path = path[1:] - else: - raise ValueError( - f"Can't use path {path} in a {self.__class__.__name__}" - ) - - self._properties["dstPath"] = relative_path - self._properties["dstSubfolderSpec"] = subfolder - - -class PBXBuildRule(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "compilerSpec": [0, str, 0, 1], - "filePatterns": [0, str, 0, 0], - "fileType": [0, str, 0, 1], - "isEditable": [0, int, 0, 1, 1], - "outputFiles": [1, str, 0, 1, []], - "script": [0, str, 0, 0], - } - ) - - def Name(self): - # Not very inspired, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.append(self._properties["fileType"]) - if "filePatterns" in self._properties: - hashables.append(self._properties["filePatterns"]) - return hashables - - -class PBXContainerItemProxy(XCObject): - # When referencing an item in this project file, containerPortal is the - # PBXProject root object of this project file. When referencing an item in - # another project file, containerPortal is a PBXFileReference identifying - # the other project file. - # - # When serving as a proxy to an XCTarget (in this project file or another), - # proxyType is 1. When serving as a proxy to a PBXFileReference (in another - # project file), proxyType is 2. Type 2 is used for references to the - # producs of the other project file's targets. - # - # Xcode is weird about remoteGlobalIDString. Usually, it's printed without - # a comment, indicating that it's tracked internally simply as a string, but - # sometimes it's printed with a comment (usually when the object is initially - # created), indicating that it's tracked as a project file object at least - # sometimes. This module always tracks it as an object, but contains a hack - # to prevent it from printing the comment in the project file output. See - # _XCKVPrint. - _schema = XCObject._schema.copy() - _schema.update( - { - "containerPortal": [0, XCContainerPortal, 0, 1], - "proxyType": [0, int, 0, 1], - "remoteGlobalIDString": [0, XCRemoteObject, 0, 1], - "remoteInfo": [0, str, 0, 1], - } - ) - - def __repr__(self): - props = self._properties - name = "{}.gyp:{}".format(props["containerPortal"].Name(), props["remoteInfo"]) - return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties["containerPortal"].Hashables()) - hashables.extend(self._properties["remoteGlobalIDString"].Hashables()) - return hashables - - -class PBXTargetDependency(XCObject): - # The "target" property accepts an XCTarget object, and obviously not - # NoneType. But XCTarget is defined below, so it can't be put into the - # schema yet. The definition of PBXTargetDependency can't be moved below - # XCTarget because XCTarget's own schema references PBXTargetDependency. - # Python doesn't deal well with this circular relationship, and doesn't have - # a real way to do forward declarations. To work around, the type of - # the "target" property is reset below, after XCTarget is defined. - # - # At least one of "name" and "target" is required. - _schema = XCObject._schema.copy() - _schema.update( - { - "name": [0, str, 0, 0], - "target": [0, None.__class__, 0, 0], - "targetProxy": [0, PBXContainerItemProxy, 1, 1], - } - ) - - def __repr__(self): - name = self._properties.get("name") or self._properties["target"].Name() - return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties["targetProxy"].Hashables()) - return hashables - - -class PBXReferenceProxy(XCFileLikeElement): - _schema = XCFileLikeElement._schema.copy() - _schema.update( - { - "fileType": [0, str, 0, 1], - "path": [0, str, 0, 1], - "remoteRef": [0, PBXContainerItemProxy, 1, 1], - } - ) - - -class XCTarget(XCRemoteObject): - # An XCTarget is really just an XCObject, the XCRemoteObject thing is just - # to allow PBXProject to be used in the remoteGlobalIDString property of - # PBXContainerItemProxy. - # - # Setting a "name" property at instantiation may also affect "productName", - # which may in turn affect the "PRODUCT_NAME" build setting in children of - # "buildConfigurationList". See __init__ below. - _schema = XCRemoteObject._schema.copy() - _schema.update( - { - "buildConfigurationList": [ - 0, - XCConfigurationList, - 1, - 1, - XCConfigurationList(), - ], - "buildPhases": [1, XCBuildPhase, 1, 1, []], - "dependencies": [1, PBXTargetDependency, 1, 1, []], - "name": [0, str, 0, 1], - "productName": [0, str, 0, 1], - } - ) - - def __init__( - self, - properties=None, - id=None, - parent=None, - force_outdir=None, - force_prefix=None, - force_extension=None, - ): - # super - XCRemoteObject.__init__(self, properties, id, parent) - - # Set up additional defaults not expressed in the schema. If a "name" - # property was supplied, set "productName" if it is not present. Also set - # the "PRODUCT_NAME" build setting in each configuration, but only if - # the setting is not present in any build configuration. - if "name" in self._properties and "productName" not in self._properties: - self.SetProperty("productName", self._properties["name"]) - - if "productName" in self._properties: - if "buildConfigurationList" in self._properties: - configs = self._properties["buildConfigurationList"] - if configs.HasBuildSetting("PRODUCT_NAME") == 0: - configs.SetBuildSetting( - "PRODUCT_NAME", self._properties["productName"] - ) - - def AddDependency(self, other): - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject == other_pbxproject: - # Add a dependency to another target in the same project file. - container = PBXContainerItemProxy( - { - "containerPortal": pbxproject, - "proxyType": 1, - "remoteGlobalIDString": other, - "remoteInfo": other.Name(), - } - ) - dependency = PBXTargetDependency( - {"target": other, "targetProxy": container} - ) - self.AppendProperty("dependencies", dependency) - else: - # Add a dependency to a target in a different project file. - other_project_ref = pbxproject.AddOrGetProjectReference(other_pbxproject)[1] - container = PBXContainerItemProxy( - { - "containerPortal": other_project_ref, - "proxyType": 1, - "remoteGlobalIDString": other, - "remoteInfo": other.Name(), - } - ) - dependency = PBXTargetDependency( - {"name": other.Name(), "targetProxy": container} - ) - self.AppendProperty("dependencies", dependency) - - # Proxy all of these through to the build configuration list. - - def ConfigurationNamed(self, name): - return self._properties["buildConfigurationList"].ConfigurationNamed(name) - - def DefaultConfiguration(self): - return self._properties["buildConfigurationList"].DefaultConfiguration() - - def HasBuildSetting(self, key): - return self._properties["buildConfigurationList"].HasBuildSetting(key) - - def GetBuildSetting(self, key): - return self._properties["buildConfigurationList"].GetBuildSetting(key) - - def SetBuildSetting(self, key, value): - return self._properties["buildConfigurationList"].SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - return self._properties["buildConfigurationList"].AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - return self._properties["buildConfigurationList"].DelBuildSetting(key) - - -# Redefine the type of the "target" property. See PBXTargetDependency._schema -# above. -PBXTargetDependency._schema["target"][1] = XCTarget - - -class PBXNativeTarget(XCTarget): - # buildPhases is overridden in the schema to be able to set defaults. - # - # NOTE: Contrary to most objects, it is advisable to set parent when - # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject - # object. A parent reference is required for a PBXNativeTarget during - # construction to be able to set up the target defaults for productReference, - # because a PBXBuildFile object must be created for the target and it must - # be added to the PBXProject's mainGroup hierarchy. - _schema = XCTarget._schema.copy() - _schema.update( - { - "buildPhases": [ - 1, - XCBuildPhase, - 1, - 1, - [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()], - ], - "buildRules": [1, PBXBuildRule, 1, 1, []], - "productReference": [0, PBXFileReference, 0, 1], - "productType": [0, str, 0, 1], - } - ) - - # Mapping from Xcode product-types to settings. The settings are: - # filetype : used for explicitFileType in the project file - # prefix : the prefix for the file name - # suffix : the suffix for the file name - _product_filetypes = { - "com.apple.product-type.application": ["wrapper.application", "", ".app"], - "com.apple.product-type.application.watchapp": [ - "wrapper.application", - "", - ".app", - ], - "com.apple.product-type.watchkit-extension": [ - "wrapper.app-extension", - "", - ".appex", - ], - "com.apple.product-type.app-extension": ["wrapper.app-extension", "", ".appex"], - "com.apple.product-type.bundle": ["wrapper.cfbundle", "", ".bundle"], - "com.apple.product-type.framework": ["wrapper.framework", "", ".framework"], - "com.apple.product-type.library.dynamic": [ - "compiled.mach-o.dylib", - "lib", - ".dylib", - ], - "com.apple.product-type.library.static": ["archive.ar", "lib", ".a"], - "com.apple.product-type.tool": ["compiled.mach-o.executable", "", ""], - "com.apple.product-type.bundle.unit-test": ["wrapper.cfbundle", "", ".xctest"], - "com.apple.product-type.bundle.ui-testing": ["wrapper.cfbundle", "", ".xctest"], - "com.googlecode.gyp.xcode.bundle": ["compiled.mach-o.dylib", "", ".so"], - "com.apple.product-type.kernel-extension": ["wrapper.kext", "", ".kext"], - } - - def __init__( - self, - properties=None, - id=None, - parent=None, - force_outdir=None, - force_prefix=None, - force_extension=None, - ): - # super - XCTarget.__init__(self, properties, id, parent) - - if ( - "productName" in self._properties - and "productType" in self._properties - and "productReference" not in self._properties - and self._properties["productType"] in self._product_filetypes - ): - products_group = None - pbxproject = self.PBXProjectAncestor() - if pbxproject is not None: - products_group = pbxproject.ProductsGroup() - - if products_group is not None: - (filetype, prefix, suffix) = self._product_filetypes[ - self._properties["productType"] - ] - # Xcode does not have a distinct type for loadable modules that are - # pure BSD targets (not in a bundle wrapper). GYP allows such modules - # to be specified by setting a target type to loadable_module without - # having mac_bundle set. These are mapped to the pseudo-product type - # com.googlecode.gyp.xcode.bundle. - # - # By picking up this special type and converting it to a dynamic - # library (com.apple.product-type.library.dynamic) with fix-ups, - # single-file loadable modules can be produced. - # - # MACH_O_TYPE is changed to mh_bundle to produce the proper file type - # (as opposed to mh_dylib). In order for linking to succeed, - # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be - # cleared. They are meaningless for type mh_bundle. - # - # Finally, the .so extension is forcibly applied over the default - # (.dylib), unless another forced extension is already selected. - # .dylib is plainly wrong, and .bundle is used by loadable_modules in - # bundle wrappers (com.apple.product-type.bundle). .so seems an odd - # choice because it's used as the extension on many other systems that - # don't distinguish between linkable shared libraries and non-linkable - # loadable modules, but there's precedent: Python loadable modules on - # Mac OS X use an .so extension. - if self._properties["productType"] == "com.googlecode.gyp.xcode.bundle": - self._properties[ - "productType" - ] = "com.apple.product-type.library.dynamic" - self.SetBuildSetting("MACH_O_TYPE", "mh_bundle") - self.SetBuildSetting("DYLIB_CURRENT_VERSION", "") - self.SetBuildSetting("DYLIB_COMPATIBILITY_VERSION", "") - if force_extension is None: - force_extension = suffix[1:] - - if ( - self._properties["productType"] in { - "com.apple.product-type-bundle.unit.test", - "com.apple.product-type-bundle.ui-testing" - } - ) and force_extension is None: - force_extension = suffix[1:] - - if force_extension is not None: - # If it's a wrapper (bundle), set WRAPPER_EXTENSION. - # Extension override. - suffix = "." + force_extension - if filetype.startswith("wrapper."): - self.SetBuildSetting("WRAPPER_EXTENSION", force_extension) - else: - self.SetBuildSetting("EXECUTABLE_EXTENSION", force_extension) - - if filetype.startswith("compiled.mach-o.executable"): - product_name = self._properties["productName"] - product_name += suffix - suffix = "" - self.SetProperty("productName", product_name) - self.SetBuildSetting("PRODUCT_NAME", product_name) - - # Xcode handles most prefixes based on the target type, however there - # are exceptions. If a "BSD Dynamic Library" target is added in the - # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that - # behavior. - if force_prefix is not None: - prefix = force_prefix - if filetype.startswith("wrapper."): - self.SetBuildSetting("WRAPPER_PREFIX", prefix) - else: - self.SetBuildSetting("EXECUTABLE_PREFIX", prefix) - - if force_outdir is not None: - self.SetBuildSetting("TARGET_BUILD_DIR", force_outdir) - - # TODO(tvl): Remove the below hack. - # http://code.google.com/p/gyp/issues/detail?id=122 - - # Some targets include the prefix in the target_name. These targets - # really should just add a product_name setting that doesn't include - # the prefix. For example: - # target_name = 'libevent', product_name = 'event' - # This check cleans up for them. - product_name = self._properties["productName"] - prefix_len = len(prefix) - if prefix_len and (product_name[:prefix_len] == prefix): - product_name = product_name[prefix_len:] - self.SetProperty("productName", product_name) - self.SetBuildSetting("PRODUCT_NAME", product_name) - - ref_props = { - "explicitFileType": filetype, - "includeInIndex": 0, - "path": prefix + product_name + suffix, - "sourceTree": "BUILT_PRODUCTS_DIR", - } - file_ref = PBXFileReference(ref_props) - products_group.AppendChild(file_ref) - self.SetProperty("productReference", file_ref) - - def GetBuildPhaseByType(self, type): - if "buildPhases" not in self._properties: - return None - - the_phase = None - for phase in self._properties["buildPhases"]: - if isinstance(phase, type): - # Some phases may be present in multiples in a well-formed project file, - # but phases like PBXSourcesBuildPhase may only be present singly, and - # this function is intended as an aid to GetBuildPhaseByType. Loop - # over the entire list of phases and assert if more than one of the - # desired type is found. - assert the_phase is None - the_phase = phase - - return the_phase - - def HeadersPhase(self): - headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase) - if headers_phase is None: - headers_phase = PBXHeadersBuildPhase() - - # The headers phase should come before the resources, sources, and - # frameworks phases, if any. - insert_at = len(self._properties["buildPhases"]) - for index, phase in enumerate(self._properties["buildPhases"]): - if isinstance( - phase, - ( - PBXResourcesBuildPhase, - PBXSourcesBuildPhase, - PBXFrameworksBuildPhase, - ), - ): - insert_at = index - break - - self._properties["buildPhases"].insert(insert_at, headers_phase) - headers_phase.parent = self - - return headers_phase - - def ResourcesPhase(self): - resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) - if resources_phase is None: - resources_phase = PBXResourcesBuildPhase() - - # The resources phase should come before the sources and frameworks - # phases, if any. - insert_at = len(self._properties["buildPhases"]) - for index, phase in enumerate(self._properties["buildPhases"]): - if isinstance(phase, (PBXSourcesBuildPhase, PBXFrameworksBuildPhase)): - insert_at = index - break - - self._properties["buildPhases"].insert(insert_at, resources_phase) - resources_phase.parent = self - - return resources_phase - - def SourcesPhase(self): - sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) - if sources_phase is None: - sources_phase = PBXSourcesBuildPhase() - self.AppendProperty("buildPhases", sources_phase) - - return sources_phase - - def FrameworksPhase(self): - frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) - if frameworks_phase is None: - frameworks_phase = PBXFrameworksBuildPhase() - self.AppendProperty("buildPhases", frameworks_phase) - - return frameworks_phase - - def AddDependency(self, other): - # super - XCTarget.AddDependency(self, other) - - static_library_type = "com.apple.product-type.library.static" - shared_library_type = "com.apple.product-type.library.dynamic" - framework_type = "com.apple.product-type.framework" - if ( - isinstance(other, PBXNativeTarget) - and "productType" in self._properties - and self._properties["productType"] != static_library_type - and "productType" in other._properties - and ( - other._properties["productType"] == static_library_type - or ( - ( - other._properties["productType"] in { - shared_library_type, - framework_type - } - ) - and ( - (not other.HasBuildSetting("MACH_O_TYPE")) - or other.GetBuildSetting("MACH_O_TYPE") != "mh_bundle" - ) - ) - ) - ): - - file_ref = other.GetProperty("productReference") - - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject != other_pbxproject: - other_project_product_group = pbxproject.AddOrGetProjectReference( - other_pbxproject - )[0] - file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) - - self.FrameworksPhase().AppendProperty( - "files", PBXBuildFile({"fileRef": file_ref}) - ) - - -class PBXAggregateTarget(XCTarget): - pass - - -class PBXProject(XCContainerPortal): - # A PBXProject is really just an XCObject, the XCContainerPortal thing is - # just to allow PBXProject to be used in the containerPortal property of - # PBXContainerItemProxy. - """ - - Attributes: - path: "sample.xcodeproj". TODO(mark) Document me! - _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each - value is a reference to the dict in the - projectReferences list associated with the keyed - PBXProject. - """ - - _schema = XCContainerPortal._schema.copy() - _schema.update( - { - "attributes": [0, dict, 0, 0], - "buildConfigurationList": [ - 0, - XCConfigurationList, - 1, - 1, - XCConfigurationList(), - ], - "compatibilityVersion": [0, str, 0, 1, "Xcode 3.2"], - "hasScannedForEncodings": [0, int, 0, 1, 1], - "mainGroup": [0, PBXGroup, 1, 1, PBXGroup()], - "projectDirPath": [0, str, 0, 1, ""], - "projectReferences": [1, dict, 0, 0], - "projectRoot": [0, str, 0, 1, ""], - "targets": [1, XCTarget, 1, 1, []], - } - ) - - def __init__(self, properties=None, id=None, parent=None, path=None): - self.path = path - self._other_pbxprojects = {} - # super - XCContainerPortal.__init__(self, properties, id, parent) - - def Name(self): - name = self.path - if name[-10:] == ".xcodeproj": - name = name[:-10] - return posixpath.basename(name) - - def Path(self): - return self.path - - def Comment(self): - return "Project object" - - def Children(self): - # super - children = XCContainerPortal.Children(self) - - # Add children that the schema doesn't know about. Maybe there's a more - # elegant way around this, but this is the only case where we need to own - # objects in a dictionary (that is itself in a list), and three lines for - # a one-off isn't that big a deal. - if "projectReferences" in self._properties: - for reference in self._properties["projectReferences"]: - children.append(reference["ProductGroup"]) - - return children - - def PBXProjectAncestor(self): - return self - - def _GroupByName(self, name): - if "mainGroup" not in self._properties: - self.SetProperty("mainGroup", PBXGroup()) - - main_group = self._properties["mainGroup"] - group = main_group.GetChildByName(name) - if group is None: - group = PBXGroup({"name": name}) - main_group.AppendChild(group) - - return group - - # SourceGroup and ProductsGroup are created by default in Xcode's own - # templates. - def SourceGroup(self): - return self._GroupByName("Source") - - def ProductsGroup(self): - return self._GroupByName("Products") - - # IntermediatesGroup is used to collect source-like files that are generated - # by rules or script phases and are placed in intermediate directories such - # as DerivedSources. - def IntermediatesGroup(self): - return self._GroupByName("Intermediates") - - # FrameworksGroup and ProjectsGroup are top-level groups used to collect - # frameworks and projects. - def FrameworksGroup(self): - return self._GroupByName("Frameworks") - - def ProjectsGroup(self): - return self._GroupByName("Projects") - - def RootGroupForPath(self, path): - """Returns a PBXGroup child of this object to which path should be added. - - This method is intended to choose between SourceGroup and - IntermediatesGroup on the basis of whether path is present in a source - directory or an intermediates directory. For the purposes of this - determination, any path located within a derived file directory such as - PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates - directory. - - The returned value is a two-element tuple. The first element is the - PBXGroup, and the second element specifies whether that group should be - organized hierarchically (True) or as a single flat list (False). - """ - - # TODO(mark): make this a class variable and bind to self on call? - # Also, this list is nowhere near exhaustive. - # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by - # gyp.generator.xcode. There should probably be some way for that module - # to push the names in, rather than having to hard-code them here. - source_tree_groups = { - "DERIVED_FILE_DIR": (self.IntermediatesGroup, True), - "INTERMEDIATE_DIR": (self.IntermediatesGroup, True), - "PROJECT_DERIVED_FILE_DIR": (self.IntermediatesGroup, True), - "SHARED_INTERMEDIATE_DIR": (self.IntermediatesGroup, True), - } - - (source_tree, path) = SourceTreeAndPathFromPath(path) - if source_tree is not None and source_tree in source_tree_groups: - (group_func, hierarchical) = source_tree_groups[source_tree] - group = group_func() - return (group, hierarchical) - - # TODO(mark): make additional choices based on file extension. - - return (self.SourceGroup(), True) - - def AddOrGetFileInRootGroup(self, path): - """Returns a PBXFileReference corresponding to path in the correct group - according to RootGroupForPath's heuristics. - - If an existing PBXFileReference for path exists, it will be returned. - Otherwise, one will be created and returned. - """ - - (group, hierarchical) = self.RootGroupForPath(path) - return group.AddOrGetFileByPath(path, hierarchical) - - def RootGroupsTakeOverOnlyChildren(self, recurse=False): - """Calls TakeOverOnlyChild for all groups in the main group.""" - - for group in self._properties["mainGroup"]._properties["children"]: - if isinstance(group, PBXGroup): - group.TakeOverOnlyChild(recurse) - - def SortGroups(self): - # Sort the children of the mainGroup (like "Source" and "Products") - # according to their defined order. - self._properties["mainGroup"]._properties["children"] = sorted( - self._properties["mainGroup"]._properties["children"], - key=cmp_to_key(lambda x, y: x.CompareRootGroup(y)), - ) - - # Sort everything else by putting group before files, and going - # alphabetically by name within sections of groups and files. SortGroup - # is recursive. - for group in self._properties["mainGroup"]._properties["children"]: - if not isinstance(group, PBXGroup): - continue - - if group.Name() == "Products": - # The Products group is a special case. Instead of sorting - # alphabetically, sort things in the order of the targets that - # produce the products. To do this, just build up a new list of - # products based on the targets. - products = [] - for target in self._properties["targets"]: - if not isinstance(target, PBXNativeTarget): - continue - product = target._properties["productReference"] - # Make sure that the product is already in the products group. - assert product in group._properties["children"] - products.append(product) - - # Make sure that this process doesn't miss anything that was already - # in the products group. - assert len(products) == len(group._properties["children"]) - group._properties["children"] = products - else: - group.SortGroup() - - def AddOrGetProjectReference(self, other_pbxproject): - """Add a reference to another project file (via PBXProject object) to this - one. - - Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in - this project file that contains a PBXReferenceProxy object for each - product of each PBXNativeTarget in the other project file. ProjectRef is - a PBXFileReference to the other project file. - - If this project file already references the other project file, the - existing ProductGroup and ProjectRef are returned. The ProductGroup will - still be updated if necessary. - """ - - if "projectReferences" not in self._properties: - self._properties["projectReferences"] = [] - - product_group = None - project_ref = None - - if other_pbxproject not in self._other_pbxprojects: - # This project file isn't yet linked to the other one. Establish the - # link. - product_group = PBXGroup({"name": "Products"}) - - # ProductGroup is strong. - product_group.parent = self - - # There's nothing unique about this PBXGroup, and if left alone, it will - # wind up with the same set of hashables as all other PBXGroup objects - # owned by the projectReferences list. Add the hashables of the - # remote PBXProject that it's related to. - product_group._hashables.extend(other_pbxproject.Hashables()) - - # The other project reports its path as relative to the same directory - # that this project's path is relative to. The other project's path - # is not necessarily already relative to this project. Figure out the - # pathname that this project needs to use to refer to the other one. - this_path = posixpath.dirname(self.Path()) - projectDirPath = self.GetProperty("projectDirPath") - if projectDirPath: - if posixpath.isabs(projectDirPath[0]): - this_path = projectDirPath - else: - this_path = posixpath.join(this_path, projectDirPath) - other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) - - # ProjectRef is weak (it's owned by the mainGroup hierarchy). - project_ref = PBXFileReference( - { - "lastKnownFileType": "wrapper.pb-project", - "path": other_path, - "sourceTree": "SOURCE_ROOT", - } - ) - self.ProjectsGroup().AppendChild(project_ref) - - ref_dict = {"ProductGroup": product_group, "ProjectRef": project_ref} - self._other_pbxprojects[other_pbxproject] = ref_dict - self.AppendProperty("projectReferences", ref_dict) - - # Xcode seems to sort this list case-insensitively - self._properties["projectReferences"] = sorted( - self._properties["projectReferences"], - key=lambda x: x["ProjectRef"].Name().lower() - ) - else: - # The link already exists. Pull out the relevnt data. - project_ref_dict = self._other_pbxprojects[other_pbxproject] - product_group = project_ref_dict["ProductGroup"] - project_ref = project_ref_dict["ProjectRef"] - - self._SetUpProductReferences(other_pbxproject, product_group, project_ref) - - inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False) - targets = other_pbxproject.GetProperty("targets") - if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets): - dir_path = project_ref._properties["path"] - product_group._hashables.extend(dir_path) - - return [product_group, project_ref] - - def _AllSymrootsUnique(self, target, inherit_unique_symroot): - # Returns True if all configurations have a unique 'SYMROOT' attribute. - # The value of inherit_unique_symroot decides, if a configuration is assumed - # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't - # define an explicit value for 'SYMROOT'. - symroots = self._DefinedSymroots(target) - for s in self._DefinedSymroots(target): - if ( - s is not None - and not self._IsUniqueSymrootForTarget(s) - or s is None - and not inherit_unique_symroot - ): - return False - return True if symroots else inherit_unique_symroot - - def _DefinedSymroots(self, target): - # Returns all values for the 'SYMROOT' attribute defined in all - # configurations for this target. If any configuration doesn't define the - # 'SYMROOT' attribute, None is added to the returned set. If all - # configurations don't define the 'SYMROOT' attribute, an empty set is - # returned. - config_list = target.GetProperty("buildConfigurationList") - symroots = set() - for config in config_list.GetProperty("buildConfigurations"): - setting = config.GetProperty("buildSettings") - if "SYMROOT" in setting: - symroots.add(setting["SYMROOT"]) - else: - symroots.add(None) - if len(symroots) == 1 and None in symroots: - return set() - return symroots - - def _IsUniqueSymrootForTarget(self, symroot): - # This method returns True if all configurations in target contain a - # 'SYMROOT' attribute that is unique for the given target. A value is - # unique, if the Xcode macro '$SRCROOT' appears in it in any form. - uniquifier = ["$SRCROOT", "$(SRCROOT)"] - if any(x in symroot for x in uniquifier): - return True - return False - - def _SetUpProductReferences(self, other_pbxproject, product_group, project_ref): - # TODO(mark): This only adds references to products in other_pbxproject - # when they don't exist in this pbxproject. Perhaps it should also - # remove references from this pbxproject that are no longer present in - # other_pbxproject. Perhaps it should update various properties if they - # change. - for target in other_pbxproject._properties["targets"]: - if not isinstance(target, PBXNativeTarget): - continue - - other_fileref = target._properties["productReference"] - if product_group.GetChildByRemoteObject(other_fileref) is None: - # Xcode sets remoteInfo to the name of the target and not the name - # of its product, despite this proxy being a reference to the product. - container_item = PBXContainerItemProxy( - { - "containerPortal": project_ref, - "proxyType": 2, - "remoteGlobalIDString": other_fileref, - "remoteInfo": target.Name(), - } - ) - # TODO(mark): Does sourceTree get copied straight over from the other - # project? Can the other project ever have lastKnownFileType here - # instead of explicitFileType? (Use it if so?) Can path ever be - # unset? (I don't think so.) Can other_fileref have name set, and - # does it impact the PBXReferenceProxy if so? These are the questions - # that perhaps will be answered one day. - reference_proxy = PBXReferenceProxy( - { - "fileType": other_fileref._properties["explicitFileType"], - "path": other_fileref._properties["path"], - "sourceTree": other_fileref._properties["sourceTree"], - "remoteRef": container_item, - } - ) - - product_group.AppendChild(reference_proxy) - - def SortRemoteProductReferences(self): - # For each remote project file, sort the associated ProductGroup in the - # same order that the targets are sorted in the remote project file. This - # is the sort order used by Xcode. - - def CompareProducts(x, y, remote_products): - # x and y are PBXReferenceProxy objects. Go through their associated - # PBXContainerItem to get the remote PBXFileReference, which will be - # present in the remote_products list. - x_remote = x._properties["remoteRef"]._properties["remoteGlobalIDString"] - y_remote = y._properties["remoteRef"]._properties["remoteGlobalIDString"] - x_index = remote_products.index(x_remote) - y_index = remote_products.index(y_remote) - - # Use the order of each remote PBXFileReference in remote_products to - # determine the sort order. - return cmp(x_index, y_index) - - for other_pbxproject, ref_dict in self._other_pbxprojects.items(): - # Build up a list of products in the remote project file, ordered the - # same as the targets that produce them. - remote_products = [] - for target in other_pbxproject._properties["targets"]: - if not isinstance(target, PBXNativeTarget): - continue - remote_products.append(target._properties["productReference"]) - - # Sort the PBXReferenceProxy children according to the list of remote - # products. - product_group = ref_dict["ProductGroup"] - product_group._properties["children"] = sorted( - product_group._properties["children"], - key=cmp_to_key( - lambda x, y, rp=remote_products: CompareProducts(x, y, rp)), - ) - - -class XCProjectFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update( - { - "archiveVersion": [0, int, 0, 1, 1], - "classes": [0, dict, 0, 1, {}], - "objectVersion": [0, int, 0, 1, 46], - "rootObject": [0, PBXProject, 1, 1], - } - ) - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - # Although XCProjectFile is implemented here as an XCObject, it's not a - # proper object in the Xcode sense, and it certainly doesn't have its own - # ID. Pass through an attempt to update IDs to the real root object. - if recursive: - self._properties["rootObject"].ComputeIDs(recursive, overwrite, hash) - - def Print(self, file=sys.stdout): - self.VerifyHasRequiredProperties() - - # Add the special "objects" property, which will be caught and handled - # separately during printing. This structure allows a fairly standard - # loop do the normal printing. - self._properties["objects"] = {} - self._XCPrint(file, 0, "// !$*UTF8*$!\n") - if self._should_print_single_line: - self._XCPrint(file, 0, "{ ") - else: - self._XCPrint(file, 0, "{\n") - for property, value in sorted( - self._properties.items() - ): - if property == "objects": - self._PrintObjects(file) - else: - self._XCKVPrint(file, 1, property, value) - self._XCPrint(file, 0, "}\n") - del self._properties["objects"] - - def _PrintObjects(self, file): - if self._should_print_single_line: - self._XCPrint(file, 0, "objects = {") - else: - self._XCPrint(file, 1, "objects = {\n") - - objects_by_class = {} - for object in self.Descendants(): - if object == self: - continue - class_name = object.__class__.__name__ - if class_name not in objects_by_class: - objects_by_class[class_name] = [] - objects_by_class[class_name].append(object) - - for class_name in sorted(objects_by_class): - self._XCPrint(file, 0, "\n") - self._XCPrint(file, 0, "/* Begin " + class_name + " section */\n") - for object in sorted( - objects_by_class[class_name], key=attrgetter("id") - ): - object.Print(file) - self._XCPrint(file, 0, "/* End " + class_name + " section */\n") - - if self._should_print_single_line: - self._XCPrint(file, 0, "}; ") - else: - self._XCPrint(file, 1, "};\n") diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py deleted file mode 100644 index 530196366946d..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Applies a fix to CR LF TAB handling in xml.dom. - -Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 -Working around this: http://bugs.python.org/issue5752 -TODO(bradnelson): Consider dropping this when we drop XP support. -""" - - -import xml.dom.minidom - - -def _Replacement_write_data(writer, data, is_attrib=False): - """Writes datachars to writer.""" - data = data.replace("&", "&").replace("<", "<") - data = data.replace('"', """).replace(">", ">") - if is_attrib: - data = data.replace("\r", " ").replace("\n", " ").replace("\t", " ") - writer.write(data) - - -def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): - # indent = current indentation - # addindent = indentation to add to higher levels - # newl = newline string - writer.write(indent + "<" + self.tagName) - - attrs = self._get_attributes() - a_names = sorted(attrs.keys()) - - for a_name in a_names: - writer.write(' %s="' % a_name) - _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) - writer.write('"') - if self.childNodes: - writer.write(">%s" % newl) - for node in self.childNodes: - node.writexml(writer, indent + addindent, addindent, newl) - writer.write(f"{indent}{newl}") - else: - writer.write("/>%s" % newl) - - -class XmlFix: - """Object to manage temporary patching of xml.dom.minidom.""" - - def __init__(self): - # Preserve current xml.dom.minidom functions. - self.write_data = xml.dom.minidom._write_data - self.writexml = xml.dom.minidom.Element.writexml - # Inject replacement versions of a function and a method. - xml.dom.minidom._write_data = _Replacement_write_data - xml.dom.minidom.Element.writexml = _Replacement_writexml - - def Cleanup(self): - if self.write_data: - xml.dom.minidom._write_data = self.write_data - xml.dom.minidom.Element.writexml = self.writexml - self.write_data = None - - def __del__(self): - self.Cleanup() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE deleted file mode 100644 index 6f62d44e4ef73..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.APACHE b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.APACHE deleted file mode 100644 index f433b1a53f5b8..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.BSD b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.BSD deleted file mode 100644 index 42ce7b75c92fb..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/__init__.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/__init__.py deleted file mode 100644 index 5fd91838316fb..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "23.3.dev0" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014 %s" % __author__ diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py deleted file mode 100644 index 6fb19b30bb53c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py +++ /dev/null @@ -1,108 +0,0 @@ -""" -ELF file parser. - -This provides a class ``ELFFile`` that parses an ELF executable in a similar -interface to ``ZipFile``. Only the read interface is implemented. - -Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca -ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html -""" - -import enum -import os -import struct -from typing import IO, Optional, Tuple - - -class ELFInvalid(ValueError): - pass - - -class EIClass(enum.IntEnum): - C32 = 1 - C64 = 2 - - -class EIData(enum.IntEnum): - Lsb = 1 - Msb = 2 - - -class EMachine(enum.IntEnum): - I386 = 3 - S390 = 22 - Arm = 40 - X8664 = 62 - AArc64 = 183 - - -class ELFFile: - """ - Representation of an ELF executable. - """ - - def __init__(self, f: IO[bytes]) -> None: - self._f = f - - try: - ident = self._read("16B") - except struct.error: - raise ELFInvalid("unable to parse identification") - magic = bytes(ident[:4]) - if magic != b"\x7fELF": - raise ELFInvalid(f"invalid magic: {magic!r}") - - self.capacity = ident[4] # Format for program header (bitness). - self.encoding = ident[5] # Data structure encoding (endianness). - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, self._p_fmt, self._p_idx = { - (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. - (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. - }[(self.capacity, self.encoding)] - except KeyError: - raise ELFInvalid( - f"unrecognized capacity ({self.capacity}) or " - f"encoding ({self.encoding})" - ) - - try: - ( - _, - self.machine, # Architecture type. - _, - _, - self._e_phoff, # Offset of program header. - _, - self.flags, # Processor-specific flags. - _, - self._e_phentsize, # Size of section. - self._e_phnum, # Number of sections. - ) = self._read(e_fmt) - except struct.error as e: - raise ELFInvalid("unable to parse machine and section information") from e - - def _read(self, fmt: str) -> Tuple[int, ...]: - return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) - - @property - def interpreter(self) -> Optional[str]: - """ - The path recorded in the ``PT_INTERP`` section header. - """ - for index in range(self._e_phnum): - self._f.seek(self._e_phoff + self._e_phentsize * index) - try: - data = self._read(self._p_fmt) - except struct.error: - continue - if data[self._p_idx[0]] != 3: # Not PT_INTERP. - continue - self._f.seek(data[self._p_idx[1]]) - return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") - return None diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_manylinux.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_manylinux.py deleted file mode 100644 index 3705d50db9193..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_manylinux.py +++ /dev/null @@ -1,252 +0,0 @@ -import collections -import contextlib -import functools -import os -import re -import sys -import warnings -from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple - -from ._elffile import EIClass, EIData, ELFFile, EMachine - -EF_ARM_ABIMASK = 0xFF000000 -EF_ARM_ABI_VER5 = 0x05000000 -EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - -# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` -# as the type for `path` until then. -@contextlib.contextmanager -def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: - try: - with open(path, "rb") as f: - yield ELFFile(f) - except (OSError, TypeError, ValueError): - yield None - - -def _is_linux_armhf(executable: str) -> bool: - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.Arm - and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 - and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD - ) - - -def _is_linux_i686(executable: str) -> bool: - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.I386 - ) - - -def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: - if "armv7l" in archs: - return _is_linux_armhf(executable) - if "i686" in archs: - return _is_linux_i686(executable) - allowed_archs = {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x", "loongarch64"} - return any(arch in allowed_archs for arch in archs) - - -# If glibc ever changes its major version, we need to know what the last -# minor version was, so we can build the complete list of all versions. -# For now, guess what the highest minor version might be, assume it will -# be 50 for testing. Once this actually happens, update the dictionary -# with the actual value. -_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) - - -class _GLibCVersion(NamedTuple): - major: int - minor: int - - -def _glibc_version_string_confstr() -> Optional[str]: - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 - try: - # Should be a string like "glibc 2.17". - version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION") - assert version_string is not None - _, version = version_string.rsplit() - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def _glibc_version_string_ctypes() -> Optional[str]: - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # We must also handle the special case where the executable is not a - # dynamically linked executable. This can occur when using musl libc, - # for example. In this situation, dlopen() will error, leading to an - # OSError. Interestingly, at least in the case of musl, there is no - # errno set on the OSError. The single string argument used to construct - # OSError comes from libc itself and is therefore not portable to - # hard code here. In any case, failure to call dlopen() means we - # can proceed, so we bail on our attempt. - try: - process_namespace = ctypes.CDLL(None) - except OSError: - return None - - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str: str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -def _glibc_version_string() -> Optional[str]: - """Returns glibc version string, or None if not using glibc.""" - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _parse_glibc_version(version_str: str) -> Tuple[int, int]: - """Parse glibc version. - - We use a regexp instead of str.split because we want to discard any - random junk that might come after the minor version -- this might happen - in patched/forked versions of glibc (e.g. Linaro's version of glibc - uses version strings like "2.20-2014.11"). See gh-3588. - """ - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - f"Expected glibc version with 2 components major.minor," - f" got: {version_str}", - RuntimeWarning, - ) - return -1, -1 - return int(m.group("major")), int(m.group("minor")) - - -@functools.lru_cache() -def _get_glibc_version() -> Tuple[int, int]: - version_str = _glibc_version_string() - if version_str is None: - return (-1, -1) - return _parse_glibc_version(version_str) - - -# From PEP 513, PEP 600 -def _is_compatible(arch: str, version: _GLibCVersion) -> bool: - sys_glibc = _get_glibc_version() - if sys_glibc < version: - return False - # Check for presence of _manylinux module. - try: - import _manylinux # noqa - except ImportError: - return True - if hasattr(_manylinux, "manylinux_compatible"): - result = _manylinux.manylinux_compatible(version[0], version[1], arch) - if result is not None: - return bool(result) - return True - if version == _GLibCVersion(2, 5): - if hasattr(_manylinux, "manylinux1_compatible"): - return bool(_manylinux.manylinux1_compatible) - if version == _GLibCVersion(2, 12): - if hasattr(_manylinux, "manylinux2010_compatible"): - return bool(_manylinux.manylinux2010_compatible) - if version == _GLibCVersion(2, 17): - if hasattr(_manylinux, "manylinux2014_compatible"): - return bool(_manylinux.manylinux2014_compatible) - return True - - -_LEGACY_MANYLINUX_MAP = { - # CentOS 7 w/ glibc 2.17 (PEP 599) - (2, 17): "manylinux2014", - # CentOS 6 w/ glibc 2.12 (PEP 571) - (2, 12): "manylinux2010", - # CentOS 5 w/ glibc 2.5 (PEP 513) - (2, 5): "manylinux1", -} - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate manylinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be manylinux-compatible. - - :returns: An iterator of compatible manylinux tags. - """ - if not _have_compatible_abi(sys.executable, archs): - return - # Oldest glibc to be supported regardless of architecture is (2, 17). - too_old_glibc2 = _GLibCVersion(2, 16) - if set(archs) & {"x86_64", "i686"}: - # On x86/i686 also oldest glibc to be supported is (2, 5). - too_old_glibc2 = _GLibCVersion(2, 4) - current_glibc = _GLibCVersion(*_get_glibc_version()) - glibc_max_list = [current_glibc] - # We can assume compatibility across glibc major versions. - # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 - # - # Build a list of maximum glibc versions so that we can - # output the canonical list of all glibc from current_glibc - # down to too_old_glibc2, including all intermediary versions. - for glibc_major in range(current_glibc.major - 1, 1, -1): - glibc_minor = _LAST_GLIBC_MINOR[glibc_major] - glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) - for arch in archs: - for glibc_max in glibc_max_list: - if glibc_max.major == too_old_glibc2.major: - min_minor = too_old_glibc2.minor - else: - # For other glibc major versions oldest supported is (x, 0). - min_minor = -1 - for glibc_minor in range(glibc_max.minor, min_minor, -1): - glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) - if _is_compatible(arch, glibc_version): - yield f"{tag}_{arch}" - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_compatible(arch, glibc_version): - yield f"{legacy_tag}_{arch}" diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_musllinux.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_musllinux.py deleted file mode 100644 index 86419df9d7087..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_musllinux.py +++ /dev/null @@ -1,83 +0,0 @@ -"""PEP 656 support. - -This module implements logic to detect if the currently running Python is -linked against musl, and what musl version is used. -""" - -import functools -import re -import subprocess -import sys -from typing import Iterator, NamedTuple, Optional, Sequence - -from ._elffile import ELFFile - - -class _MuslVersion(NamedTuple): - major: int - minor: int - - -def _parse_musl_version(output: str) -> Optional[_MuslVersion]: - lines = [n for n in (n.strip() for n in output.splitlines()) if n] - if len(lines) < 2 or lines[0][:4] != "musl": - return None - m = re.match(r"Version (\d+)\.(\d+)", lines[1]) - if not m: - return None - return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) - - -@functools.lru_cache() -def _get_musl_version(executable: str) -> Optional[_MuslVersion]: - """Detect currently-running musl runtime version. - - This is done by checking the specified executable's dynamic linking - information, and invoking the loader to parse its output for a version - string. If the loader is musl, the output would be something like:: - - musl libc (x86_64) - Version 1.2.2 - Dynamic Program Loader - """ - try: - with open(executable, "rb") as f: - ld = ELFFile(f).interpreter - except (OSError, TypeError, ValueError): - return None - if ld is None or "musl" not in ld: - return None - proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) - return _parse_musl_version(proc.stderr) - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate musllinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be musllinux-compatible. - - :returns: An iterator of compatible musllinux tags. - """ - sys_musl = _get_musl_version(sys.executable) - if sys_musl is None: # Python not dynamically linked against musl. - return - for arch in archs: - for minor in range(sys_musl.minor, -1, -1): - yield f"musllinux_{sys_musl.major}_{minor}_{arch}" - - -if __name__ == "__main__": # pragma: no cover - import sysconfig - - plat = sysconfig.get_platform() - assert plat.startswith("linux-"), "not linux" - - print("plat:", plat) - print("musl:", _get_musl_version(sys.executable)) - print("tags:", end=" ") - for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): - print(t, end="\n ") diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_parser.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_parser.py deleted file mode 100644 index 4576981c2dd75..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_parser.py +++ /dev/null @@ -1,359 +0,0 @@ -"""Handwritten parser of dependency specifiers. - -The docstring for each __parse_* function contains ENBF-inspired grammar representing -the implementation. -""" - -import ast -from typing import Any, List, NamedTuple, Optional, Tuple, Union - -from ._tokenizer import DEFAULT_RULES, Tokenizer - - -class Node: - def __init__(self, value: str) -> None: - self.value = value - - def __str__(self) -> str: - return self.value - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -MarkerVar = Union[Variable, Value] -MarkerItem = Tuple[MarkerVar, Op, MarkerVar] -# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] -# MarkerList = List[Union["MarkerList", MarkerAtom, str]] -# mypy does not support recursive type definition -# https://github.com/python/mypy/issues/731 -MarkerAtom = Any -MarkerList = List[Any] - - -class ParsedRequirement(NamedTuple): - name: str - url: str - extras: List[str] - specifier: str - marker: Optional[MarkerList] - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for dependency specifier -# -------------------------------------------------------------------------------------- -def parse_requirement(source: str) -> ParsedRequirement: - return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: - """ - requirement = WS? IDENTIFIER WS? extras WS? requirement_details - """ - tokenizer.consume("WS") - - name_token = tokenizer.expect( - "IDENTIFIER", expected="package name at the start of dependency specifier" - ) - name = name_token.text - tokenizer.consume("WS") - - extras = _parse_extras(tokenizer) - tokenizer.consume("WS") - - url, specifier, marker = _parse_requirement_details(tokenizer) - tokenizer.expect("END", expected="end of dependency specifier") - - return ParsedRequirement(name, url, extras, specifier, marker) - - -def _parse_requirement_details( - tokenizer: Tokenizer, -) -> Tuple[str, str, Optional[MarkerList]]: - """ - requirement_details = AT URL (WS requirement_marker?)? - | specifier WS? (requirement_marker)? - """ - - specifier = "" - url = "" - marker = None - - if tokenizer.check("AT"): - tokenizer.read() - tokenizer.consume("WS") - - url_start = tokenizer.position - url = tokenizer.expect("URL", expected="URL after @").text - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - tokenizer.expect("WS", expected="whitespace after URL") - - # The input might end after whitespace. - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, span_start=url_start, after="URL and whitespace" - ) - else: - specifier_start = tokenizer.position - specifier = _parse_specifier(tokenizer) - tokenizer.consume("WS") - - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, - span_start=specifier_start, - after=( - "version specifier" - if specifier - else "name and no valid version specifier" - ), - ) - - return (url, specifier, marker) - - -def _parse_requirement_marker( - tokenizer: Tokenizer, *, span_start: int, after: str -) -> MarkerList: - """ - requirement_marker = SEMICOLON marker WS? - """ - - if not tokenizer.check("SEMICOLON"): - tokenizer.raise_syntax_error( - f"Expected end or semicolon (after {after})", - span_start=span_start, - ) - tokenizer.read() - - marker = _parse_marker(tokenizer) - tokenizer.consume("WS") - - return marker - - -def _parse_extras(tokenizer: Tokenizer) -> List[str]: - """ - extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? - """ - if not tokenizer.check("LEFT_BRACKET", peek=True): - return [] - - with tokenizer.enclosing_tokens( - "LEFT_BRACKET", - "RIGHT_BRACKET", - around="extras", - ): - tokenizer.consume("WS") - extras = _parse_extras_list(tokenizer) - tokenizer.consume("WS") - - return extras - - -def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: - """ - extras_list = identifier (wsp* ',' wsp* identifier)* - """ - extras: List[str] = [] - - if not tokenizer.check("IDENTIFIER"): - return extras - - extras.append(tokenizer.read().text) - - while True: - tokenizer.consume("WS") - if tokenizer.check("IDENTIFIER", peek=True): - tokenizer.raise_syntax_error("Expected comma between extra names") - elif not tokenizer.check("COMMA"): - break - - tokenizer.read() - tokenizer.consume("WS") - - extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") - extras.append(extra_token.text) - - return extras - - -def _parse_specifier(tokenizer: Tokenizer) -> str: - """ - specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS - | WS? version_many WS? - """ - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="version specifier", - ): - tokenizer.consume("WS") - parsed_specifiers = _parse_version_many(tokenizer) - tokenizer.consume("WS") - - return parsed_specifiers - - -def _parse_version_many(tokenizer: Tokenizer) -> str: - """ - version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? - """ - parsed_specifiers = "" - while tokenizer.check("SPECIFIER"): - span_start = tokenizer.position - parsed_specifiers += tokenizer.read().text - if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): - tokenizer.raise_syntax_error( - ".* suffix can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position + 1, - ) - if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): - tokenizer.raise_syntax_error( - "Local version label can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position, - ) - tokenizer.consume("WS") - if not tokenizer.check("COMMA"): - break - parsed_specifiers += tokenizer.read().text - tokenizer.consume("WS") - - return parsed_specifiers - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for marker expression -# -------------------------------------------------------------------------------------- -def parse_marker(source: str) -> MarkerList: - return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: - retval = _parse_marker(tokenizer) - tokenizer.expect("END", expected="end of marker expression") - return retval - - -def _parse_marker(tokenizer: Tokenizer) -> MarkerList: - """ - marker = marker_atom (BOOLOP marker_atom)+ - """ - expression = [_parse_marker_atom(tokenizer)] - while tokenizer.check("BOOLOP"): - token = tokenizer.read() - expr_right = _parse_marker_atom(tokenizer) - expression.extend((token.text, expr_right)) - return expression - - -def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: - """ - marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? - | WS? marker_item WS? - """ - - tokenizer.consume("WS") - if tokenizer.check("LEFT_PARENTHESIS", peek=True): - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="marker expression", - ): - tokenizer.consume("WS") - marker: MarkerAtom = _parse_marker(tokenizer) - tokenizer.consume("WS") - else: - marker = _parse_marker_item(tokenizer) - tokenizer.consume("WS") - return marker - - -def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: - """ - marker_item = WS? marker_var WS? marker_op WS? marker_var WS? - """ - tokenizer.consume("WS") - marker_var_left = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - marker_op = _parse_marker_op(tokenizer) - tokenizer.consume("WS") - marker_var_right = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - return (marker_var_left, marker_op, marker_var_right) - - -def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: - """ - marker_var = VARIABLE | QUOTED_STRING - """ - if tokenizer.check("VARIABLE"): - return process_env_var(tokenizer.read().text.replace(".", "_")) - elif tokenizer.check("QUOTED_STRING"): - return process_python_str(tokenizer.read().text) - else: - tokenizer.raise_syntax_error( - message="Expected a marker variable or quoted string" - ) - - -def process_env_var(env_var: str) -> Variable: - if ( - env_var == "platform_python_implementation" - or env_var == "python_implementation" - ): - return Variable("platform_python_implementation") - else: - return Variable(env_var) - - -def process_python_str(python_str: str) -> Value: - value = ast.literal_eval(python_str) - return Value(str(value)) - - -def _parse_marker_op(tokenizer: Tokenizer) -> Op: - """ - marker_op = IN | NOT IN | OP - """ - if tokenizer.check("IN"): - tokenizer.read() - return Op("in") - elif tokenizer.check("NOT"): - tokenizer.read() - tokenizer.expect("WS", expected="whitespace after 'not'") - tokenizer.expect("IN", expected="'in' after 'not'") - return Op("not in") - elif tokenizer.check("OP"): - return Op(tokenizer.read().text) - else: - return tokenizer.raise_syntax_error( - "Expected marker operator, one of " - "<=, <, !=, ==, >=, >, ~=, ===, in, not in" - ) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_structures.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_structures.py deleted file mode 100644 index 90a6465f9682c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_structures.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - - -class InfinityType: - def __repr__(self) -> str: - return "Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return False - - def __le__(self, other: object) -> bool: - return False - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return True - - def __ge__(self, other: object) -> bool: - return True - - def __neg__(self: object) -> "NegativeInfinityType": - return NegativeInfinity - - -Infinity = InfinityType() - - -class NegativeInfinityType: - def __repr__(self) -> str: - return "-Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return True - - def __le__(self, other: object) -> bool: - return True - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return False - - def __ge__(self, other: object) -> bool: - return False - - def __neg__(self: object) -> InfinityType: - return Infinity - - -NegativeInfinity = NegativeInfinityType() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_tokenizer.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_tokenizer.py deleted file mode 100644 index dd0d648d49a7c..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/_tokenizer.py +++ /dev/null @@ -1,192 +0,0 @@ -import contextlib -import re -from dataclasses import dataclass -from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union - -from .specifiers import Specifier - - -@dataclass -class Token: - name: str - text: str - position: int - - -class ParserSyntaxError(Exception): - """The provided source text could not be parsed correctly.""" - - def __init__( - self, - message: str, - *, - source: str, - span: Tuple[int, int], - ) -> None: - self.span = span - self.message = message - self.source = source - - super().__init__() - - def __str__(self) -> str: - marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" - return "\n ".join([self.message, self.source, marker]) - - -DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { - "LEFT_PARENTHESIS": r"\(", - "RIGHT_PARENTHESIS": r"\)", - "LEFT_BRACKET": r"\[", - "RIGHT_BRACKET": r"\]", - "SEMICOLON": r";", - "COMMA": r",", - "QUOTED_STRING": re.compile( - r""" - ( - ('[^']*') - | - ("[^"]*") - ) - """, - re.VERBOSE, - ), - "OP": r"(===|==|~=|!=|<=|>=|<|>)", - "BOOLOP": r"\b(or|and)\b", - "IN": r"\bin\b", - "NOT": r"\bnot\b", - "VARIABLE": re.compile( - r""" - \b( - python_version - |python_full_version - |os[._]name - |sys[._]platform - |platform_(release|system) - |platform[._](version|machine|python_implementation) - |python_implementation - |implementation_(name|version) - |extra - )\b - """, - re.VERBOSE, - ), - "SPECIFIER": re.compile( - Specifier._operator_regex_str + Specifier._version_regex_str, - re.VERBOSE | re.IGNORECASE, - ), - "AT": r"\@", - "URL": r"[^ \t]+", - "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", - "VERSION_PREFIX_TRAIL": r"\.\*", - "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", - "WS": r"[ \t]+", - "END": r"$", -} - - -class Tokenizer: - """Context-sensitive token parsing. - - Provides methods to examine the input stream to check whether the next token - matches. - """ - - def __init__( - self, - source: str, - *, - rules: "Dict[str, Union[str, re.Pattern[str]]]", - ) -> None: - self.source = source - self.rules: Dict[str, re.Pattern[str]] = { - name: re.compile(pattern) for name, pattern in rules.items() - } - self.next_token: Optional[Token] = None - self.position = 0 - - def consume(self, name: str) -> None: - """Move beyond provided token name, if at current position.""" - if self.check(name): - self.read() - - def check(self, name: str, *, peek: bool = False) -> bool: - """Check whether the next token has the provided name. - - By default, if the check succeeds, the token *must* be read before - another check. If `peek` is set to `True`, the token is not loaded and - would need to be checked again. - """ - assert ( - self.next_token is None - ), f"Cannot check for {name!r}, already have {self.next_token!r}" - assert name in self.rules, f"Unknown token name: {name!r}" - - expression = self.rules[name] - - match = expression.match(self.source, self.position) - if match is None: - return False - if not peek: - self.next_token = Token(name, match[0], self.position) - return True - - def expect(self, name: str, *, expected: str) -> Token: - """Expect a certain token name next, failing with a syntax error otherwise. - - The token is *not* read. - """ - if not self.check(name): - raise self.raise_syntax_error(f"Expected {expected}") - return self.read() - - def read(self) -> Token: - """Consume the next token and return it.""" - token = self.next_token - assert token is not None - - self.position += len(token.text) - self.next_token = None - - return token - - def raise_syntax_error( - self, - message: str, - *, - span_start: Optional[int] = None, - span_end: Optional[int] = None, - ) -> NoReturn: - """Raise ParserSyntaxError at the given position.""" - span = ( - self.position if span_start is None else span_start, - self.position if span_end is None else span_end, - ) - raise ParserSyntaxError( - message, - source=self.source, - span=span, - ) - - @contextlib.contextmanager - def enclosing_tokens( - self, open_token: str, close_token: str, *, around: str - ) -> Iterator[None]: - if self.check(open_token): - open_position = self.position - self.read() - else: - open_position = None - - yield - - if open_position is None: - return - - if not self.check(close_token): - self.raise_syntax_error( - f"Expected matching {close_token} for {open_token}, after {around}", - span_start=open_position, - ) - - self.read() diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/markers.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/markers.py deleted file mode 100644 index 8b98fca7233be..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/markers.py +++ /dev/null @@ -1,252 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import operator -import os -import platform -import sys -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -from ._parser import ( - MarkerAtom, - MarkerList, - Op, - Value, - Variable, - parse_marker as _parse_marker, -) -from ._tokenizer import ParserSyntaxError -from .specifiers import InvalidSpecifier, Specifier -from .utils import canonicalize_name - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - -Operator = Callable[[str, str], bool] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -def _normalize_extra_values(results: Any) -> Any: - """ - Normalize extra values. - """ - if isinstance(results[0], tuple): - lhs, op, rhs = results[0] - if isinstance(lhs, Variable) and lhs.value == "extra": - normalized_extra = canonicalize_name(rhs.value) - rhs = Value(normalized_extra) - elif isinstance(rhs, Variable) and rhs.value == "extra": - normalized_extra = canonicalize_name(lhs.value) - lhs = Value(normalized_extra) - results[0] = lhs, op, rhs - return results - - -def _format_marker( - marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True -) -> str: - - assert isinstance(marker, (list, tuple, str)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators: Dict[str, Operator] = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs: str, op: Op, rhs: str) -> bool: - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs, prereleases=True) - - oper: Optional[Operator] = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") - - return oper(lhs, rhs) - - -def _normalize(*values: str, key: str) -> Tuple[str, ...]: - # PEP 685 – Comparison of extra names for optional distribution dependencies - # https://peps.python.org/pep-0685/ - # > When comparing extra names, tools MUST normalize the names being - # > compared using the semantics outlined in PEP 503 for names - if key == "extra": - return tuple(canonicalize_name(v) for v in values) - - # other environment markers don't have such standards - return values - - -def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: - groups: List[List[bool]] = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, str)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - environment_key = lhs.value - lhs_value = environment[environment_key] - rhs_value = rhs.value - else: - lhs_value = lhs.value - environment_key = rhs.value - rhs_value = environment[environment_key] - - lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info: "sys._version_info") -> str: - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment() -> Dict[str, str]: - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker: - def __init__(self, marker: str) -> None: - # Note: We create a Marker object without calling this constructor in - # packaging.requirements.Requirement. If any additional logic is - # added here, make sure to mirror/adapt Requirement. - try: - self._markers = _normalize_extra_values(_parse_marker(marker)) - # The attribute `_markers` can be described in terms of a recursive type: - # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] - # - # For example, the following expression: - # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") - # - # is parsed into: - # [ - # (, ')>, ), - # 'and', - # [ - # (, , ), - # 'or', - # (, , ) - # ] - # ] - except ParserSyntaxError as e: - raise InvalidMarker(str(e)) from e - - def __str__(self) -> str: - return _format_marker(self._markers) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash((self.__class__.__name__, str(self))) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Marker): - return NotImplemented - - return str(self) == str(other) - - def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - current_environment["extra"] = "" - if environment is not None: - current_environment.update(environment) - # The API used to allow setting extra to None. We need to handle this - # case for backwards compatibility. - if current_environment["extra"] is None: - current_environment["extra"] = "" - - return _evaluate_markers(self._markers, current_environment) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/metadata.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/metadata.py deleted file mode 100644 index fb274930799da..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/metadata.py +++ /dev/null @@ -1,825 +0,0 @@ -import email.feedparser -import email.header -import email.message -import email.parser -import email.policy -import sys -import typing -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Optional, - Tuple, - Type, - Union, - cast, -) - -from . import requirements, specifiers, utils, version as version_module - -T = typing.TypeVar("T") -if sys.version_info[:2] >= (3, 8): # pragma: no cover - from typing import Literal, TypedDict -else: # pragma: no cover - if typing.TYPE_CHECKING: - from typing_extensions import Literal, TypedDict - else: - try: - from typing_extensions import Literal, TypedDict - except ImportError: - - class Literal: - def __init_subclass__(*_args, **_kwargs): - pass - - class TypedDict: - def __init_subclass__(*_args, **_kwargs): - pass - - -try: - ExceptionGroup -except NameError: # pragma: no cover - - class ExceptionGroup(Exception): # noqa: N818 - """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. - - If :external:exc:`ExceptionGroup` is already defined by Python itself, - that version is used instead. - """ - - message: str - exceptions: List[Exception] - - def __init__(self, message: str, exceptions: List[Exception]) -> None: - self.message = message - self.exceptions = exceptions - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" - -else: # pragma: no cover - ExceptionGroup = ExceptionGroup - - -class InvalidMetadata(ValueError): - """A metadata field contains invalid data.""" - - field: str - """The name of the field that contains invalid data.""" - - def __init__(self, field: str, message: str) -> None: - self.field = field - super().__init__(message) - - -# The RawMetadata class attempts to make as few assumptions about the underlying -# serialization formats as possible. The idea is that as long as a serialization -# formats offer some very basic primitives in *some* way then we can support -# serializing to and from that format. -class RawMetadata(TypedDict, total=False): - """A dictionary of raw core metadata. - - Each field in core metadata maps to a key of this dictionary (when data is - provided). The key is lower-case and underscores are used instead of dashes - compared to the equivalent core metadata field. Any core metadata field that - can be specified multiple times or can hold multiple values in a single - field have a key with a plural name. See :class:`Metadata` whose attributes - match the keys of this dictionary. - - Core metadata fields that can be specified multiple times are stored as a - list or dict depending on which is appropriate for the field. Any fields - which hold multiple values in a single field are stored as a list. - - """ - - # Metadata 1.0 - PEP 241 - metadata_version: str - name: str - version: str - platforms: List[str] - summary: str - description: str - keywords: List[str] - home_page: str - author: str - author_email: str - license: str - - # Metadata 1.1 - PEP 314 - supported_platforms: List[str] - download_url: str - classifiers: List[str] - requires: List[str] - provides: List[str] - obsoletes: List[str] - - # Metadata 1.2 - PEP 345 - maintainer: str - maintainer_email: str - requires_dist: List[str] - provides_dist: List[str] - obsoletes_dist: List[str] - requires_python: str - requires_external: List[str] - project_urls: Dict[str, str] - - # Metadata 2.0 - # PEP 426 attempted to completely revamp the metadata format - # but got stuck without ever being able to build consensus on - # it and ultimately ended up withdrawn. - # - # However, a number of tools had started emitting METADATA with - # `2.0` Metadata-Version, so for historical reasons, this version - # was skipped. - - # Metadata 2.1 - PEP 566 - description_content_type: str - provides_extra: List[str] - - # Metadata 2.2 - PEP 643 - dynamic: List[str] - - # Metadata 2.3 - PEP 685 - # No new fields were added in PEP 685, just some edge case were - # tightened up to provide better interoptability. - - -_STRING_FIELDS = { - "author", - "author_email", - "description", - "description_content_type", - "download_url", - "home_page", - "license", - "maintainer", - "maintainer_email", - "metadata_version", - "name", - "requires_python", - "summary", - "version", -} - -_LIST_FIELDS = { - "classifiers", - "dynamic", - "obsoletes", - "obsoletes_dist", - "platforms", - "provides", - "provides_dist", - "provides_extra", - "requires", - "requires_dist", - "requires_external", - "supported_platforms", -} - -_DICT_FIELDS = { - "project_urls", -} - - -def _parse_keywords(data: str) -> List[str]: - """Split a string of comma-separate keyboards into a list of keywords.""" - return [k.strip() for k in data.split(",")] - - -def _parse_project_urls(data: List[str]) -> Dict[str, str]: - """Parse a list of label/URL string pairings separated by a comma.""" - urls = {} - for pair in data: - # Our logic is slightly tricky here as we want to try and do - # *something* reasonable with malformed data. - # - # The main thing that we have to worry about, is data that does - # not have a ',' at all to split the label from the Value. There - # isn't a singular right answer here, and we will fail validation - # later on (if the caller is validating) so it doesn't *really* - # matter, but since the missing value has to be an empty str - # and our return value is dict[str, str], if we let the key - # be the missing value, then they'd have multiple '' values that - # overwrite each other in a accumulating dict. - # - # The other potentional issue is that it's possible to have the - # same label multiple times in the metadata, with no solid "right" - # answer with what to do in that case. As such, we'll do the only - # thing we can, which is treat the field as unparseable and add it - # to our list of unparsed fields. - parts = [p.strip() for p in pair.split(",", 1)] - parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items - - # TODO: The spec doesn't say anything about if the keys should be - # considered case sensitive or not... logically they should - # be case-preserving and case-insensitive, but doing that - # would open up more cases where we might have duplicate - # entries. - label, url = parts - if label in urls: - # The label already exists in our set of urls, so this field - # is unparseable, and we can just add the whole thing to our - # unparseable data and stop processing it. - raise KeyError("duplicate labels in project urls") - urls[label] = url - - return urls - - -def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: - """Get the body of the message.""" - # If our source is a str, then our caller has managed encodings for us, - # and we don't need to deal with it. - if isinstance(source, str): - payload: str = msg.get_payload() - return payload - # If our source is a bytes, then we're managing the encoding and we need - # to deal with it. - else: - bpayload: bytes = msg.get_payload(decode=True) - try: - return bpayload.decode("utf8", "strict") - except UnicodeDecodeError: - raise ValueError("payload in an invalid encoding") - - -# The various parse_FORMAT functions here are intended to be as lenient as -# possible in their parsing, while still returning a correctly typed -# RawMetadata. -# -# To aid in this, we also generally want to do as little touching of the -# data as possible, except where there are possibly some historic holdovers -# that make valid data awkward to work with. -# -# While this is a lower level, intermediate format than our ``Metadata`` -# class, some light touch ups can make a massive difference in usability. - -# Map METADATA fields to RawMetadata. -_EMAIL_TO_RAW_MAPPING = { - "author": "author", - "author-email": "author_email", - "classifier": "classifiers", - "description": "description", - "description-content-type": "description_content_type", - "download-url": "download_url", - "dynamic": "dynamic", - "home-page": "home_page", - "keywords": "keywords", - "license": "license", - "maintainer": "maintainer", - "maintainer-email": "maintainer_email", - "metadata-version": "metadata_version", - "name": "name", - "obsoletes": "obsoletes", - "obsoletes-dist": "obsoletes_dist", - "platform": "platforms", - "project-url": "project_urls", - "provides": "provides", - "provides-dist": "provides_dist", - "provides-extra": "provides_extra", - "requires": "requires", - "requires-dist": "requires_dist", - "requires-external": "requires_external", - "requires-python": "requires_python", - "summary": "summary", - "supported-platform": "supported_platforms", - "version": "version", -} -_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} - - -def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]: - """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). - - This function returns a two-item tuple of dicts. The first dict is of - recognized fields from the core metadata specification. Fields that can be - parsed and translated into Python's built-in types are converted - appropriately. All other fields are left as-is. Fields that are allowed to - appear multiple times are stored as lists. - - The second dict contains all other fields from the metadata. This includes - any unrecognized fields. It also includes any fields which are expected to - be parsed into a built-in type but were not formatted appropriately. Finally, - any fields that are expected to appear only once but are repeated are - included in this dict. - - """ - raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {} - unparsed: Dict[str, List[str]] = {} - - if isinstance(data, str): - parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) - else: - parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) - - # We have to wrap parsed.keys() in a set, because in the case of multiple - # values for a key (a list), the key will appear multiple times in the - # list of keys, but we're avoiding that by using get_all(). - for name in frozenset(parsed.keys()): - # Header names in RFC are case insensitive, so we'll normalize to all - # lower case to make comparisons easier. - name = name.lower() - - # We use get_all() here, even for fields that aren't multiple use, - # because otherwise someone could have e.g. two Name fields, and we - # would just silently ignore it rather than doing something about it. - headers = parsed.get_all(name) or [] - - # The way the email module works when parsing bytes is that it - # unconditionally decodes the bytes as ascii using the surrogateescape - # handler. When you pull that data back out (such as with get_all() ), - # it looks to see if the str has any surrogate escapes, and if it does - # it wraps it in a Header object instead of returning the string. - # - # As such, we'll look for those Header objects, and fix up the encoding. - value = [] - # Flag if we have run into any issues processing the headers, thus - # signalling that the data belongs in 'unparsed'. - valid_encoding = True - for h in headers: - # It's unclear if this can return more types than just a Header or - # a str, so we'll just assert here to make sure. - assert isinstance(h, (email.header.Header, str)) - - # If it's a header object, we need to do our little dance to get - # the real data out of it. In cases where there is invalid data - # we're going to end up with mojibake, but there's no obvious, good - # way around that without reimplementing parts of the Header object - # ourselves. - # - # That should be fine since, if mojibacked happens, this key is - # going into the unparsed dict anyways. - if isinstance(h, email.header.Header): - # The Header object stores it's data as chunks, and each chunk - # can be independently encoded, so we'll need to check each - # of them. - chunks: List[Tuple[bytes, Optional[str]]] = [] - for bin, encoding in email.header.decode_header(h): - try: - bin.decode("utf8", "strict") - except UnicodeDecodeError: - # Enable mojibake. - encoding = "latin1" - valid_encoding = False - else: - encoding = "utf8" - chunks.append((bin, encoding)) - - # Turn our chunks back into a Header object, then let that - # Header object do the right thing to turn them into a - # string for us. - value.append(str(email.header.make_header(chunks))) - # This is already a string, so just add it. - else: - value.append(h) - - # We've processed all of our values to get them into a list of str, - # but we may have mojibake data, in which case this is an unparsed - # field. - if not valid_encoding: - unparsed[name] = value - continue - - raw_name = _EMAIL_TO_RAW_MAPPING.get(name) - if raw_name is None: - # This is a bit of a weird situation, we've encountered a key that - # we don't know what it means, so we don't know whether it's meant - # to be a list or not. - # - # Since we can't really tell one way or another, we'll just leave it - # as a list, even though it may be a single item list, because that's - # what makes the most sense for email headers. - unparsed[name] = value - continue - - # If this is one of our string fields, then we'll check to see if our - # value is a list of a single item. If it is then we'll assume that - # it was emitted as a single string, and unwrap the str from inside - # the list. - # - # If it's any other kind of data, then we haven't the faintest clue - # what we should parse it as, and we have to just add it to our list - # of unparsed stuff. - if raw_name in _STRING_FIELDS and len(value) == 1: - raw[raw_name] = value[0] - # If this is one of our list of string fields, then we can just assign - # the value, since email *only* has strings, and our get_all() call - # above ensures that this is a list. - elif raw_name in _LIST_FIELDS: - raw[raw_name] = value - # Special Case: Keywords - # The keywords field is implemented in the metadata spec as a str, - # but it conceptually is a list of strings, and is serialized using - # ", ".join(keywords), so we'll do some light data massaging to turn - # this into what it logically is. - elif raw_name == "keywords" and len(value) == 1: - raw[raw_name] = _parse_keywords(value[0]) - # Special Case: Project-URL - # The project urls is implemented in the metadata spec as a list of - # specially-formatted strings that represent a key and a value, which - # is fundamentally a mapping, however the email format doesn't support - # mappings in a sane way, so it was crammed into a list of strings - # instead. - # - # We will do a little light data massaging to turn this into a map as - # it logically should be. - elif raw_name == "project_urls": - try: - raw[raw_name] = _parse_project_urls(value) - except KeyError: - unparsed[name] = value - # Nothing that we've done has managed to parse this, so it'll just - # throw it in our unparseable data and move on. - else: - unparsed[name] = value - - # We need to support getting the Description from the message payload in - # addition to getting it from the the headers. This does mean, though, there - # is the possibility of it being set both ways, in which case we put both - # in 'unparsed' since we don't know which is right. - try: - payload = _get_payload(parsed, data) - except ValueError: - unparsed.setdefault("description", []).append( - parsed.get_payload(decode=isinstance(data, bytes)) - ) - else: - if payload: - # Check to see if we've already got a description, if so then both - # it, and this body move to unparseable. - if "description" in raw: - description_header = cast(str, raw.pop("description")) - unparsed.setdefault("description", []).extend( - [description_header, payload] - ) - elif "description" in unparsed: - unparsed["description"].append(payload) - else: - raw["description"] = payload - - # We need to cast our `raw` to a metadata, because a TypedDict only support - # literal key names, but we're computing our key names on purpose, but the - # way this function is implemented, our `TypedDict` can only have valid key - # names. - return cast(RawMetadata, raw), unparsed - - -_NOT_FOUND = object() - - -# Keep the two values in sync. -_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] -_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] - -_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) - - -class _Validator(Generic[T]): - """Validate a metadata field. - - All _process_*() methods correspond to a core metadata field. The method is - called with the field's raw value. If the raw value is valid it is returned - in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). - If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause - as appropriate). - """ - - name: str - raw_name: str - added: _MetadataVersion - - def __init__( - self, - *, - added: _MetadataVersion = "1.0", - ) -> None: - self.added = added - - def __set_name__(self, _owner: "Metadata", name: str) -> None: - self.name = name - self.raw_name = _RAW_TO_EMAIL_MAPPING[name] - - def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: - # With Python 3.8, the caching can be replaced with functools.cached_property(). - # No need to check the cache as attribute lookup will resolve into the - # instance's __dict__ before __get__ is called. - cache = instance.__dict__ - value = instance._raw.get(self.name) - - # To make the _process_* methods easier, we'll check if the value is None - # and if this field is NOT a required attribute, and if both of those - # things are true, we'll skip the the converter. This will mean that the - # converters never have to deal with the None union. - if self.name in _REQUIRED_ATTRS or value is not None: - try: - converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") - except AttributeError: - pass - else: - value = converter(value) - - cache[self.name] = value - try: - del instance._raw[self.name] # type: ignore[misc] - except KeyError: - pass - - return cast(T, value) - - def _invalid_metadata( - self, msg: str, cause: Optional[Exception] = None - ) -> InvalidMetadata: - exc = InvalidMetadata( - self.raw_name, msg.format_map({"field": repr(self.raw_name)}) - ) - exc.__cause__ = cause - return exc - - def _process_metadata_version(self, value: str) -> _MetadataVersion: - # Implicitly makes Metadata-Version required. - if value not in _VALID_METADATA_VERSIONS: - raise self._invalid_metadata(f"{value!r} is not a valid metadata version") - return cast(_MetadataVersion, value) - - def _process_name(self, value: str) -> str: - if not value: - raise self._invalid_metadata("{field} is a required field") - # Validate the name as a side-effect. - try: - utils.canonicalize_name(value, validate=True) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - else: - return value - - def _process_version(self, value: str) -> version_module.Version: - if not value: - raise self._invalid_metadata("{field} is a required field") - try: - return version_module.parse(value) - except version_module.InvalidVersion as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_summary(self, value: str) -> str: - """Check the field contains no newlines.""" - if "\n" in value: - raise self._invalid_metadata("{field} must be a single line") - return value - - def _process_description_content_type(self, value: str) -> str: - content_types = {"text/plain", "text/x-rst", "text/markdown"} - message = email.message.EmailMessage() - message["content-type"] = value - - content_type, parameters = ( - # Defaults to `text/plain` if parsing failed. - message.get_content_type().lower(), - message["content-type"].params, - ) - # Check if content-type is valid or defaulted to `text/plain` and thus was - # not parseable. - if content_type not in content_types or content_type not in value.lower(): - raise self._invalid_metadata( - f"{{field}} must be one of {list(content_types)}, not {value!r}" - ) - - charset = parameters.get("charset", "UTF-8") - if charset != "UTF-8": - raise self._invalid_metadata( - f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" - ) - - markdown_variants = {"GFM", "CommonMark"} - variant = parameters.get("variant", "GFM") # Use an acceptable default. - if content_type == "text/markdown" and variant not in markdown_variants: - raise self._invalid_metadata( - f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " - f"not {variant!r}", - ) - return value - - def _process_dynamic(self, value: List[str]) -> List[str]: - for dynamic_field in map(str.lower, value): - if dynamic_field in {"name", "version", "metadata-version"}: - raise self._invalid_metadata( - f"{value!r} is not allowed as a dynamic field" - ) - elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: - raise self._invalid_metadata(f"{value!r} is not a valid dynamic field") - return list(map(str.lower, value)) - - def _process_provides_extra( - self, - value: List[str], - ) -> List[utils.NormalizedName]: - normalized_names = [] - try: - for name in value: - normalized_names.append(utils.canonicalize_name(name, validate=True)) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{name!r} is invalid for {{field}}", cause=exc - ) - else: - return normalized_names - - def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: - try: - return specifiers.SpecifierSet(value) - except specifiers.InvalidSpecifier as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_requires_dist( - self, - value: List[str], - ) -> List[requirements.Requirement]: - reqs = [] - try: - for req in value: - reqs.append(requirements.Requirement(req)) - except requirements.InvalidRequirement as exc: - raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc) - else: - return reqs - - -class Metadata: - """Representation of distribution metadata. - - Compared to :class:`RawMetadata`, this class provides objects representing - metadata fields instead of only using built-in types. Any invalid metadata - will cause :exc:`InvalidMetadata` to be raised (with a - :py:attr:`~BaseException.__cause__` attribute as appropriate). - """ - - _raw: RawMetadata - - @classmethod - def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": - """Create an instance from :class:`RawMetadata`. - - If *validate* is true, all metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - ins = cls() - ins._raw = data.copy() # Mutations occur due to caching enriched values. - - if validate: - exceptions: List[Exception] = [] - try: - metadata_version = ins.metadata_version - metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) - except InvalidMetadata as metadata_version_exc: - exceptions.append(metadata_version_exc) - metadata_version = None - - # Make sure to check for the fields that are present, the required - # fields (so their absence can be reported). - fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS - # Remove fields that have already been checked. - fields_to_check -= {"metadata_version"} - - for key in fields_to_check: - try: - if metadata_version: - # Can't use getattr() as that triggers descriptor protocol which - # will fail due to no value for the instance argument. - try: - field_metadata_version = cls.__dict__[key].added - except KeyError: - exc = InvalidMetadata(key, f"unrecognized field: {key!r}") - exceptions.append(exc) - continue - field_age = _VALID_METADATA_VERSIONS.index( - field_metadata_version - ) - if field_age > metadata_age: - field = _RAW_TO_EMAIL_MAPPING[key] - exc = InvalidMetadata( - field, - "{field} introduced in metadata version " - "{field_metadata_version}, not {metadata_version}", - ) - exceptions.append(exc) - continue - getattr(ins, key) - except InvalidMetadata as exc: - exceptions.append(exc) - - if exceptions: - raise ExceptionGroup("invalid metadata", exceptions) - - return ins - - @classmethod - def from_email( - cls, data: Union[bytes, str], *, validate: bool = True - ) -> "Metadata": - """Parse metadata from email headers. - - If *validate* is true, the metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - raw, unparsed = parse_email(data) - - if validate: - exceptions: list[Exception] = [] - for unparsed_key in unparsed: - if unparsed_key in _EMAIL_TO_RAW_MAPPING: - message = f"{unparsed_key!r} has invalid data" - else: - message = f"unrecognized field: {unparsed_key!r}" - exceptions.append(InvalidMetadata(unparsed_key, message)) - - if exceptions: - raise ExceptionGroup("unparsed", exceptions) - - try: - return cls.from_raw(raw, validate=validate) - except ExceptionGroup as exc_group: - raise ExceptionGroup( - "invalid or unparsed metadata", exc_group.exceptions - ) from None - - metadata_version: _Validator[_MetadataVersion] = _Validator() - """:external:ref:`core-metadata-metadata-version` - (required; validated to be a valid metadata version)""" - name: _Validator[str] = _Validator() - """:external:ref:`core-metadata-name` - (required; validated using :func:`~packaging.utils.canonicalize_name` and its - *validate* parameter)""" - version: _Validator[version_module.Version] = _Validator() - """:external:ref:`core-metadata-version` (required)""" - dynamic: _Validator[Optional[List[str]]] = _Validator( - added="2.2", - ) - """:external:ref:`core-metadata-dynamic` - (validated against core metadata field names and lowercased)""" - platforms: _Validator[Optional[List[str]]] = _Validator() - """:external:ref:`core-metadata-platform`""" - supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """:external:ref:`core-metadata-supported-platform`""" - summary: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" - description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body - """:external:ref:`core-metadata-description`""" - description_content_type: _Validator[Optional[str]] = _Validator(added="2.1") - """:external:ref:`core-metadata-description-content-type` (validated)""" - keywords: _Validator[Optional[List[str]]] = _Validator() - """:external:ref:`core-metadata-keywords`""" - home_page: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-home-page`""" - download_url: _Validator[Optional[str]] = _Validator(added="1.1") - """:external:ref:`core-metadata-download-url`""" - author: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-author`""" - author_email: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-author-email`""" - maintainer: _Validator[Optional[str]] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer`""" - maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer-email`""" - license: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-license`""" - classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """:external:ref:`core-metadata-classifier`""" - requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-dist`""" - requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-python`""" - # Because `Requires-External` allows for non-PEP 440 version specifiers, we - # don't do any processing on the values. - requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-requires-external`""" - project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-project-url`""" - # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation - # regardless of metadata version. - provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator( - added="2.1", - ) - """:external:ref:`core-metadata-provides-extra`""" - provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-provides-dist`""" - obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-obsoletes-dist`""" - requires: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Requires`` (deprecated)""" - provides: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Provides`` (deprecated)""" - obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Obsoletes`` (deprecated)""" diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/py.typed b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/py.typed deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/requirements.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/requirements.py deleted file mode 100644 index 0c00eba331b73..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/requirements.py +++ /dev/null @@ -1,90 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from typing import Any, Iterator, Optional, Set - -from ._parser import parse_requirement as _parse_requirement -from ._tokenizer import ParserSyntaxError -from .markers import Marker, _normalize_extra_values -from .specifiers import SpecifierSet -from .utils import canonicalize_name - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -class Requirement: - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string: str) -> None: - try: - parsed = _parse_requirement(requirement_string) - except ParserSyntaxError as e: - raise InvalidRequirement(str(e)) from e - - self.name: str = parsed.name - self.url: Optional[str] = parsed.url or None - self.extras: Set[str] = set(parsed.extras if parsed.extras else []) - self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) - self.marker: Optional[Marker] = None - if parsed.marker is not None: - self.marker = Marker.__new__(Marker) - self.marker._markers = _normalize_extra_values(parsed.marker) - - def _iter_parts(self, name: str) -> Iterator[str]: - yield name - - if self.extras: - formatted_extras = ",".join(sorted(self.extras)) - yield f"[{formatted_extras}]" - - if self.specifier: - yield str(self.specifier) - - if self.url: - yield f"@ {self.url}" - if self.marker: - yield " " - - if self.marker: - yield f"; {self.marker}" - - def __str__(self) -> str: - return "".join(self._iter_parts(self.name)) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash( - ( - self.__class__.__name__, - *self._iter_parts(canonicalize_name(self.name)), - ) - ) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Requirement): - return NotImplemented - - return ( - canonicalize_name(self.name) == canonicalize_name(other.name) - and self.extras == other.extras - and self.specifier == other.specifier - and self.url == other.url - and self.marker == other.marker - ) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/specifiers.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/specifiers.py deleted file mode 100644 index 94448327ae2d4..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/specifiers.py +++ /dev/null @@ -1,1030 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier - from packaging.version import Version -""" - -import abc -import itertools -import re -from typing import ( - Callable, - Iterable, - Iterator, - List, - Optional, - Set, - Tuple, - TypeVar, - Union, -) - -from .utils import canonicalize_version -from .version import Version - -UnparsedVersion = Union[Version, str] -UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) -CallableOperator = Callable[[Version, str], bool] - - -def _coerce_version(version: UnparsedVersion) -> Version: - if not isinstance(version, Version): - version = Version(version) - return version - - -class InvalidSpecifier(ValueError): - """ - Raised when attempting to create a :class:`Specifier` with a specifier - string that is invalid. - - >>> Specifier("lolwat") - Traceback (most recent call last): - ... - packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' - """ - - -class BaseSpecifier(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __str__(self) -> str: - """ - Returns the str representation of this Specifier-like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Returns a hash value for this Specifier-like object. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier-like - objects are equal. - - :param other: The other object to check against. - """ - - @property - @abc.abstractmethod - def prereleases(self) -> Optional[bool]: - """Whether or not pre-releases as a whole are allowed. - - This can be set to either ``True`` or ``False`` to explicitly enable or disable - prereleases or it can be set to ``None`` (the default) to use default semantics. - """ - - @prereleases.setter - def prereleases(self, value: bool) -> None: - """Setter for :attr:`prereleases`. - - :param value: The value to set. - """ - - @abc.abstractmethod - def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class Specifier(BaseSpecifier): - """This class abstracts handling of version specifiers. - - .. tip:: - - It is generally not required to instantiate this manually. You should instead - prefer to work with :class:`SpecifierSet` instead, which can parse - comma-separated version specifiers (which is what package metadata contains). - """ - - _operator_regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - """ - _version_regex_str = r""" - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s;)]* # The arbitrary version can be just about anything, - # we match everything except for whitespace, a - # semi-colon for marker support, and a closing paren - # since versions can be enclosed in them. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - - # You cannot use a wild card and a pre-release, post-release, a dev or - # local version together so group them with a | and make them optional. - (?: - \.\* # Wild card syntax of .* - | - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - """Initialize a Specifier instance. - - :param spec: - The string representation of a specifier which will be parsed and - normalized before use. - :param prereleases: - This tells the specifier if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - :raises InvalidSpecifier: - If the given specifier is invalid (i.e. bad syntax). - """ - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: Tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 - @property # type: ignore[override] - def prereleases(self) -> bool: - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if Version(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - @property - def operator(self) -> str: - """The operator of this specifier. - - >>> Specifier("==1.2.3").operator - '==' - """ - return self._spec[0] - - @property - def version(self) -> str: - """The version of this specifier. - - >>> Specifier("==1.2.3").version - '1.2.3' - """ - return self._spec[1] - - def __repr__(self) -> str: - """A representation of the Specifier that shows all internal state. - - >>> Specifier('>=1.0.0') - =1.0.0')> - >>> Specifier('>=1.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> Specifier('>=1.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - """A string representation of the Specifier that can be round-tripped. - - >>> str(Specifier('>=1.0.0')) - '>=1.0.0' - >>> str(Specifier('>=1.0.0', prereleases=False)) - '>=1.0.0' - """ - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> Tuple[str, str]: - canonical_version = canonicalize_version( - self._spec[1], - strip_trailing_zero=(self._spec[0] != "~="), - ) - return self._spec[0], canonical_version - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - """Whether or not the two Specifier-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") - True - >>> (Specifier("==1.2.3", prereleases=False) == - ... Specifier("==1.2.3", prereleases=True)) - True - >>> Specifier("==1.2.3") == "==1.2.3" - True - >>> Specifier("==1.2.3") == Specifier("==1.2.4") - False - >>> Specifier("==1.2.3") == Specifier("~=1.2.3") - False - """ - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _compare_compatible(self, prospective: Version, spec: str) -> bool: - - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore suffix segments. - prefix = _version_join( - list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - def _compare_equal(self, prospective: Version, spec: str) -> bool: - - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - normalized_prospective = canonicalize_version( - prospective.public, strip_trailing_zero=False - ) - # Get the normalized version string ignoring the trailing .* - normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) - # Split the spec out by bangs and dots, and pretend that there is - # an implicit dot in between a release segment and a pre-release segment. - split_spec = _version_split(normalized_spec) - - # Split the prospective version out by bangs and dots, and pretend - # that there is an implicit dot in between a release segment and - # a pre-release segment. - split_prospective = _version_split(normalized_prospective) - - # 0-pad the prospective version before shortening it to get the correct - # shortened version. - padded_prospective, _ = _pad_version(split_prospective, split_spec) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - shortened_prospective = padded_prospective[: len(split_spec)] - - return shortened_prospective == split_spec - else: - # Convert our spec string into a Version - spec_version = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec_version.local: - prospective = Version(prospective.public) - - return prospective == spec_version - - def _compare_not_equal(self, prospective: Version, spec: str) -> bool: - return not self._compare_equal(prospective, spec) - - def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) <= Version(spec) - - def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) >= Version(spec) - - def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: - return str(prospective).lower() == str(spec).lower() - - def __contains__(self, item: Union[str, Version]) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in Specifier(">=1.2.3") - True - >>> Version("1.2.3") in Specifier(">=1.2.3") - True - >>> "1.0.0" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) - True - """ - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this Specifier. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> Specifier(">=1.2.3").contains("1.2.3") - True - >>> Specifier(">=1.2.3").contains(Version("1.2.3")) - True - >>> Specifier(">=1.2.3").contains("1.0.0") - False - >>> Specifier(">=1.2.3").contains("1.3.0a1") - False - >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") - True - >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) - True - """ - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version, this allows us to have a shortcut for - # "2.0" in Specifier(">=2") - normalized_item = _coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifier. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(Specifier().contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) - ['1.2.3', '1.3', ] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) - ['1.5a1'] - >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - """ - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = _coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version: str) -> List[str]: - """Split version into components. - - The split components are intended for version comparison. The logic does - not attempt to retain the original version string, so joining the - components back with :func:`_version_join` may not produce the original - version string. - """ - result: List[str] = [] - - epoch, _, rest = version.rpartition("!") - result.append(epoch or "0") - - for item in rest.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _version_join(components: List[str]) -> str: - """Join split version components into a version string. - - This function assumes the input came from :func:`_version_split`, where the - first component must be the epoch (either empty or numeric), and all other - components numeric. - """ - epoch, *rest = components - return f"{epoch}!{'.'.join(rest)}" - - -def _is_not_suffix(segment: str) -> bool: - return not any( - segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") - ) - - -def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) - - -class SpecifierSet(BaseSpecifier): - """This class abstracts handling of a set of version specifiers. - - It can be passed a single specifier (``>=3.0``), a comma-separated list of - specifiers (``>=3.0,!=3.1``), or no specifier at all. - """ - - def __init__( - self, specifiers: str = "", prereleases: Optional[bool] = None - ) -> None: - """Initialize a SpecifierSet instance. - - :param specifiers: - The string representation of a specifier or a comma-separated list of - specifiers which will be parsed and normalized before use. - :param prereleases: - This tells the SpecifierSet if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - - :raises InvalidSpecifier: - If the given ``specifiers`` are not parseable than this exception will be - raised. - """ - - # Split on `,` to break each individual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier. - parsed: Set[Specifier] = set() - for specifier in split_specifiers: - parsed.add(Specifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - @property - def prereleases(self) -> Optional[bool]: - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __repr__(self) -> str: - """A representation of the specifier set that shows all internal state. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> SpecifierSet('>=1.0.0,!=2.0.0') - =1.0.0')> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"" - - def __str__(self) -> str: - """A string representation of the specifier set that can be round-tripped. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) - '!=1.0.1,>=1.0.0' - >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) - '!=1.0.1,>=1.0.0' - """ - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self) -> int: - return hash(self._specs) - - def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": - """Return a SpecifierSet which is a combination of the two sets. - - :param other: The other object to combine with. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' - =1.0.0')> - >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') - =1.0.0')> - """ - if isinstance(other, str): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other: object) -> bool: - """Whether or not the two SpecifierSet-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == - ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") - False - """ - if isinstance(other, (str, Specifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __len__(self) -> int: - """Returns the number of specifiers in this specifier set.""" - return len(self._specs) - - def __iter__(self) -> Iterator[Specifier]: - """ - Returns an iterator over all the underlying :class:`Specifier` instances - in this specifier set. - - >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) - [, =1.0.0')>] - """ - return iter(self._specs) - - def __contains__(self, item: UnparsedVersion) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) - True - """ - return self.contains(item) - - def contains( - self, - item: UnparsedVersion, - prereleases: Optional[bool] = None, - installed: Optional[bool] = None, - ) -> bool: - """Return whether or not the item is contained in this SpecifierSet. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this SpecifierSet. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) - True - """ - # Ensure that our item is a Version instance. - if not isinstance(item, Version): - item = Version(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - if installed and item.is_prerelease: - item = Version(item.base_version) - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifiers in this set. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) - ['1.3', ] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) - [] - >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - - An "empty" SpecifierSet will filter items based on the presence of prerelease - versions in the set. - - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet("").filter(["1.5a1"])) - ['1.5a1'] - >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - """ - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iter(iterable) - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases. - else: - filtered: List[UnparsedVersionVar] = [] - found_prereleases: List[UnparsedVersionVar] = [] - - for item in iterable: - parsed_version = _coerce_version(item) - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return iter(found_prereleases) - - return iter(filtered) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/tags.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/tags.py deleted file mode 100644 index 37f33b1ef849e..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/tags.py +++ /dev/null @@ -1,553 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import logging -import platform -import struct -import subprocess -import sys -import sysconfig -from importlib.machinery import EXTENSION_SUFFIXES -from typing import ( - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -from . import _manylinux, _musllinux - -logger = logging.getLogger(__name__) - -PythonVersion = Sequence[int] -MacVersion = Tuple[int, int] - -INTERPRETER_SHORT_NAMES: Dict[str, str] = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} - - -_32_BIT_INTERPRETER = struct.calcsize("P") == 4 - - -class Tag: - """ - A representation of the tag triple for a wheel. - - Instances are considered immutable and thus are hashable. Equality checking - is also supported. - """ - - __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - - def __init__(self, interpreter: str, abi: str, platform: str) -> None: - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - # The __hash__ of every single element in a Set[Tag] will be evaluated each time - # that a set calls its `.disjoint()` method, which may be called hundreds of - # times when scanning a page of links for packages with tags matching that - # Set[Tag]. Pre-computing the value here produces significant speedups for - # downstream consumers. - self._hash = hash((self._interpreter, self._abi, self._platform)) - - @property - def interpreter(self) -> str: - return self._interpreter - - @property - def abi(self) -> str: - return self._abi - - @property - def platform(self) -> str: - return self._platform - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Tag): - return NotImplemented - - return ( - (self._hash == other._hash) # Short-circuit ASAP for perf reasons. - and (self._platform == other._platform) - and (self._abi == other._abi) - and (self._interpreter == other._interpreter) - ) - - def __hash__(self) -> int: - return self._hash - - def __str__(self) -> str: - return f"{self._interpreter}-{self._abi}-{self._platform}" - - def __repr__(self) -> str: - return f"<{self} @ {id(self)}>" - - -def parse_tag(tag: str) -> FrozenSet[Tag]: - """ - Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. - - Returning a set is required due to the possibility that the tag is a - compressed tag set. - """ - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: - value: Union[int, str, None] = sysconfig.get_config_var(name) - if value is None and warn: - logger.debug( - "Config variable '%s' is unset, Python ABI tag may be incorrect", name - ) - return value - - -def _normalize_string(string: str) -> str: - return string.replace(".", "_").replace("-", "_").replace(" ", "_") - - -def _abi3_applies(python_version: PythonVersion) -> bool: - """ - Determine if the Python version supports abi3. - - PEP 384 was first implemented in Python 3.2. - """ - return len(python_version) > 1 and tuple(python_version) >= (3, 2) - - -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: - py_version = tuple(py_version) # To allow for version comparison. - abis = [] - version = _version_nodot(py_version[:2]) - debug = pymalloc = ucs4 = "" - with_debug = _get_config_var("Py_DEBUG", warn) - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version < (3, 8): - with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append(f"cp{version}") - abis.insert( - 0, - "cp{version}{debug}{pymalloc}{ucs4}".format( - version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 - ), - ) - return abis - - -def cpython_tags( - python_version: Optional[PythonVersion] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a CPython interpreter. - - The tags consist of: - - cp-- - - cp-abi3- - - cp-none- - - cp-abi3- # Older Python versions down to 3.2. - - If python_version only specifies a major version then user-provided ABIs and - the 'none' ABItag will be used. - - If 'abi3' or 'none' are specified in 'abis' then they will be yielded at - their normal position and not at the beginning. - """ - if not python_version: - python_version = sys.version_info[:2] - - interpreter = f"cp{_version_nodot(python_version[:2])}" - - if abis is None: - if len(python_version) > 1: - abis = _cpython_abis(python_version, warn) - else: - abis = [] - abis = list(abis) - # 'abi3' and 'none' are explicitly handled later. - for explicit_abi in ("abi3", "none"): - try: - abis.remove(explicit_abi) - except ValueError: - pass - - platforms = list(platforms or platform_tags()) - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - if _abi3_applies(python_version): - yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) - yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) - - if _abi3_applies(python_version): - for minor_version in range(python_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{version}".format( - version=_version_nodot((python_version[0], minor_version)) - ) - yield Tag(interpreter, "abi3", platform_) - - -def _generic_abi() -> List[str]: - """ - Return the ABI tag based on EXT_SUFFIX. - """ - # The following are examples of `EXT_SUFFIX`. - # We want to keep the parts which are related to the ABI and remove the - # parts which are related to the platform: - # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 - # - mac: '.cpython-310-darwin.so' => cp310 - # - win: '.cp310-win_amd64.pyd' => cp310 - # - win: '.pyd' => cp37 (uses _cpython_abis()) - # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 - # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' - # => graalpy_38_native - - ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) - if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": - raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") - parts = ext_suffix.split(".") - if len(parts) < 3: - # CPython3.7 and earlier uses ".pyd" on Windows. - return _cpython_abis(sys.version_info[:2]) - soabi = parts[1] - if soabi.startswith("cpython"): - # non-windows - abi = "cp" + soabi.split("-")[1] - elif soabi.startswith("cp"): - # windows - abi = soabi.split("-")[0] - elif soabi.startswith("pypy"): - abi = "-".join(soabi.split("-")[:2]) - elif soabi.startswith("graalpy"): - abi = "-".join(soabi.split("-")[:3]) - elif soabi: - # pyston, ironpython, others? - abi = soabi - else: - return [] - return [_normalize_string(abi)] - - -def generic_tags( - interpreter: Optional[str] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a generic interpreter. - - The tags consist of: - - -- - - The "none" ABI will be added if it was not explicitly provided. - """ - if not interpreter: - interp_name = interpreter_name() - interp_version = interpreter_version(warn=warn) - interpreter = "".join([interp_name, interp_version]) - if abis is None: - abis = _generic_abi() - else: - abis = list(abis) - platforms = list(platforms or platform_tags()) - if "none" not in abis: - abis.append("none") - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - -def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: - """ - Yields Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all previous versions of that major version. - """ - if len(py_version) > 1: - yield f"py{_version_nodot(py_version[:2])}" - yield f"py{py_version[0]}" - if len(py_version) > 1: - for minor in range(py_version[1] - 1, -1, -1): - yield f"py{_version_nodot((py_version[0], minor))}" - - -def compatible_tags( - python_version: Optional[PythonVersion] = None, - interpreter: Optional[str] = None, - platforms: Optional[Iterable[str]] = None, -) -> Iterator[Tag]: - """ - Yields the sequence of tags that are compatible with a specific version of Python. - - The tags consist of: - - py*-none- - - -none-any # ... if `interpreter` is provided. - - py*-none-any - """ - if not python_version: - python_version = sys.version_info[:2] - platforms = list(platforms or platform_tags()) - for version in _py_interpreter_range(python_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - if interpreter: - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(python_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - if cpu_arch in {"arm64", "x86_64"}: - formats.append("universal2") - - if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: - formats.append("universal") - - return formats - - -def mac_platforms( - version: Optional[MacVersion] = None, arch: Optional[str] = None -) -> Iterator[str]: - """ - Yields the platform tags for a macOS system. - - The `version` parameter is a two-item tuple specifying the macOS version to - generate platform tags for. The `arch` parameter is the CPU architecture to - generate platform tags for. Both parameters default to the appropriate value - for the current system. - """ - version_str, _, cpu_arch = platform.mac_ver() - if version is None: - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - if version == (10, 16): - # When built against an older macOS SDK, Python will report macOS 10.16 - # instead of the real version. - version_str = subprocess.run( - [ - sys.executable, - "-sS", - "-c", - "import platform; print(platform.mac_ver()[0])", - ], - check=True, - env={"SYSTEM_VERSION_COMPAT": "0"}, - stdout=subprocess.PIPE, - text=True, - ).stdout - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - else: - version = version - if arch is None: - arch = _mac_arch(cpu_arch) - else: - arch = arch - - if (10, 0) <= version and version < (11, 0): - # Prior to Mac OS 11, each yearly release of Mac OS bumped the - # "minor" version number. The major version was always 10. - for minor_version in range(version[1], -1, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=10, minor=minor_version, binary_format=binary_format - ) - - if version >= (11, 0): - # Starting with Mac OS 11, each yearly release bumps the major version - # number. The minor versions are now the midyear updates. - for major_version in range(version[0], 10, -1): - compat_version = major_version, 0 - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=major_version, minor=0, binary_format=binary_format - ) - - if version >= (11, 0): - # Mac OS 11 on x86_64 is compatible with binaries from previous releases. - # Arm64 support was introduced in 11.0, so no Arm binaries from previous - # releases exist. - # - # However, the "universal2" binary format can have a - # macOS version earlier than 11.0 when the x86_64 part of the binary supports - # that version of macOS. - if arch == "x86_64": - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - else: - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_format = "universal2" - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: - linux = _normalize_string(sysconfig.get_platform()) - if not linux.startswith("linux_"): - # we should never be here, just yield the sysconfig one and return - yield linux - return - if is_32bit: - if linux == "linux_x86_64": - linux = "linux_i686" - elif linux == "linux_aarch64": - linux = "linux_armv8l" - _, arch = linux.split("_", 1) - archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) - yield from _manylinux.platform_tags(archs) - yield from _musllinux.platform_tags(archs) - for arch in archs: - yield f"linux_{arch}" - - -def _generic_platforms() -> Iterator[str]: - yield _normalize_string(sysconfig.get_platform()) - - -def platform_tags() -> Iterator[str]: - """ - Provides the platform tags for this installation. - """ - if platform.system() == "Darwin": - return mac_platforms() - elif platform.system() == "Linux": - return _linux_platforms() - else: - return _generic_platforms() - - -def interpreter_name() -> str: - """ - Returns the name of the running interpreter. - - Some implementations have a reserved, two-letter abbreviation which will - be returned when appropriate. - """ - name = sys.implementation.name - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def interpreter_version(*, warn: bool = False) -> str: - """ - Returns the version of the running interpreter. - """ - version = _get_config_var("py_version_nodot", warn=warn) - if version: - version = str(version) - else: - version = _version_nodot(sys.version_info[:2]) - return version - - -def _version_nodot(version: PythonVersion) -> str: - return "".join(map(str, version)) - - -def sys_tags(*, warn: bool = False) -> Iterator[Tag]: - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - - interp_name = interpreter_name() - if interp_name == "cp": - yield from cpython_tags(warn=warn) - else: - yield from generic_tags() - - if interp_name == "pp": - interp = "pp3" - elif interp_name == "cp": - interp = "cp" + interpreter_version(warn=warn) - else: - interp = None - yield from compatible_tags(interpreter=interp) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/utils.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/utils.py deleted file mode 100644 index c2c2f75aa8062..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/utils.py +++ /dev/null @@ -1,172 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import re -from typing import FrozenSet, NewType, Tuple, Union, cast - -from .tags import Tag, parse_tag -from .version import InvalidVersion, Version - -BuildTag = Union[Tuple[()], Tuple[int, str]] -NormalizedName = NewType("NormalizedName", str) - - -class InvalidName(ValueError): - """ - An invalid distribution name; users should refer to the packaging user guide. - """ - - -class InvalidWheelFilename(ValueError): - """ - An invalid wheel filename was found, users should refer to PEP 427. - """ - - -class InvalidSdistFilename(ValueError): - """ - An invalid sdist filename was found, users should refer to the packaging user guide. - """ - - -# Core metadata spec for `Name` -_validate_regex = re.compile( - r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE -) -_canonicalize_regex = re.compile(r"[-_.]+") -_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") -# PEP 427: The build number must start with a digit. -_build_tag_regex = re.compile(r"(\d+)(.*)") - - -def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: - if validate and not _validate_regex.match(name): - raise InvalidName(f"name is invalid: {name!r}") - # This is taken from PEP 503. - value = _canonicalize_regex.sub("-", name).lower() - return cast(NormalizedName, value) - - -def is_normalized_name(name: str) -> bool: - return _normalized_regex.match(name) is not None - - -def canonicalize_version( - version: Union[Version, str], *, strip_trailing_zero: bool = True -) -> str: - """ - This is very similar to Version.__str__, but has one subtle difference - with the way it handles the release segment. - """ - if isinstance(version, str): - try: - parsed = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - else: - parsed = version - - parts = [] - - # Epoch - if parsed.epoch != 0: - parts.append(f"{parsed.epoch}!") - - # Release segment - release_segment = ".".join(str(x) for x in parsed.release) - if strip_trailing_zero: - # NB: This strips trailing '.0's to normalize - release_segment = re.sub(r"(\.0)+$", "", release_segment) - parts.append(release_segment) - - # Pre-release - if parsed.pre is not None: - parts.append("".join(str(x) for x in parsed.pre)) - - # Post-release - if parsed.post is not None: - parts.append(f".post{parsed.post}") - - # Development release - if parsed.dev is not None: - parts.append(f".dev{parsed.dev}") - - # Local version segment - if parsed.local is not None: - parts.append(f"+{parsed.local}") - - return "".join(parts) - - -def parse_wheel_filename( - filename: str, -) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: - if not filename.endswith(".whl"): - raise InvalidWheelFilename( - f"Invalid wheel filename (extension must be '.whl'): {filename}" - ) - - filename = filename[:-4] - dashes = filename.count("-") - if dashes not in (4, 5): - raise InvalidWheelFilename( - f"Invalid wheel filename (wrong number of parts): {filename}" - ) - - parts = filename.split("-", dashes - 2) - name_part = parts[0] - # See PEP 427 for the rules on escaping the project name. - if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: - raise InvalidWheelFilename(f"Invalid project name: {filename}") - name = canonicalize_name(name_part) - - try: - version = Version(parts[1]) - except InvalidVersion as e: - raise InvalidWheelFilename( - f"Invalid wheel filename (invalid version): {filename}" - ) from e - - if dashes == 5: - build_part = parts[2] - build_match = _build_tag_regex.match(build_part) - if build_match is None: - raise InvalidWheelFilename( - f"Invalid build number: {build_part} in '{filename}'" - ) - build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) - else: - build = () - tags = parse_tag(parts[-1]) - return (name, version, build, tags) - - -def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: - if filename.endswith(".tar.gz"): - file_stem = filename[: -len(".tar.gz")] - elif filename.endswith(".zip"): - file_stem = filename[: -len(".zip")] - else: - raise InvalidSdistFilename( - f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" - f" {filename}" - ) - - # We are requiring a PEP 440 version, which cannot contain dashes, - # so we split on the last dash. - name_part, sep, version_part = file_stem.rpartition("-") - if not sep: - raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") - - name = canonicalize_name(name_part) - - try: - version = Version(version_part) - except InvalidVersion as e: - raise InvalidSdistFilename( - f"Invalid sdist filename (invalid version): {filename}" - ) from e - - return (name, version) diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/version.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/version.py deleted file mode 100644 index 5faab9bd0dcf2..0000000000000 --- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pylib/packaging/version.py +++ /dev/null @@ -1,563 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.version import parse, Version -""" - -import itertools -import re -from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union - -from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType - -__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] - -LocalType = Tuple[Union[int, str], ...] - -CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] -CmpLocalType = Union[ - NegativeInfinityType, - Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], -] -CmpKey = Tuple[ - int, - Tuple[int, ...], - CmpPrePostDevType, - CmpPrePostDevType, - CmpPrePostDevType, - CmpLocalType, -] -VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] - - -class _Version(NamedTuple): - epoch: int - release: Tuple[int, ...] - dev: Optional[Tuple[str, int]] - pre: Optional[Tuple[str, int]] - post: Optional[Tuple[str, int]] - local: Optional[LocalType] - - -def parse(version: str) -> "Version": - """Parse the given version string. - - >>> parse('1.0.dev1') - - - :param version: The version string to parse. - :raises InvalidVersion: When the version string is not a valid version. - """ - return Version(version) - - -class InvalidVersion(ValueError): - """Raised when a version string is not a valid version. - - >>> Version("invalid") - Traceback (most recent call last): - ... - packaging.version.InvalidVersion: Invalid version: 'invalid' - """ - - -class _BaseVersion: - _key: Tuple[Any, ...] - - def __hash__(self) -> int: - return hash(self._key) - - # Please keep the duplicated `isinstance` check - # in the six comparisons hereunder - # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key < other._key - - def __le__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key <= other._key - - def __eq__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key == other._key - - def __ge__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key >= other._key - - def __gt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key > other._key - - def __ne__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key != other._key - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -_VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_\.]?
-            (?Palpha|a|beta|b|preview|pre|c|rc)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-VERSION_PATTERN = _VERSION_PATTERN
-"""
-A string containing the regular expression used to match a valid version.
-
-The pattern is not anchored at either end, and is intended for embedding in larger
-expressions (for example, matching a version number as part of a file name). The
-regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
-flags set.
-
-:meta hide-value:
-"""
-
-
-class Version(_BaseVersion):
-    """This class abstracts handling of a project's versions.
-
-    A :class:`Version` instance is comparison aware and can be compared and
-    sorted using the standard Python interfaces.
-
-    >>> v1 = Version("1.0a5")
-    >>> v2 = Version("1.0")
-    >>> v1
-    
-    >>> v2
-    
-    >>> v1 < v2
-    True
-    >>> v1 == v2
-    False
-    >>> v1 > v2
-    False
-    >>> v1 >= v2
-    False
-    >>> v1 <= v2
-    True
-    """
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-    _key: CmpKey
-
-    def __init__(self, version: str) -> None:
-        """Initialize a Version object.
-
-        :param version:
-            The string representation of a version which will be parsed and normalized
-            before use.
-        :raises InvalidVersion:
-            If the ``version`` does not conform to PEP 440 in any way then this
-            exception will be raised.
-        """
-
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion(f"Invalid version: '{version}'")
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self) -> str:
-        """A representation of the Version that shows all internal state.
-
-        >>> Version('1.0.0')
-        
-        """
-        return f""
-
-    def __str__(self) -> str:
-        """A string representation of the version that can be rounded-tripped.
-
-        >>> str(Version("1.0a5"))
-        '1.0a5'
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(f".post{self.post}")
-
-        # Development release
-        if self.dev is not None:
-            parts.append(f".dev{self.dev}")
-
-        # Local version segment
-        if self.local is not None:
-            parts.append(f"+{self.local}")
-
-        return "".join(parts)
-
-    @property
-    def epoch(self) -> int:
-        """The epoch of the version.
-
-        >>> Version("2.0.0").epoch
-        0
-        >>> Version("1!2.0.0").epoch
-        1
-        """
-        return self._version.epoch
-
-    @property
-    def release(self) -> Tuple[int, ...]:
-        """The components of the "release" segment of the version.
-
-        >>> Version("1.2.3").release
-        (1, 2, 3)
-        >>> Version("2.0.0").release
-        (2, 0, 0)
-        >>> Version("1!2.0.0.post0").release
-        (2, 0, 0)
-
-        Includes trailing zeroes but not the epoch or any pre-release / development /
-        post-release suffixes.
-        """
-        return self._version.release
-
-    @property
-    def pre(self) -> Optional[Tuple[str, int]]:
-        """The pre-release segment of the version.
-
-        >>> print(Version("1.2.3").pre)
-        None
-        >>> Version("1.2.3a1").pre
-        ('a', 1)
-        >>> Version("1.2.3b1").pre
-        ('b', 1)
-        >>> Version("1.2.3rc1").pre
-        ('rc', 1)
-        """
-        return self._version.pre
-
-    @property
-    def post(self) -> Optional[int]:
-        """The post-release number of the version.
-
-        >>> print(Version("1.2.3").post)
-        None
-        >>> Version("1.2.3.post1").post
-        1
-        """
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self) -> Optional[int]:
-        """The development number of the version.
-
-        >>> print(Version("1.2.3").dev)
-        None
-        >>> Version("1.2.3.dev1").dev
-        1
-        """
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self) -> Optional[str]:
-        """The local version segment of the version.
-
-        >>> print(Version("1.2.3").local)
-        None
-        >>> Version("1.2.3+abc").local
-        'abc'
-        """
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self) -> str:
-        """The public portion of the version.
-
-        >>> Version("1.2.3").public
-        '1.2.3'
-        >>> Version("1.2.3+abc").public
-        '1.2.3'
-        >>> Version("1.2.3+abc.dev1").public
-        '1.2.3'
-        """
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self) -> str:
-        """The "base version" of the version.
-
-        >>> Version("1.2.3").base_version
-        '1.2.3'
-        >>> Version("1.2.3+abc").base_version
-        '1.2.3'
-        >>> Version("1!1.2.3+abc.dev1").base_version
-        '1!1.2.3'
-
-        The "base version" is the public version of the project without any pre or post
-        release markers.
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self) -> bool:
-        """Whether this version is a pre-release.
-
-        >>> Version("1.2.3").is_prerelease
-        False
-        >>> Version("1.2.3a1").is_prerelease
-        True
-        >>> Version("1.2.3b1").is_prerelease
-        True
-        >>> Version("1.2.3rc1").is_prerelease
-        True
-        >>> Version("1.2.3dev1").is_prerelease
-        True
-        """
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self) -> bool:
-        """Whether this version is a post-release.
-
-        >>> Version("1.2.3").is_postrelease
-        False
-        >>> Version("1.2.3.post1").is_postrelease
-        True
-        """
-        return self.post is not None
-
-    @property
-    def is_devrelease(self) -> bool:
-        """Whether this version is a development release.
-
-        >>> Version("1.2.3").is_devrelease
-        False
-        >>> Version("1.2.3.dev1").is_devrelease
-        True
-        """
-        return self.dev is not None
-
-    @property
-    def major(self) -> int:
-        """The first item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").major
-        1
-        """
-        return self.release[0] if len(self.release) >= 1 else 0
-
-    @property
-    def minor(self) -> int:
-        """The second item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").minor
-        2
-        >>> Version("1").minor
-        0
-        """
-        return self.release[1] if len(self.release) >= 2 else 0
-
-    @property
-    def micro(self) -> int:
-        """The third item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").micro
-        3
-        >>> Version("1").micro
-        0
-        """
-        return self.release[2] if len(self.release) >= 3 else 0
-
-
-def _parse_letter_version(
-    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
-) -> Optional[Tuple[str, int]]:
-
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-    return None
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-    return None
-
-
-def _cmpkey(
-    epoch: int,
-    release: Tuple[int, ...],
-    pre: Optional[Tuple[str, int]],
-    post: Optional[Tuple[str, int]],
-    dev: Optional[Tuple[str, int]],
-    local: Optional[LocalType],
-) -> CmpKey:
-
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    _release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        _pre: CmpPrePostDevType = NegativeInfinity
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        _pre = Infinity
-    else:
-        _pre = pre
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        _post: CmpPrePostDevType = NegativeInfinity
-
-    else:
-        _post = post
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        _dev: CmpPrePostDevType = Infinity
-
-    else:
-        _dev = dev
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        _local: CmpLocalType = NegativeInfinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        _local = tuple(
-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
-        )
-
-    return epoch, _release, _pre, _post, _dev, _local
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pyproject.toml b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pyproject.toml
deleted file mode 100644
index def9858e444c5..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/pyproject.toml
+++ /dev/null
@@ -1,120 +0,0 @@
-[build-system]
-requires = ["setuptools>=61.0"]
-build-backend = "setuptools.build_meta"
-
-[project]
-name = "gyp-next"
-version = "0.18.1"
-authors = [
-  { name="Node.js contributors", email="ryzokuken@disroot.org" },
-]
-description = "A fork of the GYP build system for use in the Node.js projects"
-readme = "README.md"
-license = { file="LICENSE" }
-requires-python = ">=3.8"
-dependencies = ["packaging>=24.0", "setuptools>=69.5.1"]
-classifiers = [
-    "Development Status :: 3 - Alpha",
-    "Environment :: Console",
-    "Intended Audience :: Developers",
-    "License :: OSI Approved :: BSD License",
-    "Natural Language :: English",
-    "Programming Language :: Python",
-    "Programming Language :: Python :: 3",
-    "Programming Language :: Python :: 3.8",
-    "Programming Language :: Python :: 3.9",
-    "Programming Language :: Python :: 3.10",
-    "Programming Language :: Python :: 3.11",
-]
-
-[project.optional-dependencies]
-dev = ["pytest", "ruff"]
-
-[project.scripts]
-gyp = "gyp:script_main"
-
-[project.urls]
-"Homepage" = "https://github.com/nodejs/gyp-next"
-
-[tool.ruff]
-extend-exclude = ["pylib/packaging"]
-line-length = 88
-target-version = "py37"
-
-[tool.ruff.lint]
-select = [
-  "C4",   # flake8-comprehensions
-  "C90",  # McCabe cyclomatic complexity
-  "DTZ",  # flake8-datetimez
-  "E",    # pycodestyle
-  "F",    # Pyflakes
-  "G",    # flake8-logging-format
-  "ICN",  # flake8-import-conventions
-  "INT",  # flake8-gettext
-  "PL",   # Pylint
-  "PYI",  # flake8-pyi
-  "RSE",  # flake8-raise
-  "RUF",  # Ruff-specific rules
-  "T10",  # flake8-debugger
-  "TCH",  # flake8-type-checking
-  "TID",  # flake8-tidy-imports
-  "UP",   # pyupgrade
-  "W",    # pycodestyle
-  "YTT",  # flake8-2020
-  # "A",    # flake8-builtins
-  # "ANN",  # flake8-annotations
-  # "ARG",  # flake8-unused-arguments
-  # "B",    # flake8-bugbear
-  # "BLE",  # flake8-blind-except
-  # "COM",  # flake8-commas
-  # "D",    # pydocstyle
-  # "DJ",   # flake8-django
-  # "EM",   # flake8-errmsg
-  # "ERA",  # eradicate
-  # "EXE",  # flake8-executable
-  # "FBT",  # flake8-boolean-trap
-  # "I",    # isort
-  # "INP",  # flake8-no-pep420
-  # "ISC",  # flake8-implicit-str-concat
-  # "N",    # pep8-naming
-  # "NPY",  # NumPy-specific rules
-  # "PD",   # pandas-vet
-  # "PGH",  # pygrep-hooks
-  # "PIE",  # flake8-pie
-  # "PT",   # flake8-pytest-style
-  # "PTH",  # flake8-use-pathlib
-  # "Q",    # flake8-quotes
-  # "RET",  # flake8-return
-  # "S",    # flake8-bandit
-  # "SIM",  # flake8-simplify
-  # "SLF",  # flake8-self
-  # "T20",  # flake8-print
-  # "TRY",  # tryceratops
-]
-ignore = [
-  "E721",
-  "PLC1901",
-  "PLR0402",
-  "PLR1714",
-  "PLR2004",
-  "PLR5501",
-  "PLW0603",
-  "PLW2901",
-  "PYI024",
-  "RUF005",
-  "RUF012",
-  "UP031",
-]
-
-[tool.ruff.lint.mccabe]
-max-complexity = 101
-
-[tool.ruff.lint.pylint]
-max-args = 11
-max-branches = 108
-max-returns = 10
-max-statements = 286
-
-[tool.setuptools]
-package-dir = {"" = "pylib"}
-packages = ["gyp", "gyp.generator"]
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/release-please-config.json b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/release-please-config.json
deleted file mode 100644
index b6cad32a2dd0e..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/release-please-config.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-    "last-release-sha": "78756421b0d7bb335992a9c7d26ba3cc8b619708",
-    "packages": {
-        ".": {
-          "release-type": "python",
-          "package-name": "gyp-next",
-          "bump-minor-pre-major": true,
-          "include-component-in-tag": false
-        }
-    }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/test_gyp.py b/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/test_gyp.py
deleted file mode 100755
index b7bb956b8ed58..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/gyp/test_gyp.py
+++ /dev/null
@@ -1,261 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""gyptest.py -- test runner for GYP tests."""
-
-
-import argparse
-import os
-import platform
-import subprocess
-import sys
-import time
-
-
-def is_test_name(f):
-    return f.startswith("gyptest") and f.endswith(".py")
-
-
-def find_all_gyptest_files(directory):
-    result = []
-    for root, dirs, files in os.walk(directory):
-        result.extend([os.path.join(root, f) for f in files if is_test_name(f)])
-    result.sort()
-    return result
-
-
-def main(argv=None):
-    if argv is None:
-        argv = sys.argv
-
-    parser = argparse.ArgumentParser()
-    parser.add_argument("-a", "--all", action="store_true", help="run all tests")
-    parser.add_argument("-C", "--chdir", action="store", help="change to directory")
-    parser.add_argument(
-        "-f",
-        "--format",
-        action="store",
-        default="",
-        help="run tests with the specified formats",
-    )
-    parser.add_argument(
-        "-G",
-        "--gyp_option",
-        action="append",
-        default=[],
-        help="Add -G options to the gyp command line",
-    )
-    parser.add_argument(
-        "-l", "--list", action="store_true", help="list available tests and exit"
-    )
-    parser.add_argument(
-        "-n",
-        "--no-exec",
-        action="store_true",
-        help="no execute, just print the command line",
-    )
-    parser.add_argument(
-        "--path", action="append", default=[], help="additional $PATH directory"
-    )
-    parser.add_argument(
-        "-q",
-        "--quiet",
-        action="store_true",
-        help="quiet, don't print anything unless there are failures",
-    )
-    parser.add_argument(
-        "-v",
-        "--verbose",
-        action="store_true",
-        help="print configuration info and test results.",
-    )
-    parser.add_argument("tests", nargs="*")
-    args = parser.parse_args(argv[1:])
-
-    if args.chdir:
-        os.chdir(args.chdir)
-
-    if args.path:
-        extra_path = [os.path.abspath(p) for p in args.path]
-        extra_path = os.pathsep.join(extra_path)
-        os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"]
-
-    if not args.tests:
-        if not args.all:
-            sys.stderr.write("Specify -a to get all tests.\n")
-            return 1
-        args.tests = ["test"]
-
-    tests = []
-    for arg in args.tests:
-        if os.path.isdir(arg):
-            tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
-        else:
-            if not is_test_name(os.path.basename(arg)):
-                print(arg, "is not a valid gyp test name.", file=sys.stderr)
-                sys.exit(1)
-            tests.append(arg)
-
-    if args.list:
-        for test in tests:
-            print(test)
-        sys.exit(0)
-
-    os.environ["PYTHONPATH"] = os.path.abspath("test/lib")
-
-    if args.verbose:
-        print_configuration_info()
-
-    if args.gyp_option and not args.quiet:
-        print("Extra Gyp options: %s\n" % args.gyp_option)
-
-    if args.format:
-        format_list = args.format.split(",")
-    else:
-        format_list = {
-            "aix5": ["make"],
-            "os400": ["make"],
-            "freebsd7": ["make"],
-            "freebsd8": ["make"],
-            "openbsd5": ["make"],
-            "cygwin": ["msvs"],
-            "win32": ["msvs", "ninja"],
-            "linux": ["make", "ninja"],
-            "linux2": ["make", "ninja"],
-            "linux3": ["make", "ninja"],
-            # TODO: Re-enable xcode-ninja.
-            # https://bugs.chromium.org/p/gyp/issues/detail?id=530
-            # 'darwin':   ['make', 'ninja', 'xcode', 'xcode-ninja'],
-            "darwin": ["make", "ninja", "xcode"],
-        }[sys.platform]
-
-    gyp_options = []
-    for option in args.gyp_option:
-        gyp_options += ["-G", option]
-
-    runner = Runner(format_list, tests, gyp_options, args.verbose)
-    runner.run()
-
-    if not args.quiet:
-        runner.print_results()
-
-    return 1 if runner.failures else 0
-
-
-def print_configuration_info():
-    print("Test configuration:")
-    if sys.platform == "darwin":
-        sys.path.append(os.path.abspath("test/lib"))
-        import TestMac
-
-        print(f"  Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
-        print(f"  Xcode {TestMac.Xcode.Version()}")
-    elif sys.platform == "win32":
-        sys.path.append(os.path.abspath("pylib"))
-        import gyp.MSVSVersion
-
-        print("  Win %s %s\n" % platform.win32_ver()[0:2])
-        print("  MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
-    elif sys.platform in ("linux", "linux2"):
-        print("  Linux %s" % " ".join(platform.linux_distribution()))
-    print(f"  Python {platform.python_version()}")
-    print(f"  PYTHONPATH={os.environ['PYTHONPATH']}")
-    print()
-
-
-class Runner:
-    def __init__(self, formats, tests, gyp_options, verbose):
-        self.formats = formats
-        self.tests = tests
-        self.verbose = verbose
-        self.gyp_options = gyp_options
-        self.failures = []
-        self.num_tests = len(formats) * len(tests)
-        num_digits = len(str(self.num_tests))
-        self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits)
-        self.isatty = sys.stdout.isatty() and not self.verbose
-        self.env = os.environ.copy()
-        self.hpos = 0
-
-    def run(self):
-        run_start = time.time()
-
-        i = 1
-        for fmt in self.formats:
-            for test in self.tests:
-                self.run_test(test, fmt, i)
-                i += 1
-
-        if self.isatty:
-            self.erase_current_line()
-
-        self.took = time.time() - run_start
-
-    def run_test(self, test, fmt, i):
-        if self.isatty:
-            self.erase_current_line()
-
-        msg = self.fmt_str % (i, self.num_tests, fmt, test)
-        self.print_(msg)
-
-        start = time.time()
-        cmd = [sys.executable, test] + self.gyp_options
-        self.env["TESTGYP_FORMAT"] = fmt
-        proc = subprocess.Popen(
-            cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env
-        )
-        proc.wait()
-        took = time.time() - start
-
-        stdout = proc.stdout.read().decode("utf8")
-        if proc.returncode == 2:
-            res = "skipped"
-        elif proc.returncode:
-            res = "failed"
-            self.failures.append(f"({test}) {fmt}")
-        else:
-            res = "passed"
-        res_msg = f" {res} {took:.3f}s"
-        self.print_(res_msg)
-
-        if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")):
-            print()
-            print("\n".join(f"    {line}" for line in stdout.splitlines()))
-        elif not self.isatty:
-            print()
-
-    def print_(self, msg):
-        print(msg, end="")
-        index = msg.rfind("\n")
-        if index == -1:
-            self.hpos += len(msg)
-        else:
-            self.hpos = len(msg) - index
-        sys.stdout.flush()
-
-    def erase_current_line(self):
-        print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="")
-        sys.stdout.flush()
-        self.hpos = 0
-
-    def print_results(self):
-        num_failures = len(self.failures)
-        if num_failures:
-            print()
-            if num_failures == 1:
-                print("Failed the following test:")
-            else:
-                print("Failed the following %d tests:" % num_failures)
-            print("\t" + "\n\t".join(sorted(self.failures)))
-            print()
-        print(
-            "Ran %d tests in %.3fs, %d failed."
-            % (self.num_tests, self.took, num_failures)
-        )
-        print()
-
-
-if __name__ == "__main__":
-    sys.exit(main())
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/Find-VisualStudio.cs b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/Find-VisualStudio.cs
deleted file mode 100644
index d2e45a76275f5..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/Find-VisualStudio.cs
+++ /dev/null
@@ -1,250 +0,0 @@
-// Copyright 2017 - Refael Ackermann
-// Distributed under MIT style license
-// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf
-
-// Usage:
-// powershell -ExecutionPolicy Unrestricted -Command "Add-Type -Path Find-VisualStudio.cs; [VisualStudioConfiguration.Main]::PrintJson()"
-// This script needs to be compatible with PowerShell v2 to run on Windows 2008R2 and Windows 7.
-
-using System;
-using System.Text;
-using System.Runtime.InteropServices;
-using System.Collections.Generic;
-
-namespace VisualStudioConfiguration
-{
-    [Flags]
-    public enum InstanceState : uint
-    {
-        None = 0,
-        Local = 1,
-        Registered = 2,
-        NoRebootRequired = 4,
-        NoErrors = 8,
-        Complete = 4294967295,
-    }
-
-    [Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")]
-    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
-    [ComImport]
-    public interface IEnumSetupInstances
-    {
-
-        void Next([MarshalAs(UnmanagedType.U4), In] int celt,
-            [MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt,
-            [MarshalAs(UnmanagedType.U4)] out int pceltFetched);
-
-        void Skip([MarshalAs(UnmanagedType.U4), In] int celt);
-
-        void Reset();
-
-        [return: MarshalAs(UnmanagedType.Interface)]
-        IEnumSetupInstances Clone();
-    }
-
-    [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
-    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
-    [ComImport]
-    public interface ISetupConfiguration
-    {
-    }
-
-    [Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")]
-    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
-    [ComImport]
-    public interface ISetupConfiguration2 : ISetupConfiguration
-    {
-
-        [return: MarshalAs(UnmanagedType.Interface)]
-        IEnumSetupInstances EnumInstances();
-
-        [return: MarshalAs(UnmanagedType.Interface)]
-        ISetupInstance GetInstanceForCurrentProcess();
-
-        [return: MarshalAs(UnmanagedType.Interface)]
-        ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path);
-
-        [return: MarshalAs(UnmanagedType.Interface)]
-        IEnumSetupInstances EnumAllInstances();
-    }
-
-    [Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")]
-    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
-    [ComImport]
-    public interface ISetupInstance
-    {
-    }
-
-    [Guid("89143C9A-05AF-49B0-B717-72E218A2185C")]
-    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
-    [ComImport]
-    public interface ISetupInstance2 : ISetupInstance
-    {
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetInstanceId();
-
-        [return: MarshalAs(UnmanagedType.Struct)]
-        System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetInstallationName();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetInstallationPath();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetInstallationVersion();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid);
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid);
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath);
-
-        [return: MarshalAs(UnmanagedType.U4)]
-        InstanceState GetState();
-
-        [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
-        ISetupPackageReference[] GetPackages();
-
-        ISetupPackageReference GetProduct();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetProductPath();
-
-        [return: MarshalAs(UnmanagedType.VariantBool)]
-        bool IsLaunchable();
-
-        [return: MarshalAs(UnmanagedType.VariantBool)]
-        bool IsComplete();
-
-        [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
-        ISetupPropertyStore GetProperties();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetEnginePath();
-    }
-
-    [Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")]
-    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
-    [ComImport]
-    public interface ISetupPackageReference
-    {
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetId();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetVersion();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetChip();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetLanguage();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetBranch();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetType();
-
-        [return: MarshalAs(UnmanagedType.BStr)]
-        string GetUniqueId();
-
-        [return: MarshalAs(UnmanagedType.VariantBool)]
-        bool GetIsExtension();
-    }
-
-    [Guid("c601c175-a3be-44bc-91f6-4568d230fc83")]
-    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
-    [ComImport]
-    public interface ISetupPropertyStore
-    {
-
-        [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)]
-        string[] GetNames();
-
-        object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName);
-    }
-
-    [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
-    [CoClass(typeof(SetupConfigurationClass))]
-    [ComImport]
-    public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration
-    {
-    }
-
-    [Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")]
-    [ClassInterface(ClassInterfaceType.None)]
-    [ComImport]
-    public class SetupConfigurationClass
-    {
-    }
-
-    public static class Main
-    {
-        public static void PrintJson()
-        {
-            ISetupConfiguration query = new SetupConfiguration();
-            ISetupConfiguration2 query2 = (ISetupConfiguration2)query;
-            IEnumSetupInstances e = query2.EnumAllInstances();
-
-            int pceltFetched;
-            ISetupInstance2[] rgelt = new ISetupInstance2[1];
-            List instances = new List();
-            while (true)
-            {
-                e.Next(1, rgelt, out pceltFetched);
-                if (pceltFetched <= 0)
-                {
-                    Console.WriteLine(String.Format("[{0}]", string.Join(",", instances.ToArray())));
-                    return;
-                }
-
-                try
-                {
-                    instances.Add(InstanceJson(rgelt[0]));
-                }
-                catch (COMException)
-                {
-                    // Ignore instances that can't be queried.
-                }
-            }
-        }
-
-        private static string JsonString(string s)
-        {
-            return "\"" + s.Replace("\\", "\\\\").Replace("\"", "\\\"") + "\"";
-        }
-
-        private static string InstanceJson(ISetupInstance2 setupInstance2)
-        {
-            // Visual Studio component directory:
-            // https://docs.microsoft.com/en-us/visualstudio/install/workload-and-component-ids
-
-            StringBuilder json = new StringBuilder();
-            json.Append("{");
-
-            string path = JsonString(setupInstance2.GetInstallationPath());
-            json.Append(String.Format("\"path\":{0},", path));
-
-            string version = JsonString(setupInstance2.GetInstallationVersion());
-            json.Append(String.Format("\"version\":{0},", version));
-
-            List packages = new List();
-            foreach (ISetupPackageReference package in setupInstance2.GetPackages())
-            {
-                string id = JsonString(package.GetId());
-                packages.Add(id);
-            }
-            json.Append(String.Format("\"packages\":[{0}]", string.Join(",", packages.ToArray())));
-
-            json.Append("}");
-            return json.ToString();
-        }
-    }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/build.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/build.js
deleted file mode 100644
index e1f49bb6ff0ca..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/build.js
+++ /dev/null
@@ -1,227 +0,0 @@
-'use strict'
-
-const gracefulFs = require('graceful-fs')
-const fs = gracefulFs.promises
-const path = require('path')
-const { glob } = require('glob')
-const log = require('./log')
-const which = require('which')
-const win = process.platform === 'win32'
-
-async function build (gyp, argv) {
-  let platformMake = 'make'
-  if (process.platform === 'aix') {
-    platformMake = 'gmake'
-  } else if (process.platform === 'os400') {
-    platformMake = 'gmake'
-  } else if (process.platform.indexOf('bsd') !== -1) {
-    platformMake = 'gmake'
-  } else if (win && argv.length > 0) {
-    argv = argv.map(function (target) {
-      return '/t:' + target
-    })
-  }
-
-  const makeCommand = gyp.opts.make || process.env.MAKE || platformMake
-  let command = win ? 'msbuild' : makeCommand
-  const jobs = gyp.opts.jobs || process.env.JOBS
-  let buildType
-  let config
-  let arch
-  let nodeDir
-  let guessedSolution
-  let python
-  let buildBinsDir
-
-  await loadConfigGypi()
-
-  /**
-   * Load the "config.gypi" file that was generated during "configure".
-   */
-
-  async function loadConfigGypi () {
-    let data
-    try {
-      const configPath = path.resolve('build', 'config.gypi')
-      data = await fs.readFile(configPath, 'utf8')
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        throw new Error('You must run `node-gyp configure` first!')
-      } else {
-        throw err
-      }
-    }
-
-    config = JSON.parse(data.replace(/#.+\n/, ''))
-
-    // get the 'arch', 'buildType', and 'nodeDir' vars from the config
-    buildType = config.target_defaults.default_configuration
-    arch = config.variables.target_arch
-    nodeDir = config.variables.nodedir
-    python = config.variables.python
-
-    if ('debug' in gyp.opts) {
-      buildType = gyp.opts.debug ? 'Debug' : 'Release'
-    }
-    if (!buildType) {
-      buildType = 'Release'
-    }
-
-    log.verbose('build type', buildType)
-    log.verbose('architecture', arch)
-    log.verbose('node dev dir', nodeDir)
-    log.verbose('python', python)
-
-    if (win) {
-      await findSolutionFile()
-    } else {
-      await doWhich()
-    }
-  }
-
-  /**
-   * On Windows, find the first build/*.sln file.
-   */
-
-  async function findSolutionFile () {
-    const files = await glob('build/*.sln')
-    if (files.length === 0) {
-      if (gracefulFs.existsSync('build/Makefile') || (await glob('build/*.mk')).length !== 0) {
-        command = makeCommand
-        await doWhich(false)
-        return
-      } else {
-        throw new Error('Could not find *.sln file or Makefile. Did you run "configure"?')
-      }
-    }
-    guessedSolution = files[0]
-    log.verbose('found first Solution file', guessedSolution)
-    await doWhich(true)
-  }
-
-  /**
-   * Uses node-which to locate the msbuild / make executable.
-   */
-
-  async function doWhich (msvs) {
-    // On Windows use msbuild provided by node-gyp configure
-    if (msvs) {
-      if (!config.variables.msbuild_path) {
-        throw new Error('MSBuild is not set, please run `node-gyp configure`.')
-      }
-      command = config.variables.msbuild_path
-      log.verbose('using MSBuild:', command)
-      await doBuild(msvs)
-      return
-    }
-
-    // First make sure we have the build command in the PATH
-    const execPath = await which(command)
-    log.verbose('`which` succeeded for `' + command + '`', execPath)
-    await doBuild(msvs)
-  }
-
-  /**
-   * Actually spawn the process and compile the module.
-   */
-
-  async function doBuild (msvs) {
-    // Enable Verbose build
-    const verbose = log.logger.isVisible('verbose')
-    let j
-
-    if (!msvs && verbose) {
-      argv.push('V=1')
-    }
-
-    if (msvs && !verbose) {
-      argv.push('/clp:Verbosity=minimal')
-    }
-
-    if (msvs) {
-      // Turn off the Microsoft logo on Windows
-      argv.push('/nologo')
-    }
-
-    // Specify the build type, Release by default
-    if (msvs) {
-      // Convert .gypi config target_arch to MSBuild /Platform
-      // Since there are many ways to state '32-bit Intel', default to it.
-      // N.B. msbuild's Condition string equality tests are case-insensitive.
-      const archLower = arch.toLowerCase()
-      const p = archLower === 'x64'
-        ? 'x64'
-        : (archLower === 'arm'
-            ? 'ARM'
-            : (archLower === 'arm64' ? 'ARM64' : 'Win32'))
-      argv.push('/p:Configuration=' + buildType + ';Platform=' + p)
-      if (jobs) {
-        j = parseInt(jobs, 10)
-        if (!isNaN(j) && j > 0) {
-          argv.push('/m:' + j)
-        } else if (jobs.toUpperCase() === 'MAX') {
-          argv.push('/m:' + require('os').cpus().length)
-        }
-      }
-    } else {
-      argv.push('BUILDTYPE=' + buildType)
-      // Invoke the Makefile in the 'build' dir.
-      argv.push('-C')
-      argv.push('build')
-      if (jobs) {
-        j = parseInt(jobs, 10)
-        if (!isNaN(j) && j > 0) {
-          argv.push('--jobs')
-          argv.push(j)
-        } else if (jobs.toUpperCase() === 'MAX') {
-          argv.push('--jobs')
-          argv.push(require('os').cpus().length)
-        }
-      }
-    }
-
-    if (msvs) {
-      // did the user specify their own .sln file?
-      const hasSln = argv.some(function (arg) {
-        return path.extname(arg) === '.sln'
-      })
-      if (!hasSln) {
-        argv.unshift(gyp.opts.solution || guessedSolution)
-      }
-    }
-
-    if (!win) {
-      // Add build-time dependency symlinks (such as Python) to PATH
-      buildBinsDir = path.resolve('build', 'node_gyp_bins')
-      process.env.PATH = `${buildBinsDir}:${process.env.PATH}`
-      await fs.mkdir(buildBinsDir, { recursive: true })
-      const symlinkDestination = path.join(buildBinsDir, 'python3')
-      try {
-        await fs.unlink(symlinkDestination)
-      } catch (err) {
-        if (err.code !== 'ENOENT') throw err
-      }
-      await fs.symlink(python, symlinkDestination)
-      log.verbose('bin symlinks', `created symlink to "${python}" in "${buildBinsDir}" and added to PATH`)
-    }
-
-    const proc = gyp.spawn(command, argv)
-    await new Promise((resolve, reject) => proc.on('exit', async (code, signal) => {
-      if (buildBinsDir) {
-        // Clean up the build-time dependency symlinks:
-        await fs.rm(buildBinsDir, { recursive: true })
-      }
-
-      if (code !== 0) {
-        return reject(new Error('`' + command + '` failed with exit code: ' + code))
-      }
-      if (signal) {
-        return reject(new Error('`' + command + '` got signal: ' + signal))
-      }
-      resolve()
-    }))
-  }
-}
-
-module.exports = build
-module.exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module'
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/clean.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/clean.js
deleted file mode 100644
index 523f8016caece..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/clean.js
+++ /dev/null
@@ -1,15 +0,0 @@
-'use strict'
-
-const fs = require('graceful-fs').promises
-const log = require('./log')
-
-async function clean (gyp, argv) {
-  // Remove the 'build' dir
-  const buildDir = 'build'
-
-  log.verbose('clean', 'removing "%s" directory', buildDir)
-  await fs.rm(buildDir, { recursive: true, force: true })
-}
-
-module.exports = clean
-module.exports.usage = 'Removes any generated build files and the "out" dir'
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/configure.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/configure.js
deleted file mode 100644
index ee672cfbf26c2..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/configure.js
+++ /dev/null
@@ -1,328 +0,0 @@
-'use strict'
-
-const { promises: fs, readFileSync } = require('graceful-fs')
-const path = require('path')
-const log = require('./log')
-const os = require('os')
-const processRelease = require('./process-release')
-const win = process.platform === 'win32'
-const findNodeDirectory = require('./find-node-directory')
-const { createConfigGypi } = require('./create-config-gypi')
-const { format: msgFormat } = require('util')
-const { findAccessibleSync } = require('./util')
-const { findPython } = require('./find-python')
-const { findVisualStudio } = win ? require('./find-visualstudio') : {}
-
-const majorRe = /^#define NODE_MAJOR_VERSION (\d+)/m
-const minorRe = /^#define NODE_MINOR_VERSION (\d+)/m
-const patchRe = /^#define NODE_PATCH_VERSION (\d+)/m
-
-async function configure (gyp, argv) {
-  const buildDir = path.resolve('build')
-  const configNames = ['config.gypi', 'common.gypi']
-  const configs = []
-  let nodeDir
-  const release = processRelease(argv, gyp, process.version, process.release)
-
-  const python = await findPython(gyp.opts.python)
-  return getNodeDir()
-
-  async function getNodeDir () {
-    // 'python' should be set by now
-    process.env.PYTHON = python
-
-    if (!gyp.opts.nodedir &&
-        process.config.variables.use_prefix_to_find_headers) {
-      // check if the headers can be found using the prefix specified
-      // at build time. Use them if they match the version expected
-      const prefix = process.config.variables.node_prefix
-      let availVersion
-      try {
-        const nodeVersionH = readFileSync(path.join(prefix,
-          'include', 'node', 'node_version.h'), { encoding: 'utf8' })
-        const major = nodeVersionH.match(majorRe)[1]
-        const minor = nodeVersionH.match(minorRe)[1]
-        const patch = nodeVersionH.match(patchRe)[1]
-        availVersion = major + '.' + minor + '.' + patch
-      } catch {}
-      if (availVersion === release.version) {
-        // ok version matches, use the headers
-        gyp.opts.nodedir = prefix
-        log.verbose('using local node headers based on prefix',
-          'setting nodedir to ' + gyp.opts.nodedir)
-      }
-    }
-
-    if (gyp.opts.nodedir) {
-      // --nodedir was specified. use that for the dev files
-      nodeDir = gyp.opts.nodedir.replace(/^~/, os.homedir())
-      log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir)
-    } else {
-      // if no --nodedir specified, ensure node dependencies are installed
-      if ('v' + release.version !== process.version) {
-        // if --target was given, then determine a target version to compile for
-        log.verbose('get node dir', 'compiling against --target node version: %s', release.version)
-      } else {
-        // if no --target was specified then use the current host node version
-        log.verbose('get node dir', 'no --target version specified, falling back to host node version: %s', release.version)
-      }
-
-      if (!release.semver) {
-        // could not parse the version string with semver
-        throw new Error('Invalid version number: ' + release.version)
-      }
-
-      // If the tarball option is set, always remove and reinstall the headers
-      // into devdir. Otherwise only install if they're not already there.
-      gyp.opts.ensure = !gyp.opts.tarball
-
-      await gyp.commands.install([release.version])
-
-      log.verbose('get node dir', 'target node version installed:', release.versionDir)
-      nodeDir = path.resolve(gyp.devDir, release.versionDir)
-    }
-
-    return createBuildDir()
-  }
-
-  async function createBuildDir () {
-    log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir)
-
-    const isNew = await fs.mkdir(buildDir, { recursive: true })
-    log.verbose(
-      'build dir', '"build" dir needed to be created?', isNew ? 'Yes' : 'No'
-    )
-    if (win) {
-      let usingMakeGenerator = false
-      for (let i = argv.length - 1; i >= 0; --i) {
-        const arg = argv[i]
-        if (arg === '-f' || arg === '--format') {
-          const format = argv[i + 1]
-          if (typeof format === 'string' && format.startsWith('make')) {
-            usingMakeGenerator = true
-            break
-          }
-        } else if (arg.startsWith('--format=make')) {
-          usingMakeGenerator = true
-          break
-        }
-      }
-      let vsInfo = {}
-      if (!usingMakeGenerator) {
-        vsInfo = await findVisualStudio(release.semver, gyp.opts['msvs-version'])
-      }
-      return createConfigFile(vsInfo)
-    }
-    return createConfigFile(null)
-  }
-
-  async function createConfigFile (vsInfo) {
-    if (win) {
-      process.env.GYP_MSVS_VERSION = Math.min(vsInfo.versionYear, 2015)
-      process.env.GYP_MSVS_OVERRIDE_PATH = vsInfo.path
-    }
-    const configPath = await createConfigGypi({ gyp, buildDir, nodeDir, vsInfo, python })
-    configs.push(configPath)
-    return findConfigs()
-  }
-
-  async function findConfigs () {
-    const name = configNames.shift()
-    if (!name) {
-      return runGyp()
-    }
-
-    const fullPath = path.resolve(name)
-    log.verbose(name, 'checking for gypi file: %s', fullPath)
-    try {
-      await fs.stat(fullPath)
-      log.verbose(name, 'found gypi file')
-      configs.push(fullPath)
-    } catch (err) {
-      // ENOENT will check next gypi filename
-      if (err.code !== 'ENOENT') {
-        throw err
-      }
-    }
-
-    return findConfigs()
-  }
-
-  async function runGyp () {
-    if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) {
-      if (win) {
-        log.verbose('gyp', 'gyp format was not specified; forcing "msvs"')
-        // force the 'make' target for non-Windows
-        argv.push('-f', 'msvs')
-      } else {
-        log.verbose('gyp', 'gyp format was not specified; forcing "make"')
-        // force the 'make' target for non-Windows
-        argv.push('-f', 'make')
-      }
-    }
-
-    // include all the ".gypi" files that were found
-    configs.forEach(function (config) {
-      argv.push('-I', config)
-    })
-
-    // For AIX and z/OS we need to set up the path to the exports file
-    // which contains the symbols needed for linking.
-    let nodeExpFile
-    let nodeRootDir
-    let candidates
-    let logprefix = 'find exports file'
-    if (process.platform === 'aix' || process.platform === 'os390' || process.platform === 'os400') {
-      const ext = process.platform === 'os390' ? 'x' : 'exp'
-      nodeRootDir = findNodeDirectory()
-
-      if (process.platform === 'aix' || process.platform === 'os400') {
-        candidates = [
-          'include/node/node',
-          'out/Release/node',
-          'out/Debug/node',
-          'node'
-        ].map(function (file) {
-          return file + '.' + ext
-        })
-      } else {
-        candidates = [
-          'out/Release/lib.target/libnode',
-          'out/Debug/lib.target/libnode',
-          'out/Release/obj.target/libnode',
-          'out/Debug/obj.target/libnode',
-          'lib/libnode'
-        ].map(function (file) {
-          return file + '.' + ext
-        })
-      }
-
-      nodeExpFile = findAccessibleSync(logprefix, nodeRootDir, candidates)
-      if (nodeExpFile !== undefined) {
-        log.verbose(logprefix, 'Found exports file: %s', nodeExpFile)
-      } else {
-        const msg = msgFormat('Could not find node.%s file in %s', ext, nodeRootDir)
-        log.error(logprefix, 'Could not find exports file')
-        throw new Error(msg)
-      }
-    }
-
-    // For z/OS we need to set up the path to zoslib include directory,
-    // which contains headers included in v8config.h.
-    let zoslibIncDir
-    if (process.platform === 'os390') {
-      logprefix = "find zoslib's zos-base.h:"
-      let msg
-      let zoslibIncPath = process.env.ZOSLIB_INCLUDES
-      if (zoslibIncPath) {
-        zoslibIncPath = findAccessibleSync(logprefix, zoslibIncPath, ['zos-base.h'])
-        if (zoslibIncPath === undefined) {
-          msg = msgFormat('Could not find zos-base.h file in the directory set ' +
-                          'in ZOSLIB_INCLUDES environment variable: %s; set it ' +
-                          'to the correct path, or unset it to search %s', process.env.ZOSLIB_INCLUDES, nodeRootDir)
-        }
-      } else {
-        candidates = [
-          'include/node/zoslib/zos-base.h',
-          'include/zoslib/zos-base.h',
-          'zoslib/include/zos-base.h',
-          'install/include/node/zoslib/zos-base.h'
-        ]
-        zoslibIncPath = findAccessibleSync(logprefix, nodeRootDir, candidates)
-        if (zoslibIncPath === undefined) {
-          msg = msgFormat('Could not find any of %s in directory %s; set ' +
-                          'environmant variable ZOSLIB_INCLUDES to the path ' +
-                          'that contains zos-base.h', candidates.toString(), nodeRootDir)
-        }
-      }
-      if (zoslibIncPath !== undefined) {
-        zoslibIncDir = path.dirname(zoslibIncPath)
-        log.verbose(logprefix, "Found zoslib's zos-base.h in: %s", zoslibIncDir)
-      } else if (release.version.split('.')[0] >= 16) {
-        // zoslib is only shipped in Node v16 and above.
-        log.error(logprefix, msg)
-        throw new Error(msg)
-      }
-    }
-
-    // this logic ported from the old `gyp_addon` python file
-    const gypScript = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py')
-    const addonGypi = path.resolve(__dirname, '..', 'addon.gypi')
-    let commonGypi = path.resolve(nodeDir, 'include/node/common.gypi')
-    try {
-      await fs.stat(commonGypi)
-    } catch (err) {
-      commonGypi = path.resolve(nodeDir, 'common.gypi')
-    }
-
-    let outputDir = 'build'
-    if (win) {
-      // Windows expects an absolute path
-      outputDir = buildDir
-    }
-    const nodeGypDir = path.resolve(__dirname, '..')
-
-    let nodeLibFile = path.join(nodeDir,
-      !gyp.opts.nodedir ? '<(target_arch)' : '$(Configuration)',
-      release.name + '.lib')
-
-    argv.push('-I', addonGypi)
-    argv.push('-I', commonGypi)
-    argv.push('-Dlibrary=shared_library')
-    argv.push('-Dvisibility=default')
-    argv.push('-Dnode_root_dir=' + nodeDir)
-    if (process.platform === 'aix' || process.platform === 'os390' || process.platform === 'os400') {
-      argv.push('-Dnode_exp_file=' + nodeExpFile)
-      if (process.platform === 'os390' && zoslibIncDir) {
-        argv.push('-Dzoslib_include_dir=' + zoslibIncDir)
-      }
-    }
-    argv.push('-Dnode_gyp_dir=' + nodeGypDir)
-
-    // Do this to keep Cygwin environments happy, else the unescaped '\' gets eaten up,
-    // resulting in bad paths, Ex c:parentFolderfolderanotherFolder instead of c:\parentFolder\folder\anotherFolder
-    if (win) {
-      nodeLibFile = nodeLibFile.replace(/\\/g, '\\\\')
-    }
-    argv.push('-Dnode_lib_file=' + nodeLibFile)
-    argv.push('-Dmodule_root_dir=' + process.cwd())
-    argv.push('-Dnode_engine=' +
-        (gyp.opts.node_engine || process.jsEngine || 'v8'))
-    argv.push('--depth=.')
-    argv.push('--no-parallel')
-
-    // tell gyp to write the Makefile/Solution files into output_dir
-    argv.push('--generator-output', outputDir)
-
-    // tell make to write its output into the same dir
-    argv.push('-Goutput_dir=.')
-
-    // enforce use of the "binding.gyp" file
-    argv.unshift('binding.gyp')
-
-    // execute `gyp` from the current target nodedir
-    argv.unshift(gypScript)
-
-    // make sure python uses files that came with this particular node package
-    const pypath = [path.join(__dirname, '..', 'gyp', 'pylib')]
-    if (process.env.PYTHONPATH) {
-      pypath.push(process.env.PYTHONPATH)
-    }
-    process.env.PYTHONPATH = pypath.join(win ? ';' : ':')
-
-    await new Promise((resolve, reject) => {
-      const cp = gyp.spawn(python, argv)
-      cp.on('exit', (code) => {
-        if (code !== 0) {
-          reject(new Error('`gyp` failed with exit code: ' + code))
-        } else {
-          // we're done
-          resolve()
-        }
-      })
-    })
-  }
-}
-
-module.exports = configure
-module.exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module'
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/create-config-gypi.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/create-config-gypi.js
deleted file mode 100644
index 01a820e9f2f31..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/create-config-gypi.js
+++ /dev/null
@@ -1,153 +0,0 @@
-'use strict'
-
-const fs = require('graceful-fs').promises
-const log = require('./log')
-const path = require('path')
-
-function parseConfigGypi (config) {
-  // translated from tools/js2c.py of Node.js
-  // 1. string comments
-  config = config.replace(/#.*/g, '')
-  // 2. join multiline strings
-  config = config.replace(/'$\s+'/mg, '')
-  // 3. normalize string literals from ' into "
-  config = config.replace(/'/g, '"')
-  return JSON.parse(config)
-}
-
-async function getBaseConfigGypi ({ gyp, nodeDir }) {
-  // try reading $nodeDir/include/node/config.gypi first when:
-  // 1. --dist-url or --nodedir is specified
-  // 2. and --force-process-config is not specified
-  const useCustomHeaders = gyp.opts.nodedir || gyp.opts.disturl || gyp.opts['dist-url']
-  const shouldReadConfigGypi = useCustomHeaders && !gyp.opts['force-process-config']
-  if (shouldReadConfigGypi && nodeDir) {
-    try {
-      const baseConfigGypiPath = path.resolve(nodeDir, 'include/node/config.gypi')
-      const baseConfigGypi = await fs.readFile(baseConfigGypiPath)
-      return parseConfigGypi(baseConfigGypi.toString())
-    } catch (err) {
-      log.warn('read config.gypi', err.message)
-    }
-  }
-
-  // fallback to process.config if it is invalid
-  return JSON.parse(JSON.stringify(process.config))
-}
-
-async function getCurrentConfigGypi ({ gyp, nodeDir, vsInfo, python }) {
-  const config = await getBaseConfigGypi({ gyp, nodeDir })
-  if (!config.target_defaults) {
-    config.target_defaults = {}
-  }
-  if (!config.variables) {
-    config.variables = {}
-  }
-
-  const defaults = config.target_defaults
-  const variables = config.variables
-
-  // don't inherit the "defaults" from the base config.gypi.
-  // doing so could cause problems in cases where the `node` executable was
-  // compiled on a different machine (with different lib/include paths) than
-  // the machine where the addon is being built to
-  defaults.cflags = []
-  defaults.defines = []
-  defaults.include_dirs = []
-  defaults.libraries = []
-
-  // set the default_configuration prop
-  if ('debug' in gyp.opts) {
-    defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release'
-  }
-
-  if (!defaults.default_configuration) {
-    defaults.default_configuration = 'Release'
-  }
-
-  // set the target_arch variable
-  variables.target_arch = gyp.opts.arch || process.arch || 'ia32'
-  if (variables.target_arch === 'arm64') {
-    defaults.msvs_configuration_platform = 'ARM64'
-    defaults.xcode_configuration_platform = 'arm64'
-  }
-
-  // set the node development directory
-  variables.nodedir = nodeDir
-
-  // set the configured Python path
-  variables.python = python
-
-  // disable -T "thin" static archives by default
-  variables.standalone_static_library = gyp.opts.thin ? 0 : 1
-
-  if (process.platform === 'win32') {
-    defaults.msbuild_toolset = vsInfo.toolset
-    if (vsInfo.sdk) {
-      defaults.msvs_windows_target_platform_version = vsInfo.sdk
-    }
-    if (variables.target_arch === 'arm64') {
-      if (vsInfo.versionMajor > 15 ||
-          (vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) {
-        defaults.msvs_enable_marmasm = 1
-      } else {
-        log.warn('Compiling ARM64 assembly is only available in\n' +
-          'Visual Studio 2017 version 15.9 and above')
-      }
-    }
-    variables.msbuild_path = vsInfo.msBuild
-    if (config.variables.clang === 1) {
-      config.variables.clang = 0
-    }
-  }
-
-  // loop through the rest of the opts and add the unknown ones as variables.
-  // this allows for module-specific configure flags like:
-  //
-  //   $ node-gyp configure --shared-libxml2
-  Object.keys(gyp.opts).forEach(function (opt) {
-    if (opt === 'argv') {
-      return
-    }
-    if (opt in gyp.configDefs) {
-      return
-    }
-    variables[opt.replace(/-/g, '_')] = gyp.opts[opt]
-  })
-
-  return config
-}
-
-async function createConfigGypi ({ gyp, buildDir, nodeDir, vsInfo, python }) {
-  const configFilename = 'config.gypi'
-  const configPath = path.resolve(buildDir, configFilename)
-
-  log.verbose('build/' + configFilename, 'creating config file')
-
-  const config = await getCurrentConfigGypi({ gyp, nodeDir, vsInfo, python })
-
-  // ensures that any boolean values in config.gypi get stringified
-  function boolsToString (k, v) {
-    if (typeof v === 'boolean') {
-      return String(v)
-    }
-    return v
-  }
-
-  log.silly('build/' + configFilename, config)
-
-  // now write out the config.gypi file to the build/ dir
-  const prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step'
-
-  const json = JSON.stringify(config, boolsToString, 2)
-  log.verbose('build/' + configFilename, 'writing out config file: %s', configPath)
-  await fs.writeFile(configPath, [prefix, json, ''].join('\n'))
-
-  return configPath
-}
-
-module.exports = {
-  createConfigGypi,
-  parseConfigGypi,
-  getCurrentConfigGypi
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/download.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/download.js
deleted file mode 100644
index ed0aa37f44116..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/download.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const fetch = require('make-fetch-happen')
-const { promises: fs } = require('graceful-fs')
-const log = require('./log')
-
-async function download (gyp, url) {
-  log.http('GET', url)
-
-  const requestOpts = {
-    headers: {
-      'User-Agent': `node-gyp v${gyp.version} (node ${process.version})`,
-      Connection: 'keep-alive'
-    },
-    proxy: gyp.opts.proxy,
-    noProxy: gyp.opts.noproxy
-  }
-
-  const cafile = gyp.opts.cafile
-  if (cafile) {
-    requestOpts.ca = await readCAFile(cafile)
-  }
-
-  const res = await fetch(url, requestOpts)
-  log.http(res.status, res.url)
-
-  return res
-}
-
-async function readCAFile (filename) {
-  // The CA file can contain multiple certificates so split on certificate
-  // boundaries.  [\S\s]*? is used to match everything including newlines.
-  const ca = await fs.readFile(filename, 'utf8')
-  const re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g
-  return ca.match(re)
-}
-
-module.exports = {
-  download,
-  readCAFile
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-node-directory.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-node-directory.js
deleted file mode 100644
index 8838b81d33899..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-node-directory.js
+++ /dev/null
@@ -1,63 +0,0 @@
-'use strict'
-
-const path = require('path')
-const log = require('./log')
-
-function findNodeDirectory (scriptLocation, processObj) {
-  // set dirname and process if not passed in
-  // this facilitates regression tests
-  if (scriptLocation === undefined) {
-    scriptLocation = __dirname
-  }
-  if (processObj === undefined) {
-    processObj = process
-  }
-
-  // Have a look to see what is above us, to try and work out where we are
-  const npmParentDirectory = path.join(scriptLocation, '../../../..')
-  log.verbose('node-gyp root', 'npm_parent_directory is ' +
-              path.basename(npmParentDirectory))
-  let nodeRootDir = ''
-
-  log.verbose('node-gyp root', 'Finding node root directory')
-  if (path.basename(npmParentDirectory) === 'deps') {
-    // We are in a build directory where this script lives in
-    // deps/npm/node_modules/node-gyp/lib
-    nodeRootDir = path.join(npmParentDirectory, '..')
-    log.verbose('node-gyp root', 'in build directory, root = ' +
-                nodeRootDir)
-  } else if (path.basename(npmParentDirectory) === 'node_modules') {
-    // We are in a node install directory where this script lives in
-    // lib/node_modules/npm/node_modules/node-gyp/lib or
-    // node_modules/npm/node_modules/node-gyp/lib depending on the
-    // platform
-    if (processObj.platform === 'win32') {
-      nodeRootDir = path.join(npmParentDirectory, '..')
-    } else {
-      nodeRootDir = path.join(npmParentDirectory, '../..')
-    }
-    log.verbose('node-gyp root', 'in install directory, root = ' +
-                nodeRootDir)
-  } else {
-    // We don't know where we are, try working it out from the location
-    // of the node binary
-    const nodeDir = path.dirname(processObj.execPath)
-    const directoryUp = path.basename(nodeDir)
-    if (directoryUp === 'bin') {
-      nodeRootDir = path.join(nodeDir, '..')
-    } else if (directoryUp === 'Release' || directoryUp === 'Debug') {
-      // If we are a recently built node, and the directory structure
-      // is that of a repository. If we are on Windows then we only need
-      // to go one level up, everything else, two
-      if (processObj.platform === 'win32') {
-        nodeRootDir = path.join(nodeDir, '..')
-      } else {
-        nodeRootDir = path.join(nodeDir, '../..')
-      }
-    }
-    // Else return the default blank, "".
-  }
-  return nodeRootDir
-}
-
-module.exports = findNodeDirectory
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-python.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-python.js
deleted file mode 100644
index a71c00c2b65bc..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-python.js
+++ /dev/null
@@ -1,310 +0,0 @@
-'use strict'
-
-const log = require('./log')
-const semver = require('semver')
-const { execFile } = require('./util')
-const win = process.platform === 'win32'
-
-function getOsUserInfo () {
-  try {
-    return require('os').userInfo().username
-  } catch {}
-}
-
-const systemDrive = process.env.SystemDrive || 'C:'
-const username = process.env.USERNAME || process.env.USER || getOsUserInfo()
-const localAppData = process.env.LOCALAPPDATA || `${systemDrive}\\${username}\\AppData\\Local`
-const foundLocalAppData = process.env.LOCALAPPDATA || username
-const programFiles = process.env.ProgramW6432 || process.env.ProgramFiles || `${systemDrive}\\Program Files`
-const programFilesX86 = process.env['ProgramFiles(x86)'] || `${programFiles} (x86)`
-
-const winDefaultLocationsArray = []
-for (const majorMinor of ['311', '310', '39', '38']) {
-  if (foundLocalAppData) {
-    winDefaultLocationsArray.push(
-      `${localAppData}\\Programs\\Python\\Python${majorMinor}\\python.exe`,
-      `${programFiles}\\Python${majorMinor}\\python.exe`,
-      `${localAppData}\\Programs\\Python\\Python${majorMinor}-32\\python.exe`,
-      `${programFiles}\\Python${majorMinor}-32\\python.exe`,
-      `${programFilesX86}\\Python${majorMinor}-32\\python.exe`
-    )
-  } else {
-    winDefaultLocationsArray.push(
-      `${programFiles}\\Python${majorMinor}\\python.exe`,
-      `${programFiles}\\Python${majorMinor}-32\\python.exe`,
-      `${programFilesX86}\\Python${majorMinor}-32\\python.exe`
-    )
-  }
-}
-
-class PythonFinder {
-  static findPython = (...args) => new PythonFinder(...args).findPython()
-
-  log = log.withPrefix('find Python')
-  argsExecutable = ['-c', 'import sys; sys.stdout.buffer.write(sys.executable.encode(\'utf-8\'));']
-  argsVersion = ['-c', 'import sys; print("%s.%s.%s" % sys.version_info[:3]);']
-  semverRange = '>=3.6.0'
-
-  // These can be overridden for testing:
-  execFile = execFile
-  env = process.env
-  win = win
-  pyLauncher = 'py.exe'
-  winDefaultLocations = winDefaultLocationsArray
-
-  constructor (configPython) {
-    this.configPython = configPython
-    this.errorLog = []
-  }
-
-  // Logs a message at verbose level, but also saves it to be displayed later
-  // at error level if an error occurs. This should help diagnose the problem.
-  addLog (message) {
-    this.log.verbose(message)
-    this.errorLog.push(message)
-  }
-
-  // Find Python by trying a sequence of possibilities.
-  // Ignore errors, keep trying until Python is found.
-  async findPython () {
-    const SKIP = 0
-    const FAIL = 1
-    const toCheck = (() => {
-      if (this.env.NODE_GYP_FORCE_PYTHON) {
-        return [{
-          before: () => {
-            this.addLog(
-              'checking Python explicitly set from NODE_GYP_FORCE_PYTHON')
-            this.addLog('- process.env.NODE_GYP_FORCE_PYTHON is ' +
-              `"${this.env.NODE_GYP_FORCE_PYTHON}"`)
-          },
-          check: () => this.checkCommand(this.env.NODE_GYP_FORCE_PYTHON)
-        }]
-      }
-
-      const checks = [
-        {
-          before: () => {
-            if (!this.configPython) {
-              this.addLog(
-                'Python is not set from command line or npm configuration')
-              return SKIP
-            }
-            this.addLog('checking Python explicitly set from command line or ' +
-              'npm configuration')
-            this.addLog('- "--python=" or "npm config get python" is ' +
-              `"${this.configPython}"`)
-          },
-          check: () => this.checkCommand(this.configPython)
-        },
-        {
-          before: () => {
-            if (!this.env.PYTHON) {
-              this.addLog('Python is not set from environment variable ' +
-                'PYTHON')
-              return SKIP
-            }
-            this.addLog('checking Python explicitly set from environment ' +
-              'variable PYTHON')
-            this.addLog(`- process.env.PYTHON is "${this.env.PYTHON}"`)
-          },
-          check: () => this.checkCommand(this.env.PYTHON)
-        }
-      ]
-
-      if (this.win) {
-        checks.push({
-          before: () => {
-            this.addLog(
-              'checking if the py launcher can be used to find Python 3')
-          },
-          check: () => this.checkPyLauncher()
-        })
-      }
-
-      checks.push(...[
-        {
-          before: () => { this.addLog('checking if "python3" can be used') },
-          check: () => this.checkCommand('python3')
-        },
-        {
-          before: () => { this.addLog('checking if "python" can be used') },
-          check: () => this.checkCommand('python')
-        }
-      ])
-
-      if (this.win) {
-        for (let i = 0; i < this.winDefaultLocations.length; ++i) {
-          const location = this.winDefaultLocations[i]
-          checks.push({
-            before: () => this.addLog(`checking if Python is ${location}`),
-            check: () => this.checkExecPath(location)
-          })
-        }
-      }
-
-      return checks
-    })()
-
-    for (const check of toCheck) {
-      const before = check.before()
-      if (before === SKIP) {
-        continue
-      }
-      if (before === FAIL) {
-        return this.fail()
-      }
-      try {
-        return await check.check()
-      } catch (err) {
-        this.log.silly('runChecks: err = %j', (err && err.stack) || err)
-      }
-    }
-
-    return this.fail()
-  }
-
-  // Check if command is a valid Python to use.
-  // Will exit the Python finder on success.
-  // If on Windows, run in a CMD shell to support BAT/CMD launchers.
-  async checkCommand (command) {
-    let exec = command
-    let args = this.argsExecutable
-    let shell = false
-    if (this.win) {
-      // Arguments have to be manually quoted
-      exec = `"${exec}"`
-      args = args.map(a => `"${a}"`)
-      shell = true
-    }
-
-    this.log.verbose(`- executing "${command}" to get executable path`)
-    // Possible outcomes:
-    // - Error: not in PATH, not executable or execution fails
-    // - Gibberish: the next command to check version will fail
-    // - Absolute path to executable
-    try {
-      const execPath = await this.run(exec, args, shell)
-      this.addLog(`- executable path is "${execPath}"`)
-      return this.checkExecPath(execPath)
-    } catch (err) {
-      this.addLog(`- "${command}" is not in PATH or produced an error`)
-      throw err
-    }
-  }
-
-  // Check if the py launcher can find a valid Python to use.
-  // Will exit the Python finder on success.
-  // Distributions of Python on Windows by default install with the "py.exe"
-  // Python launcher which is more likely to exist than the Python executable
-  // being in the $PATH.
-  // Because the Python launcher supports Python 2 and Python 3, we should
-  // explicitly request a Python 3 version. This is done by supplying "-3" as
-  // the first command line argument. Since "py.exe -3" would be an invalid
-  // executable for "execFile", we have to use the launcher to figure out
-  // where the actual "python.exe" executable is located.
-  async checkPyLauncher () {
-    this.log.verbose(`- executing "${this.pyLauncher}" to get Python 3 executable path`)
-    // Possible outcomes: same as checkCommand
-    try {
-      const execPath = await this.run(this.pyLauncher, ['-3', ...this.argsExecutable], false)
-      this.addLog(`- executable path is "${execPath}"`)
-      return this.checkExecPath(execPath)
-    } catch (err) {
-      this.addLog(`- "${this.pyLauncher}" is not in PATH or produced an error`)
-      throw err
-    }
-  }
-
-  // Check if a Python executable is the correct version to use.
-  // Will exit the Python finder on success.
-  async checkExecPath (execPath) {
-    this.log.verbose(`- executing "${execPath}" to get version`)
-    // Possible outcomes:
-    // - Error: executable can not be run (likely meaning the command wasn't
-    //   a Python executable and the previous command produced gibberish)
-    // - Gibberish: somehow the last command produced an executable path,
-    //   this will fail when verifying the version
-    // - Version of the Python executable
-    try {
-      const version = await this.run(execPath, this.argsVersion, false)
-      this.addLog(`- version is "${version}"`)
-
-      const range = new semver.Range(this.semverRange)
-      let valid = false
-      try {
-        valid = range.test(version)
-      } catch (err) {
-        this.log.silly('range.test() threw:\n%s', err.stack)
-        this.addLog(`- "${execPath}" does not have a valid version`)
-        this.addLog('- is it a Python executable?')
-        throw err
-      }
-      if (!valid) {
-        this.addLog(`- version is ${version} - should be ${this.semverRange}`)
-        this.addLog('- THIS VERSION OF PYTHON IS NOT SUPPORTED')
-        throw new Error(`Found unsupported Python version ${version}`)
-      }
-      return this.succeed(execPath, version)
-    } catch (err) {
-      this.addLog(`- "${execPath}" could not be run`)
-      throw err
-    }
-  }
-
-  // Run an executable or shell command, trimming the output.
-  async run (exec, args, shell) {
-    const env = Object.assign({}, this.env)
-    env.TERM = 'dumb'
-    const opts = { env, shell }
-
-    this.log.silly('execFile: exec = %j', exec)
-    this.log.silly('execFile: args = %j', args)
-    this.log.silly('execFile: opts = %j', opts)
-    try {
-      const [err, stdout, stderr] = await this.execFile(exec, args, opts)
-      this.log.silly('execFile result: err = %j', (err && err.stack) || err)
-      this.log.silly('execFile result: stdout = %j', stdout)
-      this.log.silly('execFile result: stderr = %j', stderr)
-      return stdout.trim()
-    } catch (err) {
-      this.log.silly('execFile: threw:\n%s', err.stack)
-      throw err
-    }
-  }
-
-  succeed (execPath, version) {
-    this.log.info(`using Python version ${version} found at "${execPath}"`)
-    return execPath
-  }
-
-  fail () {
-    const errorLog = this.errorLog.join('\n')
-
-    const pathExample = this.win
-      ? 'C:\\Path\\To\\python.exe'
-      : '/path/to/pythonexecutable'
-    // For Windows 80 col console, use up to the column before the one marked
-    // with X (total 79 chars including logger prefix, 58 chars usable here):
-    //                                                           X
-    const info = [
-      '**********************************************************',
-      'You need to install the latest version of Python.',
-      'Node-gyp should be able to find and use Python. If not,',
-      'you can try one of the following options:',
-      `- Use the switch --python="${pathExample}"`,
-      '  (accepted by both node-gyp and npm)',
-      '- Set the environment variable PYTHON',
-      '- Set the npm configuration variable python:',
-      `  npm config set python "${pathExample}"`,
-      'For more information consult the documentation at:',
-      'https://github.com/nodejs/node-gyp#installation',
-      '**********************************************************'
-    ].join('\n')
-
-    this.log.error(`\n${errorLog}\n\n${info}\n`)
-    throw new Error('Could not find any Python installation to use')
-  }
-}
-
-module.exports = PythonFinder
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-visualstudio.js
deleted file mode 100644
index 2dc1930fd7828..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/find-visualstudio.js
+++ /dev/null
@@ -1,590 +0,0 @@
-'use strict'
-
-const log = require('./log')
-const { existsSync } = require('fs')
-const { win32: path } = require('path')
-const { regSearchKeys, execFile } = require('./util')
-
-class VisualStudioFinder {
-  static findVisualStudio = (...args) => new VisualStudioFinder(...args).findVisualStudio()
-
-  log = log.withPrefix('find VS')
-
-  regSearchKeys = regSearchKeys
-
-  constructor (nodeSemver, configMsvsVersion) {
-    this.nodeSemver = nodeSemver
-    this.configMsvsVersion = configMsvsVersion
-    this.errorLog = []
-    this.validVersions = []
-  }
-
-  // Logs a message at verbose level, but also saves it to be displayed later
-  // at error level if an error occurs. This should help diagnose the problem.
-  addLog (message) {
-    this.log.verbose(message)
-    this.errorLog.push(message)
-  }
-
-  async findVisualStudio () {
-    this.configVersionYear = null
-    this.configPath = null
-    if (this.configMsvsVersion) {
-      this.addLog('msvs_version was set from command line or npm config')
-      if (this.configMsvsVersion.match(/^\d{4}$/)) {
-        this.configVersionYear = parseInt(this.configMsvsVersion, 10)
-        this.addLog(
-          `- looking for Visual Studio version ${this.configVersionYear}`)
-      } else {
-        this.configPath = path.resolve(this.configMsvsVersion)
-        this.addLog(
-          `- looking for Visual Studio installed in "${this.configPath}"`)
-      }
-    } else {
-      this.addLog('msvs_version not set from command line or npm config')
-    }
-
-    if (process.env.VCINSTALLDIR) {
-      this.envVcInstallDir =
-        path.resolve(process.env.VCINSTALLDIR, '..')
-      this.addLog('running in VS Command Prompt, installation path is:\n' +
-        `"${this.envVcInstallDir}"\n- will only use this version`)
-    } else {
-      this.addLog('VCINSTALLDIR not set, not running in VS Command Prompt')
-    }
-
-    const checks = [
-      () => this.findVisualStudio2019OrNewerFromSpecifiedLocation(),
-      () => this.findVisualStudio2019OrNewerUsingSetupModule(),
-      () => this.findVisualStudio2019OrNewer(),
-      () => this.findVisualStudio2017FromSpecifiedLocation(),
-      () => this.findVisualStudio2017UsingSetupModule(),
-      () => this.findVisualStudio2017(),
-      () => this.findVisualStudio2015(),
-      () => this.findVisualStudio2013()
-    ]
-
-    for (const check of checks) {
-      const info = await check()
-      if (info) {
-        return this.succeed(info)
-      }
-    }
-
-    return this.fail()
-  }
-
-  succeed (info) {
-    this.log.info(`using VS${info.versionYear} (${info.version}) found at:` +
-                  `\n"${info.path}"` +
-                  '\nrun with --verbose for detailed information')
-    return info
-  }
-
-  fail () {
-    if (this.configMsvsVersion && this.envVcInstallDir) {
-      this.errorLog.push(
-        'msvs_version does not match this VS Command Prompt or the',
-        'installation cannot be used.')
-    } else if (this.configMsvsVersion) {
-      // If msvs_version was specified but finding VS failed, print what would
-      // have been accepted
-      this.errorLog.push('')
-      if (this.validVersions) {
-        this.errorLog.push('valid versions for msvs_version:')
-        this.validVersions.forEach((version) => {
-          this.errorLog.push(`- "${version}"`)
-        })
-      } else {
-        this.errorLog.push('no valid versions for msvs_version were found')
-      }
-    }
-
-    const errorLog = this.errorLog.join('\n')
-
-    // For Windows 80 col console, use up to the column before the one marked
-    // with X (total 79 chars including logger prefix, 62 chars usable here):
-    //                                                               X
-    const infoLog = [
-      '**************************************************************',
-      'You need to install the latest version of Visual Studio',
-      'including the "Desktop development with C++" workload.',
-      'For more information consult the documentation at:',
-      'https://github.com/nodejs/node-gyp#on-windows',
-      '**************************************************************'
-    ].join('\n')
-
-    this.log.error(`\n${errorLog}\n\n${infoLog}\n`)
-    throw new Error('Could not find any Visual Studio installation to use')
-  }
-
-  async findVisualStudio2019OrNewerFromSpecifiedLocation () {
-    return this.findVSFromSpecifiedLocation([2019, 2022])
-  }
-
-  async findVisualStudio2017FromSpecifiedLocation () {
-    if (this.nodeSemver.major >= 22) {
-      this.addLog(
-        'not looking for VS2017 as it is only supported up to Node.js 21')
-      return null
-    }
-    return this.findVSFromSpecifiedLocation([2017])
-  }
-
-  async findVSFromSpecifiedLocation (supportedYears) {
-    if (!this.envVcInstallDir) {
-      return null
-    }
-    const info = {
-      path: path.resolve(this.envVcInstallDir),
-      // Assume the version specified by the user is correct.
-      // Since Visual Studio 2015, the Developer Command Prompt sets the
-      // VSCMD_VER environment variable which contains the version information
-      // for Visual Studio.
-      // https://learn.microsoft.com/en-us/visualstudio/ide/reference/command-prompt-powershell?view=vs-2022
-      version: process.env.VSCMD_VER,
-      packages: [
-        'Microsoft.VisualStudio.Component.VC.Tools.x86.x64',
-        // Assume MSBuild exists. It will be checked in processing.
-        'Microsoft.VisualStudio.VC.MSBuild.Base'
-      ]
-    }
-
-    // Is there a better way to get SDK information?
-    const envWindowsSDKVersion = process.env.WindowsSDKVersion
-    const sdkVersionMatched = envWindowsSDKVersion?.match(/^(\d+)\.(\d+)\.(\d+)\..*/)
-    if (sdkVersionMatched) {
-      info.packages.push(`Microsoft.VisualStudio.Component.Windows10SDK.${sdkVersionMatched[3]}.Desktop`)
-    }
-    // pass for further processing
-    return this.processData([info], supportedYears)
-  }
-
-  async findVisualStudio2019OrNewerUsingSetupModule () {
-    return this.findNewVSUsingSetupModule([2019, 2022])
-  }
-
-  async findVisualStudio2017UsingSetupModule () {
-    if (this.nodeSemver.major >= 22) {
-      this.addLog(
-        'not looking for VS2017 as it is only supported up to Node.js 21')
-      return null
-    }
-    return this.findNewVSUsingSetupModule([2017])
-  }
-
-  async findNewVSUsingSetupModule (supportedYears) {
-    const ps = path.join(process.env.SystemRoot, 'System32',
-      'WindowsPowerShell', 'v1.0', 'powershell.exe')
-    const vcInstallDir = this.envVcInstallDir
-
-    const checkModuleArgs = [
-      '-NoProfile',
-      '-Command',
-      '&{@(Get-Module -ListAvailable -Name VSSetup).Version.ToString()}'
-    ]
-    this.log.silly('Running', ps, checkModuleArgs)
-    const [cErr] = await this.execFile(ps, checkModuleArgs)
-    if (cErr) {
-      this.addLog('VSSetup module doesn\'t seem to exist. You can install it via: "Install-Module VSSetup -Scope CurrentUser"')
-      this.log.silly('VSSetup error = %j', cErr && (cErr.stack || cErr))
-      return null
-    }
-    const filterArg = vcInstallDir !== undefined ? `| where {$_.InstallationPath -eq '${vcInstallDir}' }` : ''
-    const psArgs = [
-      '-NoProfile',
-      '-Command',
-      `&{Get-VSSetupInstance ${filterArg} | ConvertTo-Json -Depth 3}`
-    ]
-
-    this.log.silly('Running', ps, psArgs)
-    const [err, stdout, stderr] = await this.execFile(ps, psArgs)
-    let parsedData = this.parseData(err, stdout, stderr)
-    if (parsedData === null) {
-      return null
-    }
-    this.log.silly('Parsed data', parsedData)
-    if (!Array.isArray(parsedData)) {
-      // if there are only 1 result, then Powershell will output non-array
-      parsedData = [parsedData]
-    }
-    // normalize output
-    parsedData = parsedData.map((info) => {
-      info.path = info.InstallationPath
-      info.version = `${info.InstallationVersion.Major}.${info.InstallationVersion.Minor}.${info.InstallationVersion.Build}.${info.InstallationVersion.Revision}`
-      info.packages = info.Packages.map((p) => p.Id)
-      return info
-    })
-    // pass for further processing
-    return this.processData(parsedData, supportedYears)
-  }
-
-  // Invoke the PowerShell script to get information about Visual Studio 2019
-  // or newer installations
-  async findVisualStudio2019OrNewer () {
-    return this.findNewVS([2019, 2022])
-  }
-
-  // Invoke the PowerShell script to get information about Visual Studio 2017
-  async findVisualStudio2017 () {
-    if (this.nodeSemver.major >= 22) {
-      this.addLog(
-        'not looking for VS2017 as it is only supported up to Node.js 21')
-      return null
-    }
-    return this.findNewVS([2017])
-  }
-
-  // Invoke the PowerShell script to get information about Visual Studio 2017
-  // or newer installations
-  async findNewVS (supportedYears) {
-    const ps = path.join(process.env.SystemRoot, 'System32',
-      'WindowsPowerShell', 'v1.0', 'powershell.exe')
-    const csFile = path.join(__dirname, 'Find-VisualStudio.cs')
-    const psArgs = [
-      '-ExecutionPolicy',
-      'Unrestricted',
-      '-NoProfile',
-      '-Command',
-      '&{Add-Type -Path \'' + csFile + '\';' + '[VisualStudioConfiguration.Main]::PrintJson()}'
-    ]
-
-    this.log.silly('Running', ps, psArgs)
-    const [err, stdout, stderr] = await this.execFile(ps, psArgs)
-    const parsedData = this.parseData(err, stdout, stderr, { checkIsArray: true })
-    if (parsedData === null) {
-      return null
-    }
-    return this.processData(parsedData, supportedYears)
-  }
-
-  // Parse the output of the PowerShell script, make sanity checks
-  parseData (err, stdout, stderr, sanityCheckOptions) {
-    const defaultOptions = {
-      checkIsArray: false
-    }
-
-    // Merging provided options with the default options
-    const sanityOptions = { ...defaultOptions, ...sanityCheckOptions }
-
-    this.log.silly('PS stderr = %j', stderr)
-
-    const failPowershell = (failureDetails) => {
-      this.addLog(
-        `could not use PowerShell to find Visual Studio 2017 or newer, try re-running with '--loglevel silly' for more details. \n
-        Failure details: ${failureDetails}`)
-      return null
-    }
-
-    if (err) {
-      this.log.silly('PS err = %j', err && (err.stack || err))
-      return failPowershell(`${err}`.substring(0, 40))
-    }
-
-    let vsInfo
-    try {
-      vsInfo = JSON.parse(stdout)
-    } catch (e) {
-      this.log.silly('PS stdout = %j', stdout)
-      this.log.silly(e)
-      return failPowershell()
-    }
-
-    if (sanityOptions.checkIsArray && !Array.isArray(vsInfo)) {
-      this.log.silly('PS stdout = %j', stdout)
-      return failPowershell('Expected array as output of the PS script')
-    }
-    return vsInfo
-  }
-
-  // Process parsed data containing information about VS installations
-  // Look for the required parts, extract and output them back
-  processData (vsInfo, supportedYears) {
-    vsInfo = vsInfo.map((info) => {
-      this.log.silly(`processing installation: "${info.path}"`)
-      info.path = path.resolve(info.path)
-      const ret = this.getVersionInfo(info)
-      ret.path = info.path
-      ret.msBuild = this.getMSBuild(info, ret.versionYear)
-      ret.toolset = this.getToolset(info, ret.versionYear)
-      ret.sdk = this.getSDK(info)
-      return ret
-    })
-    this.log.silly('vsInfo:', vsInfo)
-
-    // Remove future versions or errors parsing version number
-    // Also remove any unsupported versions
-    vsInfo = vsInfo.filter((info) => {
-      if (info.versionYear && supportedYears.indexOf(info.versionYear) !== -1) {
-        return true
-      }
-      this.addLog(`${info.versionYear ? 'unsupported' : 'unknown'} version "${info.version}" found at "${info.path}"`)
-      return false
-    })
-
-    // Sort to place newer versions first
-    vsInfo.sort((a, b) => b.versionYear - a.versionYear)
-
-    for (let i = 0; i < vsInfo.length; ++i) {
-      const info = vsInfo[i]
-      this.addLog(`checking VS${info.versionYear} (${info.version}) found ` +
-                  `at:\n"${info.path}"`)
-
-      if (info.msBuild) {
-        this.addLog('- found "Visual Studio C++ core features"')
-      } else {
-        this.addLog('- "Visual Studio C++ core features" missing')
-        continue
-      }
-
-      if (info.toolset) {
-        this.addLog(`- found VC++ toolset: ${info.toolset}`)
-      } else {
-        this.addLog('- missing any VC++ toolset')
-        continue
-      }
-
-      if (info.sdk) {
-        this.addLog(`- found Windows SDK: ${info.sdk}`)
-      } else {
-        this.addLog('- missing any Windows SDK')
-        continue
-      }
-
-      if (!this.checkConfigVersion(info.versionYear, info.path)) {
-        continue
-      }
-
-      return info
-    }
-
-    this.addLog(
-      'could not find a version of Visual Studio 2017 or newer to use')
-    return null
-  }
-
-  // Helper - process version information
-  getVersionInfo (info) {
-    const match = /^(\d+)\.(\d+)(?:\..*)?/.exec(info.version)
-    if (!match) {
-      this.log.silly('- failed to parse version:', info.version)
-      return {}
-    }
-    this.log.silly('- version match = %j', match)
-    const ret = {
-      version: info.version,
-      versionMajor: parseInt(match[1], 10),
-      versionMinor: parseInt(match[2], 10)
-    }
-    if (ret.versionMajor === 15) {
-      ret.versionYear = 2017
-      return ret
-    }
-    if (ret.versionMajor === 16) {
-      ret.versionYear = 2019
-      return ret
-    }
-    if (ret.versionMajor === 17) {
-      ret.versionYear = 2022
-      return ret
-    }
-    this.log.silly('- unsupported version:', ret.versionMajor)
-    return {}
-  }
-
-  msBuildPathExists (path) {
-    return existsSync(path)
-  }
-
-  // Helper - process MSBuild information
-  getMSBuild (info, versionYear) {
-    const pkg = 'Microsoft.VisualStudio.VC.MSBuild.Base'
-    const msbuildPath = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'MSBuild.exe')
-    const msbuildPathArm64 = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'arm64', 'MSBuild.exe')
-    if (info.packages.indexOf(pkg) !== -1) {
-      this.log.silly('- found VC.MSBuild.Base')
-      if (versionYear === 2017) {
-        return path.join(info.path, 'MSBuild', '15.0', 'Bin', 'MSBuild.exe')
-      }
-      if (versionYear === 2019) {
-        if (process.arch === 'arm64' && this.msBuildPathExists(msbuildPathArm64)) {
-          return msbuildPathArm64
-        } else {
-          return msbuildPath
-        }
-      }
-    }
-    /**
-     * Visual Studio 2022 doesn't have the MSBuild package.
-     * Support for compiling _on_ ARM64 was added in MSVC 14.32.31326,
-     * so let's leverage it if the user has an ARM64 device.
-     */
-    if (process.arch === 'arm64' && this.msBuildPathExists(msbuildPathArm64)) {
-      return msbuildPathArm64
-    } else if (this.msBuildPathExists(msbuildPath)) {
-      return msbuildPath
-    }
-    return null
-  }
-
-  // Helper - process toolset information
-  getToolset (info, versionYear) {
-    const pkg = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64'
-    const express = 'Microsoft.VisualStudio.WDExpress'
-
-    if (info.packages.indexOf(pkg) !== -1) {
-      this.log.silly('- found VC.Tools.x86.x64')
-    } else if (info.packages.indexOf(express) !== -1) {
-      this.log.silly('- found Visual Studio Express (looking for toolset)')
-    } else {
-      return null
-    }
-
-    if (versionYear === 2017) {
-      return 'v141'
-    } else if (versionYear === 2019) {
-      return 'v142'
-    } else if (versionYear === 2022) {
-      return 'v143'
-    }
-    this.log.silly('- invalid versionYear:', versionYear)
-    return null
-  }
-
-  // Helper - process Windows SDK information
-  getSDK (info) {
-    const win8SDK = 'Microsoft.VisualStudio.Component.Windows81SDK'
-    const win10SDKPrefix = 'Microsoft.VisualStudio.Component.Windows10SDK.'
-    const win11SDKPrefix = 'Microsoft.VisualStudio.Component.Windows11SDK.'
-
-    let Win10or11SDKVer = 0
-    info.packages.forEach((pkg) => {
-      if (!pkg.startsWith(win10SDKPrefix) && !pkg.startsWith(win11SDKPrefix)) {
-        return
-      }
-      const parts = pkg.split('.')
-      if (parts.length > 5 && parts[5] !== 'Desktop') {
-        this.log.silly('- ignoring non-Desktop Win10/11SDK:', pkg)
-        return
-      }
-      const foundSdkVer = parseInt(parts[4], 10)
-      if (isNaN(foundSdkVer)) {
-        // Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb
-        this.log.silly('- failed to parse Win10/11SDK number:', pkg)
-        return
-      }
-      this.log.silly('- found Win10/11SDK:', foundSdkVer)
-      Win10or11SDKVer = Math.max(Win10or11SDKVer, foundSdkVer)
-    })
-
-    if (Win10or11SDKVer !== 0) {
-      return `10.0.${Win10or11SDKVer}.0`
-    } else if (info.packages.indexOf(win8SDK) !== -1) {
-      this.log.silly('- found Win8SDK')
-      return '8.1'
-    }
-    return null
-  }
-
-  // Find an installation of Visual Studio 2015 to use
-  async findVisualStudio2015 () {
-    if (this.nodeSemver.major >= 19) {
-      this.addLog(
-        'not looking for VS2015 as it is only supported up to Node.js 18')
-      return null
-    }
-    return this.findOldVS({
-      version: '14.0',
-      versionMajor: 14,
-      versionMinor: 0,
-      versionYear: 2015,
-      toolset: 'v140'
-    })
-  }
-
-  // Find an installation of Visual Studio 2013 to use
-  async findVisualStudio2013 () {
-    if (this.nodeSemver.major >= 9) {
-      this.addLog(
-        'not looking for VS2013 as it is only supported up to Node.js 8')
-      return null
-    }
-    return this.findOldVS({
-      version: '12.0',
-      versionMajor: 12,
-      versionMinor: 0,
-      versionYear: 2013,
-      toolset: 'v120'
-    })
-  }
-
-  // Helper - common code for VS2013 and VS2015
-  async findOldVS (info) {
-    const regVC7 = ['HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7',
-      'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7']
-    const regMSBuild = 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions'
-
-    this.addLog(`looking for Visual Studio ${info.versionYear}`)
-    try {
-      let res = await this.regSearchKeys(regVC7, info.version, [])
-      const vsPath = path.resolve(res, '..')
-      this.addLog(`- found in "${vsPath}"`)
-      const msBuildRegOpts = process.arch === 'ia32' ? [] : ['/reg:32']
-
-      try {
-        res = await this.regSearchKeys([`${regMSBuild}\\${info.version}`], 'MSBuildToolsPath', msBuildRegOpts)
-      } catch (err) {
-        this.addLog('- could not find MSBuild in registry for this version')
-        return null
-      }
-
-      const msBuild = path.join(res, 'MSBuild.exe')
-      this.addLog(`- MSBuild in "${msBuild}"`)
-
-      if (!this.checkConfigVersion(info.versionYear, vsPath)) {
-        return null
-      }
-
-      info.path = vsPath
-      info.msBuild = msBuild
-      info.sdk = null
-      return info
-    } catch (err) {
-      this.addLog('- not found')
-      return null
-    }
-  }
-
-  // After finding a usable version of Visual Studio:
-  // - add it to validVersions to be displayed at the end if a specific
-  //   version was requested and not found;
-  // - check if this is the version that was requested.
-  // - check if this matches the Visual Studio Command Prompt
-  checkConfigVersion (versionYear, vsPath) {
-    this.validVersions.push(versionYear)
-    this.validVersions.push(vsPath)
-
-    if (this.configVersionYear && this.configVersionYear !== versionYear) {
-      this.addLog('- msvs_version does not match this version')
-      return false
-    }
-    if (this.configPath &&
-        path.relative(this.configPath, vsPath) !== '') {
-      this.addLog('- msvs_version does not point to this installation')
-      return false
-    }
-    if (this.envVcInstallDir &&
-        path.relative(this.envVcInstallDir, vsPath) !== '') {
-      this.addLog('- does not match this Visual Studio Command Prompt')
-      return false
-    }
-
-    return true
-  }
-
-  async execFile (exec, args) {
-    return await execFile(exec, args, { encoding: 'utf8' })
-  }
-}
-
-module.exports = VisualStudioFinder
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/install.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/install.js
deleted file mode 100644
index 7196a316296fb..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/install.js
+++ /dev/null
@@ -1,415 +0,0 @@
-'use strict'
-
-const { createWriteStream, promises: fs } = require('graceful-fs')
-const os = require('os')
-const { backOff } = require('exponential-backoff')
-const tar = require('tar')
-const path = require('path')
-const { Transform, promises: { pipeline } } = require('stream')
-const crypto = require('crypto')
-const log = require('./log')
-const semver = require('semver')
-const { download } = require('./download')
-const processRelease = require('./process-release')
-
-const win = process.platform === 'win32'
-
-async function install (gyp, argv) {
-  log.stdout()
-  const release = processRelease(argv, gyp, process.version, process.release)
-  // Detecting target_arch based on logic from create-cnfig-gyp.js. Used on Windows only.
-  const arch = win ? (gyp.opts.target_arch || gyp.opts.arch || process.arch || 'ia32') : ''
-  // Used to prevent downloading tarball if only new node.lib is required on Windows.
-  let shouldDownloadTarball = true
-
-  // Determine which node dev files version we are installing
-  log.verbose('install', 'input version string %j', release.version)
-
-  if (!release.semver) {
-    // could not parse the version string with semver
-    throw new Error('Invalid version number: ' + release.version)
-  }
-
-  if (semver.lt(release.version, '0.8.0')) {
-    throw new Error('Minimum target version is `0.8.0` or greater. Got: ' + release.version)
-  }
-
-  // 0.x.y-pre versions are not published yet and cannot be installed. Bail.
-  if (release.semver.prerelease[0] === 'pre') {
-    log.verbose('detected "pre" node version', release.version)
-    if (!gyp.opts.nodedir) {
-      throw new Error('"pre" versions of node cannot be installed, use the --nodedir flag instead')
-    }
-    log.verbose('--nodedir flag was passed; skipping install', gyp.opts.nodedir)
-    return
-  }
-
-  // flatten version into String
-  log.verbose('install', 'installing version: %s', release.versionDir)
-
-  // the directory where the dev files will be installed
-  const devDir = path.resolve(gyp.devDir, release.versionDir)
-
-  // If '--ensure' was passed, then don't *always* install the version;
-  // check if it is already installed, and only install when needed
-  if (gyp.opts.ensure) {
-    log.verbose('install', '--ensure was passed, so won\'t reinstall if already installed')
-    try {
-      await fs.stat(devDir)
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        log.verbose('install', 'version not already installed, continuing with install', release.version)
-        try {
-          return await go()
-        } catch (err) {
-          return rollback(err)
-        }
-      } else if (err.code === 'EACCES') {
-        return eaccesFallback(err)
-      }
-      throw err
-    }
-    log.verbose('install', 'version is already installed, need to check "installVersion"')
-    const installVersionFile = path.resolve(devDir, 'installVersion')
-    let installVersion = 0
-    try {
-      const ver = await fs.readFile(installVersionFile, 'ascii')
-      installVersion = parseInt(ver, 10) || 0
-    } catch (err) {
-      if (err.code !== 'ENOENT') {
-        throw err
-      }
-    }
-    log.verbose('got "installVersion"', installVersion)
-    log.verbose('needs "installVersion"', gyp.package.installVersion)
-    if (installVersion < gyp.package.installVersion) {
-      log.verbose('install', 'version is no good; reinstalling')
-      try {
-        return await go()
-      } catch (err) {
-        return rollback(err)
-      }
-    }
-    log.verbose('install', 'version is good')
-    if (win) {
-      log.verbose('on Windows; need to check node.lib')
-      const nodeLibPath = path.resolve(devDir, arch, 'node.lib')
-      try {
-        await fs.stat(nodeLibPath)
-      } catch (err) {
-        if (err.code === 'ENOENT') {
-          log.verbose('install', `version not already installed for ${arch}, continuing with install`, release.version)
-          try {
-            shouldDownloadTarball = false
-            return await go()
-          } catch (err) {
-            return rollback(err)
-          }
-        } else if (err.code === 'EACCES') {
-          return eaccesFallback(err)
-        }
-        throw err
-      }
-    }
-  } else {
-    try {
-      return await go()
-    } catch (err) {
-      return rollback(err)
-    }
-  }
-
-  async function copyDirectory (src, dest) {
-    try {
-      await fs.stat(src)
-    } catch {
-      throw new Error(`Missing source directory for copy: ${src}`)
-    }
-    await fs.mkdir(dest, { recursive: true })
-    const entries = await fs.readdir(src, { withFileTypes: true })
-    for (const entry of entries) {
-      if (entry.isDirectory()) {
-        await copyDirectory(path.join(src, entry.name), path.join(dest, entry.name))
-      } else if (entry.isFile()) {
-        // with parallel installs, copying files may cause file errors on
-        // Windows so use an exponential backoff to resolve collisions
-        await backOff(async () => {
-          try {
-            await fs.copyFile(path.join(src, entry.name), path.join(dest, entry.name))
-          } catch (err) {
-            // if ensure, check if file already exists and that's good enough
-            if (gyp.opts.ensure && err.code === 'EBUSY') {
-              try {
-                await fs.stat(path.join(dest, entry.name))
-                return
-              } catch {}
-            }
-            throw err
-          }
-        })
-      } else {
-        throw new Error('Unexpected file directory entry type')
-      }
-    }
-  }
-
-  async function go () {
-    log.verbose('ensuring devDir is created', devDir)
-
-    // first create the dir for the node dev files
-    try {
-      const created = await fs.mkdir(devDir, { recursive: true })
-
-      if (created) {
-        log.verbose('created devDir', created)
-      }
-    } catch (err) {
-      if (err.code === 'EACCES') {
-        return eaccesFallback(err)
-      }
-
-      throw err
-    }
-
-    // now download the node tarball
-    const tarPath = gyp.opts.tarball
-    let extractErrors = false
-    let extractCount = 0
-    const contentShasums = {}
-    const expectShasums = {}
-
-    // checks if a file to be extracted from the tarball is valid.
-    // only .h header files and the gyp files get extracted
-    function isValid (path) {
-      const isValid = valid(path)
-      if (isValid) {
-        log.verbose('extracted file from tarball', path)
-        extractCount++
-      } else {
-        // invalid
-        log.silly('ignoring from tarball', path)
-      }
-      return isValid
-    }
-
-    function onwarn (code, message) {
-      extractErrors = true
-      log.error('error while extracting tarball', code, message)
-    }
-
-    // download the tarball and extract!
-    // Ommited on Windows if only new node.lib is required
-
-    // on Windows there can be file errors from tar if parallel installs
-    // are happening (not uncommon with multiple native modules) so
-    // extract the tarball to a temp directory first and then copy over
-    const tarExtractDir = win ? await fs.mkdtemp(path.join(os.tmpdir(), 'node-gyp-tmp-')) : devDir
-
-    try {
-      if (shouldDownloadTarball) {
-        if (tarPath) {
-          await tar.extract({
-            file: tarPath,
-            strip: 1,
-            filter: isValid,
-            onwarn,
-            cwd: tarExtractDir
-          })
-        } else {
-          try {
-            const res = await download(gyp, release.tarballUrl)
-
-            if (res.status !== 200) {
-              throw new Error(`${res.status} response downloading ${release.tarballUrl}`)
-            }
-
-            await pipeline(
-              res.body,
-              // content checksum
-              new ShaSum((_, checksum) => {
-                const filename = path.basename(release.tarballUrl).trim()
-                contentShasums[filename] = checksum
-                log.verbose('content checksum', filename, checksum)
-              }),
-              tar.extract({
-                strip: 1,
-                cwd: tarExtractDir,
-                filter: isValid,
-                onwarn
-              })
-            )
-          } catch (err) {
-          // something went wrong downloading the tarball?
-            if (err.code === 'ENOTFOUND') {
-              throw new Error('This is most likely not a problem with node-gyp or the package itself and\n' +
-              'is related to network connectivity. In most cases you are behind a proxy or have bad \n' +
-              'network settings.')
-            }
-            throw err
-          }
-        }
-
-        // invoked after the tarball has finished being extracted
-        if (extractErrors || extractCount === 0) {
-          throw new Error('There was a fatal problem while downloading/extracting the tarball')
-        }
-
-        log.verbose('tarball', 'done parsing tarball')
-      }
-
-      const installVersionPath = path.resolve(tarExtractDir, 'installVersion')
-      await Promise.all([
-      // need to download node.lib
-        ...(win ? [downloadNodeLib()] : []),
-        // write the "installVersion" file
-        fs.writeFile(installVersionPath, gyp.package.installVersion + '\n'),
-        // Only download SHASUMS.txt if we downloaded something in need of SHA verification
-        ...(!tarPath || win ? [downloadShasums()] : [])
-      ])
-
-      log.verbose('download contents checksum', JSON.stringify(contentShasums))
-      // check content shasums
-      for (const k in contentShasums) {
-        log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k])
-        if (contentShasums[k] !== expectShasums[k]) {
-          throw new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k])
-        }
-      }
-
-      // copy over the files from the temp tarball extract directory to devDir
-      if (tarExtractDir !== devDir) {
-        await copyDirectory(tarExtractDir, devDir)
-      }
-    } finally {
-      if (tarExtractDir !== devDir) {
-        try {
-          // try to cleanup temp dir
-          await fs.rm(tarExtractDir, { recursive: true })
-        } catch {
-          log.warn('failed to clean up temp tarball extract directory')
-        }
-      }
-    }
-
-    async function downloadShasums () {
-      log.verbose('check download content checksum, need to download `SHASUMS256.txt`...')
-      log.verbose('checksum url', release.shasumsUrl)
-
-      const res = await download(gyp, release.shasumsUrl)
-
-      if (res.status !== 200) {
-        throw new Error(`${res.status}  status code downloading checksum`)
-      }
-
-      for (const line of (await res.text()).trim().split('\n')) {
-        const items = line.trim().split(/\s+/)
-        if (items.length !== 2) {
-          return
-        }
-
-        // 0035d18e2dcf9aad669b1c7c07319e17abfe3762  ./node-v0.11.4.tar.gz
-        const name = items[1].replace(/^\.\//, '')
-        expectShasums[name] = items[0]
-      }
-
-      log.verbose('checksum data', JSON.stringify(expectShasums))
-    }
-
-    async function downloadNodeLib () {
-      log.verbose('on Windows; need to download `' + release.name + '.lib`...')
-      const dir = path.resolve(tarExtractDir, arch)
-      const targetLibPath = path.resolve(dir, release.name + '.lib')
-      const { libUrl, libPath } = release[arch]
-      const name = `${arch} ${release.name}.lib`
-      log.verbose(name, 'dir', dir)
-      log.verbose(name, 'url', libUrl)
-
-      await fs.mkdir(dir, { recursive: true })
-      log.verbose('streaming', name, 'to:', targetLibPath)
-
-      const res = await download(gyp, libUrl)
-
-      // Since only required node.lib is downloaded throw error if it is not fetched
-      if (res.status !== 200) {
-        throw new Error(`${res.status} status code downloading ${name}`)
-      }
-
-      return pipeline(
-        res.body,
-        new ShaSum((_, checksum) => {
-          contentShasums[libPath] = checksum
-          log.verbose('content checksum', libPath, checksum)
-        }),
-        createWriteStream(targetLibPath)
-      )
-    } // downloadNodeLib()
-  } // go()
-
-  /**
-   * Checks if a given filename is "valid" for this installation.
-   */
-
-  function valid (file) {
-    // header files
-    const extname = path.extname(file)
-    return extname === '.h' || extname === '.gypi'
-  }
-
-  async function rollback (err) {
-    log.warn('install', 'got an error, rolling back install')
-    // roll-back the install if anything went wrong
-    await gyp.commands.remove([release.versionDir])
-    throw err
-  }
-
-  /**
-   * The EACCES fallback is a workaround for npm's `sudo` behavior, where
-   * it drops the permissions before invoking any child processes (like
-   * node-gyp). So what happens is the "nobody" user doesn't have
-   * permission to create the dev dir. As a fallback, make the tmpdir() be
-   * the dev dir for this installation. This is not ideal, but at least
-   * the compilation will succeed...
-   */
-
-  async function eaccesFallback (err) {
-    const noretry = '--node_gyp_internal_noretry'
-    if (argv.indexOf(noretry) !== -1) {
-      throw err
-    }
-    const tmpdir = os.tmpdir()
-    gyp.devDir = path.resolve(tmpdir, '.node-gyp')
-    let userString = ''
-    try {
-      // os.userInfo can fail on some systems, it's not critical here
-      userString = ` ("${os.userInfo().username}")`
-    } catch (e) {}
-    log.warn('EACCES', 'current user%s does not have permission to access the dev dir "%s"', userString, devDir)
-    log.warn('EACCES', 'attempting to reinstall using temporary dev dir "%s"', gyp.devDir)
-    if (process.cwd() === tmpdir) {
-      log.verbose('tmpdir == cwd', 'automatically will remove dev files after to save disk space')
-      gyp.todo.push({ name: 'remove', args: argv })
-    }
-    return gyp.commands.install([noretry].concat(argv))
-  }
-}
-
-class ShaSum extends Transform {
-  constructor (callback) {
-    super()
-    this._callback = callback
-    this._digester = crypto.createHash('sha256')
-  }
-
-  _transform (chunk, _, callback) {
-    this._digester.update(chunk)
-    callback(null, chunk)
-  }
-
-  _flush (callback) {
-    this._callback(null, this._digester.digest('hex'))
-    callback()
-  }
-}
-
-module.exports = install
-module.exports.usage = 'Install node development files for the specified node version.'
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/list.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/list.js
deleted file mode 100644
index 36889ad4f71e2..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/list.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const fs = require('graceful-fs').promises
-const log = require('./log')
-
-async function list (gyp, args) {
-  const devDir = gyp.devDir
-  log.verbose('list', 'using node-gyp dir:', devDir)
-
-  let versions = []
-  try {
-    const dir = await fs.readdir(devDir)
-    if (Array.isArray(dir)) {
-      versions = dir.filter((v) => v !== 'current')
-    }
-  } catch (err) {
-    if (err && err.code !== 'ENOENT') {
-      throw err
-    }
-  }
-
-  return versions
-}
-
-module.exports = list
-module.exports.usage = 'Prints a listing of the currently installed node development files'
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/log.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/log.js
deleted file mode 100644
index 36fa2487f5ce1..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/log.js
+++ /dev/null
@@ -1,168 +0,0 @@
-'use strict'
-
-const { log } = require('proc-log')
-const { format } = require('util')
-
-// helper to emit log messages with a predefined prefix
-const withPrefix = (prefix) => log.LEVELS.reduce((acc, level) => {
-  acc[level] = (...args) => log[level](prefix, ...args)
-  return acc
-}, {})
-
-// very basic ansi color generator
-const COLORS = {
-  wrap: (str, colors) => {
-    const codes = colors.filter(c => typeof c === 'number')
-    return `\x1b[${codes.join(';')}m${str}\x1b[0m`
-  },
-  inverse: 7,
-  fg: {
-    black: 30,
-    red: 31,
-    green: 32,
-    yellow: 33,
-    blue: 34,
-    magenta: 35,
-    cyan: 36,
-    white: 37
-  },
-  bg: {
-    black: 40,
-    red: 41,
-    green: 42,
-    yellow: 43,
-    blue: 44,
-    magenta: 45,
-    cyan: 46,
-    white: 47
-  }
-}
-
-class Logger {
-  #buffer = []
-  #paused = null
-  #level = null
-  #stream = null
-
-  // ordered from loudest to quietest
-  #levels = [{
-    id: 'silly',
-    display: 'sill',
-    style: { inverse: true }
-  }, {
-    id: 'verbose',
-    display: 'verb',
-    style: { fg: 'cyan', bg: 'black' }
-  }, {
-    id: 'info',
-    style: { fg: 'green' }
-  }, {
-    id: 'http',
-    style: { fg: 'green', bg: 'black' }
-  }, {
-    id: 'notice',
-    style: { fg: 'cyan', bg: 'black' }
-  }, {
-    id: 'warn',
-    display: 'WARN',
-    style: { fg: 'black', bg: 'yellow' }
-  }, {
-    id: 'error',
-    display: 'ERR!',
-    style: { fg: 'red', bg: 'black' }
-  }]
-
-  constructor (stream) {
-    process.on('log', (...args) => this.#onLog(...args))
-    this.#levels = new Map(this.#levels.map((level, index) => [level.id, { ...level, index }]))
-    this.level = 'info'
-    this.stream = stream
-    log.pause()
-  }
-
-  get stream () {
-    return this.#stream
-  }
-
-  set stream (stream) {
-    this.#stream = stream
-  }
-
-  get level () {
-    return this.#levels.get(this.#level) ?? null
-  }
-
-  set level (level) {
-    this.#level = this.#levels.get(level)?.id ?? null
-  }
-
-  isVisible (level) {
-    return this.level?.index <= this.#levels.get(level)?.index ?? -1
-  }
-
-  #onLog (...args) {
-    const [level] = args
-
-    if (level === 'pause') {
-      this.#paused = true
-      return
-    }
-
-    if (level === 'resume') {
-      this.#paused = false
-      this.#buffer.forEach((b) => this.#log(...b))
-      this.#buffer.length = 0
-      return
-    }
-
-    if (this.#paused) {
-      this.#buffer.push(args)
-      return
-    }
-
-    this.#log(...args)
-  }
-
-  #color (str, { fg, bg, inverse }) {
-    if (!this.#stream?.isTTY) {
-      return str
-    }
-
-    return COLORS.wrap(str, [
-      COLORS.fg[fg],
-      COLORS.bg[bg],
-      inverse && COLORS.inverse
-    ])
-  }
-
-  #log (levelId, msgPrefix, ...args) {
-    if (!this.isVisible(levelId) || typeof this.#stream?.write !== 'function') {
-      return
-    }
-
-    const level = this.#levels.get(levelId)
-
-    const prefixParts = [
-      this.#color('gyp', { fg: 'white', bg: 'black' }),
-      this.#color(level.display ?? level.id, level.style)
-    ]
-    if (msgPrefix) {
-      prefixParts.push(this.#color(msgPrefix, { fg: 'magenta' }))
-    }
-
-    const prefix = prefixParts.join(' ').trim() + ' '
-    const lines = format(...args).split(/\r?\n/).map(l => prefix + l.trim())
-
-    this.#stream.write(lines.join('\n') + '\n')
-  }
-}
-
-// used to suppress logs in tests
-const NULL_LOGGER = !!process.env.NODE_GYP_NULL_LOGGER
-
-module.exports = {
-  logger: new Logger(NULL_LOGGER ? null : process.stderr),
-  stdout: NULL_LOGGER ? () => {} : (...args) => console.log(...args),
-  withPrefix,
-  ...log
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/node-gyp.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/node-gyp.js
deleted file mode 100644
index 5e25bf996f8b2..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/node-gyp.js
+++ /dev/null
@@ -1,188 +0,0 @@
-'use strict'
-
-const path = require('path')
-const nopt = require('nopt')
-const log = require('./log')
-const childProcess = require('child_process')
-const { EventEmitter } = require('events')
-
-const commands = [
-  // Module build commands
-  'build',
-  'clean',
-  'configure',
-  'rebuild',
-  // Development Header File management commands
-  'install',
-  'list',
-  'remove'
-]
-
-class Gyp extends EventEmitter {
-  /**
-   * Export the contents of the package.json.
-   */
-  package = require('../package.json')
-
-  /**
-   * nopt configuration definitions
-   */
-  configDefs = {
-    help: Boolean, // everywhere
-    arch: String, // 'configure'
-    cafile: String, // 'install'
-    debug: Boolean, // 'build'
-    directory: String, // bin
-    make: String, // 'build'
-    'msvs-version': String, // 'configure'
-    ensure: Boolean, // 'install'
-    solution: String, // 'build' (windows only)
-    proxy: String, // 'install'
-    noproxy: String, // 'install'
-    devdir: String, // everywhere
-    nodedir: String, // 'configure'
-    loglevel: String, // everywhere
-    python: String, // 'configure'
-    'dist-url': String, // 'install'
-    tarball: String, // 'install'
-    jobs: String, // 'build'
-    thin: String, // 'configure'
-    'force-process-config': Boolean // 'configure'
-  }
-
-  /**
-   * nopt shorthands
-   */
-  shorthands = {
-    release: '--no-debug',
-    C: '--directory',
-    debug: '--debug',
-    j: '--jobs',
-    silly: '--loglevel=silly',
-    verbose: '--loglevel=verbose',
-    silent: '--loglevel=silent'
-  }
-
-  /**
-   * expose the command aliases for the bin file to use.
-   */
-  aliases = {
-    ls: 'list',
-    rm: 'remove'
-  }
-
-  constructor (...args) {
-    super(...args)
-
-    this.devDir = ''
-
-    this.commands = commands.reduce((acc, command) => {
-      acc[command] = (argv) => require('./' + command)(this, argv)
-      return acc
-    }, {})
-
-    Object.defineProperty(this, 'version', {
-      enumerable: true,
-      get: function () { return this.package.version }
-    })
-  }
-
-  /**
-   * Parses the given argv array and sets the 'opts',
-   * 'argv' and 'command' properties.
-   */
-  parseArgv (argv) {
-    this.opts = nopt(this.configDefs, this.shorthands, argv)
-    this.argv = this.opts.argv.remain.slice()
-
-    const commands = this.todo = []
-
-    // create a copy of the argv array with aliases mapped
-    argv = this.argv.map((arg) => {
-    // is this an alias?
-      if (arg in this.aliases) {
-        arg = this.aliases[arg]
-      }
-      return arg
-    })
-
-    // process the mapped args into "command" objects ("name" and "args" props)
-    argv.slice().forEach((arg) => {
-      if (arg in this.commands) {
-        const args = argv.splice(0, argv.indexOf(arg))
-        argv.shift()
-        if (commands.length > 0) {
-          commands[commands.length - 1].args = args
-        }
-        commands.push({ name: arg, args: [] })
-      }
-    })
-    if (commands.length > 0) {
-      commands[commands.length - 1].args = argv.splice(0)
-    }
-
-    // support for inheriting config env variables from npm
-    const npmConfigPrefix = 'npm_config_'
-    Object.keys(process.env).forEach((name) => {
-      if (name.indexOf(npmConfigPrefix) !== 0) {
-        return
-      }
-      const val = process.env[name]
-      if (name === npmConfigPrefix + 'loglevel') {
-        log.logger.level = val
-      } else {
-      // add the user-defined options to the config
-        name = name.substring(npmConfigPrefix.length)
-        // gyp@741b7f1 enters an infinite loop when it encounters
-        // zero-length options so ensure those don't get through.
-        if (name) {
-        // convert names like force_process_config to force-process-config
-          if (name.includes('_')) {
-            name = name.replace(/_/g, '-')
-          }
-          this.opts[name] = val
-        }
-      }
-    })
-
-    if (this.opts.loglevel) {
-      log.logger.level = this.opts.loglevel
-    }
-    log.resume()
-  }
-
-  /**
-   * Spawns a child process and emits a 'spawn' event.
-   */
-  spawn (command, args, opts) {
-    if (!opts) {
-      opts = {}
-    }
-    if (!opts.silent && !opts.stdio) {
-      opts.stdio = [0, 1, 2]
-    }
-    const cp = childProcess.spawn(command, args, opts)
-    log.info('spawn', command)
-    log.info('spawn args', args)
-    return cp
-  }
-
-  /**
-   * Returns the usage instructions for node-gyp.
-   */
-  usage () {
-    return [
-      '',
-      '  Usage: node-gyp  [options]',
-      '',
-      '  where  is one of:',
-      commands.map((c) => '    - ' + c + ' - ' + require('./' + c).usage).join('\n'),
-      '',
-      'node-gyp@' + this.version + '  ' + path.resolve(__dirname, '..'),
-      'node@' + process.versions.node
-    ].join('\n')
-  }
-}
-
-module.exports = () => new Gyp()
-module.exports.Gyp = Gyp
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/process-release.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/process-release.js
deleted file mode 100644
index c9a319dfadd2b..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/process-release.js
+++ /dev/null
@@ -1,146 +0,0 @@
-/* eslint-disable n/no-deprecated-api */
-
-'use strict'
-
-const semver = require('semver')
-const url = require('url')
-const path = require('path')
-const log = require('./log')
-
-// versions where -headers.tar.gz started shipping
-const headersTarballRange = '>= 3.0.0 || ~0.12.10 || ~0.10.42'
-const bitsre = /\/win-(x86|x64|arm64)\//
-const bitsreV3 = /\/win-(x86|ia32|x64)\// // io.js v3.x.x shipped with "ia32" but should
-// have been "x86"
-
-// Captures all the logic required to determine download URLs, local directory and
-// file names. Inputs come from command-line switches (--target, --dist-url),
-// `process.version` and `process.release` where it exists.
-function processRelease (argv, gyp, defaultVersion, defaultRelease) {
-  let version = (semver.valid(argv[0]) && argv[0]) || gyp.opts.target || defaultVersion
-  const versionSemver = semver.parse(version)
-  let overrideDistUrl = gyp.opts['dist-url'] || gyp.opts.disturl
-  let isNamedForLegacyIojs
-  let name
-  let distBaseUrl
-  let baseUrl
-  let libUrl32
-  let libUrl64
-  let libUrlArm64
-  let tarballUrl
-  let canGetHeaders
-
-  if (!versionSemver) {
-    // not a valid semver string, nothing we can do
-    return { version }
-  }
-  // flatten version into String
-  version = versionSemver.version
-
-  // defaultVersion should come from process.version so ought to be valid semver
-  const isDefaultVersion = version === semver.parse(defaultVersion).version
-
-  // can't use process.release if we're using --target=x.y.z
-  if (!isDefaultVersion) {
-    defaultRelease = null
-  }
-
-  if (defaultRelease) {
-    // v3 onward, has process.release
-    name = defaultRelease.name.replace(/io\.js/, 'iojs') // remove the '.' for directory naming purposes
-  } else {
-    // old node or alternative --target=
-    // semver.satisfies() doesn't like prerelease tags so test major directly
-    isNamedForLegacyIojs = versionSemver.major >= 1 && versionSemver.major < 4
-    // isNamedForLegacyIojs is required to support Electron < 4 (in particular Electron 3)
-    // as previously this logic was used to ensure "iojs" was used to download iojs releases
-    // and "node" for node releases.  Unfortunately the logic was broad enough that electron@3
-    // published release assets as "iojs" so that the node-gyp logic worked.  Once Electron@3 has
-    // been EOL for a while (late 2019) we should remove this hack.
-    name = isNamedForLegacyIojs ? 'iojs' : 'node'
-  }
-
-  // check for the nvm.sh standard mirror env variables
-  if (!overrideDistUrl && process.env.NODEJS_ORG_MIRROR) {
-    overrideDistUrl = process.env.NODEJS_ORG_MIRROR
-  }
-
-  if (overrideDistUrl) {
-    log.verbose('download', 'using dist-url', overrideDistUrl)
-  }
-
-  if (overrideDistUrl) {
-    distBaseUrl = overrideDistUrl.replace(/\/+$/, '')
-  } else {
-    distBaseUrl = 'https://nodejs.org/dist'
-  }
-  distBaseUrl += '/v' + version + '/'
-
-  // new style, based on process.release so we have a lot of the data we need
-  if (defaultRelease && defaultRelease.headersUrl && !overrideDistUrl) {
-    baseUrl = url.resolve(defaultRelease.headersUrl, './')
-    libUrl32 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x86', versionSemver.major)
-    libUrl64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x64', versionSemver.major)
-    libUrlArm64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'arm64', versionSemver.major)
-    tarballUrl = defaultRelease.headersUrl
-  } else {
-    // older versions without process.release are captured here and we have to make
-    // a lot of assumptions, additionally if you --target=x.y.z then we can't use the
-    // current process.release
-    baseUrl = distBaseUrl
-    libUrl32 = resolveLibUrl(name, baseUrl, 'x86', versionSemver.major)
-    libUrl64 = resolveLibUrl(name, baseUrl, 'x64', versionSemver.major)
-    libUrlArm64 = resolveLibUrl(name, baseUrl, 'arm64', versionSemver.major)
-
-    // making the bold assumption that anything with a version number >3.0.0 will
-    // have a *-headers.tar.gz file in its dist location, even some frankenstein
-    // custom version
-    canGetHeaders = semver.satisfies(versionSemver, headersTarballRange)
-    tarballUrl = url.resolve(baseUrl, name + '-v' + version + (canGetHeaders ? '-headers' : '') + '.tar.gz')
-  }
-
-  return {
-    version,
-    semver: versionSemver,
-    name,
-    baseUrl,
-    tarballUrl,
-    shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'),
-    versionDir: (name !== 'node' ? name + '-' : '') + version,
-    ia32: {
-      libUrl: libUrl32,
-      libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path))
-    },
-    x64: {
-      libUrl: libUrl64,
-      libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path))
-    },
-    arm64: {
-      libUrl: libUrlArm64,
-      libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrlArm64).path))
-    }
-  }
-}
-
-function normalizePath (p) {
-  return path.normalize(p).replace(/\\/g, '/')
-}
-
-function resolveLibUrl (name, defaultUrl, arch, versionMajor) {
-  const base = url.resolve(defaultUrl, './')
-  const hasLibUrl = bitsre.test(defaultUrl) || (versionMajor === 3 && bitsreV3.test(defaultUrl))
-
-  if (!hasLibUrl) {
-    // let's assume it's a baseUrl then
-    if (versionMajor >= 1) {
-      return url.resolve(base, 'win-' + arch + '/' + name + '.lib')
-    }
-    // prior to io.js@1.0.0 32-bit node.lib lives in /, 64-bit lives in /x64/
-    return url.resolve(base, (arch === 'x86' ? '' : arch + '/') + name + '.lib')
-  }
-
-  // else we have a proper url to a .lib, just make sure it's the right arch
-  return defaultUrl.replace(versionMajor === 3 ? bitsreV3 : bitsre, '/win-' + arch + '/')
-}
-
-module.exports = processRelease
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/rebuild.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/rebuild.js
deleted file mode 100644
index 609817665e2db..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/rebuild.js
+++ /dev/null
@@ -1,12 +0,0 @@
-'use strict'
-
-async function rebuild (gyp, argv) {
-  gyp.todo.push(
-    { name: 'clean', args: [] }
-    , { name: 'configure', args: argv }
-    , { name: 'build', args: [] }
-  )
-}
-
-module.exports = rebuild
-module.exports.usage = 'Runs "clean", "configure" and "build" all at once'
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/remove.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/remove.js
deleted file mode 100644
index 7efdb01a662e7..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/remove.js
+++ /dev/null
@@ -1,43 +0,0 @@
-'use strict'
-
-const fs = require('graceful-fs').promises
-const path = require('path')
-const log = require('./log')
-const semver = require('semver')
-
-async function remove (gyp, argv) {
-  const devDir = gyp.devDir
-  log.verbose('remove', 'using node-gyp dir:', devDir)
-
-  // get the user-specified version to remove
-  let version = argv[0] || gyp.opts.target
-  log.verbose('remove', 'removing target version:', version)
-
-  if (!version) {
-    throw new Error('You must specify a version number to remove. Ex: "' + process.version + '"')
-  }
-
-  const versionSemver = semver.parse(version)
-  if (versionSemver) {
-    // flatten the version Array into a String
-    version = versionSemver.version
-  }
-
-  const versionPath = path.resolve(gyp.devDir, version)
-  log.verbose('remove', 'removing development files for version:', version)
-
-  // first check if its even installed
-  try {
-    await fs.stat(versionPath)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return 'version was already uninstalled: ' + version
-    }
-    throw err
-  }
-
-  await fs.rm(versionPath, { recursive: true, force: true })
-}
-
-module.exports = remove
-module.exports.usage = 'Removes the node development files for the specified version'
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/util.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/util.js
deleted file mode 100644
index 3f6aeeb7dcb43..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/lib/util.js
+++ /dev/null
@@ -1,81 +0,0 @@
-'use strict'
-
-const cp = require('child_process')
-const path = require('path')
-const { openSync, closeSync } = require('graceful-fs')
-const log = require('./log')
-
-const execFile = async (...args) => new Promise((resolve) => {
-  const child = cp.execFile(...args, (...a) => resolve(a))
-  child.stdin.end()
-})
-
-async function regGetValue (key, value, addOpts) {
-  const outReValue = value.replace(/\W/g, '.')
-  const outRe = new RegExp(`^\\s+${outReValue}\\s+REG_\\w+\\s+(\\S.*)$`, 'im')
-  const reg = path.join(process.env.SystemRoot, 'System32', 'reg.exe')
-  const regArgs = ['query', key, '/v', value].concat(addOpts)
-
-  log.silly('reg', 'running', reg, regArgs)
-  const [err, stdout, stderr] = await execFile(reg, regArgs, { encoding: 'utf8' })
-
-  log.silly('reg', 'reg.exe stdout = %j', stdout)
-  if (err || stderr.trim() !== '') {
-    log.silly('reg', 'reg.exe err = %j', err && (err.stack || err))
-    log.silly('reg', 'reg.exe stderr = %j', stderr)
-    if (err) {
-      throw err
-    }
-    throw new Error(stderr)
-  }
-
-  const result = outRe.exec(stdout)
-  if (!result) {
-    log.silly('reg', 'error parsing stdout')
-    throw new Error('Could not parse output of reg.exe')
-  }
-
-  log.silly('reg', 'found: %j', result[1])
-  return result[1]
-}
-
-async function regSearchKeys (keys, value, addOpts) {
-  for (const key of keys) {
-    try {
-      return await regGetValue(key, value, addOpts)
-    } catch {
-      continue
-    }
-  }
-}
-
-/**
- * Returns the first file or directory from an array of candidates that is
- * readable by the current user, or undefined if none of the candidates are
- * readable.
- */
-function findAccessibleSync (logprefix, dir, candidates) {
-  for (let next = 0; next < candidates.length; next++) {
-    const candidate = path.resolve(dir, candidates[next])
-    let fd
-    try {
-      fd = openSync(candidate, 'r')
-    } catch (e) {
-      // this candidate was not found or not readable, do nothing
-      log.silly(logprefix, 'Could not open %s: %s', candidate, e.message)
-      continue
-    }
-    closeSync(fd)
-    log.silly(logprefix, 'Found readable %s', candidate)
-    return candidate
-  }
-
-  return undefined
-}
-
-module.exports = {
-  execFile,
-  regGetValue,
-  regSearchKeys,
-  findAccessibleSync
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/macOS_Catalina_acid_test.sh b/node_modules/@npmcli/run-script/node_modules/node-gyp/macOS_Catalina_acid_test.sh
deleted file mode 100644
index e1e98941a8e4c..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/macOS_Catalina_acid_test.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-pkgs=(
-  "com.apple.pkg.DeveloperToolsCLILeo" # standalone
-  "com.apple.pkg.DeveloperToolsCLI"    # from XCode
-  "com.apple.pkg.CLTools_Executables"  # Mavericks
-)
-
-for pkg in "${pkgs[@]}"; do
-  output=$(/usr/sbin/pkgutil --pkg-info "$pkg" 2>/dev/null)
-  if [ "$output" ]; then
-    version=$(echo "$output" | grep 'version' | cut -d' ' -f2)
-    break
-  fi
-done
-
-if [ "$version" ]; then
-  echo "Command Line Tools version: $version"
-else
-  echo >&2 'Command Line Tools not found'
-fi
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/LICENSE b/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/LICENSE
deleted file mode 100644
index 83837797202b7..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) GitHub, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/lib/index.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/lib/index.js
deleted file mode 100644
index 86d90861078da..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/lib/index.js
+++ /dev/null
@@ -1,153 +0,0 @@
-const META = Symbol('proc-log.meta')
-module.exports = {
-  META: META,
-  output: {
-    LEVELS: [
-      'standard',
-      'error',
-      'buffer',
-      'flush',
-    ],
-    KEYS: {
-      standard: 'standard',
-      error: 'error',
-      buffer: 'buffer',
-      flush: 'flush',
-    },
-    standard: function (...args) {
-      return process.emit('output', 'standard', ...args)
-    },
-    error: function (...args) {
-      return process.emit('output', 'error', ...args)
-    },
-    buffer: function (...args) {
-      return process.emit('output', 'buffer', ...args)
-    },
-    flush: function (...args) {
-      return process.emit('output', 'flush', ...args)
-    },
-  },
-  log: {
-    LEVELS: [
-      'notice',
-      'error',
-      'warn',
-      'info',
-      'verbose',
-      'http',
-      'silly',
-      'timing',
-      'pause',
-      'resume',
-    ],
-    KEYS: {
-      notice: 'notice',
-      error: 'error',
-      warn: 'warn',
-      info: 'info',
-      verbose: 'verbose',
-      http: 'http',
-      silly: 'silly',
-      timing: 'timing',
-      pause: 'pause',
-      resume: 'resume',
-    },
-    error: function (...args) {
-      return process.emit('log', 'error', ...args)
-    },
-    notice: function (...args) {
-      return process.emit('log', 'notice', ...args)
-    },
-    warn: function (...args) {
-      return process.emit('log', 'warn', ...args)
-    },
-    info: function (...args) {
-      return process.emit('log', 'info', ...args)
-    },
-    verbose: function (...args) {
-      return process.emit('log', 'verbose', ...args)
-    },
-    http: function (...args) {
-      return process.emit('log', 'http', ...args)
-    },
-    silly: function (...args) {
-      return process.emit('log', 'silly', ...args)
-    },
-    timing: function (...args) {
-      return process.emit('log', 'timing', ...args)
-    },
-    pause: function () {
-      return process.emit('log', 'pause')
-    },
-    resume: function () {
-      return process.emit('log', 'resume')
-    },
-  },
-  time: {
-    LEVELS: [
-      'start',
-      'end',
-    ],
-    KEYS: {
-      start: 'start',
-      end: 'end',
-    },
-    start: function (name, fn) {
-      process.emit('time', 'start', name)
-      function end () {
-        return process.emit('time', 'end', name)
-      }
-      if (typeof fn === 'function') {
-        const res = fn()
-        if (res && res.finally) {
-          return res.finally(end)
-        }
-        end()
-        return res
-      }
-      return end
-    },
-    end: function (name) {
-      return process.emit('time', 'end', name)
-    },
-  },
-  input: {
-    LEVELS: [
-      'start',
-      'end',
-      'read',
-    ],
-    KEYS: {
-      start: 'start',
-      end: 'end',
-      read: 'read',
-    },
-    start: function (fn) {
-      process.emit('input', 'start')
-      function end () {
-        return process.emit('input', 'end')
-      }
-      if (typeof fn === 'function') {
-        const res = fn()
-        if (res && res.finally) {
-          return res.finally(end)
-        }
-        end()
-        return res
-      }
-      return end
-    },
-    end: function () {
-      return process.emit('input', 'end')
-    },
-    read: function (...args) {
-      let resolve, reject
-      const promise = new Promise((_resolve, _reject) => {
-        resolve = _resolve
-        reject = _reject
-      })
-      process.emit('input', 'read', resolve, reject, ...args)
-      return promise
-    },
-  },
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/package.json b/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/package.json
deleted file mode 100644
index 4ab89102ecc9b..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log/package.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
-  "name": "proc-log",
-  "version": "4.2.0",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "main": "lib/index.js",
-  "description": "just emit 'log' events on the process object",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/proc-log.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "posttest": "npm run lint",
-    "postsnap": "eslint index.js test/*.js --fix",
-    "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run lint -- --fix",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.21.3",
-    "tap": "^16.0.1"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.21.3",
-    "publish": true
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/LICENSE b/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/bin/which.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/bin/which.js
deleted file mode 100755
index 6df16f21acf93..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/bin/which.js
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env node
-
-const which = require('../lib')
-const argv = process.argv.slice(2)
-
-const usage = (err) => {
-  if (err) {
-    console.error(`which: ${err}`)
-  }
-  console.error('usage: which [-as] program ...')
-  process.exit(1)
-}
-
-if (!argv.length) {
-  return usage()
-}
-
-let dashdash = false
-const [commands, flags] = argv.reduce((acc, arg) => {
-  if (dashdash || arg === '--') {
-    dashdash = true
-    return acc
-  }
-
-  if (!/^-/.test(arg)) {
-    acc[0].push(arg)
-    return acc
-  }
-
-  for (const flag of arg.slice(1).split('')) {
-    if (flag === 's') {
-      acc[1].silent = true
-    } else if (flag === 'a') {
-      acc[1].all = true
-    } else {
-      usage(`illegal option -- ${flag}`)
-    }
-  }
-
-  return acc
-}, [[], {}])
-
-for (const command of commands) {
-  try {
-    const res = which.sync(command, { all: flags.all })
-    if (!flags.silent) {
-      console.log([].concat(res).join('\n'))
-    }
-  } catch (err) {
-    process.exitCode = 1
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/lib/index.js b/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/lib/index.js
deleted file mode 100644
index 2fd358baf888f..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/lib/index.js
+++ /dev/null
@@ -1,111 +0,0 @@
-const { isexe, sync: isexeSync } = require('isexe')
-const { join, delimiter, sep, posix } = require('path')
-
-const isWindows = process.platform === 'win32'
-
-// used to check for slashed in commands passed in. always checks for the posix
-// seperator on all platforms, and checks for the current separator when not on
-// a posix platform. don't use the isWindows check for this since that is mocked
-// in tests but we still need the code to actually work when called. that is also
-// why it is ignored from coverage.
-/* istanbul ignore next */
-const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1'))
-const rRel = new RegExp(`^\\.${rSlash.source}`)
-
-const getNotFoundError = (cmd) =>
-  Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' })
-
-const getPathInfo = (cmd, {
-  path: optPath = process.env.PATH,
-  pathExt: optPathExt = process.env.PATHEXT,
-  delimiter: optDelimiter = delimiter,
-}) => {
-  // If it has a slash, then we don't bother searching the pathenv.
-  // just check the file itself, and that's it.
-  const pathEnv = cmd.match(rSlash) ? [''] : [
-    // windows always checks the cwd first
-    ...(isWindows ? [process.cwd()] : []),
-    ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter),
-  ]
-
-  if (isWindows) {
-    const pathExtExe = optPathExt ||
-      ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter)
-    const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()])
-    if (cmd.includes('.') && pathExt[0] !== '') {
-      pathExt.unshift('')
-    }
-    return { pathEnv, pathExt, pathExtExe }
-  }
-
-  return { pathEnv, pathExt: [''] }
-}
-
-const getPathPart = (raw, cmd) => {
-  const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw
-  const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : ''
-  return prefix + join(pathPart, cmd)
-}
-
-const which = async (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const envPart of pathEnv) {
-    const p = getPathPart(envPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-const whichSync = (cmd, opt = {}) => {
-  const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt)
-  const found = []
-
-  for (const pathEnvPart of pathEnv) {
-    const p = getPathPart(pathEnvPart, cmd)
-
-    for (const ext of pathExt) {
-      const withExt = p + ext
-      const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true })
-      if (is) {
-        if (!opt.all) {
-          return withExt
-        }
-        found.push(withExt)
-      }
-    }
-  }
-
-  if (opt.all && found.length) {
-    return found
-  }
-
-  if (opt.nothrow) {
-    return null
-  }
-
-  throw getNotFoundError(cmd)
-}
-
-module.exports = which
-which.sync = whichSync
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/package.json b/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/package.json
deleted file mode 100644
index 515bfb22ca0e1..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which/package.json
+++ /dev/null
@@ -1,57 +0,0 @@
-{
-  "author": "GitHub Inc.",
-  "name": "which",
-  "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
-  "version": "4.0.0",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/node-which.git"
-  },
-  "main": "lib/index.js",
-  "bin": {
-    "node-which": "./bin/which.js"
-  },
-  "license": "ISC",
-  "dependencies": {
-    "isexe": "^3.1.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.18.0",
-    "tap": "^16.3.0"
-  },
-  "scripts": {
-    "test": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "ciVersions": [
-      "16.13.0",
-      "16.x",
-      "18.0.0",
-      "18.x"
-    ],
-    "version": "4.18.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/package.json b/node_modules/@npmcli/run-script/node_modules/node-gyp/package.json
deleted file mode 100644
index 57142c42841e3..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "name": "node-gyp",
-  "description": "Node.js native addon build tool",
-  "license": "MIT",
-  "keywords": [
-    "native",
-    "addon",
-    "module",
-    "c",
-    "c++",
-    "bindings",
-    "gyp"
-  ],
-  "version": "10.3.1",
-  "installVersion": 11,
-  "author": "Nathan Rajlich  (http://tootallnate.net)",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/nodejs/node-gyp.git"
-  },
-  "preferGlobal": true,
-  "bin": "./bin/node-gyp.js",
-  "main": "./lib/node-gyp.js",
-  "dependencies": {
-    "env-paths": "^2.2.0",
-    "exponential-backoff": "^3.1.1",
-    "glob": "^10.3.10",
-    "graceful-fs": "^4.2.6",
-    "make-fetch-happen": "^13.0.0",
-    "nopt": "^7.0.0",
-    "proc-log": "^4.1.0",
-    "semver": "^7.3.5",
-    "tar": "^6.2.1",
-    "which": "^4.0.0"
-  },
-  "engines": {
-    "node": "^16.14.0 || >=18.0.0"
-  },
-  "devDependencies": {
-    "bindings": "^1.5.0",
-    "cross-env": "^7.0.3",
-    "mocha": "^10.2.0",
-    "nan": "^2.14.2",
-    "require-inject": "^1.4.4",
-    "standard": "^17.0.0"
-  },
-  "scripts": {
-    "lint": "standard \"*/*.js\" \"test/**/*.js\" \".github/**/*.js\"",
-    "test": "cross-env NODE_GYP_NULL_LOGGER=true mocha --timeout 15000 test/test-download.js test/test-*"
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/release-please-config.json b/node_modules/@npmcli/run-script/node_modules/node-gyp/release-please-config.json
deleted file mode 100644
index 94b8f8110e881..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/release-please-config.json
+++ /dev/null
@@ -1,40 +0,0 @@
-{
-    "packages": {
-        ".": {
-            "include-component-in-tag": false,
-            "release-type": "node",
-            "changelog-sections": [
-                { "type": "feat", "section": "Features", "hidden": false },
-                { "type": "fix", "section": "Bug Fixes", "hidden": false },
-                { "type": "bin", "section": "Core", "hidden": false },
-                { "type": "gyp", "section": "Core", "hidden": false },
-                { "type": "lib", "section": "Core", "hidden": false },
-                { "type": "src", "section": "Core", "hidden": false },
-                { "type": "test", "section": "Tests", "hidden": false },
-                { "type": "build", "section": "Core", "hidden": false },
-                { "type": "clean", "section": "Core", "hidden": false },
-                { "type": "configure", "section": "Core", "hidden": false },
-                { "type": "install", "section": "Core", "hidden": false },
-                { "type": "list", "section": "Core", "hidden": false },
-                { "type": "rebuild", "section": "Core", "hidden": false },
-                { "type": "remove", "section": "Core", "hidden": false },
-                { "type": "deps", "section": "Core", "hidden": false },
-                { "type": "python", "section": "Core", "hidden": false },
-                { "type": "lin", "section": "Core", "hidden": false },
-                { "type": "linux", "section": "Core", "hidden": false },
-                { "type": "mac", "section": "Core", "hidden": false },
-                { "type": "macos", "section": "Core", "hidden": false },
-                { "type": "win", "section": "Core", "hidden": false },
-                { "type": "windows", "section": "Core", "hidden": false },
-                { "type": "zos", "section": "Core", "hidden": false },
-                { "type": "doc", "section": "Doc", "hidden": false },
-                { "type": "docs", "section": "Doc", "hidden": false },
-                { "type": "readme", "section": "Doc", "hidden": false },
-                { "type": "chore", "section": "Miscellaneous", "hidden": false },
-                { "type": "refactor", "section": "Miscellaneous", "hidden": false },
-                { "type": "ci", "section": "Miscellaneous", "hidden": false },
-                { "type": "meta", "section": "Miscellaneous", "hidden": false }
-            ]
-        }
-    }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/node-gyp/src/win_delay_load_hook.cc b/node_modules/@npmcli/run-script/node_modules/node-gyp/src/win_delay_load_hook.cc
deleted file mode 100644
index 169f8029f10fd..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/node-gyp/src/win_delay_load_hook.cc
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * When this file is linked to a DLL, it sets up a delay-load hook that
- * intervenes when the DLL is trying to load the host executable
- * dynamically. Instead of trying to locate the .exe file it'll just
- * return a handle to the process image.
- *
- * This allows compiled addons to work when the host executable is renamed.
- */
-
-#ifdef _MSC_VER
-
-#pragma managed(push, off)
-
-#ifndef WIN32_LEAN_AND_MEAN
-#define WIN32_LEAN_AND_MEAN
-#endif
-
-#include 
-
-#include 
-#include 
-
-static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) {
-  HMODULE m;
-  if (event != dliNotePreLoadLibrary)
-    return NULL;
-
-  if (_stricmp(info->szDll, HOST_BINARY) != 0)
-    return NULL;
-
-  m = GetModuleHandle(NULL);
-  return (FARPROC) m;
-}
-
-decltype(__pfnDliNotifyHook2) __pfnDliNotifyHook2 = load_exe_hook;
-
-#pragma managed(pop)
-
-#endif
diff --git a/node_modules/@npmcli/run-script/node_modules/nopt/LICENSE b/node_modules/@npmcli/run-script/node_modules/nopt/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/nopt/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/nopt/bin/nopt.js b/node_modules/@npmcli/run-script/node_modules/nopt/bin/nopt.js
deleted file mode 100755
index 6ed2082064b5e..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/nopt/bin/nopt.js
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env node
-const nopt = require('../lib/nopt')
-const path = require('path')
-console.log('parsed', nopt({
-  num: Number,
-  bool: Boolean,
-  help: Boolean,
-  list: Array,
-  'num-list': [Number, Array],
-  'str-list': [String, Array],
-  'bool-list': [Boolean, Array],
-  str: String,
-  clear: Boolean,
-  config: Boolean,
-  length: Number,
-  file: path,
-}, {
-  s: ['--str', 'astring'],
-  b: ['--bool'],
-  nb: ['--no-bool'],
-  tft: ['--bool-list', '--no-bool-list', '--bool-list', 'true'],
-  '?': ['--help'],
-  h: ['--help'],
-  H: ['--help'],
-  n: ['--num', '125'],
-  c: ['--config'],
-  l: ['--length'],
-  f: ['--file'],
-}, process.argv, 2))
diff --git a/node_modules/@npmcli/run-script/node_modules/nopt/lib/debug.js b/node_modules/@npmcli/run-script/node_modules/nopt/lib/debug.js
deleted file mode 100644
index 544ab382ca85c..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/nopt/lib/debug.js
+++ /dev/null
@@ -1,5 +0,0 @@
-/* istanbul ignore next */
-module.exports = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
-  // eslint-disable-next-line no-console
-  ? (...a) => console.error(...a)
-  : () => {}
diff --git a/node_modules/@npmcli/run-script/node_modules/nopt/lib/nopt-lib.js b/node_modules/@npmcli/run-script/node_modules/nopt/lib/nopt-lib.js
deleted file mode 100644
index d3d1de0255ba9..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/nopt/lib/nopt-lib.js
+++ /dev/null
@@ -1,479 +0,0 @@
-const abbrev = require('abbrev')
-const debug = require('./debug')
-const defaultTypeDefs = require('./type-defs')
-
-const hasOwn = (o, k) => Object.prototype.hasOwnProperty.call(o, k)
-
-const getType = (k, { types, dynamicTypes }) => {
-  let hasType = hasOwn(types, k)
-  let type = types[k]
-  if (!hasType && typeof dynamicTypes === 'function') {
-    const matchedType = dynamicTypes(k)
-    if (matchedType !== undefined) {
-      type = matchedType
-      hasType = true
-    }
-  }
-  return [hasType, type]
-}
-
-const isTypeDef = (type, def) => def && type === def
-const hasTypeDef = (type, def) => def && type.indexOf(def) !== -1
-const doesNotHaveTypeDef = (type, def) => def && !hasTypeDef(type, def)
-
-function nopt (args, {
-  types,
-  shorthands,
-  typeDefs,
-  invalidHandler,
-  typeDefault,
-  dynamicTypes,
-} = {}) {
-  debug(types, shorthands, args, typeDefs)
-
-  const data = {}
-  const argv = {
-    remain: [],
-    cooked: args,
-    original: args.slice(0),
-  }
-
-  parse(args, data, argv.remain, { typeDefs, types, dynamicTypes, shorthands })
-
-  // now data is full
-  clean(data, { types, dynamicTypes, typeDefs, invalidHandler, typeDefault })
-  data.argv = argv
-
-  Object.defineProperty(data.argv, 'toString', {
-    value: function () {
-      return this.original.map(JSON.stringify).join(' ')
-    },
-    enumerable: false,
-  })
-
-  return data
-}
-
-function clean (data, {
-  types = {},
-  typeDefs = {},
-  dynamicTypes,
-  invalidHandler,
-  typeDefault,
-} = {}) {
-  const StringType = typeDefs.String?.type
-  const NumberType = typeDefs.Number?.type
-  const ArrayType = typeDefs.Array?.type
-  const BooleanType = typeDefs.Boolean?.type
-  const DateType = typeDefs.Date?.type
-
-  const hasTypeDefault = typeof typeDefault !== 'undefined'
-  if (!hasTypeDefault) {
-    typeDefault = [false, true, null]
-    if (StringType) {
-      typeDefault.push(StringType)
-    }
-    if (ArrayType) {
-      typeDefault.push(ArrayType)
-    }
-  }
-
-  const remove = {}
-
-  Object.keys(data).forEach((k) => {
-    if (k === 'argv') {
-      return
-    }
-    let val = data[k]
-    debug('val=%j', val)
-    const isArray = Array.isArray(val)
-    let [hasType, rawType] = getType(k, { types, dynamicTypes })
-    let type = rawType
-    if (!isArray) {
-      val = [val]
-    }
-    if (!type) {
-      type = typeDefault
-    }
-    if (isTypeDef(type, ArrayType)) {
-      type = typeDefault.concat(ArrayType)
-    }
-    if (!Array.isArray(type)) {
-      type = [type]
-    }
-
-    debug('val=%j', val)
-    debug('types=', type)
-    val = val.map((v) => {
-      // if it's an unknown value, then parse false/true/null/numbers/dates
-      if (typeof v === 'string') {
-        debug('string %j', v)
-        v = v.trim()
-        if ((v === 'null' && ~type.indexOf(null))
-            || (v === 'true' &&
-               (~type.indexOf(true) || hasTypeDef(type, BooleanType)))
-            || (v === 'false' &&
-               (~type.indexOf(false) || hasTypeDef(type, BooleanType)))) {
-          v = JSON.parse(v)
-          debug('jsonable %j', v)
-        } else if (hasTypeDef(type, NumberType) && !isNaN(v)) {
-          debug('convert to number', v)
-          v = +v
-        } else if (hasTypeDef(type, DateType) && !isNaN(Date.parse(v))) {
-          debug('convert to date', v)
-          v = new Date(v)
-        }
-      }
-
-      if (!hasType) {
-        if (!hasTypeDefault) {
-          return v
-        }
-        // if the default type has been passed in then we want to validate the
-        // unknown data key instead of bailing out earlier. we also set the raw
-        // type which is passed to the invalid handler so that it can be
-        // determined if during validation if it is unknown vs invalid
-        rawType = typeDefault
-      }
-
-      // allow `--no-blah` to set 'blah' to null if null is allowed
-      if (v === false && ~type.indexOf(null) &&
-          !(~type.indexOf(false) || hasTypeDef(type, BooleanType))) {
-        v = null
-      }
-
-      const d = {}
-      d[k] = v
-      debug('prevalidated val', d, v, rawType)
-      if (!validate(d, k, v, rawType, { typeDefs })) {
-        if (invalidHandler) {
-          invalidHandler(k, v, rawType, data)
-        } else if (invalidHandler !== false) {
-          debug('invalid: ' + k + '=' + v, rawType)
-        }
-        return remove
-      }
-      debug('validated v', d, v, rawType)
-      return d[k]
-    }).filter((v) => v !== remove)
-
-    // if we allow Array specifically, then an empty array is how we
-    // express 'no value here', not null.  Allow it.
-    if (!val.length && doesNotHaveTypeDef(type, ArrayType)) {
-      debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(ArrayType))
-      delete data[k]
-    } else if (isArray) {
-      debug(isArray, data[k], val)
-      data[k] = val
-    } else {
-      data[k] = val[0]
-    }
-
-    debug('k=%s val=%j', k, val, data[k])
-  })
-}
-
-function validate (data, k, val, type, { typeDefs } = {}) {
-  const ArrayType = typeDefs?.Array?.type
-  // arrays are lists of types.
-  if (Array.isArray(type)) {
-    for (let i = 0, l = type.length; i < l; i++) {
-      if (isTypeDef(type[i], ArrayType)) {
-        continue
-      }
-      if (validate(data, k, val, type[i], { typeDefs })) {
-        return true
-      }
-    }
-    delete data[k]
-    return false
-  }
-
-  // an array of anything?
-  if (isTypeDef(type, ArrayType)) {
-    return true
-  }
-
-  // Original comment:
-  // NaN is poisonous.  Means that something is not allowed.
-  // New comment: Changing this to an isNaN check breaks a lot of tests.
-  // Something is being assumed here that is not actually what happens in
-  // practice.  Fixing it is outside the scope of getting linting to pass in
-  // this repo. Leaving as-is for now.
-  /* eslint-disable-next-line no-self-compare */
-  if (type !== type) {
-    debug('Poison NaN', k, val, type)
-    delete data[k]
-    return false
-  }
-
-  // explicit list of values
-  if (val === type) {
-    debug('Explicitly allowed %j', val)
-    data[k] = val
-    return true
-  }
-
-  // now go through the list of typeDefs, validate against each one.
-  let ok = false
-  const types = Object.keys(typeDefs)
-  for (let i = 0, l = types.length; i < l; i++) {
-    debug('test type %j %j %j', k, val, types[i])
-    const t = typeDefs[types[i]]
-    if (t && (
-      (type && type.name && t.type && t.type.name) ?
-        (type.name === t.type.name) :
-        (type === t.type)
-    )) {
-      const d = {}
-      ok = t.validate(d, k, val) !== false
-      val = d[k]
-      if (ok) {
-        data[k] = val
-        break
-      }
-    }
-  }
-  debug('OK? %j (%j %j %j)', ok, k, val, types[types.length - 1])
-
-  if (!ok) {
-    delete data[k]
-  }
-  return ok
-}
-
-function parse (args, data, remain, {
-  types = {},
-  typeDefs = {},
-  shorthands = {},
-  dynamicTypes,
-} = {}) {
-  const StringType = typeDefs.String?.type
-  const NumberType = typeDefs.Number?.type
-  const ArrayType = typeDefs.Array?.type
-  const BooleanType = typeDefs.Boolean?.type
-
-  debug('parse', args, data, remain)
-
-  const abbrevs = abbrev(Object.keys(types))
-  debug('abbrevs=%j', abbrevs)
-  const shortAbbr = abbrev(Object.keys(shorthands))
-
-  for (let i = 0; i < args.length; i++) {
-    let arg = args[i]
-    debug('arg', arg)
-
-    if (arg.match(/^-{2,}$/)) {
-      // done with keys.
-      // the rest are args.
-      remain.push.apply(remain, args.slice(i + 1))
-      args[i] = '--'
-      break
-    }
-    let hadEq = false
-    if (arg.charAt(0) === '-' && arg.length > 1) {
-      const at = arg.indexOf('=')
-      if (at > -1) {
-        hadEq = true
-        const v = arg.slice(at + 1)
-        arg = arg.slice(0, at)
-        args.splice(i, 1, arg, v)
-      }
-
-      // see if it's a shorthand
-      // if so, splice and back up to re-parse it.
-      const shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands })
-      debug('arg=%j shRes=%j', arg, shRes)
-      if (shRes) {
-        args.splice.apply(args, [i, 1].concat(shRes))
-        if (arg !== shRes[0]) {
-          i--
-          continue
-        }
-      }
-      arg = arg.replace(/^-+/, '')
-      let no = null
-      while (arg.toLowerCase().indexOf('no-') === 0) {
-        no = !no
-        arg = arg.slice(3)
-      }
-
-      if (abbrevs[arg]) {
-        arg = abbrevs[arg]
-      }
-
-      let [hasType, argType] = getType(arg, { types, dynamicTypes })
-      let isTypeArray = Array.isArray(argType)
-      if (isTypeArray && argType.length === 1) {
-        isTypeArray = false
-        argType = argType[0]
-      }
-
-      let isArray = isTypeDef(argType, ArrayType) ||
-        isTypeArray && hasTypeDef(argType, ArrayType)
-
-      // allow unknown things to be arrays if specified multiple times.
-      if (!hasType && hasOwn(data, arg)) {
-        if (!Array.isArray(data[arg])) {
-          data[arg] = [data[arg]]
-        }
-        isArray = true
-      }
-
-      let val
-      let la = args[i + 1]
-
-      const isBool = typeof no === 'boolean' ||
-        isTypeDef(argType, BooleanType) ||
-        isTypeArray && hasTypeDef(argType, BooleanType) ||
-        (typeof argType === 'undefined' && !hadEq) ||
-        (la === 'false' &&
-         (argType === null ||
-          isTypeArray && ~argType.indexOf(null)))
-
-      if (isBool) {
-        // just set and move along
-        val = !no
-        // however, also support --bool true or --bool false
-        if (la === 'true' || la === 'false') {
-          val = JSON.parse(la)
-          la = null
-          if (no) {
-            val = !val
-          }
-          i++
-        }
-
-        // also support "foo":[Boolean, "bar"] and "--foo bar"
-        if (isTypeArray && la) {
-          if (~argType.indexOf(la)) {
-            // an explicit type
-            val = la
-            i++
-          } else if (la === 'null' && ~argType.indexOf(null)) {
-            // null allowed
-            val = null
-            i++
-          } else if (!la.match(/^-{2,}[^-]/) &&
-                      !isNaN(la) &&
-                      hasTypeDef(argType, NumberType)) {
-            // number
-            val = +la
-            i++
-          } else if (!la.match(/^-[^-]/) && hasTypeDef(argType, StringType)) {
-            // string
-            val = la
-            i++
-          }
-        }
-
-        if (isArray) {
-          (data[arg] = data[arg] || []).push(val)
-        } else {
-          data[arg] = val
-        }
-
-        continue
-      }
-
-      if (isTypeDef(argType, StringType)) {
-        if (la === undefined) {
-          la = ''
-        } else if (la.match(/^-{1,2}[^-]+/)) {
-          la = ''
-          i--
-        }
-      }
-
-      if (la && la.match(/^-{2,}$/)) {
-        la = undefined
-        i--
-      }
-
-      val = la === undefined ? true : la
-      if (isArray) {
-        (data[arg] = data[arg] || []).push(val)
-      } else {
-        data[arg] = val
-      }
-
-      i++
-      continue
-    }
-    remain.push(arg)
-  }
-}
-
-const SINGLES = Symbol('singles')
-const singleCharacters = (arg, shorthands) => {
-  let singles = shorthands[SINGLES]
-  if (!singles) {
-    singles = Object.keys(shorthands).filter((s) => s.length === 1).reduce((l, r) => {
-      l[r] = true
-      return l
-    }, {})
-    shorthands[SINGLES] = singles
-    debug('shorthand singles', singles)
-  }
-  const chrs = arg.split('').filter((c) => singles[c])
-  return chrs.join('') === arg ? chrs : null
-}
-
-function resolveShort (arg, ...rest) {
-  const { types = {}, shorthands = {} } = rest.length ? rest.pop() : {}
-  const shortAbbr = rest[0] ?? abbrev(Object.keys(shorthands))
-  const abbrevs = rest[1] ?? abbrev(Object.keys(types))
-
-  // handle single-char shorthands glommed together, like
-  // npm ls -glp, but only if there is one dash, and only if
-  // all of the chars are single-char shorthands, and it's
-  // not a match to some other abbrev.
-  arg = arg.replace(/^-+/, '')
-
-  // if it's an exact known option, then don't go any further
-  if (abbrevs[arg] === arg) {
-    return null
-  }
-
-  // if it's an exact known shortopt, same deal
-  if (shorthands[arg]) {
-    // make it an array, if it's a list of words
-    if (shorthands[arg] && !Array.isArray(shorthands[arg])) {
-      shorthands[arg] = shorthands[arg].split(/\s+/)
-    }
-
-    return shorthands[arg]
-  }
-
-  // first check to see if this arg is a set of single-char shorthands
-  const chrs = singleCharacters(arg, shorthands)
-  if (chrs) {
-    return chrs.map((c) => shorthands[c]).reduce((l, r) => l.concat(r), [])
-  }
-
-  // if it's an arg abbrev, and not a literal shorthand, then prefer the arg
-  if (abbrevs[arg] && !shorthands[arg]) {
-    return null
-  }
-
-  // if it's an abbr for a shorthand, then use that
-  if (shortAbbr[arg]) {
-    arg = shortAbbr[arg]
-  }
-
-  // make it an array, if it's a list of words
-  if (shorthands[arg] && !Array.isArray(shorthands[arg])) {
-    shorthands[arg] = shorthands[arg].split(/\s+/)
-  }
-
-  return shorthands[arg]
-}
-
-module.exports = {
-  nopt,
-  clean,
-  parse,
-  validate,
-  resolveShort,
-  typeDefs: defaultTypeDefs,
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/nopt/lib/nopt.js b/node_modules/@npmcli/run-script/node_modules/nopt/lib/nopt.js
deleted file mode 100644
index 37f01a08783f8..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/nopt/lib/nopt.js
+++ /dev/null
@@ -1,30 +0,0 @@
-const lib = require('./nopt-lib')
-const defaultTypeDefs = require('./type-defs')
-
-// This is the version of nopt's API that requires setting typeDefs and invalidHandler
-// on the required `nopt` object since it is a singleton. To not do a breaking change
-// an API that requires all options be passed in is located in `nopt-lib.js` and
-// exported here as lib.
-// TODO(breaking): make API only work in non-singleton mode
-
-module.exports = exports = nopt
-exports.clean = clean
-exports.typeDefs = defaultTypeDefs
-exports.lib = lib
-
-function nopt (types, shorthands, args = process.argv, slice = 2) {
-  return lib.nopt(args.slice(slice), {
-    types: types || {},
-    shorthands: shorthands || {},
-    typeDefs: exports.typeDefs,
-    invalidHandler: exports.invalidHandler,
-  })
-}
-
-function clean (data, types, typeDefs = exports.typeDefs) {
-  return lib.clean(data, {
-    types: types || {},
-    typeDefs,
-    invalidHandler: exports.invalidHandler,
-  })
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/nopt/lib/type-defs.js b/node_modules/@npmcli/run-script/node_modules/nopt/lib/type-defs.js
deleted file mode 100644
index 608352ee248cc..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/nopt/lib/type-defs.js
+++ /dev/null
@@ -1,91 +0,0 @@
-const url = require('url')
-const path = require('path')
-const Stream = require('stream').Stream
-const os = require('os')
-const debug = require('./debug')
-
-function validateString (data, k, val) {
-  data[k] = String(val)
-}
-
-function validatePath (data, k, val) {
-  if (val === true) {
-    return false
-  }
-  if (val === null) {
-    return true
-  }
-
-  val = String(val)
-
-  const isWin = process.platform === 'win32'
-  const homePattern = isWin ? /^~(\/|\\)/ : /^~\//
-  const home = os.homedir()
-
-  if (home && val.match(homePattern)) {
-    data[k] = path.resolve(home, val.slice(2))
-  } else {
-    data[k] = path.resolve(val)
-  }
-  return true
-}
-
-function validateNumber (data, k, val) {
-  debug('validate Number %j %j %j', k, val, isNaN(val))
-  if (isNaN(val)) {
-    return false
-  }
-  data[k] = +val
-}
-
-function validateDate (data, k, val) {
-  const s = Date.parse(val)
-  debug('validate Date %j %j %j', k, val, s)
-  if (isNaN(s)) {
-    return false
-  }
-  data[k] = new Date(val)
-}
-
-function validateBoolean (data, k, val) {
-  if (typeof val === 'string') {
-    if (!isNaN(val)) {
-      val = !!(+val)
-    } else if (val === 'null' || val === 'false') {
-      val = false
-    } else {
-      val = true
-    }
-  } else {
-    val = !!val
-  }
-  data[k] = val
-}
-
-function validateUrl (data, k, val) {
-  // Changing this would be a breaking change in the npm cli
-  /* eslint-disable-next-line node/no-deprecated-api */
-  val = url.parse(String(val))
-  if (!val.host) {
-    return false
-  }
-  data[k] = val.href
-}
-
-function validateStream (data, k, val) {
-  if (!(val instanceof Stream)) {
-    return false
-  }
-  data[k] = val
-}
-
-module.exports = {
-  String: { type: String, validate: validateString },
-  Boolean: { type: Boolean, validate: validateBoolean },
-  url: { type: url, validate: validateUrl },
-  Number: { type: Number, validate: validateNumber },
-  path: { type: path, validate: validatePath },
-  Stream: { type: Stream, validate: validateStream },
-  Date: { type: Date, validate: validateDate },
-  Array: { type: Array },
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/nopt/package.json b/node_modules/@npmcli/run-script/node_modules/nopt/package.json
deleted file mode 100644
index 37b770ad48771..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/nopt/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "name": "nopt",
-  "version": "7.2.1",
-  "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.",
-  "author": "GitHub Inc.",
-  "main": "lib/nopt.js",
-  "scripts": {
-    "test": "tap",
-    "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/nopt.git"
-  },
-  "bin": {
-    "nopt": "bin/nopt.js"
-  },
-  "license": "ISC",
-  "dependencies": {
-    "abbrev": "^2.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.22.0",
-    "tap": "^16.3.0"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "windowsCI": false,
-    "version": "4.22.0",
-    "publish": true
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/ssri/LICENSE.md b/node_modules/@npmcli/run-script/node_modules/ssri/LICENSE.md
deleted file mode 100644
index e335388869f50..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2021 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/ssri/lib/index.js b/node_modules/@npmcli/run-script/node_modules/ssri/lib/index.js
deleted file mode 100644
index 7d749ed480fb9..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/ssri/lib/index.js
+++ /dev/null
@@ -1,580 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const { Minipass } = require('minipass')
-
-const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256']
-const DEFAULT_ALGORITHMS = ['sha512']
-
-// TODO: this should really be a hardcoded list of algorithms we support,
-// rather than [a-z0-9].
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-const getOptString = options => options?.length ? `?${options.join('?')}` : ''
-
-class IntegrityStream extends Minipass {
-  #emittedIntegrity
-  #emittedSize
-  #emittedVerified
-
-  constructor (opts) {
-    super()
-    this.size = 0
-    this.opts = opts
-
-    // may be overridden later, but set now for class consistency
-    this.#getOptions()
-
-    // options used for calculating stream.  can't be changed.
-    if (opts?.algorithms) {
-      this.algorithms = [...opts.algorithms]
-    } else {
-      this.algorithms = [...DEFAULT_ALGORITHMS]
-    }
-    if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) {
-      this.algorithms.push(this.algorithm)
-    }
-
-    this.hashes = this.algorithms.map(crypto.createHash)
-  }
-
-  #getOptions () {
-    // For verification
-    this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null
-    this.expectedSize = this.opts?.size
-
-    if (!this.sri) {
-      this.algorithm = null
-    } else if (this.sri.isHash) {
-      this.goodSri = true
-      this.algorithm = this.sri.algorithm
-    } else {
-      this.goodSri = !this.sri.isEmpty()
-      this.algorithm = this.sri.pickAlgorithm(this.opts)
-    }
-
-    this.digests = this.goodSri ? this.sri[this.algorithm] : null
-    this.optString = getOptString(this.opts?.options)
-  }
-
-  on (ev, handler) {
-    if (ev === 'size' && this.#emittedSize) {
-      return handler(this.#emittedSize)
-    }
-
-    if (ev === 'integrity' && this.#emittedIntegrity) {
-      return handler(this.#emittedIntegrity)
-    }
-
-    if (ev === 'verified' && this.#emittedVerified) {
-      return handler(this.#emittedVerified)
-    }
-
-    return super.on(ev, handler)
-  }
-
-  emit (ev, data) {
-    if (ev === 'end') {
-      this.#onEnd()
-    }
-    return super.emit(ev, data)
-  }
-
-  write (data) {
-    this.size += data.length
-    this.hashes.forEach(h => h.update(data))
-    return super.write(data)
-  }
-
-  #onEnd () {
-    if (!this.goodSri) {
-      this.#getOptions()
-    }
-    const newSri = parse(this.hashes.map((h, i) => {
-      return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}`
-    }).join(' '), this.opts)
-    // Integrity verification mode
-    const match = this.goodSri && newSri.match(this.sri, this.opts)
-    if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) {
-      /* eslint-disable-next-line max-len */
-      const err = new Error(`stream size mismatch when checking ${this.sri}.\n  Wanted: ${this.expectedSize}\n  Found: ${this.size}`)
-      err.code = 'EBADSIZE'
-      err.found = this.size
-      err.expected = this.expectedSize
-      err.sri = this.sri
-      this.emit('error', err)
-    } else if (this.sri && !match) {
-      /* eslint-disable-next-line max-len */
-      const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`)
-      err.code = 'EINTEGRITY'
-      err.found = newSri
-      err.expected = this.digests
-      err.algorithm = this.algorithm
-      err.sri = this.sri
-      this.emit('error', err)
-    } else {
-      this.#emittedSize = this.size
-      this.emit('size', this.size)
-      this.#emittedIntegrity = newSri
-      this.emit('integrity', newSri)
-      if (match) {
-        this.#emittedVerified = match
-        this.emit('verified', match)
-      }
-    }
-  }
-}
-
-class Hash {
-  get isHash () {
-    return true
-  }
-
-  constructor (hash, opts) {
-    const strict = opts?.strict
-    this.source = hash.trim()
-
-    // set default values so that we make V8 happy to
-    // always see a familiar object template.
-    this.digest = ''
-    this.algorithm = ''
-    this.options = []
-
-    // 3.1. Integrity metadata (called "Hash" by ssri)
-    // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
-    const match = this.source.match(
-      strict
-        ? STRICT_SRI_REGEX
-        : SRI_REGEX
-    )
-    if (!match) {
-      return
-    }
-    if (strict && !SPEC_ALGORITHMS.includes(match[1])) {
-      return
-    }
-    this.algorithm = match[1]
-    this.digest = match[2]
-
-    const rawOpts = match[3]
-    if (rawOpts) {
-      this.options = rawOpts.slice(1).split('?')
-    }
-  }
-
-  hexDigest () {
-    return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
-  }
-
-  toJSON () {
-    return this.toString()
-  }
-
-  match (integrity, opts) {
-    const other = parse(integrity, opts)
-    if (!other) {
-      return false
-    }
-    if (other.isIntegrity) {
-      const algo = other.pickAlgorithm(opts, [this.algorithm])
-
-      if (!algo) {
-        return false
-      }
-
-      const foundHash = other[algo].find(hash => hash.digest === this.digest)
-
-      if (foundHash) {
-        return foundHash
-      }
-
-      return false
-    }
-    return other.digest === this.digest ? other : false
-  }
-
-  toString (opts) {
-    if (opts?.strict) {
-      // Strict mode enforces the standard as close to the foot of the
-      // letter as it can.
-      if (!(
-        // The spec has very restricted productions for algorithms.
-        // https://www.w3.org/TR/CSP2/#source-list-syntax
-        SPEC_ALGORITHMS.includes(this.algorithm) &&
-        // Usually, if someone insists on using a "different" base64, we
-        // leave it as-is, since there's multiple standards, and the
-        // specified is not a URL-safe variant.
-        // https://www.w3.org/TR/CSP2/#base64_value
-        this.digest.match(BASE64_REGEX) &&
-        // Option syntax is strictly visual chars.
-        // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
-        // https://tools.ietf.org/html/rfc5234#appendix-B.1
-        this.options.every(opt => opt.match(VCHAR_REGEX))
-      )) {
-        return ''
-      }
-    }
-    return `${this.algorithm}-${this.digest}${getOptString(this.options)}`
-  }
-}
-
-function integrityHashToString (toString, sep, opts, hashes) {
-  const toStringIsNotEmpty = toString !== ''
-
-  let shouldAddFirstSep = false
-  let complement = ''
-
-  const lastIndex = hashes.length - 1
-
-  for (let i = 0; i < lastIndex; i++) {
-    const hashString = Hash.prototype.toString.call(hashes[i], opts)
-
-    if (hashString) {
-      shouldAddFirstSep = true
-
-      complement += hashString
-      complement += sep
-    }
-  }
-
-  const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts)
-
-  if (finalHashString) {
-    shouldAddFirstSep = true
-    complement += finalHashString
-  }
-
-  if (toStringIsNotEmpty && shouldAddFirstSep) {
-    return toString + sep + complement
-  }
-
-  return toString + complement
-}
-
-class Integrity {
-  get isIntegrity () {
-    return true
-  }
-
-  toJSON () {
-    return this.toString()
-  }
-
-  isEmpty () {
-    return Object.keys(this).length === 0
-  }
-
-  toString (opts) {
-    let sep = opts?.sep || ' '
-    let toString = ''
-
-    if (opts?.strict) {
-      // Entries must be separated by whitespace, according to spec.
-      sep = sep.replace(/\S+/g, ' ')
-
-      for (const hash of SPEC_ALGORITHMS) {
-        if (this[hash]) {
-          toString = integrityHashToString(toString, sep, opts, this[hash])
-        }
-      }
-    } else {
-      for (const hash of Object.keys(this)) {
-        toString = integrityHashToString(toString, sep, opts, this[hash])
-      }
-    }
-
-    return toString
-  }
-
-  concat (integrity, opts) {
-    const other = typeof integrity === 'string'
-      ? integrity
-      : stringify(integrity, opts)
-    return parse(`${this.toString(opts)} ${other}`, opts)
-  }
-
-  hexDigest () {
-    return parse(this, { single: true }).hexDigest()
-  }
-
-  // add additional hashes to an integrity value, but prevent
-  // *changing* an existing integrity hash.
-  merge (integrity, opts) {
-    const other = parse(integrity, opts)
-    for (const algo in other) {
-      if (this[algo]) {
-        if (!this[algo].find(hash =>
-          other[algo].find(otherhash =>
-            hash.digest === otherhash.digest))) {
-          throw new Error('hashes do not match, cannot update integrity')
-        }
-      } else {
-        this[algo] = other[algo]
-      }
-    }
-  }
-
-  match (integrity, opts) {
-    const other = parse(integrity, opts)
-    if (!other) {
-      return false
-    }
-    const algo = other.pickAlgorithm(opts, Object.keys(this))
-    return (
-      !!algo &&
-      this[algo] &&
-      other[algo] &&
-      this[algo].find(hash =>
-        other[algo].find(otherhash =>
-          hash.digest === otherhash.digest
-        )
-      )
-    ) || false
-  }
-
-  // Pick the highest priority algorithm present, optionally also limited to a
-  // set of hashes found in another integrity.  When limiting it may return
-  // nothing.
-  pickAlgorithm (opts, hashes) {
-    const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash
-    const keys = Object.keys(this).filter(k => {
-      if (hashes?.length) {
-        return hashes.includes(k)
-      }
-      return true
-    })
-    if (keys.length) {
-      return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc)
-    }
-    // no intersection between this and hashes,
-    return null
-  }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
-  if (!sri) {
-    return null
-  }
-  if (typeof sri === 'string') {
-    return _parse(sri, opts)
-  } else if (sri.algorithm && sri.digest) {
-    const fullSri = new Integrity()
-    fullSri[sri.algorithm] = [sri]
-    return _parse(stringify(fullSri, opts), opts)
-  } else {
-    return _parse(stringify(sri, opts), opts)
-  }
-}
-
-function _parse (integrity, opts) {
-  // 3.4.3. Parse metadata
-  // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
-  if (opts?.single) {
-    return new Hash(integrity, opts)
-  }
-  const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => {
-    const hash = new Hash(string, opts)
-    if (hash.algorithm && hash.digest) {
-      const algo = hash.algorithm
-      if (!acc[algo]) {
-        acc[algo] = []
-      }
-      acc[algo].push(hash)
-    }
-    return acc
-  }, new Integrity())
-  return hashes.isEmpty() ? null : hashes
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
-  if (obj.algorithm && obj.digest) {
-    return Hash.prototype.toString.call(obj, opts)
-  } else if (typeof obj === 'string') {
-    return stringify(parse(obj, opts), opts)
-  } else {
-    return Integrity.prototype.toString.call(obj, opts)
-  }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
-  const optString = getOptString(opts?.options)
-  return parse(
-    `${algorithm}-${
-      Buffer.from(hexDigest, 'hex').toString('base64')
-    }${optString}`, opts
-  )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
-  const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS]
-  const optString = getOptString(opts?.options)
-  return algorithms.reduce((acc, algo) => {
-    const digest = crypto.createHash(algo).update(data).digest('base64')
-    const hash = new Hash(
-      `${algo}-${digest}${optString}`,
-      opts
-    )
-    /* istanbul ignore else - it would be VERY strange if the string we
-     * just calculated with an algo did not have an algo or digest.
-     */
-    if (hash.algorithm && hash.digest) {
-      const hashAlgo = hash.algorithm
-      if (!acc[hashAlgo]) {
-        acc[hashAlgo] = []
-      }
-      acc[hashAlgo].push(hash)
-    }
-    return acc
-  }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
-  const istream = integrityStream(opts)
-  return new Promise((resolve, reject) => {
-    stream.pipe(istream)
-    stream.on('error', reject)
-    istream.on('error', reject)
-    let sri
-    istream.on('integrity', s => {
-      sri = s
-    })
-    istream.on('end', () => resolve(sri))
-    istream.resume()
-  })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
-  sri = parse(sri, opts)
-  if (!sri || !Object.keys(sri).length) {
-    if (opts?.error) {
-      throw Object.assign(
-        new Error('No valid integrity hashes to check against'), {
-          code: 'EINTEGRITY',
-        }
-      )
-    } else {
-      return false
-    }
-  }
-  const algorithm = sri.pickAlgorithm(opts)
-  const digest = crypto.createHash(algorithm).update(data).digest('base64')
-  const newSri = parse({ algorithm, digest })
-  const match = newSri.match(sri, opts)
-  opts = opts || {}
-  if (match || !(opts.error)) {
-    return match
-  } else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
-    /* eslint-disable-next-line max-len */
-    const err = new Error(`data size mismatch when checking ${sri}.\n  Wanted: ${opts.size}\n  Found: ${data.length}`)
-    err.code = 'EBADSIZE'
-    err.found = data.length
-    err.expected = opts.size
-    err.sri = sri
-    throw err
-  } else {
-    /* eslint-disable-next-line max-len */
-    const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
-    err.code = 'EINTEGRITY'
-    err.found = newSri
-    err.expected = sri
-    err.algorithm = algorithm
-    err.sri = sri
-    throw err
-  }
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
-  opts = opts || Object.create(null)
-  opts.integrity = sri
-  sri = parse(sri, opts)
-  if (!sri || !Object.keys(sri).length) {
-    return Promise.reject(Object.assign(
-      new Error('No valid integrity hashes to check against'), {
-        code: 'EINTEGRITY',
-      }
-    ))
-  }
-  const checker = integrityStream(opts)
-  return new Promise((resolve, reject) => {
-    stream.pipe(checker)
-    stream.on('error', reject)
-    checker.on('error', reject)
-    let verified
-    checker.on('verified', s => {
-      verified = s
-    })
-    checker.on('end', () => resolve(verified))
-    checker.resume()
-  })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts = Object.create(null)) {
-  return new IntegrityStream(opts)
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
-  const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS]
-  const optString = getOptString(opts?.options)
-
-  const hashes = algorithms.map(crypto.createHash)
-
-  return {
-    update: function (chunk, enc) {
-      hashes.forEach(h => h.update(chunk, enc))
-      return this
-    },
-    digest: function () {
-      const integrity = algorithms.reduce((acc, algo) => {
-        const digest = hashes.shift().digest('base64')
-        const hash = new Hash(
-          `${algo}-${digest}${optString}`,
-          opts
-        )
-        /* istanbul ignore else - it would be VERY strange if the hash we
-         * just calculated with an algo did not have an algo or digest.
-         */
-        if (hash.algorithm && hash.digest) {
-          const hashAlgo = hash.algorithm
-          if (!acc[hashAlgo]) {
-            acc[hashAlgo] = []
-          }
-          acc[hashAlgo].push(hash)
-        }
-        return acc
-      }, new Integrity())
-
-      return integrity
-    },
-  }
-}
-
-const NODE_HASHES = crypto.getHashes()
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
-  'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
-  // TODO - it's unclear _which_ of these Node will actually use as its name
-  //        for the algorithm, so we guesswork it based on the OpenSSL names.
-  'sha3',
-  'sha3-256', 'sha3-384', 'sha3-512',
-  'sha3_256', 'sha3_384', 'sha3_512',
-].filter(algo => NODE_HASHES.includes(algo))
-
-function getPrioritizedHash (algo1, algo2) {
-  /* eslint-disable-next-line max-len */
-  return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
-    ? algo1
-    : algo2
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/ssri/package.json b/node_modules/@npmcli/run-script/node_modules/ssri/package.json
deleted file mode 100644
index 28395414e4643..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/ssri/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
-  "name": "ssri",
-  "version": "10.0.6",
-  "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "prerelease": "npm t",
-    "postrelease": "npm publish",
-    "posttest": "npm run lint",
-    "test": "tap",
-    "coverage": "tap",
-    "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/ssri.git"
-  },
-  "keywords": [
-    "w3c",
-    "web",
-    "security",
-    "integrity",
-    "checksum",
-    "hashing",
-    "subresource integrity",
-    "sri",
-    "sri hash",
-    "sri string",
-    "sri generator",
-    "html"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "minipass": "^7.0.3"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.22.0",
-    "tap": "^16.0.1"
-  },
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.22.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/unique-filename/LICENSE b/node_modules/@npmcli/run-script/node_modules/unique-filename/LICENSE
deleted file mode 100644
index 69619c125ea7e..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/unique-filename/LICENSE
+++ /dev/null
@@ -1,5 +0,0 @@
-Copyright npm, Inc
-
-Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/unique-filename/lib/index.js b/node_modules/@npmcli/run-script/node_modules/unique-filename/lib/index.js
deleted file mode 100644
index d067d2e709809..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/unique-filename/lib/index.js
+++ /dev/null
@@ -1,7 +0,0 @@
-var path = require('path')
-
-var uniqueSlug = require('unique-slug')
-
-module.exports = function (filepath, prefix, uniq) {
-  return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq))
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/unique-filename/package.json b/node_modules/@npmcli/run-script/node_modules/unique-filename/package.json
deleted file mode 100644
index b2fbf0666489a..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/unique-filename/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "name": "unique-filename",
-  "version": "3.0.0",
-  "description": "Generate a unique filename for use in temporary directories or caches.",
-  "main": "lib/index.js",
-  "scripts": {
-    "test": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/unique-filename.git"
-  },
-  "keywords": [],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/iarna/unique-filename/issues"
-  },
-  "homepage": "https://github.com/iarna/unique-filename",
-  "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.5.1",
-    "tap": "^16.3.0"
-  },
-  "dependencies": {
-    "unique-slug": "^4.0.0"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.5.1"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/unique-slug/LICENSE b/node_modules/@npmcli/run-script/node_modules/unique-slug/LICENSE
deleted file mode 100644
index 7953647e7760b..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/unique-slug/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright npm, Inc
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/unique-slug/lib/index.js b/node_modules/@npmcli/run-script/node_modules/unique-slug/lib/index.js
deleted file mode 100644
index 1bac84d95d730..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/unique-slug/lib/index.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-var MurmurHash3 = require('imurmurhash')
-
-module.exports = function (uniq) {
-  if (uniq) {
-    var hash = new MurmurHash3(uniq)
-    return ('00000000' + hash.result().toString(16)).slice(-8)
-  } else {
-    return (Math.random().toString(16) + '0000000').slice(2, 10)
-  }
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/unique-slug/package.json b/node_modules/@npmcli/run-script/node_modules/unique-slug/package.json
deleted file mode 100644
index 33732cdbb4285..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/unique-slug/package.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
-  "name": "unique-slug",
-  "version": "4.0.0",
-  "description": "Generate a unique character string suitible for use in files and URLs.",
-  "main": "lib/index.js",
-  "scripts": {
-    "test": "tap",
-    "lint": "eslint \"**/*.js\"",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run lint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint"
-  },
-  "keywords": [],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "devDependencies": {
-    "@npmcli/eslint-config": "^3.1.0",
-    "@npmcli/template-oss": "4.5.1",
-    "tap": "^16.3.0"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/npm/unique-slug.git"
-  },
-  "dependencies": {
-    "imurmurhash": "^0.1.4"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.5.1"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json
index e5fd2fef62e5c..38a2ac9f87772 100644
--- a/node_modules/@npmcli/run-script/package.json
+++ b/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/run-script",
-  "version": "9.0.1",
+  "version": "9.0.2",
   "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
   "author": "GitHub Inc.",
   "license": "ISC",
@@ -16,7 +16,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.23.4",
     "spawk": "^1.8.1",
     "tap": "^16.0.1"
   },
@@ -24,7 +24,7 @@
     "@npmcli/node-gyp": "^4.0.0",
     "@npmcli/package-json": "^6.0.0",
     "@npmcli/promise-spawn": "^8.0.0",
-    "node-gyp": "^10.0.0",
+    "node-gyp": "^11.0.0",
     "proc-log": "^5.0.0",
     "which": "^5.0.0"
   },
@@ -42,7 +42,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.23.4",
     "publish": "true"
   },
   "tap": {
diff --git a/node_modules/is-lambda/LICENSE b/node_modules/is-lambda/LICENSE
deleted file mode 100644
index 4a59c94175c2a..0000000000000
--- a/node_modules/is-lambda/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2016-2017 Thomas Watson Steen
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/is-lambda/index.js b/node_modules/is-lambda/index.js
deleted file mode 100644
index b245ab1c68df1..0000000000000
--- a/node_modules/is-lambda/index.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict'
-
-module.exports = !!(
-  (process.env.LAMBDA_TASK_ROOT && process.env.AWS_EXECUTION_ENV) ||
-  false
-)
diff --git a/node_modules/is-lambda/package.json b/node_modules/is-lambda/package.json
deleted file mode 100644
index d8550898b4e4d..0000000000000
--- a/node_modules/is-lambda/package.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
-  "name": "is-lambda",
-  "version": "1.0.1",
-  "description": "Detect if your code is running on an AWS Lambda server",
-  "main": "index.js",
-  "dependencies": {},
-  "devDependencies": {
-    "clear-require": "^1.0.1",
-    "standard": "^10.0.2"
-  },
-  "scripts": {
-    "test": "standard && node test.js"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/watson/is-lambda.git"
-  },
-  "keywords": [
-    "aws",
-    "hosting",
-    "hosted",
-    "lambda",
-    "detect"
-  ],
-  "author": "Thomas Watson Steen  (https://twitter.com/wa7son)",
-  "license": "MIT",
-  "bugs": {
-    "url": "https://github.com/watson/is-lambda/issues"
-  },
-  "homepage": "https://github.com/watson/is-lambda",
-  "coordinates": [
-    37.3859955,
-    -122.0838831
-  ]
-}
diff --git a/node_modules/is-lambda/test.js b/node_modules/is-lambda/test.js
deleted file mode 100644
index e8e73257ad4ad..0000000000000
--- a/node_modules/is-lambda/test.js
+++ /dev/null
@@ -1,16 +0,0 @@
-'use strict'
-
-var assert = require('assert')
-var clearRequire = require('clear-require')
-
-process.env.AWS_EXECUTION_ENV = 'AWS_Lambda_nodejs6.10'
-process.env.LAMBDA_TASK_ROOT = '/var/task'
-
-var isCI = require('./')
-assert(isCI)
-
-delete process.env.AWS_EXECUTION_ENV
-
-clearRequire('./')
-isCI = require('./')
-assert(!isCI)
diff --git a/node_modules/negotiator/HISTORY.md b/node_modules/negotiator/HISTORY.md
deleted file mode 100644
index e1929aba8e22c..0000000000000
--- a/node_modules/negotiator/HISTORY.md
+++ /dev/null
@@ -1,113 +0,0 @@
-unreleased
-==================
-
-  * Added an option preferred encodings array #59
-
-0.6.3 / 2022-01-22
-==================
-
-  * Revert "Lazy-load modules from main entry point"
-
-0.6.2 / 2019-04-29
-==================
-
-  * Fix sorting charset, encoding, and language with extra parameters
-
-0.6.1 / 2016-05-02
-==================
-
-  * perf: improve `Accept` parsing speed
-  * perf: improve `Accept-Charset` parsing speed
-  * perf: improve `Accept-Encoding` parsing speed
-  * perf: improve `Accept-Language` parsing speed
-
-0.6.0 / 2015-09-29
-==================
-
-  * Fix including type extensions in parameters in `Accept` parsing
-  * Fix parsing `Accept` parameters with quoted equals
-  * Fix parsing `Accept` parameters with quoted semicolons
-  * Lazy-load modules from main entry point
-  * perf: delay type concatenation until needed
-  * perf: enable strict mode
-  * perf: hoist regular expressions
-  * perf: remove closures getting spec properties
-  * perf: remove a closure from media type parsing
-  * perf: remove property delete from media type parsing
-
-0.5.3 / 2015-05-10
-==================
-
-  * Fix media type parameter matching to be case-insensitive
-
-0.5.2 / 2015-05-06
-==================
-
-  * Fix comparing media types with quoted values
-  * Fix splitting media types with quoted commas
-
-0.5.1 / 2015-02-14
-==================
-
-  * Fix preference sorting to be stable for long acceptable lists
-
-0.5.0 / 2014-12-18
-==================
-
-  * Fix list return order when large accepted list
-  * Fix missing identity encoding when q=0 exists
-  * Remove dynamic building of Negotiator class
-
-0.4.9 / 2014-10-14
-==================
-
-  * Fix error when media type has invalid parameter
-
-0.4.8 / 2014-09-28
-==================
-
-  * Fix all negotiations to be case-insensitive
-  * Stable sort preferences of same quality according to client order
-  * Support Node.js 0.6
-
-0.4.7 / 2014-06-24
-==================
-
-  * Handle invalid provided languages
-  * Handle invalid provided media types
-
-0.4.6 / 2014-06-11
-==================
-
-  *  Order by specificity when quality is the same
-
-0.4.5 / 2014-05-29
-==================
-
-  * Fix regression in empty header handling
-
-0.4.4 / 2014-05-29
-==================
-
-  * Fix behaviors when headers are not present
-
-0.4.3 / 2014-04-16
-==================
-
-  * Handle slashes on media params correctly
-
-0.4.2 / 2014-02-28
-==================
-
-  * Fix media type sorting
-  * Handle media types params strictly
-
-0.4.1 / 2014-01-16
-==================
-
-  * Use most specific matches
-
-0.4.0 / 2014-01-09
-==================
-
-  * Remove preferred prefix from methods
diff --git a/node_modules/negotiator/LICENSE b/node_modules/negotiator/LICENSE
deleted file mode 100644
index ea6b9e2e9ac25..0000000000000
--- a/node_modules/negotiator/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2012-2014 Federico Romero
-Copyright (c) 2012-2014 Isaac Z. Schlueter
-Copyright (c) 2014-2015 Douglas Christopher Wilson
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/negotiator/index.js b/node_modules/negotiator/index.js
deleted file mode 100644
index 7df0b0a531815..0000000000000
--- a/node_modules/negotiator/index.js
+++ /dev/null
@@ -1,82 +0,0 @@
-/*!
- * negotiator
- * Copyright(c) 2012 Federico Romero
- * Copyright(c) 2012-2014 Isaac Z. Schlueter
- * Copyright(c) 2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-var preferredCharsets = require('./lib/charset')
-var preferredEncodings = require('./lib/encoding')
-var preferredLanguages = require('./lib/language')
-var preferredMediaTypes = require('./lib/mediaType')
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = Negotiator;
-module.exports.Negotiator = Negotiator;
-
-/**
- * Create a Negotiator instance from a request.
- * @param {object} request
- * @public
- */
-
-function Negotiator(request) {
-  if (!(this instanceof Negotiator)) {
-    return new Negotiator(request);
-  }
-
-  this.request = request;
-}
-
-Negotiator.prototype.charset = function charset(available) {
-  var set = this.charsets(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.charsets = function charsets(available) {
-  return preferredCharsets(this.request.headers['accept-charset'], available);
-};
-
-Negotiator.prototype.encoding = function encoding(available, preferred) {
-  var set = this.encodings(available, preferred);
-  return set && set[0];
-};
-
-Negotiator.prototype.encodings = function encodings(available, preferred) {
-  return preferredEncodings(this.request.headers['accept-encoding'], available, preferred);
-};
-
-Negotiator.prototype.language = function language(available) {
-  var set = this.languages(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.languages = function languages(available) {
-  return preferredLanguages(this.request.headers['accept-language'], available);
-};
-
-Negotiator.prototype.mediaType = function mediaType(available) {
-  var set = this.mediaTypes(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.mediaTypes = function mediaTypes(available) {
-  return preferredMediaTypes(this.request.headers.accept, available);
-};
-
-// Backwards compatibility
-Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
-Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
-Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
-Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
-Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
-Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
-Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
-Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/negotiator/lib/charset.js b/node_modules/negotiator/lib/charset.js
deleted file mode 100644
index cdd014803474a..0000000000000
--- a/node_modules/negotiator/lib/charset.js
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredCharsets;
-module.exports.preferredCharsets = preferredCharsets;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Charset header.
- * @private
- */
-
-function parseAcceptCharset(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var charset = parseCharset(accepts[i].trim(), i);
-
-    if (charset) {
-      accepts[j++] = charset;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a charset from the Accept-Charset header.
- * @private
- */
-
-function parseCharset(str, i) {
-  var match = simpleCharsetRegExp.exec(str);
-  if (!match) return null;
-
-  var charset = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    charset: charset,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a charset.
- * @private
- */
-
-function getCharsetPriority(charset, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(charset, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the charset.
- * @private
- */
-
-function specify(charset, spec, index) {
-  var s = 0;
-  if(spec.charset.toLowerCase() === charset.toLowerCase()){
-    s |= 1;
-  } else if (spec.charset !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-}
-
-/**
- * Get the preferred charsets from an Accept-Charset header.
- * @public
- */
-
-function preferredCharsets(accept, provided) {
-  // RFC 2616 sec 14.2: no header = *
-  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all charsets
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullCharset);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getCharsetPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted charsets
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full charset string.
- * @private
- */
-
-function getFullCharset(spec) {
-  return spec.charset;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/negotiator/lib/encoding.js b/node_modules/negotiator/lib/encoding.js
deleted file mode 100644
index 9ebb633d67743..0000000000000
--- a/node_modules/negotiator/lib/encoding.js
+++ /dev/null
@@ -1,205 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredEncodings;
-module.exports.preferredEncodings = preferredEncodings;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Encoding header.
- * @private
- */
-
-function parseAcceptEncoding(accept) {
-  var accepts = accept.split(',');
-  var hasIdentity = false;
-  var minQuality = 1;
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var encoding = parseEncoding(accepts[i].trim(), i);
-
-    if (encoding) {
-      accepts[j++] = encoding;
-      hasIdentity = hasIdentity || specify('identity', encoding);
-      minQuality = Math.min(minQuality, encoding.q || 1);
-    }
-  }
-
-  if (!hasIdentity) {
-    /*
-     * If identity doesn't explicitly appear in the accept-encoding header,
-     * it's added to the list of acceptable encoding with the lowest q
-     */
-    accepts[j++] = {
-      encoding: 'identity',
-      q: minQuality,
-      i: i
-    };
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse an encoding from the Accept-Encoding header.
- * @private
- */
-
-function parseEncoding(str, i) {
-  var match = simpleEncodingRegExp.exec(str);
-  if (!match) return null;
-
-  var encoding = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';');
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    encoding: encoding,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of an encoding.
- * @private
- */
-
-function getEncodingPriority(encoding, accepted, index) {
-  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(encoding, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the encoding.
- * @private
- */
-
-function specify(encoding, spec, index) {
-  var s = 0;
-  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
-    s |= 1;
-  } else if (spec.encoding !== '*' ) {
-    return null
-  }
-
-  return {
-    encoding: encoding,
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred encodings from an Accept-Encoding header.
- * @public
- */
-
-function preferredEncodings(accept, provided, preferred) {
-  var accepts = parseAcceptEncoding(accept || '');
-
-  var comparator = preferred ? function comparator (a, b) {
-    if (a.q !== b.q) {
-      return b.q - a.q // higher quality first
-    }
-
-    var aPreferred = preferred.indexOf(a.encoding)
-    var bPreferred = preferred.indexOf(b.encoding)
-
-    if (aPreferred === -1 && bPreferred === -1) {
-      // consider the original specifity/order
-      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
-    }
-
-    if (aPreferred !== -1 && bPreferred !== -1) {
-      return aPreferred - bPreferred // consider the preferred order
-    }
-
-    return aPreferred === -1 ? 1 : -1 // preferred first
-  } : compareSpecs;
-
-  if (!provided) {
-    // sorted list of all encodings
-    return accepts
-      .filter(isQuality)
-      .sort(comparator)
-      .map(getFullEncoding);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getEncodingPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted encodings
-  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
-}
-
-/**
- * Get full encoding string.
- * @private
- */
-
-function getFullEncoding(spec) {
-  return spec.encoding;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/negotiator/lib/language.js b/node_modules/negotiator/lib/language.js
deleted file mode 100644
index a23167252719b..0000000000000
--- a/node_modules/negotiator/lib/language.js
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredLanguages;
-module.exports.preferredLanguages = preferredLanguages;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Language header.
- * @private
- */
-
-function parseAcceptLanguage(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var language = parseLanguage(accepts[i].trim(), i);
-
-    if (language) {
-      accepts[j++] = language;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a language from the Accept-Language header.
- * @private
- */
-
-function parseLanguage(str, i) {
-  var match = simpleLanguageRegExp.exec(str);
-  if (!match) return null;
-
-  var prefix = match[1]
-  var suffix = match[2]
-  var full = prefix
-
-  if (suffix) full += "-" + suffix;
-
-  var q = 1;
-  if (match[3]) {
-    var params = match[3].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].split('=');
-      if (p[0] === 'q') q = parseFloat(p[1]);
-    }
-  }
-
-  return {
-    prefix: prefix,
-    suffix: suffix,
-    q: q,
-    i: i,
-    full: full
-  };
-}
-
-/**
- * Get the priority of a language.
- * @private
- */
-
-function getLanguagePriority(language, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(language, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the language.
- * @private
- */
-
-function specify(language, spec, index) {
-  var p = parseLanguage(language)
-  if (!p) return null;
-  var s = 0;
-  if(spec.full.toLowerCase() === p.full.toLowerCase()){
-    s |= 4;
-  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
-    s |= 2;
-  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
-    s |= 1;
-  } else if (spec.full !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred languages from an Accept-Language header.
- * @public
- */
-
-function preferredLanguages(accept, provided) {
-  // RFC 2616 sec 14.4: no header = *
-  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all languages
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullLanguage);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getLanguagePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted languages
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full language string.
- * @private
- */
-
-function getFullLanguage(spec) {
-  return spec.full;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/negotiator/lib/mediaType.js b/node_modules/negotiator/lib/mediaType.js
deleted file mode 100644
index 8e402ea88394c..0000000000000
--- a/node_modules/negotiator/lib/mediaType.js
+++ /dev/null
@@ -1,294 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredMediaTypes;
-module.exports.preferredMediaTypes = preferredMediaTypes;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept header.
- * @private
- */
-
-function parseAccept(accept) {
-  var accepts = splitMediaTypes(accept);
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var mediaType = parseMediaType(accepts[i].trim(), i);
-
-    if (mediaType) {
-      accepts[j++] = mediaType;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a media type from the Accept header.
- * @private
- */
-
-function parseMediaType(str, i) {
-  var match = simpleMediaTypeRegExp.exec(str);
-  if (!match) return null;
-
-  var params = Object.create(null);
-  var q = 1;
-  var subtype = match[2];
-  var type = match[1];
-
-  if (match[3]) {
-    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
-
-    for (var j = 0; j < kvps.length; j++) {
-      var pair = kvps[j];
-      var key = pair[0].toLowerCase();
-      var val = pair[1];
-
-      // get the value, unwrapping quotes
-      var value = val && val[0] === '"' && val[val.length - 1] === '"'
-        ? val.slice(1, -1)
-        : val;
-
-      if (key === 'q') {
-        q = parseFloat(value);
-        break;
-      }
-
-      // store parameter
-      params[key] = value;
-    }
-  }
-
-  return {
-    type: type,
-    subtype: subtype,
-    params: params,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a media type.
- * @private
- */
-
-function getMediaTypePriority(type, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(type, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the media type.
- * @private
- */
-
-function specify(type, spec, index) {
-  var p = parseMediaType(type);
-  var s = 0;
-
-  if (!p) {
-    return null;
-  }
-
-  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
-    s |= 4
-  } else if(spec.type != '*') {
-    return null;
-  }
-
-  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
-    s |= 2
-  } else if(spec.subtype != '*') {
-    return null;
-  }
-
-  var keys = Object.keys(spec.params);
-  if (keys.length > 0) {
-    if (keys.every(function (k) {
-      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
-    })) {
-      s |= 1
-    } else {
-      return null
-    }
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s,
-  }
-}
-
-/**
- * Get the preferred media types from an Accept header.
- * @public
- */
-
-function preferredMediaTypes(accept, provided) {
-  // RFC 2616 sec 14.2: no header = */*
-  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all types
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullType);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getMediaTypePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted types
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full type string.
- * @private
- */
-
-function getFullType(spec) {
-  return spec.type + '/' + spec.subtype;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
-
-/**
- * Count the number of quotes in a string.
- * @private
- */
-
-function quoteCount(string) {
-  var count = 0;
-  var index = 0;
-
-  while ((index = string.indexOf('"', index)) !== -1) {
-    count++;
-    index++;
-  }
-
-  return count;
-}
-
-/**
- * Split a key value pair.
- * @private
- */
-
-function splitKeyValuePair(str) {
-  var index = str.indexOf('=');
-  var key;
-  var val;
-
-  if (index === -1) {
-    key = str;
-  } else {
-    key = str.slice(0, index);
-    val = str.slice(index + 1);
-  }
-
-  return [key, val];
-}
-
-/**
- * Split an Accept header into media types.
- * @private
- */
-
-function splitMediaTypes(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 1, j = 0; i < accepts.length; i++) {
-    if (quoteCount(accepts[j]) % 2 == 0) {
-      accepts[++j] = accepts[i];
-    } else {
-      accepts[j] += ',' + accepts[i];
-    }
-  }
-
-  // trim accepts
-  accepts.length = j + 1;
-
-  return accepts;
-}
-
-/**
- * Split a string of parameters.
- * @private
- */
-
-function splitParameters(str) {
-  var parameters = str.split(';');
-
-  for (var i = 1, j = 0; i < parameters.length; i++) {
-    if (quoteCount(parameters[j]) % 2 == 0) {
-      parameters[++j] = parameters[i];
-    } else {
-      parameters[j] += ';' + parameters[i];
-    }
-  }
-
-  // trim parameters
-  parameters.length = j + 1;
-
-  for (var i = 0; i < parameters.length; i++) {
-    parameters[i] = parameters[i].trim();
-  }
-
-  return parameters;
-}
diff --git a/node_modules/negotiator/package.json b/node_modules/negotiator/package.json
deleted file mode 100644
index 19b0a8a6ef604..0000000000000
--- a/node_modules/negotiator/package.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
-  "name": "negotiator",
-  "description": "HTTP content negotiation",
-  "version": "0.6.4",
-  "contributors": [
-    "Douglas Christopher Wilson ",
-    "Federico Romero ",
-    "Isaac Z. Schlueter  (http://blog.izs.me/)"
-  ],
-  "license": "MIT",
-  "keywords": [
-    "http",
-    "content negotiation",
-    "accept",
-    "accept-language",
-    "accept-encoding",
-    "accept-charset"
-  ],
-  "repository": "jshttp/negotiator",
-  "devDependencies": {
-    "eslint": "7.32.0",
-    "eslint-plugin-markdown": "2.2.1",
-    "mocha": "9.1.3",
-    "nyc": "15.1.0"
-  },
-  "files": [
-    "lib/",
-    "HISTORY.md",
-    "LICENSE",
-    "index.js",
-    "README.md"
-  ],
-  "engines": {
-    "node": ">= 0.6"
-  },
-  "scripts": {
-    "lint": "eslint .",
-    "test": "mocha --reporter spec --check-leaks --bail test/",
-    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
-    "test-cov": "nyc --reporter=html --reporter=text npm test"
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 990f1c3e08165..bfb9af2714720 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1800,16 +1800,16 @@
       }
     },
     "node_modules/@npmcli/run-script": {
-      "version": "9.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-9.0.1.tgz",
-      "integrity": "sha512-q9C0uHrb6B6cm3qXVM32UmpqTKuFGbtP23O2K5sLvPMz2hilKd0ptqGXSpuunOuOmPQb/aT5F/kCXFc1P2gO/A==",
+      "version": "9.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-9.0.2.tgz",
+      "integrity": "sha512-cJXiUlycdizQwvqE1iaAb4VRUM3RX09/8q46zjvy+ct9GhfZRWd7jXYVc1tn/CfRlGPVkX/u4sstRlepsm7hfw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/node-gyp": "^4.0.0",
         "@npmcli/package-json": "^6.0.0",
         "@npmcli/promise-spawn": "^8.0.0",
-        "node-gyp": "^10.0.0",
+        "node-gyp": "^11.0.0",
         "proc-log": "^5.0.0",
         "which": "^5.0.0"
       },
@@ -1817,238 +1817,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/run-script/node_modules/@npmcli/agent": {
-      "version": "2.2.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz",
-      "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "agent-base": "^7.1.0",
-        "http-proxy-agent": "^7.0.0",
-        "https-proxy-agent": "^7.0.1",
-        "lru-cache": "^10.0.1",
-        "socks-proxy-agent": "^8.0.3"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/@npmcli/fs": {
-      "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz",
-      "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/abbrev": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz",
-      "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/cacache": {
-      "version": "18.0.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz",
-      "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^10.0.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^2.0.1",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/isexe": {
-      "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
-      "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": ">=16"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/make-fetch-happen": {
-      "version": "13.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz",
-      "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^2.0.0",
-        "cacache": "^18.0.0",
-        "http-cache-semantics": "^4.1.1",
-        "is-lambda": "^1.0.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^3.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^0.6.3",
-        "proc-log": "^4.2.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^10.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/make-fetch-happen/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/minipass-fetch": {
-      "version": "3.0.5",
-      "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz",
-      "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.0.3",
-        "minipass-sized": "^1.0.3",
-        "minizlib": "^2.1.2"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      },
-      "optionalDependencies": {
-        "encoding": "^0.1.13"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/node-gyp": {
-      "version": "10.3.1",
-      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.3.1.tgz",
-      "integrity": "sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "env-paths": "^2.2.0",
-        "exponential-backoff": "^3.1.1",
-        "glob": "^10.3.10",
-        "graceful-fs": "^4.2.6",
-        "make-fetch-happen": "^13.0.0",
-        "nopt": "^7.0.0",
-        "proc-log": "^4.1.0",
-        "semver": "^7.3.5",
-        "tar": "^6.2.1",
-        "which": "^4.0.0"
-      },
-      "bin": {
-        "node-gyp": "bin/node-gyp.js"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/node-gyp/node_modules/which": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
-      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/nopt": {
-      "version": "7.2.1",
-      "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz",
-      "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "abbrev": "^2.0.0"
-      },
-      "bin": {
-        "nopt": "bin/nopt.js"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/ssri": {
-      "version": "10.0.6",
-      "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz",
-      "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "minipass": "^7.0.3"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/unique-filename": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz",
-      "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "unique-slug": "^4.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/unique-slug": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz",
-      "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "imurmurhash": "^0.1.4"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/smoke-tests": {
       "resolved": "smoke-tests",
       "link": true
@@ -8125,7 +7893,7 @@
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz",
       "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==",
-      "inBundle": true,
+      "dev": true,
       "license": "MIT"
     },
     "node_modules/is-negative-zero": {
@@ -10587,7 +10355,7 @@
       "version": "0.6.4",
       "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
       "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==",
-      "inBundle": true,
+      "dev": true,
       "license": "MIT",
       "engines": {
         "node": ">= 0.6"