Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGar <gar+gh@danger.computer>2021-09-15 21:29:13 +0300
committerGar <gar+gh@danger.computer>2021-10-07 19:07:19 +0300
commita13d9d53ddf3e0f52f4a39fe116653bf40cf99e5 (patch)
treec281324e40798b6e8e1ed53a9a2d55dd8f87b91a
parent32e163fd10aace69d927dae46d04d64c04e5014b (diff)
feat: drop node 10, 11, and programmatic api
BREAKING CHANGE: - Drop official support for node versions less than v12. - Drop support for `require('npm')` - Update a few subdependencies that dropped node10 support, and brought in the latest node-gyp PR-URL: https://github.com/npm/cli/pull/3762 Credit: @wraithgar Close: #3762 Reviewed-by: @fritzy
-rw-r--r--.github/workflows/ci.yml44
-rw-r--r--index.js5
-rw-r--r--lib/npm.js3
-rw-r--r--node_modules/@npmcli/arborist/bin/dedupe.js49
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/audit.js11
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js20
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/index.js29
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/rebuild.js7
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/reify.js16
-rw-r--r--node_modules/@npmcli/arborist/package.json11
-rw-r--r--node_modules/@npmcli/metavuln-calculator/package.json8
-rw-r--r--node_modules/@npmcli/run-script/package.json4
-rw-r--r--node_modules/libnpmexec/package.json11
-rw-r--r--node_modules/libnpmfund/package.json7
-rw-r--r--node_modules/libnpmpack/package.json8
-rw-r--r--node_modules/libnpmversion/package.json7
-rw-r--r--node_modules/node-gyp/docs/Common-issues.md14
-rw-r--r--node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md94
-rw-r--r--node_modules/node-gyp/docs/Home.md7
-rw-r--r--node_modules/node-gyp/docs/Linking-to-OpenSSL.md86
-rw-r--r--node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md45
-rw-r--r--node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md48
-rw-r--r--node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md4
-rw-r--r--node_modules/node-gyp/gyp/CONTRIBUTING.md2
-rwxr-xr-xnode_modules/node-gyp/gyp/gyp_main.py14
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py19
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py6
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py26
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py7
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py2
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py4
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py4
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py24
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/__init__.py14
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/common.py19
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/common_test.py2
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py12
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py7
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/flock_tool.py4
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py13
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/android.py227
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py26
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py1
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py10
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py2
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/make.py266
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py74
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py7
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py242
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py2
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py13
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py2
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/input.py30
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/input_test.py2
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/mac_tool.py17
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py246
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py6
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py5
-rwxr-xr-xnode_modules/node-gyp/gyp/pylib/gyp/win_tool.py39
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py98
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py8
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py51
-rw-r--r--node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py4
-rw-r--r--node_modules/node-gyp/gyp/setup.py10
-rwxr-xr-xnode_modules/node-gyp/gyp/test_gyp.py30
-rw-r--r--node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el2
-rw-r--r--node_modules/node-gyp/gyp/tools/emacs/gyp.el2
-rwxr-xr-xnode_modules/node-gyp/gyp/tools/graphviz.py9
-rwxr-xr-xnode_modules/node-gyp/gyp/tools/pretty_gyp.py5
-rwxr-xr-xnode_modules/node-gyp/gyp/tools/pretty_sln.py3
-rwxr-xr-xnode_modules/node-gyp/gyp/tools/pretty_vcproj.py32
-rw-r--r--node_modules/node-gyp/lib/configure.js6
-rw-r--r--node_modules/node-gyp/lib/find-python.js56
-rw-r--r--node_modules/node-gyp/lib/find-visualstudio.js2
-rw-r--r--node_modules/node-gyp/lib/install.js557
-rw-r--r--node_modules/node-gyp/lib/proxy.js92
-rw-r--r--node_modules/node-gyp/macOS_Catalina.md4
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE16
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/agent.js209
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/cache.js260
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/index.js457
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/package.json72
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/utils/configure-options.js32
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/utils/initialize-cache.js26
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/utils/is-header-conditional.js17
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/utils/iterable-to-object.js9
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/utils/make-policy.js19
-rw-r--r--node_modules/node-gyp/node_modules/make-fetch-happen/warning.js24
-rw-r--r--node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.d.ts21
-rw-r--r--node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js180
-rw-r--r--node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js.map1
-rw-r--r--node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts19
-rw-r--r--node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js14
-rw-r--r--node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map1
-rw-r--r--node_modules/node-gyp/node_modules/socks-proxy-agent/package.json64
-rw-r--r--node_modules/node-gyp/package.json10
-rw-r--r--node_modules/node-gyp/test/fixtures/test-charmap.py41
-rw-r--r--node_modules/node-gyp/test/test-download.js317
-rw-r--r--node_modules/node-gyp/test/test-find-python.js22
-rw-r--r--node_modules/node-gyp/test/test-install.js54
-rwxr-xr-xnode_modules/node-gyp/update-gyp.py41
-rw-r--r--node_modules/pacote/package.json6
-rw-r--r--package-lock.json479
-rw-r--r--package.json26
-rw-r--r--packages/libnpmdiff/package.json2
-rw-r--r--smoke-tests/index.js2
-rw-r--r--test/coverage-map.js2
-rw-r--r--test/index.js23
-rw-r--r--test/lib/npm.js39
-rw-r--r--test/lib/utils/unsupported.js13
110 files changed, 3528 insertions, 1895 deletions
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ffa194d01..c99b7dee3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -14,10 +14,10 @@ jobs:
node-version: 14.x
- name: Install dependencies
run: |
- node . install --ignore-scripts --no-audit
- node . rebuild
+ node ./bin/npm-cli.js install --ignore-scripts --no-audit
+ node ./bin/npm-cli.js rebuild
- name: Run linting
- run: node . run posttest
+ run: node ./bin/npm-cli.js run posttest
env:
DEPLOY_VERSION: testing
@@ -31,7 +31,7 @@ jobs:
node-version: 14.x
- name: Install dependencies
run: |
- node . install --ignore-scripts --no-audit
+ node ./bin/npm-cli.js install --ignore-scripts --no-audit
- name: Rebuild the docs
run: make freshdocs
- name: Git should not be dirty
@@ -49,16 +49,16 @@ jobs:
node-version: 14.x
- name: Install dependencies
run: |
- node . install --ignore-scripts --no-audit
- node . rebuild
+ node ./bin/npm-cli.js install --ignore-scripts --no-audit
+ node ./bin/npm-cli.js rebuild
- name: Run linting
- run: node . run licenses
+ run: node ./bin/npm-cli.js run licenses
smoke-tests:
strategy:
fail-fast: false
matrix:
- node-version: [10.x, 12.x, 14.x, 16.x]
+ node-version: [12.x, 14.x, 16.x]
platform:
- os: ubuntu-latest
shell: bash
@@ -87,12 +87,12 @@ jobs:
# Run the installer script
- name: Install dependencies
run: |
- node . install --ignore-scripts --no-audit
- node . rebuild
+ node ./bin/npm-cli.js install --ignore-scripts --no-audit
+ node ./bin/npm-cli.js rebuild
# Run the smoke tests
- name: Run Smoke tests
- run: node . run --ignore-scripts smoke-tests -- --no-check-coverage -t600 -Rbase -c
+ run: node ./bin/npm-cli.js run --ignore-scripts smoke-tests -- --no-check-coverage -t600 -Rbase -c
env:
DEPLOY_VERSION: testing
@@ -100,7 +100,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- node-version: [10.x, 12.x, 14.x, 16.x]
+ node-version: [12.x, 14.x, 16.x]
platform:
- os: ubuntu-latest
shell: bash
@@ -129,11 +129,11 @@ jobs:
# Run the installer script
- name: Install dependencies
run: |
- node . install --ignore-scripts --no-audit
- node . rebuild
+ node ./bin/npm-cli.js install --ignore-scripts --no-audit
+ node ./bin/npm-cli.js rebuild
- name: Run workspaces tests
- run: node . test -w ./packages -- --no-check-coverage -t600 -Rbase -c
+ run: node ./bin/npm-cli.js test -w ./packages -- --no-check-coverage -t600 -Rbase -c
env:
DEPLOY_VERSION: testing
@@ -141,7 +141,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- node-version: ['10.1', 10.x, '12.1', 12.x, '14.1', 14.x, '16.1', 16.x]
+ node-version: ['12.13.0', 12.x, '14.15.0', 14.x, '16.0.0', 16.x]
platform:
- os: ubuntu-latest
shell: bash
@@ -170,21 +170,21 @@ jobs:
# Run the installer script
- name: Install dependencies
run: |
- node . install --ignore-scripts --no-audit
- node . rebuild
+ node ./bin/npm-cli.js install --ignore-scripts --no-audit
+ node ./bin/npm-cli.js rebuild
# Run the tests, but not if we're just gonna do coveralls later anyway
- name: Run Tap tests
- if: matrix.platform.os != 'ubuntu-latest' || matrix.node-version != '12.x'
- run: node . run --ignore-scripts test -- -t600 -Rbase -c
+ if: matrix.platform.os != 'ubuntu-latest' || matrix.node-version != '16.x'
+ run: node ./bin/npm-cli.js run --ignore-scripts test -- -t600 -Rbase -c
env:
DEPLOY_VERSION: testing
# Run coverage check
- name: Run coverage report
- if: matrix.platform.os == 'ubuntu-latest' && matrix.node-version == '12.x'
+ if: matrix.platform.os == 'ubuntu-latest' && matrix.node-version == '16.x'
# turn off --check-coverage until 100%, so CI failure is relevant
- run: node . run check-coverage -- -t600 --no-check-coverage -Rbase -c
+ run: node ./bin/npm-cli.js run check-coverage -- -t600 --no-check-coverage -Rbase -c
env:
DEPLOY_VERSION: testing
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_OPTIONAL_TOKEN }}
diff --git a/index.js b/index.js
new file mode 100644
index 000000000..5447643e0
--- /dev/null
+++ b/index.js
@@ -0,0 +1,5 @@
+if (require.main === module) {
+ require('./lib/cli.js')(process)
+} else {
+ throw new Error('The programmatic API was removed in npm v8.0.0')
+}
diff --git a/lib/npm.js b/lib/npm.js
index 966d11210..1a7b06a34 100644
--- a/lib/npm.js
+++ b/lib/npm.js
@@ -355,6 +355,3 @@ const npm = module.exports = new class extends EventEmitter {
this.log.showProgress()
}
}()
-
-if (require.main === module)
- require('./cli.js')(process)
diff --git a/node_modules/@npmcli/arborist/bin/dedupe.js b/node_modules/@npmcli/arborist/bin/dedupe.js
deleted file mode 100644
index b0e83459e..000000000
--- a/node_modules/@npmcli/arborist/bin/dedupe.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const Arborist = require('../')
-
-const options = require('./lib/options.js')
-const print = require('./lib/print-tree.js')
-require('./lib/logging.js')
-require('./lib/timers.js')
-
-const printDiff = diff => {
- const {depth} = require('treeverse')
- depth({
- tree: diff,
- visit: d => {
- if (d.location === '') {
- return
- }
- switch (d.action) {
- case 'REMOVE':
- console.error('REMOVE', d.actual.location)
- break
- case 'ADD':
- console.error('ADD', d.ideal.location, d.ideal.resolved)
- break
- case 'CHANGE':
- console.error('CHANGE', d.actual.location, {
- from: d.actual.resolved,
- to: d.ideal.resolved,
- })
- break
- }
- },
- getChildren: d => d.children,
- })
-}
-
-const start = process.hrtime()
-process.emit('time', 'install')
-const arb = new Arborist(options)
-arb.dedupe(options).then(tree => {
- process.emit('timeEnd', 'install')
- const end = process.hrtime(start)
- print(tree)
- if (options.dryRun) {
- printDiff(arb.diff)
- }
- console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`)
- if (tree.meta && options.save) {
- tree.meta.save()
- }
-}).catch(er => console.error(require('util').inspect(er, { depth: Infinity })))
diff --git a/node_modules/@npmcli/arborist/lib/arborist/audit.js b/node_modules/@npmcli/arborist/lib/arborist/audit.js
index c0cd79bb1..eb4a35655 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/audit.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/audit.js
@@ -5,6 +5,7 @@ const AuditReport = require('../audit-report.js')
// shared with reify
const _global = Symbol.for('global')
const _workspaces = Symbol.for('workspaces')
+const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot')
module.exports = cls => class Auditor extends cls {
async audit (options = {}) {
@@ -23,7 +24,15 @@ module.exports = cls => class Auditor extends cls {
process.emit('time', 'audit')
const tree = await this.loadVirtual()
if (this[_workspaces] && this[_workspaces].length) {
- options.filterSet = this.workspaceDependencySet(tree, this[_workspaces])
+ options.filterSet = this.workspaceDependencySet(
+ tree,
+ this[_workspaces],
+ this[_includeWorkspaceRoot]
+ )
+ }
+ if (!options.workspacesEnabled) {
+ options.filterSet =
+ this.excludeWorkspacesDependencySet(tree)
}
this.auditReport = await AuditReport.load(tree, options)
const ret = options.fix ? this.reify(options) : this.auditReport
diff --git a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
index b7876b114..3e6a9838f 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
@@ -93,6 +93,7 @@ const _checkEngine = Symbol('checkEngine')
const _checkPlatform = Symbol('checkPlatform')
const _virtualRoots = Symbol('virtualRoots')
const _virtualRoot = Symbol('virtualRoot')
+const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot')
const _failPeerConflict = Symbol('failPeerConflict')
const _explainPeerConflict = Symbol('explainPeerConflict')
@@ -115,12 +116,13 @@ module.exports = cls => class IdealTreeBuilder extends cls {
options.registry = this.registry = registry.replace(/\/+$/, '') + '/'
const {
- idealTree = null,
- global = false,
follow = false,
+ force = false,
+ global = false,
globalStyle = false,
+ idealTree = null,
+ includeWorkspaceRoot = false,
legacyPeerDeps = false,
- force = false,
packageLock = true,
strictPeerDeps = false,
workspaces = [],
@@ -162,6 +164,8 @@ module.exports = cls => class IdealTreeBuilder extends cls {
// don't hold onto references for nodes that are garbage collected.
this[_peerSetSource] = new WeakMap()
this[_virtualRoots] = new Map()
+
+ this[_includeWorkspaceRoot] = includeWorkspaceRoot
}
get explicitRequests () {
@@ -394,8 +398,14 @@ module.exports = cls => class IdealTreeBuilder extends cls {
if (!this[_workspaces].length) {
await this[_applyUserRequestsToNode](tree, options)
} else {
- await Promise.all(this.workspaceNodes(tree, this[_workspaces])
- .map(node => this[_applyUserRequestsToNode](node, options)))
+ const nodes = this.workspaceNodes(tree, this[_workspaces])
+ if (this[_includeWorkspaceRoot]) {
+ nodes.push(tree)
+ }
+ const appliedRequests = nodes.map(
+ node => this[_applyUserRequestsToNode](node, options)
+ )
+ await Promise.all(appliedRequests)
}
process.emit('timeEnd', 'idealTree:userRequests')
diff --git a/node_modules/@npmcli/arborist/lib/arborist/index.js b/node_modules/@npmcli/arborist/lib/arborist/index.js
index d8ca67faa..ccfa7cad9 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/index.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/index.js
@@ -58,6 +58,7 @@ class Arborist extends Base {
cache: options.cache || `${homedir()}/.npm/_cacache`,
packumentCache: options.packumentCache || new Map(),
log: options.log || procLog,
+ workspacesEnabled: options.workspacesEnabled !== false,
}
if (options.saveType && !saveTypeMap.get(options.saveType)) {
throw new Error(`Invalid saveType ${options.saveType}`)
@@ -73,8 +74,15 @@ class Arborist extends Base {
}
// returns a set of workspace nodes and all their deps
- workspaceDependencySet (tree, workspaces) {
+ workspaceDependencySet (tree, workspaces, includeWorkspaceRoot) {
const wsNodes = this.workspaceNodes(tree, workspaces)
+ if (includeWorkspaceRoot) {
+ for (const edge of tree.edgesOut.values()) {
+ if (edge.type !== 'workspace' && edge.to) {
+ wsNodes.push(edge.to)
+ }
+ }
+ }
const set = new Set(wsNodes)
const extraneous = new Set()
for (const node of set) {
@@ -96,6 +104,25 @@ class Arborist extends Base {
for (const extra of extraneous) {
set.add(extra)
}
+
+ return set
+ }
+
+ excludeWorkspacesDependencySet (tree) {
+ const set = new Set()
+ for (const edge of tree.edgesOut.values()) {
+ if (edge.type !== 'workspace' && edge.to) {
+ set.add(edge.to)
+ }
+ }
+ for (const node of set) {
+ for (const edge of node.edgesOut.values()) {
+ if (edge.to) {
+ set.add(edge.to)
+ }
+ }
+ }
+
return set
}
}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/rebuild.js b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js
index e48bdd76b..6fa5c0011 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/rebuild.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js
@@ -34,6 +34,7 @@ const _addToBuildSet = Symbol('addToBuildSet')
const _checkBins = Symbol.for('checkBins')
const _queues = Symbol('queues')
const _scriptShell = Symbol('scriptShell')
+const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot')
const _force = Symbol.for('force')
@@ -77,7 +78,11 @@ module.exports = cls => class Builder extends cls {
if (!nodes) {
const tree = await this.loadActual()
if (this[_workspaces] && this[_workspaces].length) {
- const filterSet = this.workspaceDependencySet(tree, this[_workspaces])
+ const filterSet = this.workspaceDependencySet(
+ tree,
+ this[_workspaces],
+ this[_includeWorkspaceRoot]
+ )
nodes = tree.inventory.filter(node => filterSet.has(node))
} else {
nodes = tree.inventory.values()
diff --git a/node_modules/@npmcli/arborist/lib/arborist/reify.js b/node_modules/@npmcli/arborist/lib/arborist/reify.js
index 3a9c47974..a279d8956 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/reify.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/reify.js
@@ -83,6 +83,7 @@ const _validateNodeModules = Symbol('validateNodeModules')
const _nmValidated = Symbol('nmValidated')
const _validatePath = Symbol('validatePath')
const _reifyPackages = Symbol.for('reifyPackages')
+const _includeWorkspaceRoot = Symbol.for('includeWorkspaceRoot')
const _omitDev = Symbol('omitDev')
const _omitOptional = Symbol('omitOptional')
@@ -340,6 +341,15 @@ module.exports = cls => class Reifier extends cls {
filterNodes.push(actual)
}
}
+ if (this[_includeWorkspaceRoot] && (this[_workspaces].length > 0)) {
+ for (const tree of [this.idealTree, this.actualTree]) {
+ for (const {type, to} of tree.edgesOut.values()) {
+ if (type !== 'workspace' && to) {
+ filterNodes.push(to)
+ }
+ }
+ }
+ }
}
// find all the nodes that need to change between the actual
@@ -901,7 +911,11 @@ module.exports = cls => class Reifier extends cls {
// if we're operating on a workspace, only audit the workspace deps
if (this[_workspaces] && this[_workspaces].length) {
- options.filterSet = this.workspaceDependencySet(tree, this[_workspaces])
+ options.filterSet = this.workspaceDependencySet(
+ tree,
+ this[_workspaces],
+ this[_includeWorkspaceRoot]
+ )
}
this.auditReport = AuditReport.load(tree, options)
diff --git a/node_modules/@npmcli/arborist/package.json b/node_modules/@npmcli/arborist/package.json
index b39818d48..b9dbbb707 100644
--- a/node_modules/@npmcli/arborist/package.json
+++ b/node_modules/@npmcli/arborist/package.json
@@ -1,17 +1,17 @@
{
"name": "@npmcli/arborist",
- "version": "2.9.0",
+ "version": "3.0.0",
"description": "Manage node_modules trees",
"dependencies": {
"@isaacs/string-locale-compare": "^1.0.1",
"@npmcli/installed-package-contents": "^1.0.7",
"@npmcli/map-workspaces": "^1.0.2",
- "@npmcli/metavuln-calculator": "^1.1.0",
+ "@npmcli/metavuln-calculator": "^2.0.0",
"@npmcli/move-file": "^1.1.0",
"@npmcli/name-from-folder": "^1.0.1",
"@npmcli/node-gyp": "^1.0.1",
"@npmcli/package-json": "^1.0.1",
- "@npmcli/run-script": "^1.8.2",
+ "@npmcli/run-script": "^2.0.0",
"bin-links": "^2.2.1",
"cacache": "^15.0.3",
"common-ancestor-path": "^1.0.1",
@@ -23,7 +23,7 @@
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.0",
"npm-registry-fetch": "^11.0.0",
- "pacote": "^11.3.5",
+ "pacote": "^12.0.0",
"parse-conflict-json": "^1.1.1",
"proc-log": "^1.0.0",
"promise-all-reject-late": "^1.0.0",
@@ -77,6 +77,7 @@
},
"//": "sk test-env locale to catch locale-specific sorting",
"tap": {
+ "color": true,
"after": "test/fixtures/cleanup.js",
"coverage-map": "map.js",
"test-env": [
@@ -90,6 +91,6 @@
"timeout": "240"
},
"engines": {
- "node": ">= 10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
}
diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json
index 4ad6193ae..131cff672 100644
--- a/node_modules/@npmcli/metavuln-calculator/package.json
+++ b/node_modules/@npmcli/metavuln-calculator/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/metavuln-calculator",
- "version": "1.1.1",
+ "version": "2.0.0",
"main": "lib/index.js",
"files": [
"lib"
@@ -36,7 +36,11 @@
},
"dependencies": {
"cacache": "^15.0.5",
- "pacote": "^11.1.11",
+ "json-parse-even-better-errors": "^2.3.1",
+ "pacote": "^12.0.0",
"semver": "^7.3.2"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
}
diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json
index 5a5d1a71d..9e744e639 100644
--- a/node_modules/@npmcli/run-script/package.json
+++ b/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/run-script",
- "version": "1.8.6",
+ "version": "2.0.0",
"description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
"license": "ISC",
@@ -30,7 +30,7 @@
"dependencies": {
"@npmcli/node-gyp": "^1.0.2",
"@npmcli/promise-spawn": "^1.3.2",
- "node-gyp": "^7.1.0",
+ "node-gyp": "^8.2.0",
"read-package-json-fast": "^2.0.1"
},
"files": [
diff --git a/node_modules/libnpmexec/package.json b/node_modules/libnpmexec/package.json
index 2668f1173..067c39ae7 100644
--- a/node_modules/libnpmexec/package.json
+++ b/node_modules/libnpmexec/package.json
@@ -1,12 +1,12 @@
{
"name": "libnpmexec",
- "version": "2.0.1",
+ "version": "3.0.0",
"files": [
"lib"
],
"main": "lib/index.js",
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
},
"description": "npm exec (npx) programmatic API",
"repository": "https://github.com/npm/libnpmexec",
@@ -37,6 +37,7 @@
"prepublishOnly": "git push origin --follow-tags"
},
"tap": {
+ "color": true,
"check-coverage": true
},
"devDependencies": {
@@ -49,13 +50,13 @@
"tap": "^15.0.6"
},
"dependencies": {
- "@npmcli/arborist": "^2.3.0",
+ "@npmcli/arborist": "^3.0.0",
"@npmcli/ci-detect": "^1.3.0",
- "@npmcli/run-script": "^1.8.4",
+ "@npmcli/run-script": "^2.0.0",
"chalk": "^4.1.0",
"mkdirp-infer-owner": "^2.0.0",
"npm-package-arg": "^8.1.2",
- "pacote": "^11.3.1",
+ "pacote": "^12.0.0",
"proc-log": "^1.0.0",
"read": "^1.0.7",
"read-package-json-fast": "^2.0.2",
diff --git a/node_modules/libnpmfund/package.json b/node_modules/libnpmfund/package.json
index 7f4acad38..e021d8874 100644
--- a/node_modules/libnpmfund/package.json
+++ b/node_modules/libnpmfund/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmfund",
- "version": "1.1.0",
+ "version": "2.0.0",
"files": [
"index.js"
],
@@ -52,6 +52,9 @@
"tap": "^15.0.9"
},
"dependencies": {
- "@npmcli/arborist": "^2.5.0"
+ "@npmcli/arborist": "^3.0.0"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
}
diff --git a/node_modules/libnpmpack/package.json b/node_modules/libnpmpack/package.json
index f3ec245d3..e0538b171 100644
--- a/node_modules/libnpmpack/package.json
+++ b/node_modules/libnpmpack/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpack",
- "version": "2.0.1",
+ "version": "3.0.0",
"description": "Programmatic API for the bits behind npm pack",
"author": "npm Inc. <support@npmjs.com>",
"contributors": [
@@ -34,11 +34,11 @@
"bugs": "https://github.com/npm/libnpmpack/issues",
"homepage": "https://npmjs.com/package/libnpmpack",
"dependencies": {
- "@npmcli/run-script": "^1.8.3",
+ "@npmcli/run-script": "^2.0.0",
"npm-package-arg": "^8.1.0",
- "pacote": "^11.2.6"
+ "pacote": "^12.0.0"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
}
diff --git a/node_modules/libnpmversion/package.json b/node_modules/libnpmversion/package.json
index 1ee2ee599..523f25f1d 100644
--- a/node_modules/libnpmversion/package.json
+++ b/node_modules/libnpmversion/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmversion",
- "version": "1.2.1",
+ "version": "2.0.1",
"main": "lib/index.js",
"files": [
"lib/*.js"
@@ -38,9 +38,12 @@
},
"dependencies": {
"@npmcli/git": "^2.0.7",
- "@npmcli/run-script": "^1.8.4",
+ "@npmcli/run-script": "^2.0.0",
"json-parse-even-better-errors": "^2.3.1",
"semver": "^7.3.5",
"stringify-package": "^1.0.1"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
}
diff --git a/node_modules/node-gyp/docs/Common-issues.md b/node_modules/node-gyp/docs/Common-issues.md
new file mode 100644
index 000000000..ae05fe326
--- /dev/null
+++ b/node_modules/node-gyp/docs/Common-issues.md
@@ -0,0 +1,14 @@
+## Python Issues OSX
+
+Make sure you are using the native Python version in OSX. If you use a MacPorts of HomeBrew version, you may run into problems.
+
+If you have issues with `execvp`, be sure to check your `$PYTHON` environment variable. If it is not set to the native version, unset it and try again.
+
+Notes: https://gist.github.com/erichocean/5177582
+
+## npm ERR! `node-gyp rebuild`(Windows)
+* just install the build tools from [here](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools)
+Please note the version as is required in below command e.g **2017**
+* Launch cmd, run `npm config set msvs_version 2017`
+* close and open new CMD/terminal and all is well :100:
+
diff --git a/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md b/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md
new file mode 100644
index 000000000..c1e1158d7
--- /dev/null
+++ b/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md
@@ -0,0 +1,94 @@
+When using `node-gyp` you might see an error like this when attempting to compile/install a node.js native addon:
+
+```
+$ npm install bcrypt
+npm http GET https://registry.npmjs.org/bcrypt/0.7.5
+npm http 304 https://registry.npmjs.org/bcrypt/0.7.5
+npm http GET https://registry.npmjs.org/bindings/1.0.0
+npm http 304 https://registry.npmjs.org/bindings/1.0.0
+
+> bcrypt@0.7.5 install /home/ubuntu/public/song-swap/node_modules/bcrypt
+> node-gyp rebuild
+
+gyp ERR! configure error
+gyp ERR! stack Error: "pre" versions of node cannot be installed, use the --nodedir flag instead
+gyp ERR! stack at install (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/install.js:69:16)
+gyp ERR! stack at Object.self.commands.(anonymous function) [as install] (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/node-gyp.js:56:37)
+gyp ERR! stack at getNodeDir (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/configure.js:219:20)
+gyp ERR! stack at /usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/configure.js:105:9
+gyp ERR! stack at ChildProcess.exithandler (child_process.js:630:7)
+gyp ERR! stack at ChildProcess.EventEmitter.emit (events.js:99:17)
+gyp ERR! stack at maybeClose (child_process.js:730:16)
+gyp ERR! stack at Process.ChildProcess._handle.onexit (child_process.js:797:5)
+gyp ERR! System Linux 3.5.0-21-generic
+gyp ERR! command "node" "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js" "rebuild"
+gyp ERR! cwd /home/ubuntu/public/song-swap/node_modules/bcrypt
+gyp ERR! node -v v0.11.2-pre
+gyp ERR! node-gyp -v v0.9.5
+gyp ERR! not ok
+npm ERR! bcrypt@0.7.5 install: `node-gyp rebuild`
+npm ERR! `sh "-c" "node-gyp rebuild"` failed with 1
+npm ERR!
+npm ERR! Failed at the bcrypt@0.7.5 install script.
+npm ERR! This is most likely a problem with the bcrypt package,
+npm ERR! not with npm itself.
+npm ERR! Tell the author that this fails on your system:
+npm ERR! node-gyp rebuild
+npm ERR! You can get their info via:
+npm ERR! npm owner ls bcrypt
+npm ERR! There is likely additional logging output above.
+
+npm ERR! System Linux 3.5.0-21-generic
+npm ERR! command "/usr/local/bin/node" "/usr/local/bin/npm" "install" "bcrypt"
+npm ERR! cwd /home/ubuntu/public/song-swap
+npm ERR! node -v v0.11.2-pre
+npm ERR! npm -v 1.2.18
+npm ERR! code ELIFECYCLE
+npm ERR!
+npm ERR! Additional logging details can be found in:
+npm ERR! /home/ubuntu/public/song-swap/npm-debug.log
+npm ERR! not ok code 0
+```
+
+The main error here is:
+
+```
+Error: "pre" versions of node cannot be installed, use the --nodedir flag instead
+```
+
+This error is caused when you attempt to compile a native addon using a version of node.js with `-pre` at the end of the version number:
+
+``` bash
+$ node -v
+v0.10.4-pre
+```
+
+## How to avoid (the short answer)
+
+To avoid this error completely just use a stable release of node.js. i.e. `v0.10.4`, and __not__ `v0.10.4-pre`.
+
+## How to fix (the long answer)
+
+This error happens because `node-gyp` does not know what header files were used to compile your "pre" version of node, and therefore it needs you to specify the node source code directory path using the `--nodedir` flag.
+
+For example, if I compiled my development ("pre") version of node.js using the source code in `/Users/nrajlich/node`, then I could invoke `node-gyp` like:
+
+``` bash
+$ node-gyp rebuild --nodedir=/Users/nrajlich/node
+```
+
+Or install an native addon through `npm` like:
+
+``` bash
+$ npm install bcrypt --nodedir=/Users/nrajlich/node
+```
+
+### Always use `--nodedir`
+
+__Note:__ This is for advanced users who use `-pre` versions of node more often than tagged releases.
+
+If you're invoking `node-gyp` through `npm`, then you can leverage `npm`'s configuration system and not have to specify the `--nodedir` flag all the time:
+
+``` bash
+$ npm config set nodedir /Users/nrajlich/node
+``` \ No newline at end of file
diff --git a/node_modules/node-gyp/docs/Home.md b/node_modules/node-gyp/docs/Home.md
new file mode 100644
index 000000000..fe099868b
--- /dev/null
+++ b/node_modules/node-gyp/docs/Home.md
@@ -0,0 +1,7 @@
+Welcome to the node-gyp wiki!
+
+ * [["binding.gyp" files out in the wild]]
+ * [[Linking to OpenSSL]]
+ * [[Common Issues]]
+ * [[Updating npm's bundled node-gyp]]
+ * [[Error: "pre" versions of node cannot be installed]]
diff --git a/node_modules/node-gyp/docs/Linking-to-OpenSSL.md b/node_modules/node-gyp/docs/Linking-to-OpenSSL.md
new file mode 100644
index 000000000..ec8092999
--- /dev/null
+++ b/node_modules/node-gyp/docs/Linking-to-OpenSSL.md
@@ -0,0 +1,86 @@
+A handful of native addons require linking to OpenSSL in one way or another. This introduces a small challenge since node will sometimes bundle OpenSSL statically (the default for node >= v0.8.x), or sometimes dynamically link to the system OpenSSL (default for node <= v0.6.x).
+
+Good native addons should account for both scenarios. It's recommended that you use the `binding.gyp` file provided below as a starting-point for any addon that needs to use OpenSSL:
+
+``` python
+{
+ 'variables': {
+ # node v0.6.x doesn't give us its build variables,
+ # but on Unix it was only possible to use the system OpenSSL library,
+ # so default the variable to "true", v0.8.x node and up will overwrite it.
+ 'node_shared_openssl%': 'true'
+ },
+ 'targets': [
+ {
+ 'target_name': 'binding',
+ 'sources': [
+ 'src/binding.cc'
+ ],
+ 'conditions': [
+ ['node_shared_openssl=="false"', {
+ # so when "node_shared_openssl" is "false", then OpenSSL has been
+ # bundled into the node executable. So we need to include the same
+ # header files that were used when building node.
+ 'include_dirs': [
+ '<(node_root_dir)/deps/openssl/openssl/include'
+ ],
+ "conditions" : [
+ ["target_arch=='ia32'", {
+ "include_dirs": [ "<(node_root_dir)/deps/openssl/config/piii" ]
+ }],
+ ["target_arch=='x64'", {
+ "include_dirs": [ "<(node_root_dir)/deps/openssl/config/k8" ]
+ }],
+ ["target_arch=='arm'", {
+ "include_dirs": [ "<(node_root_dir)/deps/openssl/config/arm" ]
+ }]
+ ]
+ }]
+ ]
+ }
+ ]
+}
+```
+
+This ensures that when OpenSSL is statically linked into `node` then, the bundled OpenSSL headers are included, but when the system OpenSSL is in use, then only those headers will be used.
+
+## Windows?
+
+As you can see this baseline `binding.gyp` file only accounts for the Unix scenario. Currently on Windows the situation is a little less ideal. On Windows, OpenSSL is _always_ statically compiled into the `node` executable, so ideally it would be possible to use that copy of OpenSSL when building native addons.
+
+Unfortunately it doesn't seem like that is possible at the moment, as there would need to be tweaks made to the generated `node.lib` file to include the openssl glue functions, or a new `openssl.lib` file would need to be created during the node build. I'm not sure which is the easiest/most feasible.
+
+In the meantime, one possible solution is using another copy of OpenSSL, which is what [`node-bcrypt`](https://github.com/ncb000gt/node.bcrypt.js) currently does. Adding something like this to your `binding.gyp` file's `"conditions"` block would enable this:
+
+``` python
+ [ 'OS=="win"', {
+ 'conditions': [
+ # "openssl_root" is the directory on Windows of the OpenSSL files.
+ # Check the "target_arch" variable to set good default values for
+ # both 64-bit and 32-bit builds of the module.
+ ['target_arch=="x64"', {
+ 'variables': {
+ 'openssl_root%': 'C:/OpenSSL-Win64'
+ },
+ }, {
+ 'variables': {
+ 'openssl_root%': 'C:/OpenSSL-Win32'
+ },
+ }],
+ ],
+ 'libraries': [
+ '-l<(openssl_root)/lib/libeay32.lib',
+ ],
+ 'include_dirs': [
+ '<(openssl_root)/include',
+ ],
+ }]
+```
+
+Now you can direct your users to install OpenSSL on Windows from here (be sure to tell them to install the 64-bit version if they're compiling against a 64-bit version of node): http://slproweb.com/products/Win32OpenSSL.html
+
+Also note that both `node-gyp` and `npm` allow you to overwrite that default `openssl_root` variable on the command line:
+
+``` bash
+$ node-gyp rebuild --openssl-root="C:\Users\Nathan\Desktop\openssl"
+``` \ No newline at end of file
diff --git a/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md b/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md
new file mode 100644
index 000000000..01ad5642b
--- /dev/null
+++ b/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md
@@ -0,0 +1,45 @@
+# Updating the npm-bundled version of node-gyp
+
+[Many issues](https://github.com/nodejs/node-gyp/labels/ERR%21%20node-gyp%20-v%20%3C%3D%20v5.1.0) are opened by users who are
+not running a [current version of node-gyp](https://github.com/nodejs/node-gyp/releases).
+
+`npm` bundles its own, internal, copy of `node-gyp`. This internal copy is independent of any globally installed copy of node-gyp that
+may have been installed via `npm install -g node-gyp`.
+
+Generally, npm's library files are installed inside your global "node_modules", where npm is installed (run `npm prefix` and add `lib/node_modules`, or just `node_modules` for Windows). There are some exceptions to this. Inside this global `node_modules/` there will be an `npm/` directory and inside this you'll find a `node_modules/node-gyp/` directory. So it may look something like `/usr/local/lib/node_modules/npm/node_modules/node-gyp/`. This is the version of node-gyp that ships with npm.
+
+When you install a _new_ version of node-gyp outside of npm, it'll go into your global node_modules, but not under the `npm/node_modules`. So that may look like `/usr/local/lib/node_modules/node-gyp/`. It'll have the `node-gyp` executable linked into your `PATH` so running `node-gyp` will use this version.
+
+The catch is that npm won't use this version unless you tell it to, it'll keep on using the one you have installed. You need to instruct it to by setting the `node_gyp` config variable (which goes into your `~/.npmrc`). You do this by running the `npm config set` command as below. Then npm will use the command in the path you supply whenever it needs to build a native addon.
+
+**Important**: You also need to remember to unset this when you upgrade npm with a newer version of node-gyp, or you have to manually keep your globally installed node-gyp to date. See "Undo" below.
+
+## Linux and macOS
+```
+npm install --global node-gyp@latest
+npm config set node_gyp $(npm prefix -g)/lib/node_modules/node-gyp/bin/node-gyp.js
+```
+
+`sudo` may be required for the first command if you get a permission error.
+
+## Windows
+
+### Windows Command Prompt
+```
+npm install --global node-gyp@latest
+for /f "delims=" %P in ('npm prefix -g') do npm config set node_gyp"%P\node_modules\node-gyp\bin\node-gyp.js"
+```
+
+### Powershell
+```
+npm install --global node-gyp@latest
+npm prefix -g | % {npm config set node_gyp "$_\node_modules\node-gyp\bin\node-gypjs"}
+```
+
+## Undo
+**Beware** if you don't unset the `node_gyp` config option, npm will continue to use the globally installed version of node-gyp rather than the one it ships with, which may end up being newer.
+
+```
+npm config delete node_gyp
+npm uninstall --global node-gyp
+```
diff --git a/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md b/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md
new file mode 100644
index 000000000..c4603dd3d
--- /dev/null
+++ b/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md
@@ -0,0 +1,48 @@
+This page contains links to some examples of existing `binding.gyp` files that other node modules are using. Take a look at them for inspiration.
+
+To add to this page, just add the link to the project's `binding.gyp` file below:
+
+ * [ons](https://github.com/XadillaX/aliyun-ons/blob/master/binding.gyp)
+ * [thmclrx](https://github.com/XadillaX/thmclrx/blob/master/binding.gyp)
+ * [libxmljs](https://github.com/polotek/libxmljs/blob/master/binding.gyp)
+ * [node-buffertools](https://github.com/bnoordhuis/node-buffertools/blob/master/binding.gyp)
+ * [node-canvas](https://github.com/LearnBoost/node-canvas/blob/master/binding.gyp)
+ * [node-ffi](https://github.com/rbranson/node-ffi/blob/master/binding.gyp) + [libffi](https://github.com/rbranson/node-ffi/blob/master/deps/libffi/libffi.gyp)
+ * [node-time](https://github.com/TooTallNate/node-time/blob/master/binding.gyp)
+ * [node-sass](https://github.com/sass/node-sass/blob/master/binding.gyp) + [libsass](https://github.com/sass/node-sass/blob/master/src/libsass.gyp)
+ * [node-serialport](https://github.com/voodootikigod/node-serialport/blob/master/binding.gyp)
+ * [node-weak](https://github.com/TooTallNate/node-weak/blob/master/binding.gyp)
+ * [pty.js](https://github.com/chjj/pty.js/blob/master/binding.gyp)
+ * [ref](https://github.com/TooTallNate/ref/blob/master/binding.gyp)
+ * [appjs](https://github.com/milani/appjs/blob/master/binding.gyp)
+ * [nwm](https://github.com/mixu/nwm/blob/master/binding.gyp)
+ * [bcrypt](https://github.com/ncb000gt/node.bcrypt.js/blob/master/binding.gyp)
+ * [nk-mysql](https://github.com/mmod/nodamysql/blob/master/binding.gyp)
+ * [nk-xrm-installer](https://github.com/mmod/nk-xrm-installer/blob/master/binding.gyp) + [includable.gypi](https://github.com/mmod/nk-xrm-installer/blob/master/includable.gypi) + [unpack.py](https://github.com/mmod/nk-xrm-installer/blob/master/unpack.py) + [disburse.py](https://github.com/mmod/nk-xrm-installer/blob/master/disburse.py)
+ <sub>.py files above provide complete reference for examples of fetching source via http, extracting, and moving files.</sub>
+ * [node-memwatch](https://github.com/lloyd/node-memwatch/blob/master/binding.gyp)
+ * [node-ip2location](https://github.com/bolgovr/node-ip2location/blob/master/binding.gyp)
+ * [node-midi](https://github.com/justinlatimer/node-midi/blob/master/binding.gyp)
+ * [node-sqlite3](https://github.com/developmentseed/node-sqlite3/blob/master/binding.gyp) + [libsqlite3](https://github.com/developmentseed/node-sqlite3/blob/master/deps/sqlite3.gyp)
+ * [node-zipfile](https://github.com/mapbox/node-zipfile/blob/master/binding.gyp)
+ * [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/binding.gyp)
+ * [node-inotify](https://github.com/c4milo/node-inotify/blob/master/binding.gyp)
+ * [v8-profiler](https://github.com/c4milo/v8-profiler/blob/master/binding.gyp)
+ * [airtunes](https://github.com/radioline/node_airtunes/blob/master/binding.gyp)
+ * [node-fann](https://github.com/c4milo/node-fann/blob/master/binding.gyp)
+ * [node-talib](https://github.com/oransel/node-talib/blob/master/binding.gyp)
+ * [node-leveldown](https://github.com/rvagg/node-leveldown/blob/master/binding.gyp) + [leveldb.gyp](https://github.com/rvagg/node-leveldown/blob/master/deps/leveldb/leveldb.gyp) + [snappy.gyp](https://github.com/rvagg/node-leveldown/blob/master/deps/snappy/snappy.gyp)
+ * [node-expat](https://github.com/astro/node-expat/blob/master/binding.gyp) + [libexpat](https://github.com/astro/node-expat/blob/master/deps/libexpat/libexpat.gyp)
+ * [node-openvg-canvas](https://github.com/luismreis/node-openvg-canvas/blob/master/binding.gyp) + [node-openvg](https://github.com/luismreis/node-openvg/blob/master/binding.gyp)
+ * [node-cryptopp](https://github.com/BatikhSouri/node-cryptopp/blob/master/binding.gyp)
+ * [topcube](https://github.com/creationix/topcube/blob/master/binding.gyp)
+ * [node-osmium](https://github.com/osmcode/node-osmium/blob/master/binding.gyp)
+ * [node-osrm](https://github.com/DennisOSRM/node-osrm)
+ * [node-oracle](https://github.com/joeferner/node-oracle/blob/master/binding.gyp)
+ * [node-process-list](https://github.com/ReklatsMasters/node-process-list/blob/master/binding.gyp)
+ * [node-nanomsg](https://github.com/nickdesaulniers/node-nanomsg/blob/master/binding.gyp)
+ * [Ghostscript4JS](https://github.com/NickNaso/ghostscript4js/blob/master/binding.gyp)
+ * [nodecv](https://github.com/xudafeng/nodecv/blob/master/binding.gyp)
+ * [magick-cli](https://github.com/NickNaso/magick-cli/blob/master/binding.gyp)
+ * [sharp](https://github.com/lovell/sharp/blob/master/binding.gyp)
+ * [krb5](https://github.com/adaltas/node-krb5/blob/master/binding.gyp) \ No newline at end of file
diff --git a/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md b/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
index 4c2114055..d724027fd 100644
--- a/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
+++ b/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md
@@ -1,4 +1,4 @@
# Code of Conduct
-* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md)
-* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/master/Moderation-Policy.md)
+* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md)
+* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/HEAD/Moderation-Policy.md)
diff --git a/node_modules/node-gyp/gyp/CONTRIBUTING.md b/node_modules/node-gyp/gyp/CONTRIBUTING.md
index f9dd574a4..1a0bcde2b 100644
--- a/node_modules/node-gyp/gyp/CONTRIBUTING.md
+++ b/node_modules/node-gyp/gyp/CONTRIBUTING.md
@@ -2,7 +2,7 @@
## Code of Conduct
-This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md).
+This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md).
<a id="developers-certificate-of-origin"></a>
## Developer's Certificate of Origin 1.1
diff --git a/node_modules/node-gyp/gyp/gyp_main.py b/node_modules/node-gyp/gyp/gyp_main.py
index da696cfc4..f23dcdf88 100755
--- a/node_modules/node-gyp/gyp/gyp_main.py
+++ b/node_modules/node-gyp/gyp/gyp_main.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -8,8 +8,6 @@ import os
import sys
import subprocess
-PY3 = bytes != str
-
def IsCygwin():
# Function copied from pylib/gyp/common.py
@@ -17,10 +15,8 @@ def IsCygwin():
out = subprocess.Popen(
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
- stdout, stderr = out.communicate()
- if PY3:
- stdout = stdout.decode("utf-8")
- return "CYGWIN" in str(stdout)
+ stdout, _ = out.communicate()
+ return "CYGWIN" in stdout.decode("utf-8")
except Exception:
return False
@@ -33,9 +29,7 @@ def UnixifyPath(path):
["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout, _ = out.communicate()
- if PY3:
- stdout = stdout.decode("utf-8")
- return str(stdout)
+ return stdout.decode("utf-8")
except Exception:
return path
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
index 04bbb3df7..d6b189760 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
@@ -11,12 +11,9 @@ from operator import attrgetter
import gyp.common
-try:
- cmp
-except NameError:
- def cmp(x, y):
- return (x > y) - (x < y)
+def cmp(x, y):
+ return (x > y) - (x < y)
# Initialize random number generator
@@ -69,7 +66,7 @@ def MakeGuid(name, seed="msvs_new"):
# ------------------------------------------------------------------------------
-class MSVSSolutionEntry(object):
+class MSVSSolutionEntry:
def __cmp__(self, other):
# Sort by name then guid (so things are in order on vs2008).
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
@@ -190,7 +187,7 @@ class MSVSProject(MSVSSolutionEntry):
# ------------------------------------------------------------------------------
-class MSVSSolution(object):
+class MSVSSolution:
"""Visual Studio solution."""
def __init__(
@@ -292,14 +289,14 @@ class MSVSSolution(object):
if e.items:
f.write("\tProjectSection(SolutionItems) = preProject\r\n")
for i in e.items:
- f.write("\t\t%s = %s\r\n" % (i, i))
+ f.write(f"\t\t{i} = {i}\r\n")
f.write("\tEndProjectSection\r\n")
if isinstance(e, MSVSProject):
if e.dependencies:
f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
for d in e.dependencies:
- f.write("\t\t%s = %s\r\n" % (d.get_guid(), d.get_guid()))
+ f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
f.write("\tEndProjectSection\r\n")
f.write("EndProject\r\n")
@@ -310,7 +307,7 @@ class MSVSSolution(object):
# Configurations (variants)
f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
for v in self.variants:
- f.write("\t\t%s = %s\r\n" % (v, v))
+ f.write(f"\t\t{v} = {v}\r\n")
f.write("\tEndGlobalSection\r\n")
# Sort config guids for easier diffing of solution changes.
@@ -362,7 +359,7 @@ class MSVSSolution(object):
if not isinstance(e, MSVSFolder):
continue # Does not apply to projects, only folders
for subentry in e.entries:
- f.write("\t\t%s = %s\r\n" % (subentry.get_guid(), e.get_guid()))
+ f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
f.write("\tEndGlobalSection\r\n")
f.write("EndGlobal\r\n")
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
index f953d52cd..f0cfabe83 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
@@ -9,7 +9,7 @@ import gyp.easy_xml as easy_xml
# ------------------------------------------------------------------------------
-class Tool(object):
+class Tool:
"""Visual Studio tool."""
def __init__(self, name, attrs=None):
@@ -31,7 +31,7 @@ class Tool(object):
return ["Tool", self._attrs]
-class Filter(object):
+class Filter:
"""Visual Studio filter - that is, a virtual folder."""
def __init__(self, name, contents=None):
@@ -48,7 +48,7 @@ class Filter(object):
# ------------------------------------------------------------------------------
-class Writer(object):
+class Writer:
"""Visual Studio XML project writer."""
def __init__(self, project_path, version, name, guid=None, platforms=None):
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
index 6ef16f2a0..e89a971a3 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
@@ -14,12 +14,8 @@ The MSBuild schemas were also considered. They are typically found in the
MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
"""
-from __future__ import print_function
-
-from gyp import string_types
-
-import sys
import re
+import sys
# Dictionaries of settings validators. The key is the tool name, the value is
# a dictionary mapping setting names to validation functions.
@@ -36,7 +32,7 @@ _msvs_to_msbuild_converters = {}
_msbuild_name_of_tool = {}
-class _Tool(object):
+class _Tool:
"""Represents a tool used by MSVS or MSBuild.
Attributes:
@@ -68,7 +64,7 @@ def _GetMSBuildToolSettings(msbuild_settings, tool):
return msbuild_settings.setdefault(tool.msbuild_name, {})
-class _Type(object):
+class _Type:
"""Type of settings (Base class)."""
def ValidateMSVS(self, value):
@@ -110,11 +106,11 @@ class _String(_Type):
"""A setting that's just a string."""
def ValidateMSVS(self, value):
- if not isinstance(value, string_types):
+ if not isinstance(value, str):
raise ValueError("expected string; got %r" % value)
def ValidateMSBuild(self, value):
- if not isinstance(value, string_types):
+ if not isinstance(value, str):
raise ValueError("expected string; got %r" % value)
def ConvertToMSBuild(self, value):
@@ -126,11 +122,11 @@ class _StringList(_Type):
"""A settings that's a list of strings."""
def ValidateMSVS(self, value):
- if not isinstance(value, string_types) and not isinstance(value, list):
+ if not isinstance(value, (list, str)):
raise ValueError("expected string list; got %r" % value)
def ValidateMSBuild(self, value):
- if not isinstance(value, string_types) and not isinstance(value, list):
+ if not isinstance(value, (list, str)):
raise ValueError("expected string list; got %r" % value)
def ConvertToMSBuild(self, value):
@@ -195,7 +191,7 @@ class _Enumeration(_Type):
def __init__(self, label_list, new=None):
_Type.__init__(self)
self._label_list = label_list
- self._msbuild_values = set(value for value in label_list if value is not None)
+ self._msbuild_values = {value for value in label_list if value is not None}
if new is not None:
self._msbuild_values.update(new)
@@ -342,7 +338,7 @@ def _ConvertedToAdditionalOption(tool, msvs_name, flag):
if value == "true":
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if "AdditionalOptions" in tool_settings:
- new_flags = "%s %s" % (tool_settings["AdditionalOptions"], flag)
+ new_flags = "{} {}".format(tool_settings["AdditionalOptions"], flag)
else:
new_flags = flag
tool_settings["AdditionalOptions"] = new_flags
@@ -536,14 +532,14 @@ def _ValidateSettings(validators, settings, stderr):
tool_validators[setting](value)
except ValueError as e:
print(
- "Warning: for %s/%s, %s" % (tool_name, setting, e),
+ f"Warning: for {tool_name}/{setting}, {e}",
file=stderr,
)
else:
_ValidateExclusionSetting(
setting,
tool_validators,
- ("Warning: unrecognized setting %s/%s" % (tool_name, setting)),
+ (f"Warning: unrecognized setting {tool_name}/{setting}"),
stderr,
)
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
index 99860c880..6ca09687a 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -9,10 +9,7 @@
import unittest
import gyp.MSVSSettings as MSVSSettings
-try:
- from StringIO import StringIO # Python 2
-except ImportError:
- from io import StringIO # Python 3
+from io import StringIO
class TestSequenceFunctions(unittest.TestCase):
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
index 2c08589e0..2e5c811bd 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
@@ -7,7 +7,7 @@
import gyp.easy_xml as easy_xml
-class Writer(object):
+class Writer:
"""Visual Studio XML tool file writer."""
def __init__(self, tool_file_path, name):
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
index de0896e69..e580c00fb 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
@@ -53,7 +53,7 @@ def _QuoteWin32CommandLineArgs(args):
return new_args
-class Writer(object):
+class Writer:
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version, name):
@@ -93,7 +93,7 @@ class Writer(object):
abs_command = _FindCommandInPath(command[0])
if environment and isinstance(environment, dict):
- env_list = ['%s="%s"' % (key, val) for (key, val) in environment.items()]
+ env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
environment = " ".join(env_list)
else:
environment = ""
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
index 83a9c297e..36bb782bd 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
@@ -55,7 +55,7 @@ def _SuffixName(name, suffix):
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit("#", 1)
- parts[0] = "%s_%s" % (parts[0], suffix)
+ parts[0] = f"{parts[0]}_{suffix}"
return "#".join(parts)
@@ -160,7 +160,7 @@ def _GetPdbPath(target_dict, config_name, vars):
return pdb_path
pdb_base = target_dict.get("product_name", target_dict["target_name"])
- pdb_base = "%s.%s.pdb" % (pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
+ pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
return pdb_path
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
index 36b006aaa..134b35557 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
@@ -11,14 +11,12 @@ import subprocess
import sys
import glob
-PY3 = bytes != str
-
def JoinPath(*args):
return os.path.normpath(os.path.join(*args))
-class VisualStudioVersion(object):
+class VisualStudioVersion:
"""Information regarding a version of Visual Studio."""
def __init__(
@@ -176,9 +174,7 @@ def _RegistryQueryBase(sysdir, key, value):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
- text = p.communicate()[0]
- if PY3:
- text = text.decode("utf-8")
+ text = p.communicate()[0].decode("utf-8")
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
@@ -221,21 +217,15 @@ def _RegistryGetValueUsingWinReg(key, value):
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
- ImportError if _winreg is unavailable.
+ ImportError if winreg is unavailable.
"""
- try:
- # Python 2
- from _winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
- except ImportError:
- # Python 3
- from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
-
+ from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
try:
root, subkey = key.split("\\", 1)
assert root == "HKLM" # Only need HKLM for now.
with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
return QueryValueEx(hkey, value)[0]
- except WindowsError:
+ except OSError:
return None
@@ -426,9 +416,7 @@ def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == "cygwin":
p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE)
- path = p.communicate()[0].strip()
- if PY3:
- path = path.decode("utf-8")
+ path = p.communicate()[0].decode("utf-8").strip()
return path
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
index f6ea625d4..6790ef96a 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
@@ -1,10 +1,9 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import copy
import gyp.input
@@ -16,13 +15,6 @@ import sys
import traceback
from gyp.common import GypError
-try:
- # Python 2
- string_types = basestring
-except NameError:
- # Python 3
- string_types = str
-
# Default debug modes for GYP
debug = {}
@@ -193,7 +185,7 @@ def ShlexEnv(env_name):
def FormatOpt(opt, value):
if opt.startswith("--"):
- return "%s=%s" % (opt, value)
+ return f"{opt}={value}"
return opt + value
@@ -524,7 +516,7 @@ def gyp_main(args):
for option, value in sorted(options.__dict__.items()):
if option[0] == "_":
continue
- if isinstance(value, string_types):
+ if isinstance(value, str):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common.py b/node_modules/node-gyp/gyp/pylib/gyp/common.py
index a91564386..9213fcc5e 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/common.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/common.py
@@ -10,17 +10,12 @@ import tempfile
import sys
import subprocess
-try:
- from collections.abc import MutableSet
-except ImportError:
- from collections import MutableSet
-
-PY3 = bytes != str
+from collections.abc import MutableSet
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
# among other "problems".
-class memoize(object):
+class memoize:
def __init__(self, func):
self.func = func
self.cache = {}
@@ -348,7 +343,7 @@ def WriteOnDiff(filename):
the target if it differs (on close).
"""
- class Writer(object):
+ class Writer:
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
@@ -566,8 +561,8 @@ class OrderedSet(MutableSet):
def __repr__(self):
if not self:
- return "%s()" % (self.__class__.__name__,)
- return "%s(%r)" % (self.__class__.__name__, list(self))
+ return f"{self.__class__.__name__}()"
+ return f"{self.__class__.__name__}({list(self)!r})"
def __eq__(self, other):
if isinstance(other, OrderedSet):
@@ -653,9 +648,7 @@ def IsCygwin():
out = subprocess.Popen(
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
- stdout, stderr = out.communicate()
- if PY3:
- stdout = stdout.decode("utf-8")
+ stdout = out.communicate()[0].decode("utf-8")
return "CYGWIN" in str(stdout)
except Exception:
return False
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
index 0310fb266..05344085a 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
index e0628ef4d..bda1a4746 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import sys
import re
import os
import locale
@@ -84,7 +85,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0):
rest = specification[1:]
if rest and isinstance(rest[0], dict):
for at, val in sorted(rest[0].items()):
- xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
+ xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"')
rest = rest[1:]
if rest:
xml_parts.append(">")
@@ -101,12 +102,13 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0):
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
if multi_line and indentation:
xml_parts.append(indentation)
- xml_parts.append("</%s>%s" % (name, new_line))
+ xml_parts.append(f"</{name}>{new_line}")
else:
xml_parts.append("/>%s" % new_line)
-def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False):
+def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
+ win32=(sys.platform == "win32")):
""" Writes the XML content to disk, touching the file only if it has changed.
Args:
@@ -125,9 +127,9 @@ def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False
# Get the old content
try:
- with open(path, "r") as file:
+ with open(path) as file:
existing = file.read()
- except IOError:
+ except OSError:
existing = None
# It has changed, write it
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
index 5bc795ddb..342f693a3 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -9,10 +9,7 @@
import gyp.easy_xml as easy_xml
import unittest
-try:
- from StringIO import StringIO # Python 2
-except ImportError:
- from io import StringIO # Python 3
+from io import StringIO
class TestSequenceFunctions(unittest.TestCase):
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
index f9f89e520..1cb981526 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -18,7 +18,7 @@ def main(args):
executor.Dispatch(args)
-class FlockTool(object):
+class FlockTool:
"""This class emulates the 'flock' command."""
def Dispatch(self, args):
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
index 7a393c1f9..f15df00c3 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
@@ -62,7 +62,6 @@ directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
then the "all" target includes "b1" and "b2".
"""
-from __future__ import print_function
import gyp.common
import json
@@ -216,7 +215,7 @@ def _ExtractSources(target, target_dict, toplevel_dir):
return results
-class Target(object):
+class Target:
"""Holds information about a particular target:
deps: set of Targets this Target depends upon. This is not recursive, only the
direct dependent Targets.
@@ -252,7 +251,7 @@ class Target(object):
self.is_or_has_linked_ancestor = False
-class Config(object):
+class Config:
"""Details what we're looking for
files: set of files to search for
targets: see file description for details."""
@@ -271,10 +270,10 @@ class Config(object):
if not config_path:
return
try:
- f = open(config_path, "r")
+ f = open(config_path)
config = json.load(f)
f.close()
- except IOError:
+ except OSError:
raise Exception("Unable to open file " + config_path)
except ValueError as e:
raise Exception("Unable to parse config file " + config_path + str(e))
@@ -586,7 +585,7 @@ def _WriteOutput(params, **values):
f = open(output_path, "w")
f.write(json.dumps(values) + "\n")
f.close()
- except IOError as e:
+ except OSError as e:
print("Error writing to output file", output_path, str(e))
@@ -627,7 +626,7 @@ def CalculateVariables(default_variables, params):
default_variables.setdefault("OS", operating_system)
-class TargetCalculator(object):
+class TargetCalculator:
"""Calculates the matching test_targets and matching compile_targets."""
def __init__(
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
index 16728847c..cdf1a4832 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
@@ -14,7 +14,6 @@
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
-from __future__ import print_function
import gyp
import gyp.common
@@ -84,7 +83,7 @@ def IsCPPExtension(ext):
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
- support options.generator_output, so this function is a noop."""
+ support options.generator_output, so this function is a noop."""
return path
@@ -100,11 +99,11 @@ target_outputs = {}
target_link_deps = {}
-class AndroidMkWriter(object):
+class AndroidMkWriter:
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
- Its only real entry point is Write(), and is mostly used for namespacing.
- """
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
@@ -123,18 +122,18 @@ class AndroidMkWriter(object):
):
"""The main entry point: writes a .mk file for a single target.
- Arguments:
- qualified_target: target we're generating
- relative_target: qualified target name relative to the root
- base_path: path relative to source root we're building in, used to resolve
- target-relative paths
- output_filename: output .mk file name to write
- spec, configs: gyp info
- part_of_all: flag indicating this target is part of 'all'
- write_alias_target: flag indicating whether to create short aliases for
- this target
- sdk_version: what to emit for LOCAL_SDK_VERSION in output
- """
+ Arguments:
+ qualified_target: target we're generating
+ relative_target: qualified target name relative to the root
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for
+ this target
+ sdk_version: what to emit for LOCAL_SDK_VERSION in output
+ """
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, "w")
@@ -254,15 +253,15 @@ class AndroidMkWriter(object):
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- actions (used to make other pieces dependent on these
- actions)
- """
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ """
for action in actions:
name = make.StringToMakefileVariable(
- "%s_%s" % (self.relative_target, action["action_name"])
+ "{}_{}".format(self.relative_target, action["action_name"])
)
self.WriteLn('### Rules for action "%s":' % action["action_name"])
inputs = action["inputs"]
@@ -350,7 +349,7 @@ class AndroidMkWriter(object):
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
- self.WriteLn("%s: %s ;" % (self.LocalPathify(output), main_output))
+ self.WriteLn(f"{self.LocalPathify(output)}: {main_output} ;")
extra_outputs += outputs
self.WriteLn()
@@ -360,11 +359,11 @@ class AndroidMkWriter(object):
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- rules (used to make other pieces dependent on these rules)
- """
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ """
if len(rules) == 0:
return
@@ -372,7 +371,7 @@ class AndroidMkWriter(object):
if len(rule.get("rule_sources", [])) == 0:
continue
name = make.StringToMakefileVariable(
- "%s_%s" % (self.relative_target, rule["rule_name"])
+ "{}_{}".format(self.relative_target, rule["rule_name"])
)
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
@@ -452,7 +451,7 @@ class AndroidMkWriter(object):
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
- self.WriteLn("%s: %s ;" % (output, main_output))
+ self.WriteLn(f"{output}: {main_output} ;")
self.WriteLn()
self.WriteLn()
@@ -460,9 +459,9 @@ class AndroidMkWriter(object):
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
- extra_outputs: a list that will be filled in with any outputs of this action
- (used to make other pieces dependent on this action)
- """
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ """
self.WriteLn("### Generated for copy rule.")
variable = make.StringToMakefileVariable(self.relative_target + "_copies")
@@ -487,25 +486,25 @@ class AndroidMkWriter(object):
self.LocalPathify(os.path.join(copy["destination"], filename))
)
- self.WriteLn(
- "%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)" % (output, path)
- )
+ self.WriteLn(f"{output}: {path} $(GYP_TARGET_DEPENDENCIES) | $(ACP)")
self.WriteLn("\t@echo Copying: $@")
self.WriteLn("\t$(hide) mkdir -p $(dir $@)")
self.WriteLn("\t$(hide) $(ACP) -rpf $< $@")
self.WriteLn()
outputs.append(output)
- self.WriteLn("%s = %s" % (variable, " ".join(map(make.QuoteSpaces, outputs))))
+ self.WriteLn(
+ "{} = {}".format(variable, " ".join(map(make.QuoteSpaces, outputs)))
+ )
extra_outputs.append("$(%s)" % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
- the current target.
+ the current target.
- Args:
- spec, configs: input from gyp.
- """
+ Args:
+ spec, configs: input from gyp.
+ """
for configname, config in sorted(configs.items()):
extracted_includes = []
@@ -554,16 +553,16 @@ class AndroidMkWriter(object):
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
- These are source files necessary to build the current target.
- We need to handle shared_intermediate directory source files as
- a special case by copying them to the intermediate directory and
- treating them as a generated sources. Otherwise the Android build
- rules won't pick them up.
-
- Args:
- spec, configs: input from gyp.
- extra_sources: Sources generated from Actions or Rules.
- """
+ These are source files necessary to build the current target.
+ We need to handle shared_intermediate directory source files as
+ a special case by copying them to the intermediate directory and
+ treating them as a generated sources. Otherwise the Android build
+ rules won't pick them up.
+
+ Args:
+ spec, configs: input from gyp.
+ extra_sources: Sources generated from Actions or Rules.
+ """
sources = filter(make.Compilable, spec.get("sources", []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
@@ -617,7 +616,7 @@ class AndroidMkWriter(object):
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
- self.WriteLn("%s: %s" % (local_file, self.LocalPathify(source)))
+ self.WriteLn(f"{local_file}: {self.LocalPathify(source)}")
self.WriteLn("\tmkdir -p $(@D); cp $< $@")
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
@@ -640,10 +639,10 @@ class AndroidMkWriter(object):
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
- We use the complete qualified target name to avoid collisions between
- duplicate targets in different directories. We also add a suffix to
- distinguish gyp-generated module names.
- """
+ We use the complete qualified target name to avoid collisions between
+ duplicate targets in different directories. We also add a suffix to
+ distinguish gyp-generated module names.
+ """
if int(spec.get("android_unmangled_name", 0)):
assert self.type != "shared_library" or self.target.startswith("lib")
@@ -662,7 +661,7 @@ class AndroidMkWriter(object):
suffix = "_gyp"
if self.path:
- middle = make.StringToMakefileVariable("%s_%s" % (self.path, self.target))
+ middle = make.StringToMakefileVariable(f"{self.path}_{self.target}")
else:
middle = make.StringToMakefileVariable(self.target)
@@ -671,11 +670,11 @@ class AndroidMkWriter(object):
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
- Android libraries must be named the same thing as their module name,
- otherwise the linker can't find them, so product_name and so on must be
- ignored if we are building a library, and the "lib" prepending is
- not done for Android.
- """
+ Android libraries must be named the same thing as their module name,
+ otherwise the linker can't find them, so product_name and so on must be
+ ignored if we are building a library, and the "lib" prepending is
+ not done for Android.
+ """
assert self.type != "loadable_module" # TODO: not supported?
target = spec["target_name"]
@@ -711,17 +710,17 @@ class AndroidMkWriter(object):
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- 'libfoobar.so'
- """
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ 'libfoobar.so'
+ """
return "".join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- '$(obj)/baz/libfoobar.so'
- """
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
if self.type == "executable":
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
@@ -740,7 +739,7 @@ class AndroidMkWriter(object):
% (self.android_class, self.android_module)
)
else:
- path = "$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))" % (
+ path = "$(call intermediates-dir-for,{},{},,,$(GYP_VAR_PREFIX))".format(
self.android_class,
self.android_module,
)
@@ -749,14 +748,14 @@ class AndroidMkWriter(object):
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeIncludePaths(self, include_paths):
- """ Normalize include_paths.
- Convert absolute paths to relative to the Android top directory.
-
- Args:
- include_paths: A list of unprocessed include paths.
- Returns:
- A list of normalized include paths.
- """
+ """Normalize include_paths.
+ Convert absolute paths to relative to the Android top directory.
+
+ Args:
+ include_paths: A list of unprocessed include paths.
+ Returns:
+ A list of normalized include paths.
+ """
normalized = []
for path in include_paths:
if path[0] == "/":
@@ -767,11 +766,11 @@ class AndroidMkWriter(object):
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
- Args:
- cflags: A list of compiler flags, which may be mixed with "-I.."
- Returns:
- A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
- """
+ Args:
+ cflags: A list of compiler flags, which may be mixed with "-I.."
+ Returns:
+ A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
+ """
clean_cflags = []
include_paths = []
for flag in cflags:
@@ -785,14 +784,14 @@ class AndroidMkWriter(object):
def FilterLibraries(self, libraries):
"""Filter the 'libraries' key to separate things that shouldn't be ldflags.
- Library entries that look like filenames should be converted to android
- module names instead of being passed to the linker as flags.
+ Library entries that look like filenames should be converted to android
+ module names instead of being passed to the linker as flags.
- Args:
- libraries: the value of spec.get('libraries')
- Returns:
- A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
- """
+ Args:
+ libraries: the value of spec.get('libraries')
+ Returns:
+ A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
+ """
static_lib_modules = []
dynamic_lib_modules = []
ldflags = []
@@ -823,10 +822,10 @@ class AndroidMkWriter(object):
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
- Returns a tuple (deps, link_deps), where each is a list of
- filenames that will need to be put in front of make for either
- building (deps) or linking (link_deps).
- """
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
deps = []
link_deps = []
if "dependencies" in spec:
@@ -846,9 +845,9 @@ class AndroidMkWriter(object):
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
- spec, configs: input from gyp.
- link_deps: link dependency list; see ComputeDeps()
- """
+ spec, configs: input from gyp.
+ link_deps: link dependency list; see ComputeDeps()
+ """
# Libraries (i.e. -lfoo)
# These must be included even for static libraries as some of them provide
# implicit include paths through the build system.
@@ -891,12 +890,12 @@ class AndroidMkWriter(object):
):
"""Write Makefile code to produce the final target of the gyp spec.
- spec, configs: input from gyp.
- deps, link_deps: dependency lists; see ComputeDeps()
- part_of_all: flag indicating this target is part of 'all'
- write_alias_target: flag indicating whether to create short aliases for this
- target
- """
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for this
+ target
+ """
self.WriteLn("### Rules for final target.")
if self.type != "none":
@@ -909,7 +908,7 @@ class AndroidMkWriter(object):
if isinstance(v, list):
self.WriteList(v, k)
else:
- self.WriteLn("%s := %s" % (k, make.QuoteIfNecessary(v)))
+ self.WriteLn(f"{k} := {make.QuoteIfNecessary(v)}")
self.WriteLn("")
# Add to the set of targets which represent the gyp 'all' target. We use the
@@ -928,7 +927,7 @@ class AndroidMkWriter(object):
if self.target != self.android_module and write_alias_target:
self.WriteLn("# Alias gyp target name.")
self.WriteLn(".PHONY: %s" % self.target)
- self.WriteLn("%s: %s" % (self.target, self.android_module))
+ self.WriteLn(f"{self.target}: {self.android_module}")
self.WriteLn("")
# Add the command to trigger build of the target type depending
@@ -975,25 +974,25 @@ class AndroidMkWriter(object):
):
"""Write a variable definition that is a list of values.
- E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
- foo = blaha blahb
- but in a pretty-printed style.
- """
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
values = ""
if value_list:
value_list = [quoter(prefix + value) for value in value_list]
if local_pathify:
value_list = [self.LocalPathify(value) for value in value_list]
values = " \\\n\t" + " \\\n\t".join(value_list)
- self.fp.write("%s :=%s\n\n" % (variable, values))
+ self.fp.write(f"{variable} :={values}\n\n")
def WriteLn(self, text=""):
self.fp.write(text + "\n")
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
- with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
- Absolute paths, or paths that contain variables, are just normalized."""
+ with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
+ Absolute paths, or paths that contain variables, are just normalized."""
if "$(" in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
@@ -1006,7 +1005,7 @@ class AndroidMkWriter(object):
# so we don't look for a slash.
assert local_path.startswith(
"$(LOCAL_PATH)"
- ), "Path %s attempts to escape from gyp path %s !)" % (path, self.path)
+ ), f"Path {path} attempts to escape from gyp path {self.path} !)"
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
index f5ceacfca..c95d18415 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
@@ -28,21 +28,15 @@ not be able to find the header file directories described in the generated
CMakeLists.txt file.
"""
-from __future__ import print_function
import multiprocessing
import os
import signal
-import string
import subprocess
import gyp.common
import gyp.xcode_emulation
-try:
- # maketrans moved to str in python3.
- _maketrans = string.maketrans
-except NameError:
- _maketrans = str.maketrans
+_maketrans = str.maketrans
generator_default_variables = {
"EXECUTABLE_PREFIX": "",
@@ -223,7 +217,7 @@ def WriteVariable(output, variable_name, prepend=None):
output.write("}")
-class CMakeTargetType(object):
+class CMakeTargetType:
def __init__(self, command, modifier, property_modifier):
self.command = command
self.modifier = modifier
@@ -263,7 +257,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, o
"""
for action in actions:
action_name = StringToCMakeTargetName(action["action_name"])
- action_target_name = "%s__%s" % (target_name, action_name)
+ action_target_name = f"{target_name}__{action_name}"
inputs = action["inputs"]
inputs_name = action_target_name + "__input"
@@ -282,7 +276,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, o
# Build up a list of outputs.
# Collect the output dirs we'll need.
- dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+ dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir}
if int(action.get("process_outputs_as_sources", False)):
extra_sources.extend(zip(cmake_outputs, outputs))
@@ -377,7 +371,7 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, outpu
# Build up a list of outputs.
# Collect the output dirs we'll need.
- dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+ dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir}
# Create variables for the output, as 'local' variable will be unset.
these_outputs = []
@@ -478,7 +472,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
extra_deps.append(copy_name)
return
- class Copy(object):
+ class Copy:
def __init__(self, ext, command):
self.cmake_inputs = []
self.cmake_outputs = []
@@ -587,7 +581,7 @@ def CreateCMakeTargetFullName(qualified_target):
return StringToCMakeTargetName(cmake_target_full_name)
-class CMakeNamer(object):
+class CMakeNamer:
"""Converts Gyp target names into CMake target names.
CMake requires that target names be globally unique. One way to ensure
@@ -1047,7 +1041,7 @@ def WriteTarget(
# XCode settings
xcode_settings = config.get("xcode_settings", {})
- for xcode_setting, xcode_value in xcode_settings.viewitems():
+ for xcode_setting, xcode_value in xcode_settings.items():
SetTargetProperty(
output,
cmake_target_name,
@@ -1285,11 +1279,11 @@ def PerformBuild(data, configurations, params):
os.path.join(generator_dir, output_dir, config_name)
)
arguments = ["cmake", "-G", "Ninja"]
- print("Generating [%s]: %s" % (config_name, arguments))
+ print(f"Generating [{config_name}]: {arguments}")
subprocess.check_call(arguments, cwd=build_dir)
arguments = ["ninja", "-C", build_dir]
- print("Building [%s]: %s" % (config_name, arguments))
+ print(f"Building [{config_name}]: {arguments}")
subprocess.check_call(arguments)
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
index 46f68e038..99d5c1fd6 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import os
import gyp
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
index 4bd49725d..1ff0dc83a 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
@@ -26,8 +26,6 @@ import gyp.msvs_emulation
import shlex
import xml.etree.cElementTree as ET
-PY3 = bytes != str
-
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {}
@@ -105,9 +103,7 @@ def GetAllIncludeDirectories(
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
- output = proc.communicate()[1]
- if PY3:
- output = output.decode("utf-8")
+ output = proc.communicate()[1].decode("utf-8")
# Extract the list of include dirs from the output, which has this format:
# ...
# #include "..." search starts here:
@@ -245,9 +241,7 @@ def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler
cpp_proc = subprocess.Popen(
args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
- cpp_output = cpp_proc.communicate()[0]
- if PY3:
- cpp_output = cpp_output.decode("utf-8")
+ cpp_output = cpp_proc.communicate()[0].decode("utf-8")
cpp_lines = cpp_output.split("\n")
for cpp_line in cpp_lines:
if not cpp_line.strip():
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
index 2d8aba5d1..82a07ddc6 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
@@ -49,7 +49,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
- banner = "Python %s on %s\nlocals.keys() = %s\ngypsh" % (
+ banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format(
sys.version,
sys.platform,
repr(sorted(locals.keys())),
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
index d163ae313..c595f20fe 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
@@ -21,7 +21,6 @@
# toplevel Makefile. It may make sense to generate some .mk files on
# the side to keep the files readable.
-from __future__ import print_function
import os
import re
@@ -108,7 +107,7 @@ def CalculateVariables(default_variables, params):
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
- gyp)."""
+ gyp)."""
generator_flags = params.get("generator_flags", {})
android_ndk_version = generator_flags.get("android_ndk_version", None)
# Android NDK requires a strict link order.
@@ -320,7 +319,7 @@ CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
CXX.host ?= %(CXX.host)s
CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
LINK.host ?= %(LINK.host)s
-LDFLAGS.host ?=
+LDFLAGS.host ?= $(LDFLAGS_host)
AR.host ?= %(AR.host)s
# Define a dir function that can handle spaces.
@@ -615,15 +614,15 @@ def Target(filename):
def EscapeShellArgument(s):
"""Quotes an argument so that it will be interpreted literally by a POSIX
- shell. Taken from
- http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
- """
+ shell. Taken from
+ http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
+ """
return "'" + s.replace("'", "'\\''") + "'"
def EscapeMakeVariableExpansion(s):
"""Make has its own variable expansion syntax using $. We must escape it for
- string to be interpreted literally."""
+ string to be interpreted literally."""
return s.replace("$", "$$")
@@ -638,7 +637,7 @@ def EscapeCppDefine(s):
def QuoteIfNecessary(string):
"""TODO: Should this ideally be replaced with one or more of the above
- functions?"""
+ functions?"""
if '"' in string:
string = '"' + string.replace('"', '\\"') + '"'
return string
@@ -679,11 +678,11 @@ target_outputs = {}
target_link_deps = {}
-class MakefileWriter(object):
+class MakefileWriter:
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
- Its only real entry point is Write(), and is mostly used for namespacing.
- """
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
def __init__(self, generator_flags, flavor):
self.generator_flags = generator_flags
@@ -737,14 +736,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""The main entry point: writes a .mk file for a single target.
- Arguments:
- qualified_target: target we're generating
- base_path: path relative to source root we're building in, used to resolve
- target-relative paths
- output_filename: output .mk file name to write
- spec, configs: gyp info
- part_of_all: flag indicating this target is part of 'all'
- """
+ Arguments:
+ qualified_target: target we're generating
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ """
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, "w")
@@ -844,7 +843,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
sources = [x for x in all_sources if Compilable(x)]
if sources:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
- extensions = set([os.path.splitext(s)[1] for s in sources])
+ extensions = {os.path.splitext(s)[1] for s in sources}
for ext in extensions:
if ext in self.suffix_rules_srcdir:
self.WriteLn(self.suffix_rules_srcdir[ext])
@@ -888,15 +887,15 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
"""Write a "sub-project" Makefile.
- This is a small, wrapper Makefile that calls the top-level Makefile to build
- the targets from a single gyp file (i.e. a sub-project).
+ This is a small, wrapper Makefile that calls the top-level Makefile to build
+ the targets from a single gyp file (i.e. a sub-project).
- Arguments:
- output_filename: sub-project Makefile name to write
- makefile_path: path to the top-level Makefile
- targets: list of "all" targets for this sub-project
- build_dir: build output directory, relative to the sub-project
- """
+ Arguments:
+ output_filename: sub-project Makefile name to write
+ makefile_path: path to the top-level Makefile
+ targets: list of "all" targets for this sub-project
+ build_dir: build output directory, relative to the sub-project
+ """
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, "w")
self.fp.write(header)
@@ -910,7 +909,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteLn("all:")
if makefile_path:
makefile_path = " -C " + makefile_path
- self.WriteLn("\t$(MAKE)%s %s" % (makefile_path, " ".join(targets)))
+ self.WriteLn("\t$(MAKE){} {}".format(makefile_path, " ".join(targets)))
self.fp.close()
def WriteActions(
@@ -923,17 +922,17 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write Makefile code for any 'actions' from the gyp input.
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- actions (used to make other pieces dependent on these
- actions)
- part_of_all: flag indicating this target is part of 'all'
- """
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ part_of_all: flag indicating this target is part of 'all'
+ """
env = self.GetSortedXcodeEnv()
for action in actions:
name = StringToMakefileVariable(
- "%s_%s" % (self.qualified_target, action["action_name"])
+ "{}_{}".format(self.qualified_target, action["action_name"])
)
self.WriteLn('### Rules for action "%s":' % action["action_name"])
inputs = action["inputs"]
@@ -960,9 +959,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
]
command = gyp.common.EncodePOSIXShellList(action_commands)
if "message" in action:
- self.WriteLn("quiet_cmd_%s = ACTION %s $@" % (name, action["message"]))
+ self.WriteLn(
+ "quiet_cmd_{} = ACTION {} $@".format(name, action["message"])
+ )
else:
- self.WriteLn("quiet_cmd_%s = ACTION %s $@" % (name, name))
+ self.WriteLn(f"quiet_cmd_{name} = ACTION {name} $@")
if len(dirs) > 0:
command = "mkdir -p %s" % " ".join(dirs) + "; " + command
@@ -1022,7 +1023,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Stuff the outputs in a variable so we can refer to them later.
outputs_variable = "action_%s_outputs" % name
- self.WriteLn("%s := %s" % (outputs_variable, " ".join(outputs)))
+ self.WriteLn("{} := {}".format(outputs_variable, " ".join(outputs)))
extra_outputs.append("$(%s)" % outputs_variable)
self.WriteLn()
@@ -1038,16 +1039,16 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write Makefile code for any 'rules' from the gyp input.
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- rules (used to make other pieces dependent on these rules)
- part_of_all: flag indicating this target is part of 'all'
- """
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ part_of_all: flag indicating this target is part of 'all'
+ """
env = self.GetSortedXcodeEnv()
for rule in rules:
name = StringToMakefileVariable(
- "%s_%s" % (self.qualified_target, rule["rule_name"])
+ "{}_{}".format(self.qualified_target, rule["rule_name"])
)
count = 0
self.WriteLn("### Generated for rule %s:" % name)
@@ -1175,10 +1176,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteCopies(self, copies, extra_outputs, part_of_all):
"""Write Makefile code for any 'copies' from the gyp input.
- extra_outputs: a list that will be filled in with any outputs of this action
- (used to make other pieces dependent on this action)
- part_of_all: flag indicating this target is part of 'all'
- """
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ part_of_all: flag indicating this target is part of 'all'
+ """
self.WriteLn("### Generated for copy rule.")
variable = StringToMakefileVariable(self.qualified_target + "_copies")
@@ -1206,7 +1207,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
self.WriteDoCmd([output], [path], "copy", part_of_all)
outputs.append(output)
- self.WriteLn("%s = %s" % (variable, " ".join(QuoteSpaces(o) for o in outputs)))
+ self.WriteLn(
+ "{} = {}".format(variable, " ".join(QuoteSpaces(o) for o in outputs))
+ )
extra_outputs.append("$(%s)" % variable)
self.WriteLn()
@@ -1278,15 +1281,15 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
precompiled_header,
):
"""Write Makefile code for any 'sources' from the gyp input.
- These are source files necessary to build the current target.
-
- configs, deps, sources: input from gyp.
- extra_outputs: a list of extra outputs this action should be dependent on;
- used to serialize action/rules before compilation
- extra_link_deps: a list that will be filled in with any outputs of
- compilation (to be used in link lines)
- part_of_all: flag indicating this target is part of 'all'
- """
+ These are source files necessary to build the current target.
+
+ configs, deps, sources: input from gyp.
+ extra_outputs: a list of extra outputs this action should be dependent on;
+ used to serialize action/rules before compilation
+ extra_link_deps: a list that will be filled in with any outputs of
+ compilation (to be used in link lines)
+ part_of_all: flag indicating this target is part of 'all'
+ """
# Write configuration-specific variables for CFLAGS, etc.
for configname in sorted(configs.keys()):
@@ -1300,8 +1303,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
if self.flavor == "mac":
cflags = self.xcode_settings.GetCflags(
- configname,
- arch=config.get('xcode_configuration_platform')
+ configname, arch=config.get("xcode_configuration_platform")
)
cflags_c = self.xcode_settings.GetCflagsC(configname)
cflags_cc = self.xcode_settings.GetCflagsCC(configname)
@@ -1364,7 +1366,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
if pchdeps:
self.WriteLn("# Dependencies from obj files to their precompiled headers")
for source, obj, gch in pchdeps:
- self.WriteLn("%s: %s" % (obj, gch))
+ self.WriteLn(f"{obj}: {gch}")
self.WriteLn("# End precompiled header dependencies")
if objs:
@@ -1436,12 +1438,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"mm": "GYP_PCH_OBJCXXFLAGS",
}[lang]
self.WriteLn(
- "%s: %s := %s " % (gch, var_name, lang_flag) + "$(DEFS_$(BUILDTYPE)) "
+ f"{gch}: {var_name} := {lang_flag} " + "$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"$(CFLAGS_$(BUILDTYPE)) " + extra_flags
)
- self.WriteLn("%s: %s FORCE_DO_CMD" % (gch, input))
+ self.WriteLn(f"{gch}: {input} FORCE_DO_CMD")
self.WriteLn("\t@$(call do_cmd,pch_%s,1)" % lang)
self.WriteLn("")
assert " " not in gch, "Spaces in gch filenames not supported (%s)" % gch
@@ -1451,9 +1453,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- 'libfoobar.so'
- """
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ 'libfoobar.so'
+ """
assert not self.is_mac_bundle
if self.flavor == "mac" and self.type in (
@@ -1510,9 +1512,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- '$(obj)/baz/libfoobar.so'
- """
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
assert not self.is_mac_bundle
path = os.path.join("$(obj)." + self.toolset, self.path)
@@ -1535,10 +1537,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
- Returns a tuple (deps, link_deps), where each is a list of
- filenames that will need to be put in front of make for either
- building (deps) or linking (link_deps).
- """
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
deps = []
link_deps = []
if "dependencies" in spec:
@@ -1571,11 +1573,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write Makefile code to produce the final target of the gyp spec.
- spec, configs: input from gyp.
- deps, link_deps: dependency lists; see ComputeDeps()
- extra_outputs: any extra outputs that our target should depend on
- part_of_all: flag indicating this target is part of 'all'
- """
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ extra_outputs: any extra outputs that our target should depend on
+ part_of_all: flag indicating this target is part of 'all'
+ """
self.WriteLn("### Rules for final target.")
@@ -1597,7 +1599,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
configname,
generator_default_variables["PRODUCT_DIR"],
lambda p: Sourceify(self.Absolutify(p)),
- arch=config.get('xcode_configuration_platform')
+ arch=config.get("xcode_configuration_platform"),
)
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
@@ -1860,7 +1862,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
and self.toolset == "target"
):
# On mac, products are created in install_path immediately.
- assert install_path == self.output, "%s != %s" % (
+ assert install_path == self.output, "{} != {}".format(
install_path,
self.output,
)
@@ -1897,24 +1899,24 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteList(self, value_list, variable=None, prefix="", quoter=QuoteIfNecessary):
"""Write a variable definition that is a list of values.
- E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
- foo = blaha blahb
- but in a pretty-printed style.
- """
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
values = ""
if value_list:
value_list = [quoter(prefix + value) for value in value_list]
values = " \\\n\t" + " \\\n\t".join(value_list)
- self.fp.write("%s :=%s\n\n" % (variable, values))
+ self.fp.write(f"{variable} :={values}\n\n")
def WriteDoCmd(
self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False
):
"""Write a Makefile rule that uses do_cmd.
- This makes the outputs dependent on the command line that was run,
- as well as support the V= make command line flag.
- """
+ This makes the outputs dependent on the command line that was run,
+ as well as support the V= make command line flag.
+ """
suffix = ""
if postbuilds:
assert "," not in command
@@ -1922,7 +1924,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteMakeRule(
outputs,
inputs,
- actions=["$(call do_cmd,%s%s)" % (command, suffix)],
+ actions=[f"$(call do_cmd,{command}{suffix})"],
comment=comment,
command=command,
force=True,
@@ -1947,18 +1949,18 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
):
"""Write a Makefile rule, with some extra tricks.
- outputs: a list of outputs for the rule (note: this is not directly
- supported by make; see comments below)
- inputs: a list of inputs for the rule
- actions: a list of shell commands to run for the rule
- comment: a comment to put in the Makefile above the rule (also useful
- for making this Python script's code self-documenting)
- order_only: if true, makes the dependency order-only
- force: if true, include FORCE_DO_CMD as an order-only dep
- phony: if true, the rule does not actually generate the named output, the
- output is just a name to run the rule
- command: (optional) command name to generate unambiguous labels
- """
+ outputs: a list of outputs for the rule (note: this is not directly
+ supported by make; see comments below)
+ inputs: a list of inputs for the rule
+ actions: a list of shell commands to run for the rule
+ comment: a comment to put in the Makefile above the rule (also useful
+ for making this Python script's code self-documenting)
+ order_only: if true, makes the dependency order-only
+ force: if true, include FORCE_DO_CMD as an order-only dep
+ phony: if true, the rule does not actually generate the named output, the
+ output is just a name to run the rule
+ command: (optional) command name to generate unambiguous labels
+ """
outputs = [QuoteSpaces(o) for o in outputs]
inputs = [QuoteSpaces(i) for i in inputs]
@@ -1974,11 +1976,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Order only rule: Just write a simple rule.
# TODO(evanm): just make order_only a list of deps instead of this hack.
self.WriteLn(
- "%s: | %s%s" % (" ".join(outputs), " ".join(inputs), force_append)
+ "{}: | {}{}".format(" ".join(outputs), " ".join(inputs), force_append)
)
elif len(outputs) == 1:
# Regular rule, one output: Just write a simple rule.
- self.WriteLn("%s: %s%s" % (outputs[0], " ".join(inputs), force_append))
+ self.WriteLn("{}: {}{}".format(outputs[0], " ".join(inputs), force_append))
else:
# Regular rule, more than one output: Multiple outputs are tricky in
# make. We will write three rules:
@@ -1994,10 +1996,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
(command or self.target).encode("utf-8")
).hexdigest()
intermediate = "%s.intermediate" % cmddigest
- self.WriteLn("%s: %s" % (" ".join(outputs), intermediate))
+ self.WriteLn("{}: {}".format(" ".join(outputs), intermediate))
self.WriteLn("\t%s" % "@:")
- self.WriteLn("%s: %s" % (".INTERMEDIATE", intermediate))
- self.WriteLn("%s: %s%s" % (intermediate, " ".join(inputs), force_append))
+ self.WriteLn("{}: {}".format(".INTERMEDIATE", intermediate))
+ self.WriteLn(
+ "{}: {}{}".format(intermediate, " ".join(inputs), force_append)
+ )
actions.insert(0, "$(call do_cmd,touch)")
if actions:
@@ -2008,16 +2012,16 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
"""Write a set of LOCAL_XXX definitions for Android NDK.
- These variable definitions will be used by Android NDK but do nothing for
- non-Android applications.
+ These variable definitions will be used by Android NDK but do nothing for
+ non-Android applications.
- Arguments:
- module_name: Android NDK module name, which must be unique among all
- module names.
- all_sources: A list of source files (will be filtered by Compilable).
- link_deps: A list of link dependencies, which must be sorted in
- the order from dependencies to dependents.
- """
+ Arguments:
+ module_name: Android NDK module name, which must be unique among all
+ module names.
+ all_sources: A list of source files (will be filtered by Compilable).
+ link_deps: A list of link dependencies, which must be sorted in
+ the order from dependencies to dependents.
+ """
if self.type not in ("executable", "shared_library", "static_library"):
return
@@ -2129,14 +2133,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# export foo := a\ b
# it does not -- the backslash is written to the env as literal character.
# So don't escape spaces in |env[k]|.
- self.WriteLn("%s: export %s := %s" % (QuoteSpaces(target), k, v))
+ self.WriteLn(f"{QuoteSpaces(target)}: export {k} := {v}")
def Objectify(self, path):
"""Convert a path to its output directory form."""
if "$(" in path:
path = path.replace("$(obj)/", "$(obj).%s/$(TARGET)/" % self.toolset)
if "$(obj)" not in path:
- path = "$(obj).%s/$(TARGET)/%s" % (self.toolset, path)
+ path = f"$(obj).{self.toolset}/$(TARGET)/{path}"
return path
def Pchify(self, path, lang):
@@ -2144,14 +2148,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
path = self.Absolutify(path)
if "$(" in path:
path = path.replace(
- "$(obj)/", "$(obj).%s/$(TARGET)/pch-%s" % (self.toolset, lang)
+ "$(obj)/", f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}"
)
return path
- return "$(obj).%s/$(TARGET)/pch-%s/%s" % (self.toolset, lang, path)
+ return f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}/{path}"
def Absolutify(self, path):
"""Convert a subdirectory-relative path into a base-relative path.
- Skips over paths that contain variables."""
+ Skips over paths that contain variables."""
if "$(" in path:
# Don't call normpath in this case, as it might collapse the
# path too aggressively if it features '..'. However it's still
@@ -2219,7 +2223,7 @@ def PerformBuild(data, configurations, params):
if options.toplevel_dir and options.toplevel_dir != ".":
arguments += "-C", options.toplevel_dir
arguments.append("BUILDTYPE=" + config)
- print("Building [%s]: %s" % (config, arguments))
+ print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)
@@ -2253,7 +2257,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
- toolsets = set([target_dicts[target]["toolset"] for target in target_list])
+ toolsets = {target_dicts[target]["toolset"] for target in target_list}
for target in target_list:
spec = target_dicts[target]
if spec["default_configuration"] != "Default":
@@ -2328,7 +2332,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
{
"copy_archive_args": copy_archive_arguments,
"flock": "./gyp-flock-tool flock",
- "flock_index": 2
+ "flock_index": 2,
}
)
elif flavor == "freebsd":
@@ -2362,7 +2366,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
value = "$(abspath %s)" % value
wrapper = wrappers.get(key)
if wrapper:
- value = "%s %s" % (wrapper, value)
+ value = f"{wrapper} {value}"
del wrappers[key]
if key in ("CC", "CC.host", "CXX", "CXX.host"):
make_global_settings += (
@@ -2372,10 +2376,10 @@ def GenerateOutput(target_list, target_dicts, data, params):
env_key = key.replace(".", "_") # CC.host -> CC_host
if env_key in os.environ:
value = os.environ[env_key]
- make_global_settings += " %s = %s\n" % (key, value)
+ make_global_settings += f" {key} = {value}\n"
make_global_settings += "endif\n"
else:
- make_global_settings += "%s ?= %s\n" % (key, value)
+ make_global_settings += f"{key} ?= {value}\n"
# TODO(ukai): define cmd when only wrapper is specified in
# make_global_settings.
@@ -2413,8 +2417,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
this_make_global_settings = data[build_file].get("make_global_settings", [])
assert make_global_settings_array == this_make_global_settings, (
- "make_global_settings needs to be the same for all targets. %s vs. %s"
- % (this_make_global_settings, make_global_settings)
+ "make_global_settings needs to be the same for all targets "
+ f"{this_make_global_settings} vs. {make_global_settings}"
)
build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
index 32bf4746a..8308fa843 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import ntpath
import os
@@ -26,8 +25,6 @@ import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
from gyp.common import OrderedSet
-PY3 = bytes != str
-
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
@@ -120,9 +117,7 @@ def _GetDomainAndUserName():
call = subprocess.Popen(
["net", "config", "Workstation"], stdout=subprocess.PIPE
)
- config = call.communicate()[0]
- if PY3:
- config = config.decode("utf-8")
+ config = call.communicate()[0].decode("utf-8")
username_re = re.compile(r"^User name\s+(\S+)", re.MULTILINE)
username_match = username_re.search(config)
if username_match:
@@ -157,7 +152,7 @@ def _NormalizedSource(source):
return source
-def _FixPath(path):
+def _FixPath(path, separator="\\"):
"""Convert paths to a form that will make sense in a vcproj file.
Arguments:
@@ -173,9 +168,12 @@ def _FixPath(path):
and not _IsWindowsAbsPath(path)
):
path = os.path.join(fixpath_prefix, path)
- path = path.replace("/", "\\")
+ if separator == "\\":
+ path = path.replace("/", "\\")
path = _NormalizedSource(path)
- if path and path[-1] == "\\":
+ if separator == "/":
+ path = path.replace("\\", "/")
+ if path and path[-1] == separator:
path = path[:-1]
return path
@@ -190,9 +188,9 @@ def _IsWindowsAbsPath(path):
return path.startswith("c:") or path.startswith("C:")
-def _FixPaths(paths):
+def _FixPaths(paths, separator="\\"):
"""Fix each of the paths of the list."""
- return [_FixPath(i) for i in paths]
+ return [_FixPath(i, separator) for i in paths]
def _ConvertSourcesToFilterHierarchy(
@@ -319,7 +317,7 @@ def _ConfigBaseName(config_name, platform_name):
def _ConfigFullName(config_name, config_data):
platform_name = _ConfigPlatform(config_data)
- return "%s|%s" % (_ConfigBaseName(config_name, platform_name), platform_name)
+ return f"{_ConfigBaseName(config_name, platform_name)}|{platform_name}"
def _ConfigWindowsTargetPlatformVersion(config_data, version):
@@ -340,7 +338,7 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version):
# Find a matching entry in sdk_dir\include.
expected_sdk_dir = r"%s\include" % sdk_dir
names = sorted(
- [
+ (
x
for x in (
os.listdir(expected_sdk_dir)
@@ -348,7 +346,7 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version):
else []
)
if x.startswith(version)
- ],
+ ),
reverse=True,
)
if names:
@@ -428,7 +426,9 @@ def _BuildCommandLineForRuleRaw(
# for arguments like "--arg=path" or "/opt:path".
# If the argument starts with a slash or dash, it's probably a command line
# switch
- arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
+ # Return the path with forward slashes because the command using it might
+ # not support backslashes.
+ arguments = [i if (i[:1] in "/-") else _FixPath(i, "/") for i in cmd[1:]]
arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments]
arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
if quote_cmd:
@@ -620,7 +620,7 @@ def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
spec: the project dict
options: global generator options
"""
- rules_filename = "%s%s.rules" % (spec["target_name"], options.suffix)
+ rules_filename = "{}{}.rules".format(spec["target_name"], options.suffix)
rules_file = MSVSToolFile.Writer(
os.path.join(output_dir, rules_filename), spec["target_name"]
)
@@ -666,7 +666,7 @@ def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to
options: global generator options
actions_to_add: The list of actions we will add to.
"""
- filename = "%s_rules%s.mk" % (spec["target_name"], options.suffix)
+ filename = "{}_rules{}.mk".format(spec["target_name"], options.suffix)
mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
# Find cygwin style versions of some paths.
mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
@@ -709,7 +709,7 @@ def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to
cmd = ['"%s"' % i for i in cmd]
cmd = " ".join(cmd)
# Add it to the makefile.
- mk_file.write("%s: %s\n" % (" ".join(outputs), " ".join(inputs)))
+ mk_file.write("{}: {}\n".format(" ".join(outputs), " ".join(inputs)))
mk_file.write("\t%s\n\n" % cmd)
# Close up the file.
mk_file.close()
@@ -1576,7 +1576,7 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
if version.UsesVcxproj():
while (
all([isinstance(s, MSVSProject.Filter) for s in sources])
- and len(set([s.name for s in sources])) == 1
+ and len({s.name for s in sources}) == 1
):
assert all([len(s.contents) == 1 for s in sources])
sources = [s.contents[0] for s in sources]
@@ -1782,8 +1782,8 @@ def _GetCopies(spec):
base_dir = posixpath.split(src_bare)[0]
outer_dir = posixpath.split(src_bare)[1]
fixed_dst = _FixPath(dst)
- full_dst = '"%s\\%s\\"' % (fixed_dst, outer_dir)
- cmd = 'mkdir %s 2>nul & cd "%s" && xcopy /e /f /y "%s" %s' % (
+ full_dst = f'"{fixed_dst}\\{outer_dir}\\"'
+ cmd = 'mkdir {} 2>nul & cd "{}" && xcopy /e /f /y "{}" {}'.format(
full_dst,
_FixPath(base_dir),
outer_dir,
@@ -1794,17 +1794,17 @@ def _GetCopies(spec):
[src],
["dummy_copies", dst],
cmd,
- "Copying %s to %s" % (src, fixed_dst),
+ f"Copying {src} to {fixed_dst}",
)
)
else:
fix_dst = _FixPath(cpy["destination"])
- cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
+ cmd = 'mkdir "{}" 2>nul & set ERRORLEVEL=0 & copy /Y "{}" "{}"'.format(
fix_dst,
_FixPath(src),
_FixPath(dst),
)
- copies.append(([src], [dst], cmd, "Copying %s to %s" % (src, fix_dst)))
+ copies.append(([src], [dst], cmd, f"Copying {src} to {fix_dst}"))
return copies
@@ -1904,12 +1904,12 @@ def _GetPlatformOverridesOfProject(spec):
for config_name, c in spec["configurations"].items():
config_fullname = _ConfigFullName(config_name, c)
platform = c.get("msvs_target_platform", _ConfigPlatform(c))
- fixed_config_fullname = "%s|%s" % (
+ fixed_config_fullname = "{}|{}".format(
_ConfigBaseName(config_name, _ConfigPlatform(c)),
platform,
)
if spec["toolset"] == "host" and generator_supports_multiple_toolsets:
- fixed_config_fullname = "%s|x64" % (config_name,)
+ fixed_config_fullname = f"{config_name}|x64"
config_platform_overrides[config_fullname] = fixed_config_fullname
return config_platform_overrides
@@ -2062,7 +2062,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
arguments = [devenv, sln_path, "/Build", config]
- print("Building [%s]: %s" % (config, arguments))
+ print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)
@@ -2248,7 +2248,7 @@ def _AppendFiltersForMSBuild(
if not parent_filter_name:
filter_name = source.name
else:
- filter_name = "%s\\%s" % (parent_filter_name, source.name)
+ filter_name = f"{parent_filter_name}\\{source.name}"
# Add the filter to the group.
filter_group.append(
[
@@ -2376,7 +2376,7 @@ def _GenerateRulesForMSBuild(
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
-class MSBuildRule(object):
+class MSBuildRule:
"""Used to store information used to generate an MSBuild rule.
Attributes:
@@ -2575,7 +2575,7 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
"Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
"File": "$(IntDir)$(ProjectName).read.1.tlog",
- "Lines": "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog),
+ "Lines": f"^%({rule.tlog}.Source);%({rule.tlog}.Inputs)",
},
]
command_and_input_section = [
@@ -2921,7 +2921,7 @@ def _GetMSBuildProjectConfigurations(configurations, spec):
group = ["ItemGroup", {"Label": "ProjectConfigurations"}]
for (name, settings) in sorted(configurations.items()):
configuration, platform = _GetConfigurationAndPlatform(name, settings, spec)
- designation = "%s|%s" % (configuration, platform)
+ designation = f"{configuration}|{platform}"
group.append(
[
"ProjectConfiguration",
@@ -3286,13 +3286,11 @@ def _GetMSBuildPropertyGroup(spec, label, properties):
# Self references are ignored. Self reference is used in a few places to
# append to the default value. I.e. PATH=$(PATH);other_path
edges.update(
- set(
- [
- v
- for v in MSVS_VARIABLE_REFERENCE.findall(value)
- if v in properties and v != node
- ]
- )
+ {
+ v
+ for v in MSVS_VARIABLE_REFERENCE.findall(value)
+ if v in properties and v != node
+ }
)
return edges
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
index e001f417d..e80b57f06 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,10 +8,7 @@
import gyp.generator.msvs as msvs
import unittest
-try:
- from StringIO import StringIO # Python 2
-except ImportError:
- from io import StringIO # Python 3
+from io import StringIO
class TestSequenceFunctions(unittest.TestCase):
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
index e064bad7e..d173bf229 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import collections
import copy
@@ -20,10 +19,7 @@ import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
-try:
- from cStringIO import StringIO
-except ImportError:
- from io import StringIO
+from io import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
@@ -76,7 +72,7 @@ def StripPrefix(arg, prefix):
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
- by the shell."""
+ by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# allow common OK ones and quote anything else.
if re.match(r"^[a-zA-Z0-9_=.\\/-]+$", arg):
@@ -88,7 +84,7 @@ def QuoteShellArgument(arg, flavor):
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
- shell-escaped."""
+ shell-escaped."""
if flavor == "win":
# cl.exe replaces literal # characters with = in preprocessor definitions for
# some reason. Octal-encode to work around that.
@@ -99,32 +95,32 @@ def Define(d, flavor):
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
- return "%s.%s%s" % (output, arch, extension)
+ return f"{output}.{arch}{extension}"
-class Target(object):
+class Target:
"""Target represents the paths used within a single gyp target.
- Conceptually, building a single target A is a series of steps:
+ Conceptually, building a single target A is a series of steps:
- 1) actions/rules/copies generates source/resources/etc.
- 2) compiles generates .o files
- 3) link generates a binary (library/executable)
- 4) bundle merges the above in a mac bundle
+ 1) actions/rules/copies generates source/resources/etc.
+ 2) compiles generates .o files
+ 3) link generates a binary (library/executable)
+ 4) bundle merges the above in a mac bundle
- (Any of these steps can be optional.)
+ (Any of these steps can be optional.)
- From a build ordering perspective, a dependent target B could just
- depend on the last output of this series of steps.
+ From a build ordering perspective, a dependent target B could just
+ depend on the last output of this series of steps.
- But some dependent commands sometimes need to reach inside the box.
- For example, when linking B it needs to get the path to the static
- library generated by A.
+ But some dependent commands sometimes need to reach inside the box.
+ For example, when linking B it needs to get the path to the static
+ library generated by A.
- This object stores those paths. To keep things simple, member
- variables only store concrete paths to single files, while methods
- compute derived values like "the last output of the target".
- """
+ This object stores those paths. To keep things simple, member
+ variables only store concrete paths to single files, while methods
+ compute derived values like "the last output of the target".
+ """
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
@@ -163,7 +159,7 @@ class Target(object):
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
- file."""
+ file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
@@ -173,19 +169,19 @@ class Target(object):
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
- any dependent action step."""
+ any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + ".TOC"
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
- any dependent compile step."""
+ any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
- steps."""
+ steps."""
return self.bundle or self.binary or self.actions_stamp
@@ -214,7 +210,7 @@ class Target(object):
# to the input file name as well as the output target name.
-class NinjaWriter(object):
+class NinjaWriter:
def __init__(
self,
hash_for_rules,
@@ -228,11 +224,11 @@ class NinjaWriter(object):
toplevel_dir=None,
):
"""
- base_dir: path from source root to directory containing this gyp file,
- by gyp semantics, all input paths are relative to this
- build_dir: path from source root to build output
- toplevel_dir: path to the toplevel directory
- """
+ base_dir: path from source root to directory containing this gyp file,
+ by gyp semantics, all input paths are relative to this
+ build_dir: path from source root to build output
+ toplevel_dir: path to the toplevel directory
+ """
self.hash_for_rules = hash_for_rules
self.target_outputs = target_outputs
@@ -263,10 +259,10 @@ class NinjaWriter(object):
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
- If |product_dir| is None, assumes the cwd is already the product
- dir. Otherwise, |product_dir| is the relative path to the product
- dir.
- """
+ If |product_dir| is None, assumes the cwd is already the product
+ dir. Otherwise, |product_dir| is the relative path to the product
+ dir.
+ """
PRODUCT_DIR = "$!PRODUCT_DIR"
if PRODUCT_DIR in path:
@@ -303,9 +299,9 @@ class NinjaWriter(object):
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
- variable references in |path| with |env|.
+ variable references in |path| with |env|.
- See the above discourse on path conversions."""
+ See the above discourse on path conversions."""
if env:
if self.flavor == "mac":
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
@@ -324,11 +320,11 @@ class NinjaWriter(object):
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
- If qualified is True, qualify the resulting filename with the name
- of the target. This is necessary when e.g. compiling the same
- path twice for two separate output targets.
+ If qualified is True, qualify the resulting filename with the name
+ of the target. This is necessary when e.g. compiling the same
+ path twice for two separate output targets.
- See the above discourse on path conversions."""
+ See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith("$"), path
@@ -361,9 +357,9 @@ class NinjaWriter(object):
def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
- representing the result of building all the targets or None.
+ representing the result of building all the targets or None.
- Uses a stamp file if necessary."""
+ Uses a stamp file if necessary."""
assert targets == [item for item in targets if item], targets
if len(targets) == 0:
@@ -377,14 +373,14 @@ class NinjaWriter(object):
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
- return "%s.%s.ninja" % (output_file_base, arch)
+ return f"{output_file_base}.{arch}.ninja"
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
- Returns a Target object, which represents the output paths for this spec.
- Returns None if there are no outputs (e.g. a settings-only 'none' type
- target)."""
+ Returns a Target object, which represents the output paths for this spec.
+ Returns None if there are no outputs (e.g. a settings-only 'none' type
+ target)."""
self.config_name = config_name
self.name = spec["target_name"]
@@ -418,20 +414,17 @@ class NinjaWriter(object):
if self.flavor == "mac":
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
- self.arch_subninjas = dict(
- (
- arch,
- ninja_syntax.Writer(
- OpenOutput(
- os.path.join(
- self.toplevel_build, self._SubninjaNameForArch(arch)
- ),
- "w",
- )
- ),
+ self.arch_subninjas = {
+ arch: ninja_syntax.Writer(
+ OpenOutput(
+ os.path.join(
+ self.toplevel_build, self._SubninjaNameForArch(arch)
+ ),
+ "w",
+ )
)
for arch in self.archs
- )
+ }
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
@@ -558,7 +551,7 @@ class NinjaWriter(object):
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
- with files that are generated."""
+ with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name
)
@@ -595,7 +588,7 @@ class NinjaWriter(object):
self, spec, extra_sources, prebuild, mac_bundle_depends
):
"""Write out the Actions, Rules, and Copies steps. Return a path
- representing the outputs of these steps."""
+ representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get("mac_bundle_resources", [])[:]
@@ -638,16 +631,16 @@ class NinjaWriter(object):
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
- |verb| is the short summary, e.g. ACTION or RULE.
- |message| is a hand-written description, or None if not available.
- |fallback| is the gyp-level name of the step, usable as a fallback.
- """
+ |verb| is the short summary, e.g. ACTION or RULE.
+ |message| is a hand-written description, or None if not available.
+ |fallback| is the gyp-level name of the step, usable as a fallback.
+ """
if self.toolset != "target":
verb += "(%s)" % self.toolset
if message:
- return "%s %s" % (verb, self.ExpandSpecial(message))
+ return f"{verb} {self.ExpandSpecial(message)}"
else:
- return "%s %s: %s" % (verb, self.name, fallback)
+ return f"{verb} {self.name}: {fallback}"
def WriteActions(
self, actions, extra_sources, prebuild, extra_mac_bundle_resources
@@ -657,14 +650,14 @@ class NinjaWriter(object):
all_outputs = []
for action in actions:
# First write out a rule for the action.
- name = "%s_%s" % (action["action_name"], self.hash_for_rules)
+ name = "{}_{}".format(action["action_name"], self.hash_for_rules)
description = self.GenerateDescription(
"ACTION", action.get("message", None), name
)
- is_cygwin = (
- self.msvs_settings.IsRuleRunUnderCygwin(action)
+ win_shell_flags = (
+ self.msvs_settings.GetRuleShellFlags(action)
if self.flavor == "win"
- else False
+ else None
)
args = action["action"]
depfile = action.get("depfile", None)
@@ -672,7 +665,7 @@ class NinjaWriter(object):
depfile = self.ExpandSpecial(depfile, self.base_to_build)
pool = "console" if int(action.get("ninja_use_console", 0)) else None
rule_name, _ = self.WriteNewNinjaRule(
- name, args, description, is_cygwin, env, pool, depfile=depfile
+ name, args, description, win_shell_flags, env, pool, depfile=depfile
)
inputs = [self.GypPathToNinja(i, env) for i in action["inputs"]]
@@ -706,7 +699,7 @@ class NinjaWriter(object):
continue
# First write out a rule for the rule action.
- name = "%s_%s" % (rule["rule_name"], self.hash_for_rules)
+ name = "{}_{}".format(rule["rule_name"], self.hash_for_rules)
args = rule["action"]
description = self.GenerateDescription(
@@ -714,14 +707,14 @@ class NinjaWriter(object):
rule.get("message", None),
("%s " + generator_default_variables["RULE_INPUT_PATH"]) % name,
)
- is_cygwin = (
- self.msvs_settings.IsRuleRunUnderCygwin(rule)
+ win_shell_flags = (
+ self.msvs_settings.GetRuleShellFlags(rule)
if self.flavor == "win"
- else False
+ else None
)
pool = "console" if int(rule.get("ninja_use_console", 0)) else None
rule_name, args = self.WriteNewNinjaRule(
- name, args, description, is_cygwin, env, pool
+ name, args, description, win_shell_flags, env, pool
)
# TODO: if the command references the outputs directly, we should
@@ -731,7 +724,7 @@ class NinjaWriter(object):
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ("source", "root", "dirname", "ext", "name")
- needed_variables = set(["source"])
+ needed_variables = {"source"}
for argument in args:
for var in special_locals:
if "${%s}" % var in argument:
@@ -740,7 +733,7 @@ class NinjaWriter(object):
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
- if is_cygwin:
+ if win_shell_flags and win_shell_flags.cygwin:
return path.replace("\\", "/")
return path
@@ -875,7 +868,7 @@ class NinjaWriter(object):
output = self.GypPathToUniqueOutput("headers.hmap")
self.xcode_settings.header_map_path = output
all_headers = map(
- self.GypPathToNinja, filter(lambda x: x.endswith((".h")), all_sources)
+ self.GypPathToNinja, filter(lambda x: x.endswith(".h"), all_sources)
)
variables = [
("framework", framework),
@@ -925,11 +918,11 @@ class NinjaWriter(object):
def WriteMacXCassets(self, xcassets, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
- This add an invocation of 'actool' via the 'mac_tool.py' helper script.
- It assumes that the assets catalogs define at least one imageset and
- thus an Assets.car file will be generated in the application resources
- directory. If this is not the case, then the build will probably be done
- at each invocation of ninja."""
+ This add an invocation of 'actool' via the 'mac_tool.py' helper script.
+ It assumes that the assets catalogs define at least one imageset and
+ thus an Assets.car file will be generated in the application resources
+ directory. If this is not the case, then the build will probably be done
+ at each invocation of ninja."""
if not xcassets:
return
@@ -1047,22 +1040,19 @@ class NinjaWriter(object):
spec,
)
else:
- return dict(
- (
- arch,
- self.WriteSourcesForArch(
- self.arch_subninjas[arch],
- config_name,
- config,
- sources,
- predepends,
- precompiled_header,
- spec,
- arch=arch,
- ),
+ return {
+ arch: self.WriteSourcesForArch(
+ self.arch_subninjas[arch],
+ config_name,
+ config,
+ sources,
+ predepends,
+ precompiled_header,
+ spec,
+ arch=arch,
)
for arch in self.archs
- )
+ }
def WriteSourcesForArch(
self,
@@ -1231,7 +1221,7 @@ class NinjaWriter(object):
command = "cc_s"
elif (
self.flavor == "win"
- and ext == "asm"
+ and ext in ("asm", "S")
and not self.msvs_settings.HasExplicitAsmRules(spec)
):
command = "asm"
@@ -1427,7 +1417,11 @@ class NinjaWriter(object):
is_executable = spec["type"] == "executable"
# The ldflags config key is not used on mac or win. On those platforms
# linker flags are set via xcode_settings and msvs_settings, respectively.
- env_ldflags = os.environ.get("LDFLAGS", "").split()
+ if self.toolset == "target":
+ env_ldflags = os.environ.get("LDFLAGS", "").split()
+ elif self.toolset == "host":
+ env_ldflags = os.environ.get("LDFLAGS_host", "").split()
+
if self.flavor == "mac":
ldflags = self.xcode_settings.GetLdflags(
config_name,
@@ -1729,8 +1723,8 @@ class NinjaWriter(object):
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
- |output| if any of them fails. If |is_command_start| is False, then the
- returned string will start with ' && '."""
+ |output| if any of them fails. If |is_command_start| is False, then the
+ returned string will start with ' && '."""
if not self.xcode_settings or spec["type"] == "none" or not output:
return ""
output = QuoteShellArgument(output, self.flavor)
@@ -1776,8 +1770,8 @@ class NinjaWriter(object):
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
- 'export FOO=foo; export BAR="${FOO} bar;'
- that exports |env| to the shell."""
+ 'export FOO=foo; export BAR="${FOO} bar;'
+ that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append(
@@ -1842,7 +1836,7 @@ class NinjaWriter(object):
"shared_library",
"executable",
):
- return "%s%s%s" % (prefix, target, extension)
+ return f"{prefix}{target}{extension}"
elif type == "none":
return "%s.stamp" % target
else:
@@ -1905,12 +1899,12 @@ class NinjaWriter(object):
ninja_file.variable(var, " ".join(values))
def WriteNewNinjaRule(
- self, name, args, description, is_cygwin, env, pool, depfile=None
+ self, name, args, description, win_shell_flags, env, pool, depfile=None
):
"""Write out a new ninja "rule" statement for a given command.
- Returns the name of the new rule, and a copy of |args| with variables
- expanded."""
+ Returns the name of the new rule, and a copy of |args| with variables
+ expanded."""
if self.flavor == "win":
args = [
@@ -1952,13 +1946,14 @@ class NinjaWriter(object):
if self.flavor == "win":
rspfile = rule_name + ".$unique_name.rsp"
# The cygwin case handles this inside the bash sub-shell.
- run_in = "" if is_cygwin else " " + self.build_to_base
- if is_cygwin:
+ run_in = "" if win_shell_flags.cygwin else " " + self.build_to_base
+ if win_shell_flags.cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base
)
else:
- rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
+ rspfile_content = gyp.msvs_emulation.EncodeRspFileList(
+ args, win_shell_flags.quote)
command = (
"%s gyp-win-tool action-wrapper $arch " % sys.executable
+ rspfile
@@ -2147,7 +2142,7 @@ def GetDefaultConcurrentLinks():
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
- whether the manifest embedding is enabled."""
+ whether the manifest embedding is enabled."""
return "_embed" if embed_manifest else ""
@@ -2395,7 +2390,6 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
)
if flavor == "win":
master_ninja.variable("ld_host", ld_host)
- master_ninja.variable("ldxx_host", ldxx_host)
else:
master_ninja.variable(
"ld_host", CommandWithWrapper("LINK", wrappers, ld_host)
@@ -2538,10 +2532,12 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
"solink",
description="SOLINK $lib",
restat=True,
- command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"}, # noqa: E501
+ command=mtime_preserving_solink_base
+ % {"suffix": "@$link_file_list"}, # noqa: E501
rspfile="$link_file_list",
- rspfile_content=("-Wl,--whole-archive $in $solibs -Wl,"
- "--no-whole-archive $libs"),
+ rspfile_content=(
+ "-Wl,--whole-archive $in $solibs -Wl," "--no-whole-archive $libs"
+ ),
pool="link_pool",
)
master_ninja.rule(
@@ -2798,8 +2794,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
this_make_global_settings = data[build_file].get("make_global_settings", [])
assert make_global_settings == this_make_global_settings, (
- "make_global_settings needs to be the same for all targets. %s vs. %s"
- % (this_make_global_settings, make_global_settings)
+ "make_global_settings needs to be the same for all targets. "
+ f"{this_make_global_settings} vs. {make_global_settings}"
)
spec = target_dicts[qualified_target]
@@ -2891,7 +2887,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
builddir = os.path.join(options.toplevel_dir, "out", config)
arguments = ["ninja", "-C", builddir]
- print("Building [%s]: %s" % (config, arguments))
+ print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
index abadcd982..7d180685b 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
index 9e7e99e9e..2f4d17e51 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import filecmp
import gyp.common
@@ -110,7 +109,7 @@ def CreateXCConfigurationList(configuration_names):
return xccl
-class XcodeProject(object):
+class XcodeProject:
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
@@ -613,7 +612,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
arguments = ["xcodebuild", "-project", xcodeproj_path]
arguments += ["-configuration", config]
- print("Building [%s]: %s" % (config, arguments))
+ print(f"Building [{config}]: {arguments}")
subprocess.check_call(arguments)
@@ -1072,7 +1071,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# TODO(mark): There's a possibility for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = "%s.make" % re.sub(
- "[^a-zA-Z0-9_]", "_", "%s_%s" % (target_name, rule["rule_name"])
+ "[^a-zA-Z0-9_]", "_", "{}_{}".format(target_name, rule["rule_name"])
)
makefile_path = os.path.join(
xcode_projects[build_file].path, makefile_name
@@ -1102,7 +1101,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
eol = ""
else:
eol = " \\"
- makefile.write(" %s%s\n" % (concrete_output, eol))
+ makefile.write(f" {concrete_output}{eol}\n")
for (rule_source, concrete_outputs, message, action) in zip(
rule["rule_sources"],
@@ -1123,7 +1122,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
bol = ""
else:
bol = " "
- makefile.write("%s%s \\\n" % (bol, concrete_output))
+ makefile.write(f"{bol}{concrete_output} \\\n")
concrete_output_dir = posixpath.dirname(concrete_output)
if (
@@ -1143,7 +1142,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
eol = ""
else:
eol = " \\"
- makefile.write(" %s%s\n" % (prerequisite, eol))
+ makefile.write(f" {prerequisite}{eol}\n")
# Make sure that output directories exist before executing the rule
# action.
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
index 51fbca6a2..49772d1f4 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/node-gyp/gyp/pylib/gyp/input.py
index 5504390c0..354958bfb 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/input.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/input.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import print_function
import ast
@@ -21,8 +20,6 @@ from distutils.version import StrictVersion
from gyp.common import GypError
from gyp.common import OrderedSet
-PY3 = bytes != str
-
# A list of types that are treated as linkable.
linkable_types = [
"executable",
@@ -228,17 +225,9 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check
return data[build_file_path]
if os.path.exists(build_file_path):
- # Open the build file for read ('r') with universal-newlines mode ('U')
- # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n'
- # which otherwise will fail eval()
- if sys.platform == "zos":
- # On z/OS, universal-newlines mode treats the file as an ascii file.
- # But since node-gyp produces ebcdic files, do not use that mode.
- build_file_contents = open(build_file_path, "r").read()
- else:
- build_file_contents = open(build_file_path, "rU").read()
+ build_file_contents = open(build_file_path, encoding='utf-8').read()
else:
- raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
+ raise GypError(f"{build_file_path} not found (cwd: {os.getcwd()})")
build_file_data = None
try:
@@ -567,7 +556,7 @@ class ParallelProcessingError(Exception):
pass
-class ParallelState(object):
+class ParallelState:
"""Class to keep track of state when processing input files in parallel.
If build files are loaded in parallel, use this to keep track of
@@ -987,9 +976,8 @@ def ExpandVariables(input, phase, variables, build_file):
)
p_stdout, p_stderr = p.communicate("")
- if PY3:
- p_stdout = p_stdout.decode("utf-8")
- p_stderr = p_stderr.decode("utf-8")
+ p_stdout = p_stdout.decode("utf-8")
+ p_stderr = p_stderr.decode("utf-8")
if p.wait() != 0 or p_stderr:
sys.stderr.write(p_stderr)
@@ -1219,7 +1207,7 @@ def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, buil
except NameError as e:
gyp.common.ExceptionAppend(
e,
- "while evaluating condition '%s' in %s" % (cond_expr_expanded, build_file),
+ f"while evaluating condition '{cond_expr_expanded}' in {build_file}",
)
raise GypError(e)
@@ -1675,7 +1663,7 @@ def RemoveLinkDependenciesFromNoneTargets(targets):
)
-class DependencyGraphNode(object):
+class DependencyGraphNode:
"""
Attributes:
@@ -2252,7 +2240,7 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
# Make membership testing of hashables in |to| (in particular, strings)
# faster.
- hashable_to_set = set(x for x in to if is_hashable(x))
+ hashable_to_set = {x for x in to if is_hashable(x)}
for item in fro:
singleton = False
if type(item) in (str, int):
@@ -2772,7 +2760,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
rule_name = rule["rule_name"]
if rule_name in rule_names:
raise GypError(
- "rule %s exists in duplicate, target %s" % (rule_name, target)
+ f"rule {rule_name} exists in duplicate, target {target}"
)
rule_names[rule_name] = rule
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
index 6672ddc01..a18f72e9e 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
index 07412578d..59647c9a8 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,7 +8,6 @@
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
-from __future__ import print_function
import fcntl
import fnmatch
@@ -23,8 +22,6 @@ import subprocess
import sys
import tempfile
-PY3 = bytes != str
-
def main(args):
executor = MacTool()
@@ -33,7 +30,7 @@ def main(args):
sys.exit(exit_code)
-class MacTool(object):
+class MacTool:
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
@@ -179,7 +176,7 @@ class MacTool(object):
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
- with open(source, "r") as fd:
+ with open(source) as fd:
lines = fd.read()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
@@ -251,7 +248,7 @@ class MacTool(object):
dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
with open(dest, "w") as fp:
- fp.write("%s%s" % (package_type, signature_code))
+ fp.write(f"{package_type}{signature_code}")
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
@@ -278,9 +275,7 @@ class MacTool(object):
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
env["ZERO_AR_DATE"] = "1"
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- if PY3:
- err = err.decode("utf-8")
+ err = libtoolout.communicate()[1].decode("utf-8")
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print(line, file=sys.stderr)
@@ -540,7 +535,7 @@ class MacTool(object):
"application-identifier", ""
)
for team_identifier in profile_data.get("TeamIdentifier", []):
- app_id = "%s.%s" % (team_identifier, bundle_identifier)
+ app_id = f"{team_identifier}.{bundle_identifier}"
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path,
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
index 1afc1d687..5b9c2712e 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
@@ -7,6 +7,7 @@ This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
+import collections
import os
import re
import subprocess
@@ -16,15 +17,13 @@ from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
-PY3 = bytes != str
-
windows_quoter_regex = re.compile(r'(\\*)"')
-def QuoteForRspFile(arg):
+def QuoteForRspFile(arg, quote_cmd=True):
"""Quote a command line argument so that it appears as one argument when
- processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
- Windows programs)."""
+ processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
+ Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
@@ -38,7 +37,8 @@ def QuoteForRspFile(arg):
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
- arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
+ if quote_cmd:
+ arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
@@ -50,12 +50,17 @@ def QuoteForRspFile(arg):
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
- # Finally, wrap the whole thing in quotes so that the above quote rule
- # applies and whitespace isn't a word break.
- return '"' + arg + '"'
+ # As a workaround for programs that don't use CommandLineToArgvW, gyp
+ # supports msvs_quote_cmd=0, which simply disables all quoting.
+ if quote_cmd:
+ # Finally, wrap the whole thing in quotes so that the above quote rule
+ # applies and whitespace isn't a word break.
+ return f'"{arg}"'
+
+ return arg
-def EncodeRspFileList(args):
+def EncodeRspFileList(args, quote_cmd):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
@@ -69,12 +74,13 @@ def EncodeRspFileList(args):
program = call + " " + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
- return program + " " + " ".join(QuoteForRspFile(arg) for arg in args[1:])
+ return (program + " "
+ + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:]))
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
- value at path, or return |default| if any of the path doesn't exist."""
+ value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
@@ -95,7 +101,7 @@ def _AddPrefix(element, prefix):
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
- each item will be remapped. Any elements not found will be removed."""
+ each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
@@ -108,8 +114,8 @@ def _DoRemapping(element, map):
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
- then add |element| to it, adding each item in |element| if it's a list or
- tuple."""
+ then add |element| to it, adding each item in |element| if it's a list or
+ tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
@@ -121,8 +127,8 @@ def _AppendOrReturn(append, element):
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
- standard environment variable, and if that doesn't exist, try to find
- via the registry. May return None if not found in either location."""
+ standard environment variable, and if that doesn't exist, try to find
+ via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, "dxsdk_dir"):
return _FindDirectXInstallation.dxsdk_dir
@@ -132,9 +138,7 @@ def _FindDirectXInstallation():
# Setup params to pass to and attempt to launch reg.exe.
cmd = ["reg.exe", "query", r"HKLM\Software\Microsoft\DirectX", "/s"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout = p.communicate()[0]
- if PY3:
- stdout = stdout.decode("utf-8")
+ stdout = p.communicate()[0].decode("utf-8")
for line in stdout.splitlines():
if "InstallPath" in line:
dxsdk_dir = line.split(" ")[3] + "\\"
@@ -146,7 +150,7 @@ def _FindDirectXInstallation():
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
- equivalents. Returns all variables that are independent of the target."""
+ equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
@@ -167,7 +171,7 @@ def GetGlobalVSMacroEnv(vs_version):
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
- them from all targets, and returns an OrderedSet containing them."""
+ them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(configs[0].get("msvs_system_include_dirs", []))
for config in configs[1:]:
system_includes = config.get("msvs_system_include_dirs", [])
@@ -193,10 +197,10 @@ def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
return expanded_system_includes
-class MsvsSettings(object):
+class MsvsSettings:
"""A class that understands the gyp 'msvs_...' values (especially the
- msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
- class helps map those settings to command line options."""
+ msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
+ class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
@@ -229,7 +233,9 @@ class MsvsSettings(object):
for config in configs.values():
if field in config:
unsupported += [
- "%s not supported (target %s)." % (field, spec["target_name"])
+ "{} not supported (target {}).".format(
+ field, spec["target_name"]
+ )
]
if unsupported:
raise Exception("\n".join(unsupported))
@@ -237,9 +243,9 @@ class MsvsSettings(object):
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
- Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
- the target type.
- """
+ Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
+ the target type.
+ """
ext = self.spec.get("product_extension", None)
if ext:
return ext
@@ -247,7 +253,7 @@ class MsvsSettings(object):
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
- equivalents."""
+ equivalents."""
target_arch = self.GetArch(config)
if target_arch == "x86":
target_platform = "Win32"
@@ -294,15 +300,15 @@ class MsvsSettings(object):
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
- |append| is specified, and the item is found, it will be appended to that
- object instead of returned. If |map| is specified, results will be
- remapped through |map| before being returned or appended."""
+ |append| is specified, and the item is found, it will be appended to that
+ object instead of returned. If |map| is specified, results will be
+ remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
- class _GetWrapper(object):
+ class _GetWrapper:
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
@@ -321,7 +327,7 @@ class MsvsSettings(object):
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
- msvs_target_platform. Returns either 'x86' or 'x64'."""
+ msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, "")
platform = self.msvs_target_platform.get(config, "")
if not platform: # If no specific override, use the configuration's.
@@ -335,7 +341,7 @@ class MsvsSettings(object):
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release'), VS2015 and later only use this level
- if self.vs_version.short_name >= 2015:
+ if int(self.vs_version.short_name) >= 2015:
return config
# and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
@@ -368,7 +374,7 @@ class MsvsSettings(object):
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
- include dirs used for platform SDK and similar."""
+ include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(
@@ -380,7 +386,7 @@ class MsvsSettings(object):
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
- system include dirs used for platform SDK and similar."""
+ system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(
@@ -392,7 +398,7 @@ class MsvsSettings(object):
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
- on other VS settings."""
+ on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(["CharacterSet"], config) == "1":
@@ -408,7 +414,7 @@ class MsvsSettings(object):
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
- None if there's no explicit name specified."""
+ None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(("VCCLCompilerTool", "ProgramDataBaseFileName"), config)
if pdbname:
@@ -417,7 +423,7 @@ class MsvsSettings(object):
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overridden map file name for a target or returns None
- if it's not set."""
+ if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(("VCLinkerTool", "MapFileName"), config)
if map_file:
@@ -426,7 +432,7 @@ class MsvsSettings(object):
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
- if it's not overridden."""
+ if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec["type"]
root = "VCLibrarianTool" if type == "static_library" else "VCLinkerTool"
@@ -440,7 +446,7 @@ class MsvsSettings(object):
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
- default if it's not overridden, or if no pdb will be generated."""
+ default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(("VCLinkerTool", "ProgramDatabaseFile"), config)
generate_debug_info = self._Setting(
@@ -456,7 +462,7 @@ class MsvsSettings(object):
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
- an import library."""
+ an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(("NoImportLibrary",), config)
return noimplib == "true"
@@ -539,7 +545,7 @@ class MsvsSettings(object):
)
]
)
- if self.vs_version.project_version >= 12.0:
+ if float(self.vs_version.project_version) >= 12.0:
# New flag introduced in VS2013 (project version 12.0) Forces writes to
# the program database (PDB) to be serialized through MSPDBSRV.EXE.
# https://msdn.microsoft.com/en-us/library/dn502518.aspx
@@ -549,8 +555,7 @@ class MsvsSettings(object):
return cflags
def _GetPchFlags(self, config, extension):
- """Get the flags to be added to the cflags for precompiled header support.
- """
+ """Get the flags to be added to the cflags for precompiled header support."""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
@@ -575,7 +580,7 @@ class MsvsSettings(object):
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
- setting."""
+ setting."""
config = self._TargetConfig(config)
libpaths = self._Setting(
(root, "AdditionalLibraryDirectories"), config, default=[]
@@ -622,14 +627,14 @@ class MsvsSettings(object):
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
- linker in the VS generator. Emulate that behaviour here."""
+ linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
- if it's not overridden."""
+ if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(("VCLinkerTool", "ProfileGuidedDatabase"), config)
if output_file:
@@ -649,7 +654,7 @@ class MsvsSettings(object):
build_dir,
):
"""Returns the flags that need to be added to link commands, and the
- manifest files."""
+ manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(
@@ -709,7 +714,7 @@ class MsvsSettings(object):
)
if stack_commit_size:
stack_commit_size = "," + stack_commit_size
- ldflags.append("/STACK:%s%s" % (stack_reserve_size, stack_commit_size))
+ ldflags.append(f"/STACK:{stack_reserve_size}{stack_commit_size}")
ld("TerminalServerAware", map={"1": ":NO", "2": ""}, prefix="/TSAWARE")
ld("LinkIncremental", map={"1": ":NO", "2": ""}, prefix="/INCREMENTAL")
@@ -775,12 +780,12 @@ class MsvsSettings(object):
self, config, name, gyp_to_build_path, allow_isolation, build_dir
):
"""Returns a 3-tuple:
- - the set of flags that need to be added to the link to generate
- a default manifest
- - the intermediate manifest that the linker will generate that should be
- used to assert it doesn't add anything to the merged one.
- - the list of all the manifest files to be merged by the manifest tool and
- included into the link."""
+ - the set of flags that need to be added to the link to generate
+ a default manifest
+ - the intermediate manifest that the linker will generate that should be
+ used to assert it doesn't add anything to the merged one.
+ - the list of all the manifest files to be merged by the manifest tool and
+ included into the link."""
generate_manifest = self._Setting(
("VCLinkerTool", "GenerateManifest"), config, default="true"
)
@@ -835,10 +840,10 @@ class MsvsSettings(object):
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
- <requestedExecutionLevel level='%s' uiAccess='%s' />
+ <requestedExecutionLevel level='{}' uiAccess='{}' />
</requestedPrivileges>
</security>
-</trustInfo>""" % (
+</trustInfo>""".format(
execution_level_map[execution_level],
ui_access,
)
@@ -867,7 +872,7 @@ class MsvsSettings(object):
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
- generated by the linker."""
+ generated by the linker."""
files = self._Setting(
("VCManifestTool", "AdditionalManifestFiles"), config, default=[]
)
@@ -880,7 +885,7 @@ class MsvsSettings(object):
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
- Inputs (using component .objs of a given .lib)."""
+ Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(("VCLinkerTool", "UseLibraryDependencyInputs"), config)
return uldi == "true"
@@ -901,7 +906,7 @@ class MsvsSettings(object):
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
- compiler."""
+ compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(
@@ -916,13 +921,13 @@ class MsvsSettings(object):
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
- incoming paths are in Windows normpath'd form, so they need to be
- converted to posix style for the part of the command line that's passed to
- bash. We also have to do some Visual Studio macro emulation here because
- various rules use magic VS names for things. Also note that rules that
- contain ninja variables cannot be fixed here (for example ${source}), so
- the outer generator needs to make sure that the paths that are written out
- are in posix style, if the command line will be used here."""
+ incoming paths are in Windows normpath'd form, so they need to be
+ converted to posix style for the part of the command line that's passed to
+ bash. We also have to do some Visual Studio macro emulation here because
+ various rules use magic VS names for things. Also note that rules that
+ contain ninja variables cannot be fixed here (for example ${source}), so
+ the outer generator needs to make sure that the paths that are written out
+ are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0])
)
@@ -932,17 +937,26 @@ class MsvsSettings(object):
bash_cmd = " ".join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir
- + 'bash -c "%s ; %s"' % (cd, bash_cmd)
+ + f'bash -c "{cd} ; {bash_cmd}"'
)
return cmd
- def IsRuleRunUnderCygwin(self, rule):
- """Determine if an action should be run under cygwin. If the variable is
- unset, or set to 1 we use cygwin."""
- return (
- int(rule.get("msvs_cygwin_shell", self.spec.get("msvs_cygwin_shell", 1)))
- != 0
- )
+ RuleShellFlags = collections.namedtuple("RuleShellFlags", ["cygwin", "quote"])
+
+ def GetRuleShellFlags(self, rule):
+ """Return RuleShellFlags about how the given rule should be run. This
+ includes whether it should run under cygwin (msvs_cygwin_shell), and
+ whether the commands should be quoted (msvs_quote_cmd)."""
+ # If the variable is unset, or set to 1 we use cygwin
+ cygwin = int(rule.get("msvs_cygwin_shell",
+ self.spec.get("msvs_cygwin_shell", 1))) != 0
+ # Default to quoting. There's only a few special instances where the
+ # target command uses non-standard command line parsing and handle quotes
+ # and quote escaping differently.
+ quote_cmd = int(rule.get("msvs_quote_cmd", 1))
+ assert quote_cmd != 0 or cygwin != 1, \
+ "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0"
+ return MsvsSettings.RuleShellFlags(cygwin, quote_cmd)
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
@@ -959,19 +973,19 @@ class MsvsSettings(object):
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
- there isn't we need to generate implicit rules to build MIDL .idl files."""
+ there isn't we need to generate implicit rules to build MIDL .idl files."""
return self._HasExplicitRuleForExtension(
spec, "idl"
) or self._HasExplicitIdlActions(spec)
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
- need to generate implicit rules to assemble .asm files."""
+ need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, "asm")
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
- directory, outputs, and variables and flags that are required."""
+ directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], "VCMIDLTool")
@@ -1010,10 +1024,10 @@ def _LanguageMatchesForPch(source_ext, pch_source_ext):
)
-class PrecompiledHeader(object):
+class PrecompiledHeader:
"""Helper to generate dependencies and build rules to handle generation of
- precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
- """
+ precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
+ """
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext
@@ -1027,14 +1041,14 @@ class PrecompiledHeader(object):
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
- files."""
+ files."""
return self.settings.msvs_precompiled_header[self.config]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
- returns a list of the pch files that should be depended upon. The
- additional wrapping in the return value is for interface compatibility
- with make.py on Mac, and xcode_emulation.py."""
+ returns a list of the pch files that should be depended upon. The
+ additional wrapping in the return value is for interface compatibility
+ with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
@@ -1046,14 +1060,14 @@ class PrecompiledHeader(object):
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
- (instead, existing steps are modified in GetFlagsModifications below)."""
+ (instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(
self, input, output, implicit, command, cflags_c, cflags_cc, expand_special
):
"""Get the modified cflags and implicit dependencies that should be used
- for the pch compilation step."""
+ for the pch compilation step."""
if input == self.pch_source:
pch_output = ["/Yc" + self._PchHeader()]
if command == "cxx":
@@ -1090,7 +1104,7 @@ def _GetVsvarsSetupArgs(generator_flags, arch):
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
- for the canonical way to retrieve a suitable dict."""
+ for the canonical way to retrieve a suitable dict."""
if "$" in string:
for old, new in expansions.items():
assert "$(" not in new, new
@@ -1100,7 +1114,7 @@ def ExpandMacros(string, expansions):
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
- a textual dump output by the cmd.exe 'set' command."""
+ a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
"goma_.*", # TODO(scottmg): This is ugly, but needed for goma.
"include",
@@ -1140,8 +1154,8 @@ def _ExtractImportantEnvironment(output_of_set):
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
- Briefly this is a list of key=value\0, terminated by an additional \0. See
- CreateProcess documentation for more details."""
+ Briefly this is a list of key=value\0, terminated by an additional \0. See
+ CreateProcess documentation for more details."""
block = ""
nul = "\0"
for key, value in envvar_dict.items():
@@ -1152,7 +1166,7 @@ def _FormatAsEnvironmentBlock(envvar_dict):
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
- setup batch file, followed by the equivalent of `where`."""
+ setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith("LOC:"):
@@ -1163,19 +1177,19 @@ def GenerateEnvironmentFiles(
toplevel_build_dir, generator_flags, system_includes, open_out
):
"""It's not sufficient to have the absolute path to the compiler, linker,
- etc. on Windows, as those tools rely on .dlls being in the PATH. We also
- need to support both x86 and x64 compilers within the same build (to support
- msvs_target_platform hackery). Different architectures require a different
- compiler binary, and different supporting environment variables (INCLUDE,
- LIB, LIBPATH). So, we extract the environment here, wrap all invocations
- of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
- sets up the environment, and then we do not prefix the compiler with
- an absolute path, instead preferring something like "cl.exe" in the rule
- which will then run whichever the environment setup has put in the path.
- When the following procedure to generate environment files does not
- meet your requirement (e.g. for custom toolchains), you can pass
- "-G ninja_use_custom_environment_files" to the gyp to suppress file
- generation and use custom environment files prepared by yourself."""
+ etc. on Windows, as those tools rely on .dlls being in the PATH. We also
+ need to support both x86 and x64 compilers within the same build (to support
+ msvs_target_platform hackery). Different architectures require a different
+ compiler binary, and different supporting environment variables (INCLUDE,
+ LIB, LIBPATH). So, we extract the environment here, wrap all invocations
+ of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
+ sets up the environment, and then we do not prefix the compiler with
+ an absolute path, instead preferring something like "cl.exe" in the rule
+ which will then run whichever the environment setup has put in the path.
+ When the following procedure to generate environment files does not
+ meet your requirement (e.g. for custom toolchains), you can pass
+ "-G ninja_use_custom_environment_files" to the gyp to suppress file
+ generation and use custom environment files prepared by yourself."""
archs = ("x86", "x64")
if generator_flags.get("ninja_use_custom_environment_files", 0):
cl_paths = {}
@@ -1191,9 +1205,7 @@ def GenerateEnvironmentFiles(
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
- variables, _ = popen.communicate()
- if PY3:
- variables = variables.decode("utf-8")
+ variables = popen.communicate()[0].decode("utf-8")
if popen.returncode != 0:
raise Exception('"%s" failed with error %d' % (args, popen.returncode))
env = _ExtractImportantEnvironment(variables)
@@ -1216,19 +1228,17 @@ def GenerateEnvironmentFiles(
("&&", "for", "%i", "in", "(cl.exe)", "do", "@echo", "LOC:%~$PATH:i")
)
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
- output, _ = popen.communicate()
- if PY3:
- output = output.decode("utf-8")
+ output = popen.communicate()[0].decode("utf-8")
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
- generator: Check that all regular source files, i.e. not created at run time,
- exist on disk. Missing files cause needless recompilation when building via
- VS, and we want this check to match for people/bots that build using ninja,
- so they're not surprised when the VS build fails."""
+ generator: Check that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation when building via
+ VS, and we want this check to match for people/bots that build using ninja,
+ so they're not surprised when the VS build fails."""
if int(generator_flags.get("msvs_error_on_missing_sources", 0)):
no_specials = filter(lambda x: "$" not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py b/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
index 142123580..0e3e86c74 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
@@ -16,7 +16,7 @@ def escape_path(word):
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
-class Writer(object):
+class Writer:
def __init__(self, output, width=78):
self.output = output
self.width = width
@@ -33,7 +33,7 @@ class Writer(object):
return
if isinstance(value, list):
value = " ".join(filter(None, value)) # Filter out empty strings.
- self._line("%s = %s" % (key, value), indent)
+ self._line(f"{key} = {value}", indent)
def pool(self, name, depth):
self._line("pool %s" % name)
@@ -89,7 +89,7 @@ class Writer(object):
all_inputs.extend(order_only)
self._line(
- "build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs))
+ "build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
)
if variables:
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
index e01106f9c..729cec063 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
@@ -36,10 +36,7 @@ def _deepcopy_atomic(x):
return x
-try:
- types = bool, float, int, str, type, type(None), long, unicode
-except NameError: # Python 3
- types = bool, float, int, str, type, type(None)
+types = bool, float, int, str, type, type(None)
for x in types:
d[x] = _deepcopy_atomic
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
index 758e9f5c4..638eee400 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -9,7 +9,6 @@
These functions are executed via gyp-win-tool when using the ninja generator.
"""
-from __future__ import print_function
import os
import re
@@ -20,7 +19,6 @@ import string
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-PY3 = bytes != str
# A regex matching an argument corresponding to the output filename passed to
# link.exe.
@@ -34,7 +32,7 @@ def main(args):
sys.exit(exit_code)
-class WinTool(object):
+class WinTool:
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
@@ -141,9 +139,7 @@ class WinTool(object):
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
- out, _ = link.communicate()
- if PY3:
- out = out.decode("utf-8")
+ out = link.communicate()[0].decode("utf-8")
for line in out.splitlines():
if (
not line.startswith(" Creating library ")
@@ -223,17 +219,18 @@ class WinTool(object):
our_manifest = "%(out)s.manifest" % variables
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
# and sometimes doesn't unfortunately.
- with open(our_manifest, "r") as our_f:
- with open(assert_manifest, "r") as assert_f:
- our_data = our_f.read().translate(None, string.whitespace)
- assert_data = assert_f.read().translate(None, string.whitespace)
+ with open(our_manifest) as our_f:
+ with open(assert_manifest) as assert_f:
+ translator = str.maketrans('', '', string.whitespace)
+ our_data = our_f.read().translate(translator)
+ assert_data = assert_f.read().translate(translator)
if our_data != assert_data:
os.unlink(out)
def dump(filename):
print(filename, file=sys.stderr)
print("-----", file=sys.stderr)
- with open(filename, "r") as f:
+ with open(filename) as f:
print(f.read(), file=sys.stderr)
print("-----", file=sys.stderr)
@@ -256,9 +253,7 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
- out, _ = popen.communicate()
- if PY3:
- out = out.decode("utf-8")
+ out = popen.communicate()[0].decode("utf-8")
for line in out.splitlines():
if line and "manifest authoring warning 81010002" not in line:
print(line)
@@ -302,16 +297,14 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
- out, _ = popen.communicate()
- if PY3:
- out = out.decode("utf-8")
+ out = popen.communicate()[0].decode("utf-8")
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefixes = ("Processing ", "64 bit Processing ")
- processing = set(os.path.basename(x) for x in lines if x.startswith(prefixes))
+ processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)}
for line in lines:
if not line.startswith(prefixes) and line not in processing:
print(line)
@@ -323,9 +316,7 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
- out, _ = popen.communicate()
- if PY3:
- out = out.decode("utf-8")
+ out = popen.communicate()[0].decode("utf-8")
for line in out.splitlines():
if (
not line.startswith("Copyright (C) Microsoft Corporation")
@@ -343,9 +334,7 @@ class WinTool(object):
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
- out, _ = popen.communicate()
- if PY3:
- out = out.decode("utf-8")
+ out = popen.communicate()[0].decode("utf-8")
for line in out.splitlines():
if (
not line.startswith("Microsoft (R) Windows (R) Resource Compiler")
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
index 8af2b39f9..a75d8eeab 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
@@ -7,7 +7,6 @@ This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
-from __future__ import print_function
import copy
import gyp.common
@@ -19,8 +18,6 @@ import subprocess
import sys
from gyp.common import GypError
-PY3 = bytes != str
-
# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
# "xcodebuild" is called too quickly (it has been found to return incorrect
# version number).
@@ -40,7 +37,7 @@ def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
return mapping
-class XcodeArchsDefault(object):
+class XcodeArchsDefault:
"""A class to resolve ARCHS variable from xcode_settings, resolving Xcode
macros and implementing filtering by VALID_ARCHS. The expansion of macros
depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
@@ -148,7 +145,7 @@ def GetXcodeArchsDefault():
return XCODE_ARCHS_DEFAULT_CACHE
-class XcodeSettings(object):
+class XcodeSettings:
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
@@ -281,7 +278,7 @@ class XcodeSettings(object):
else:
return "." + self.spec.get("product_extension", "app")
else:
- assert False, "Don't know extension for '%s', target '%s'" % (
+ assert False, "Don't know extension for '{}', target '{}'".format(
self.spec["type"],
self.spec["target_name"],
)
@@ -654,28 +651,32 @@ class XcodeSettings(object):
self._WarnUnimplemented("MACH_O_TYPE")
self._WarnUnimplemented("PRODUCT_TYPE")
- if arch is not None:
- archs = [arch]
- else:
- assert self.configname
- archs = self.GetActiveArchs(self.configname)
- if len(archs) != 1:
- # TODO: Supporting fat binaries will be annoying.
- self._WarnUnimplemented("ARCHS")
- archs = ["i386"]
- cflags.append("-arch " + archs[0])
-
- if archs[0] in ("i386", "x86_64"):
- if self._Test("GCC_ENABLE_SSE3_EXTENSIONS", "YES", default="NO"):
- cflags.append("-msse3")
- if self._Test(
- "GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS", "YES", default="NO"
- ):
- cflags.append("-mssse3") # Note 3rd 's'.
- if self._Test("GCC_ENABLE_SSE41_EXTENSIONS", "YES", default="NO"):
- cflags.append("-msse4.1")
- if self._Test("GCC_ENABLE_SSE42_EXTENSIONS", "YES", default="NO"):
- cflags.append("-msse4.2")
+ # If GYP_CROSSCOMPILE (--cross-compiling), disable architecture-specific
+ # additions and assume these will be provided as required via CC_host,
+ # CXX_host, CC_target and CXX_target.
+ if not gyp.common.CrossCompileRequested():
+ if arch is not None:
+ archs = [arch]
+ else:
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
+ if len(archs) != 1:
+ # TODO: Supporting fat binaries will be annoying.
+ self._WarnUnimplemented("ARCHS")
+ archs = ["i386"]
+ cflags.append("-arch " + archs[0])
+
+ if archs[0] in ("i386", "x86_64"):
+ if self._Test("GCC_ENABLE_SSE3_EXTENSIONS", "YES", default="NO"):
+ cflags.append("-msse3")
+ if self._Test(
+ "GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS", "YES", default="NO"
+ ):
+ cflags.append("-mssse3") # Note 3rd 's'.
+ if self._Test("GCC_ENABLE_SSE41_EXTENSIONS", "YES", default="NO"):
+ cflags.append("-msse4.1")
+ if self._Test("GCC_ENABLE_SSE42_EXTENSIONS", "YES", default="NO"):
+ cflags.append("-msse4.2")
cflags += self._Settings().get("WARNING_CFLAGS", [])
@@ -938,16 +939,17 @@ class XcodeSettings(object):
+ gyp_to_build_path(self._Settings()["ORDER_FILE"])
)
- if arch is not None:
- archs = [arch]
- else:
- assert self.configname
- archs = self.GetActiveArchs(self.configname)
- if len(archs) != 1:
- # TODO: Supporting fat binaries will be annoying.
- self._WarnUnimplemented("ARCHS")
- archs = ["i386"]
- ldflags.append("-arch " + archs[0])
+ if not gyp.common.CrossCompileRequested():
+ if arch is not None:
+ archs = [arch]
+ else:
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
+ if len(archs) != 1:
+ # TODO: Supporting fat binaries will be annoying.
+ self._WarnUnimplemented("ARCHS")
+ archs = ["i386"]
+ ldflags.append("-arch " + archs[0])
# Xcode adds the product directory by default.
# Rewrite -L. to -L./ to work around http://www.openradar.me/25313838
@@ -1083,7 +1085,7 @@ class XcodeSettings(object):
if not quiet:
result.append("echo STRIP\\(%s\\)" % self.spec["target_name"])
- result.append("strip %s %s" % (strip_flags, output_binary))
+ result.append(f"strip {strip_flags} {output_binary}")
self.configname = None
return result
@@ -1105,7 +1107,7 @@ class XcodeSettings(object):
):
if not quiet:
result.append("echo DSYMUTIL\\(%s\\)" % self.spec["target_name"])
- result.append("dsymutil %s -o %s" % (output_binary, output + ".dSYM"))
+ result.append("dsymutil {} -o {}".format(output_binary, output + ".dSYM"))
self.configname = None
return result
@@ -1138,7 +1140,7 @@ class XcodeSettings(object):
source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name)
test_host = os.path.dirname(settings.get("TEST_HOST"))
xctest_destination = os.path.join(test_host, "PlugIns", product_name)
- postbuilds.extend(["ditto %s %s" % (source, xctest_destination)])
+ postbuilds.extend([f"ditto {source} {xctest_destination}"])
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
@@ -1165,7 +1167,7 @@ class XcodeSettings(object):
for framework in frameworks:
source = os.path.join(platform_root, framework)
destination = os.path.join(frameworks_dir, os.path.basename(framework))
- postbuilds.extend(["ditto %s %s" % (source, destination)])
+ postbuilds.extend([f"ditto {source} {destination}"])
# Then re-sign everything with 'preserve=True'
postbuilds.extend(
@@ -1366,7 +1368,7 @@ class XcodeSettings(object):
return ""
-class MacPrefixHeader(object):
+class MacPrefixHeader:
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
@@ -1556,9 +1558,7 @@ def GetStdoutQuiet(cmdlist):
Ignores the stderr.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out = job.communicate()[0]
- if PY3:
- out = out.decode("utf-8")
+ out = job.communicate()[0].decode("utf-8")
if job.returncode != 0:
raise GypError("Error %d running %s" % (job.returncode, cmdlist[0]))
return out.rstrip("\n")
@@ -1568,9 +1568,7 @@ def GetStdout(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
- out = job.communicate()[0]
- if PY3:
- out = out.decode("utf-8")
+ out = job.communicate()[0].decode("utf-8")
if job.returncode != 0:
sys.stderr.write(out + "\n")
raise GypError("Error %d running %s" % (job.returncode, cmdlist[0]))
@@ -1866,7 +1864,7 @@ def _TopologicallySortedEnvVarKeys(env):
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
- matches = set([v for v in regex.findall(env[node]) if v in env])
+ matches = {v for v in regex.findall(env[node]) if v in env}
for dependee in matches:
assert "${" not in dependee, "Nested variables not supported: " + dependee
return matches
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
index 10ddcbccd..bb74eacbe 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
@@ -43,11 +43,11 @@ def _WriteWorkspace(main_gyp, sources_gyp, params):
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
try:
- with open(workspace_file, "r") as input_file:
+ with open(workspace_file) as input_file:
input_string = input_file.read()
if input_string == output_string:
return
- except IOError:
+ except OSError:
# Ignore errors if the file doesn't exist.
pass
@@ -214,7 +214,7 @@ def CreateWrapper(target_list, target_dicts, data, params):
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
# Add to new_target_list.
target_name = spec.get("target_name")
- new_target_name = "%s:%s#target" % (main_gyp, target_name)
+ new_target_name = f"{main_gyp}:{target_name}#target"
new_target_list.append(new_target_name)
# Add to new_target_dicts.
@@ -282,7 +282,7 @@ def CreateWrapper(target_list, target_dicts, data, params):
# Put sources_to_index in it's own gyp.
sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
- fully_qualified_target_name = "%s:%s#target" % (sources_gyp, sources_target_name)
+ fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target"
# Add to new_target_list, new_target_dicts and new_data.
new_target_list.append(fully_qualified_target_name)
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
index d90dd99dc..076eea372 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
@@ -138,19 +138,17 @@ a project file is output.
"""
import gyp.common
+from functools import cmp_to_key
import hashlib
+from operator import attrgetter
import posixpath
import re
import struct
import sys
-try:
- basestring, cmp, unicode
-except NameError: # Python 3
- basestring = unicode = str
- def cmp(x, y):
- return (x > y) - (x < y)
+def cmp(x, y):
+ return (x > y) - (x < y)
# See XCObject._EncodeString. This pattern is used to determine when a string
@@ -199,7 +197,7 @@ def ConvertVariablesToShellSyntax(input_string):
return re.sub(r"\$\((.*?)\)", "${\\1}", input_string)
-class XCObject(object):
+class XCObject:
"""The abstract base of all class types used in Xcode project files.
Class variables:
@@ -301,8 +299,8 @@ class XCObject(object):
try:
name = self.Name()
except NotImplementedError:
- return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
- return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self))
+ return f"<{self.__class__.__name__} at 0x{id(self):x}>"
+ return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>"
def Copy(self):
"""Make a copy of this object.
@@ -325,7 +323,7 @@ class XCObject(object):
that._properties[key] = new_value
else:
that._properties[key] = value
- elif isinstance(value, (basestring, int)):
+ elif isinstance(value, (str, int)):
that._properties[key] = value
elif isinstance(value, list):
if is_strong:
@@ -427,6 +425,8 @@ class XCObject(object):
"""
hash.update(struct.pack(">i", len(data)))
+ if isinstance(data, str):
+ data = data.encode("utf-8")
hash.update(data)
if seed_hash is None:
@@ -616,7 +616,7 @@ class XCObject(object):
comment = value.Comment()
elif isinstance(value, str):
printable += self._EncodeString(value)
- elif isinstance(value, basestring):
+ elif isinstance(value, str):
printable += self._EncodeString(value.encode("utf-8"))
elif isinstance(value, int):
printable += str(value)
@@ -791,7 +791,7 @@ class XCObject(object):
)
for item in value:
if not isinstance(item, property_type) and not (
- isinstance(item, basestring) and property_type == str
+ isinstance(item, str) and property_type == str
):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
@@ -806,7 +806,7 @@ class XCObject(object):
+ item.__class__.__name__
)
elif not isinstance(value, property_type) and not (
- isinstance(value, basestring) and property_type == str
+ isinstance(value, str) and property_type == str
):
# Accept unicode where str is specified. str is treated as
# UTF-8-encoded.
@@ -827,7 +827,7 @@ class XCObject(object):
self._properties[property] = value.Copy()
else:
self._properties[property] = value
- elif isinstance(value, (basestring, int)):
+ elif isinstance(value, (str, int)):
self._properties[property] = value
elif isinstance(value, list):
if is_strong:
@@ -1487,7 +1487,7 @@ class PBXGroup(XCHierarchicalElement):
def SortGroup(self):
self._properties["children"] = sorted(
- self._properties["children"], cmp=lambda x, y: x.Compare(y)
+ self._properties["children"], key=cmp_to_key(lambda x, y: x.Compare(y))
)
# Recurse.
@@ -2185,7 +2185,7 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
relative_path = path[1:]
else:
raise ValueError(
- "Can't use path %s in a %s" % (path, self.__class__.__name__)
+ f"Can't use path {path} in a {self.__class__.__name__}"
)
self._properties["dstPath"] = relative_path
@@ -2250,8 +2250,8 @@ class PBXContainerItemProxy(XCObject):
def __repr__(self):
props = self._properties
- name = "%s.gyp:%s" % (props["containerPortal"].Name(), props["remoteInfo"])
- return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self))
+ name = "{}.gyp:{}".format(props["containerPortal"].Name(), props["remoteInfo"])
+ return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>"
def Name(self):
# Admittedly not the best name, but it's what Xcode uses.
@@ -2288,7 +2288,7 @@ class PBXTargetDependency(XCObject):
def __repr__(self):
name = self._properties.get("name") or self._properties["target"].Name()
- return "<%s %r at 0x%x>" % (self.__class__.__name__, name, id(self))
+ return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>"
def Name(self):
# Admittedly not the best name, but it's what Xcode uses.
@@ -2895,7 +2895,7 @@ class PBXProject(XCContainerPortal):
# according to their defined order.
self._properties["mainGroup"]._properties["children"] = sorted(
self._properties["mainGroup"]._properties["children"],
- cmp=lambda x, y: x.CompareRootGroup(y),
+ key=cmp_to_key(lambda x, y: x.CompareRootGroup(y)),
)
# Sort everything else by putting group before files, and going
@@ -2990,9 +2990,7 @@ class PBXProject(XCContainerPortal):
# Xcode seems to sort this list case-insensitively
self._properties["projectReferences"] = sorted(
self._properties["projectReferences"],
- cmp=lambda x, y: cmp(
- x["ProjectRef"].Name().lower(), y["ProjectRef"].Name().lower()
- ),
+ key=lambda x: x["ProjectRef"].Name().lower
)
else:
# The link already exists. Pull out the relevnt data.
@@ -3124,7 +3122,8 @@ class PBXProject(XCContainerPortal):
product_group = ref_dict["ProductGroup"]
product_group._properties["children"] = sorted(
product_group._properties["children"],
- cmp=lambda x, y, rp=remote_products: CompareProducts(x, y, rp),
+ key=cmp_to_key(
+ lambda x, y, rp=remote_products: CompareProducts(x, y, rp)),
)
@@ -3159,7 +3158,7 @@ class XCProjectFile(XCObject):
else:
self._XCPrint(file, 0, "{\n")
for property, value in sorted(
- self._properties.items(), cmp=lambda x, y: cmp(x, y)
+ self._properties.items()
):
if property == "objects":
self._PrintObjects(file)
@@ -3187,7 +3186,7 @@ class XCProjectFile(XCObject):
self._XCPrint(file, 0, "\n")
self._XCPrint(file, 0, "/* Begin " + class_name + " section */\n")
for object in sorted(
- objects_by_class[class_name], cmp=lambda x, y: cmp(x.id, y.id)
+ objects_by_class[class_name], key=attrgetter("id")
):
object.Print(file)
self._XCPrint(file, 0, "/* End " + class_name + " section */\n")
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
index 0a945322b..530196366 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
@@ -39,12 +39,12 @@ def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
writer.write(">%s" % newl)
for node in self.childNodes:
node.writexml(writer, indent + addindent, addindent, newl)
- writer.write("%s</%s>%s" % (indent, self.tagName, newl))
+ writer.write(f"{indent}</{self.tagName}>{newl}")
else:
writer.write("/>%s" % newl)
-class XmlFix(object):
+class XmlFix:
"""Object to manage temporary patching of xml.dom.minidom."""
def __init__(self):
diff --git a/node_modules/node-gyp/gyp/setup.py b/node_modules/node-gyp/gyp/setup.py
index d1869c1b5..0ce46123c 100644
--- a/node_modules/node-gyp/gyp/setup.py
+++ b/node_modules/node-gyp/gyp/setup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -15,7 +15,7 @@ with open(path.join(here, "README.md")) as in_file:
setup(
name="gyp-next",
- version="0.6.2",
+ version="0.9.6",
description="A fork of the GYP build system for use in the Node.js projects",
long_description=long_description,
long_description_content_type="text/markdown",
@@ -25,7 +25,7 @@ setup(
package_dir={"": "pylib"},
packages=["gyp", "gyp.generator"],
entry_points={"console_scripts": ["gyp=gyp:script_main"]},
- python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
+ python_requires=">=3.6",
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
@@ -33,12 +33,10 @@ setup(
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
],
)
diff --git a/node_modules/node-gyp/gyp/test_gyp.py b/node_modules/node-gyp/gyp/test_gyp.py
index 382e75272..9ba264170 100755
--- a/node_modules/node-gyp/gyp/test_gyp.py
+++ b/node_modules/node-gyp/gyp/test_gyp.py
@@ -1,11 +1,10 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gyptest.py -- test runner for GYP tests."""
-from __future__ import print_function
import argparse
import os
@@ -141,10 +140,7 @@ def main(argv=None):
if not args.quiet:
runner.print_results()
- if runner.failures:
- return 1
- else:
- return 0
+ return 1 if runner.failures else 0
def print_configuration_info():
@@ -153,8 +149,8 @@ def print_configuration_info():
sys.path.append(os.path.abspath("test/lib"))
import TestMac
- print(" Mac %s %s" % (platform.mac_ver()[0], platform.mac_ver()[2]))
- print(" Xcode %s" % TestMac.Xcode.Version())
+ print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
+ print(f" Xcode {TestMac.Xcode.Version()}")
elif sys.platform == "win32":
sys.path.append(os.path.abspath("pylib"))
import gyp.MSVSVersion
@@ -163,12 +159,12 @@ def print_configuration_info():
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
elif sys.platform in ("linux", "linux2"):
print(" Linux %s" % " ".join(platform.linux_distribution()))
- print(" Python %s" % platform.python_version())
- print(" PYTHONPATH=%s" % os.environ["PYTHONPATH"])
+ print(f" Python {platform.python_version()}")
+ print(f" PYTHONPATH={os.environ['PYTHONPATH']}")
print()
-class Runner(object):
+class Runner:
def __init__(self, formats, tests, gyp_options, verbose):
self.formats = formats
self.tests = tests
@@ -217,19 +213,15 @@ class Runner(object):
res = "skipped"
elif proc.returncode:
res = "failed"
- self.failures.append("(%s) %s" % (test, fmt))
+ self.failures.append(f"({test}) {fmt}")
else:
res = "passed"
- res_msg = " %s %.3fs" % (res, took)
+ res_msg = f" {res} {took:.3f}s"
self.print_(res_msg)
- if (
- stdout
- and not stdout.endswith("PASSED\n")
- and not (stdout.endswith("NO RESULT\n"))
- ):
+ if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")):
print()
- print("\n".join(" %s" % line for line in stdout.splitlines()))
+ print("\n".join(f" {line}" for line in stdout.splitlines()))
elif not self.isatty:
print()
diff --git a/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el b/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
index 11b849788..07afc58a9 100644
--- a/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
+++ b/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
@@ -30,7 +30,7 @@
"For the purposes of face comparison, we're not interested in the
differences between certain faces. For example, the difference between
font-lock-comment-delimiter and font-lock-comment-face."
- (case face
+ (cl-case face
((font-lock-comment-delimiter-face) font-lock-comment-face)
(t face)))
diff --git a/node_modules/node-gyp/gyp/tools/emacs/gyp.el b/node_modules/node-gyp/gyp/tools/emacs/gyp.el
index b98b155ce..042ff3a92 100644
--- a/node_modules/node-gyp/gyp/tools/emacs/gyp.el
+++ b/node_modules/node-gyp/gyp/tools/emacs/gyp.el
@@ -213,7 +213,7 @@
string-start)
(setq string-start (gyp-parse-to limit))
(if string-start
- (setq group (case (gyp-section-at-point)
+ (setq group (cl-case (gyp-section-at-point)
('dependencies 1)
('variables 2)
('conditions 2)
diff --git a/node_modules/node-gyp/gyp/tools/graphviz.py b/node_modules/node-gyp/gyp/tools/graphviz.py
index 1f3acf37f..f19426b69 100755
--- a/node_modules/node-gyp/gyp/tools/graphviz.py
+++ b/node_modules/node-gyp/gyp/tools/graphviz.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -8,7 +8,6 @@
generate input suitable for graphviz to render a dependency graph of
targets."""
-from __future__ import print_function
import collections
import json
@@ -66,7 +65,7 @@ def WriteGraph(edges):
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print(
- ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, target_name)
+ f' "{target}" [shape=box, label="{filename}\\n{target_name}"]'
)
else:
# Group multiple nodes together in a subgraph.
@@ -74,14 +73,14 @@ def WriteGraph(edges):
print(' label = "%s"' % filename)
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
- print(' "%s" [label="%s"]' % (target, target_name))
+ print(f' "{target}" [label="{target_name}"]')
print(" }")
# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
- print(' "%s" -> "%s"' % (src, dst))
+ print(f' "{src}" -> "{dst}"')
print("}")
diff --git a/node_modules/node-gyp/gyp/tools/pretty_gyp.py b/node_modules/node-gyp/gyp/tools/pretty_gyp.py
index 7313b4fe1..4ffa44455 100755
--- a/node_modules/node-gyp/gyp/tools/pretty_gyp.py
+++ b/node_modules/node-gyp/gyp/tools/pretty_gyp.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -6,7 +6,6 @@
"""Pretty-prints the contents of a GYP file."""
-from __future__ import print_function
import sys
import re
@@ -34,7 +33,7 @@ def mask_comments(input):
def quote_replace(matchobj):
- return "%s%s%s%s" % (
+ return "{}{}{}{}".format(
matchobj.group(1),
matchobj.group(2),
"x" * len(matchobj.group(3)),
diff --git a/node_modules/node-gyp/gyp/tools/pretty_sln.py b/node_modules/node-gyp/gyp/tools/pretty_sln.py
index 2b1cb1de7..6ca0cd12a 100755
--- a/node_modules/node-gyp/gyp/tools/pretty_sln.py
+++ b/node_modules/node-gyp/gyp/tools/pretty_sln.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -12,7 +12,6 @@
Then it outputs a possible build order.
"""
-from __future__ import print_function
import os
import re
diff --git a/node_modules/node-gyp/gyp/tools/pretty_vcproj.py b/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
index b171fae6c..00d32debd 100755
--- a/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
+++ b/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
@@ -12,7 +12,6 @@
It outputs the resulting xml to stdout.
"""
-from __future__ import print_function
import os
import sys
@@ -21,27 +20,22 @@ from xml.dom.minidom import parse
from xml.dom.minidom import Node
__author__ = "nsylvain (Nicolas Sylvain)"
-
-try:
- cmp
-except NameError:
-
- def cmp(x, y):
- return (x > y) - (x < y)
+ARGUMENTS = None
+REPLACEMENTS = dict()
-REPLACEMENTS = dict()
-ARGUMENTS = None
+def cmp(x, y):
+ return (x > y) - (x < y)
-class CmpTuple(object):
+class CmpTuple:
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
-class CmpNode(object):
+class CmpNode:
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
@@ -72,7 +66,7 @@ class CmpNode(object):
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
- print("%s%s" % (" " * indent, node.data.strip()))
+ print("{}{}".format(" " * indent, node.data.strip()))
return
if node.childNodes:
@@ -84,23 +78,23 @@ def PrettyPrintNode(node, indent=0):
# Print the main tag
if attr_count == 0:
- print("%s<%s>" % (" " * indent, node.nodeName))
+ print("{}<{}>".format(" " * indent, node.nodeName))
else:
- print("%s<%s" % (" " * indent, node.nodeName))
+ print("{}<{}".format(" " * indent, node.nodeName))
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
- print('%s %s="%s"' % (" " * indent, name, value))
+ print('{} {}="{}"'.format(" " * indent, name, value))
print("%s>" % (" " * indent))
if node.nodeValue:
- print("%s %s" % (" " * indent, node.nodeValue))
+ print("{} {}".format(" " * indent, node.nodeValue))
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent + 2)
- print("%s</%s>" % (" " * indent, node.nodeName))
+ print("{}</{}>".format(" " * indent, node.nodeName))
def FlattenFilter(node):
diff --git a/node_modules/node-gyp/lib/configure.js b/node_modules/node-gyp/lib/configure.js
index d4342b9d7..038ccbf20 100644
--- a/node_modules/node-gyp/lib/configure.js
+++ b/node_modules/node-gyp/lib/configure.js
@@ -76,7 +76,9 @@ function configure (gyp, argv, callback) {
if (err) {
return callback(err)
}
- log.verbose('build dir', '"build" dir needed to be created?', isNew)
+ log.verbose(
+ 'build dir', '"build" dir needed to be created?', isNew ? 'Yes' : 'No'
+ )
if (win) {
findVisualStudio(release.semver, gyp.opts.msvs_version,
createConfigFile)
@@ -96,7 +98,7 @@ function configure (gyp, argv, callback) {
log.verbose('build/' + configFilename, 'creating config file')
- var config = process.config || {}
+ var config = process.config ? JSON.parse(JSON.stringify(process.config)) : {}
var defaults = config.target_defaults
var variables = config.variables
diff --git a/node_modules/node-gyp/lib/find-python.js b/node_modules/node-gyp/lib/find-python.js
index af269de2f..a445e825b 100644
--- a/node_modules/node-gyp/lib/find-python.js
+++ b/node_modules/node-gyp/lib/find-python.js
@@ -1,6 +1,5 @@
'use strict'
-const path = require('path')
const log = require('npmlog')
const semver = require('semver')
const cp = require('child_process')
@@ -8,6 +7,38 @@ const extend = require('util')._extend // eslint-disable-line
const win = process.platform === 'win32'
const logWithPrefix = require('./util').logWithPrefix
+const systemDrive = process.env.SystemDrive || 'C:'
+const username = process.env.USERNAME || process.env.USER || getOsUserInfo()
+const localAppData = process.env.LOCALAPPDATA || `${systemDrive}\\${username}\\AppData\\Local`
+const foundLocalAppData = process.env.LOCALAPPDATA || username
+const programFiles = process.env.ProgramW6432 || process.env.ProgramFiles || `${systemDrive}\\Program Files`
+const programFilesX86 = process.env['ProgramFiles(x86)'] || `${programFiles} (x86)`
+
+const winDefaultLocationsArray = []
+for (const majorMinor of ['39', '38', '37', '36']) {
+ if (foundLocalAppData) {
+ winDefaultLocationsArray.push(
+ `${localAppData}\\Programs\\Python\\Python${majorMinor}\\python.exe`,
+ `${programFiles}\\Python${majorMinor}\\python.exe`,
+ `${localAppData}\\Programs\\Python\\Python${majorMinor}-32\\python.exe`,
+ `${programFiles}\\Python${majorMinor}-32\\python.exe`,
+ `${programFilesX86}\\Python${majorMinor}-32\\python.exe`
+ )
+ } else {
+ winDefaultLocationsArray.push(
+ `${programFiles}\\Python${majorMinor}\\python.exe`,
+ `${programFiles}\\Python${majorMinor}-32\\python.exe`,
+ `${programFilesX86}\\Python${majorMinor}-32\\python.exe`
+ )
+ }
+}
+
+function getOsUserInfo () {
+ try {
+ return require('os').userInfo().username
+ } catch (e) {}
+}
+
function PythonFinder (configPython, callback) {
this.callback = callback
this.configPython = configPython
@@ -18,17 +49,14 @@ PythonFinder.prototype = {
log: logWithPrefix(log, 'find Python'),
argsExecutable: ['-c', 'import sys; print(sys.executable);'],
argsVersion: ['-c', 'import sys; print("%s.%s.%s" % sys.version_info[:3]);'],
- semverRange: '2.7.x || >=3.5.0',
+ semverRange: '>=3.6.0',
// These can be overridden for testing:
execFile: cp.execFile,
env: process.env,
win: win,
pyLauncher: 'py.exe',
- winDefaultLocations: [
- path.join(process.env.SystemDrive || 'C:', 'Python37', 'python.exe'),
- path.join(process.env.SystemDrive || 'C:', 'Python27', 'python.exe')
- ],
+ winDefaultLocations: winDefaultLocationsArray,
// Logs a message at verbose level, but also saves it to be displayed later
// at error level if an error occurs. This should help diagnose the problem.
@@ -96,11 +124,6 @@ PythonFinder.prototype = {
before: () => { this.addLog('checking if "python" can be used') },
check: this.checkCommand,
arg: 'python'
- },
- {
- before: () => { this.addLog('checking if "python2" can be used') },
- check: this.checkCommand,
- arg: 'python2'
}
]
@@ -119,7 +142,7 @@ PythonFinder.prototype = {
checks.push({
before: () => {
this.addLog(
- 'checking if the py launcher can be used to find Python')
+ 'checking if the py launcher can be used to find Python 3')
},
check: this.checkPyLauncher
})
@@ -188,10 +211,15 @@ PythonFinder.prototype = {
// Distributions of Python on Windows by default install with the "py.exe"
// Python launcher which is more likely to exist than the Python executable
// being in the $PATH.
+ // Because the Python launcher supports Python 2 and Python 3, we should
+ // explicitly request a Python 3 version. This is done by supplying "-3" as
+ // the first command line argument. Since "py.exe -3" would be an invalid
+ // executable for "execFile", we have to use the launcher to figure out
+ // where the actual "python.exe" executable is located.
checkPyLauncher: function checkPyLauncher (errorCallback) {
this.log.verbose(
- `- executing "${this.pyLauncher}" to get Python executable path`)
- this.run(this.pyLauncher, this.argsExecutable, false,
+ `- executing "${this.pyLauncher}" to get Python 3 executable path`)
+ this.run(this.pyLauncher, ['-3', ...this.argsExecutable], false,
function (err, execPath) {
// Possible outcomes: same as checkCommand
if (err) {
diff --git a/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/node-gyp/lib/find-visualstudio.js
index 9c6dad90f..f2cce327e 100644
--- a/node_modules/node-gyp/lib/find-visualstudio.js
+++ b/node_modules/node-gyp/lib/find-visualstudio.js
@@ -399,7 +399,7 @@ VisualStudioFinder.prototype = {
})
},
- // After finding a usable version of Visual Stuido:
+ // After finding a usable version of Visual Studio:
// - add it to validVersions to be displayed at the end if a specific
// version was requested and not found;
// - check if this is the version that was requested.
diff --git a/node_modules/node-gyp/lib/install.js b/node_modules/node-gyp/lib/install.js
index f9fa2b34b..99f6d8592 100644
--- a/node_modules/node-gyp/lib/install.js
+++ b/node_modules/node-gyp/lib/install.js
@@ -4,55 +4,42 @@ const fs = require('graceful-fs')
const os = require('os')
const tar = require('tar')
const path = require('path')
+const util = require('util')
+const stream = require('stream')
const crypto = require('crypto')
const log = require('npmlog')
const semver = require('semver')
-const request = require('request')
+const fetch = require('make-fetch-happen')
const processRelease = require('./process-release')
const win = process.platform === 'win32'
-const getProxyFromURI = require('./proxy')
+const streamPipeline = util.promisify(stream.pipeline)
-function install (fs, gyp, argv, callback) {
- var release = processRelease(argv, gyp, process.version, process.release)
+/**
+ * @param {typeof import('graceful-fs')} fs
+ */
- // ensure no double-callbacks happen
- function cb (err) {
- if (cb.done) {
- return
- }
- cb.done = true
- if (err) {
- log.warn('install', 'got an error, rolling back install')
- // roll-back the install if anything went wrong
- gyp.commands.remove([release.versionDir], function () {
- callback(err)
- })
- } else {
- callback(null, release.version)
- }
- }
+async function install (fs, gyp, argv) {
+ const release = processRelease(argv, gyp, process.version, process.release)
// Determine which node dev files version we are installing
log.verbose('install', 'input version string %j', release.version)
if (!release.semver) {
// could not parse the version string with semver
- return callback(new Error('Invalid version number: ' + release.version))
+ throw new Error('Invalid version number: ' + release.version)
}
if (semver.lt(release.version, '0.8.0')) {
- return callback(new Error('Minimum target version is `0.8.0` or greater. Got: ' + release.version))
+ throw new Error('Minimum target version is `0.8.0` or greater. Got: ' + release.version)
}
// 0.x.y-pre versions are not published yet and cannot be installed. Bail.
if (release.semver.prerelease[0] === 'pre') {
log.verbose('detected "pre" node version', release.version)
- if (gyp.opts.nodedir) {
- log.verbose('--nodedir flag was passed; skipping install', gyp.opts.nodedir)
- callback()
- } else {
- callback(new Error('"pre" versions of node cannot be installed, use the --nodedir flag instead'))
+ if (!gyp.opts.nodedir) {
+ throw new Error('"pre" versions of node cannot be installed, use the --nodedir flag instead')
}
+ log.verbose('--nodedir flag was passed; skipping install', gyp.opts.nodedir)
return
}
@@ -60,296 +47,225 @@ function install (fs, gyp, argv, callback) {
log.verbose('install', 'installing version: %s', release.versionDir)
// the directory where the dev files will be installed
- var devDir = path.resolve(gyp.devDir, release.versionDir)
+ const devDir = path.resolve(gyp.devDir, release.versionDir)
// If '--ensure' was passed, then don't *always* install the version;
// check if it is already installed, and only install when needed
if (gyp.opts.ensure) {
log.verbose('install', '--ensure was passed, so won\'t reinstall if already installed')
- fs.stat(devDir, function (err) {
- if (err) {
- if (err.code === 'ENOENT') {
- log.verbose('install', 'version not already installed, continuing with install', release.version)
- go()
- } else if (err.code === 'EACCES') {
- eaccesFallback(err)
- } else {
- cb(err)
+ try {
+ await fs.promises.stat(devDir)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ log.verbose('install', 'version not already installed, continuing with install', release.version)
+ try {
+ return await go()
+ } catch (err) {
+ return rollback(err)
}
- return
+ } else if (err.code === 'EACCES') {
+ return eaccesFallback(err)
}
- log.verbose('install', 'version is already installed, need to check "installVersion"')
- var installVersionFile = path.resolve(devDir, 'installVersion')
- fs.readFile(installVersionFile, 'ascii', function (err, ver) {
- if (err && err.code !== 'ENOENT') {
- return cb(err)
- }
- var installVersion = parseInt(ver, 10) || 0
- log.verbose('got "installVersion"', installVersion)
- log.verbose('needs "installVersion"', gyp.package.installVersion)
- if (installVersion < gyp.package.installVersion) {
- log.verbose('install', 'version is no good; reinstalling')
- go()
- } else {
- log.verbose('install', 'version is good')
- cb()
- }
- })
- })
+ throw err
+ }
+ log.verbose('install', 'version is already installed, need to check "installVersion"')
+ const installVersionFile = path.resolve(devDir, 'installVersion')
+ let installVersion = 0
+ try {
+ const ver = await fs.promises.readFile(installVersionFile, 'ascii')
+ installVersion = parseInt(ver, 10) || 0
+ } catch (err) {
+ if (err.code !== 'ENOENT') {
+ throw err
+ }
+ }
+ log.verbose('got "installVersion"', installVersion)
+ log.verbose('needs "installVersion"', gyp.package.installVersion)
+ if (installVersion < gyp.package.installVersion) {
+ log.verbose('install', 'version is no good; reinstalling')
+ try {
+ return await go()
+ } catch (err) {
+ return rollback(err)
+ }
+ }
+ log.verbose('install', 'version is good')
} else {
- go()
- }
-
- function getContentSha (res, callback) {
- var shasum = crypto.createHash('sha256')
- res.on('data', function (chunk) {
- shasum.update(chunk)
- }).on('end', function () {
- callback(null, shasum.digest('hex'))
- })
+ try {
+ return await go()
+ } catch (err) {
+ return rollback(err)
+ }
}
- function go () {
+ async function go () {
log.verbose('ensuring nodedir is created', devDir)
// first create the dir for the node dev files
- fs.mkdir(devDir, { recursive: true }, function (err, created) {
- if (err) {
- if (err.code === 'EACCES') {
- eaccesFallback(err)
- } else {
- cb(err)
- }
- return
- }
+ try {
+ const created = await fs.promises.mkdir(devDir, { recursive: true })
if (created) {
log.verbose('created nodedir', created)
}
-
- // now download the node tarball
- var tarPath = gyp.opts.tarball
- var badDownload = false
- var extractCount = 0
- var contentShasums = {}
- var expectShasums = {}
-
- // checks if a file to be extracted from the tarball is valid.
- // only .h header files and the gyp files get extracted
- function isValid (path) {
- var isValid = valid(path)
- if (isValid) {
- log.verbose('extracted file from tarball', path)
- extractCount++
- } else {
- // invalid
- log.silly('ignoring from tarball', path)
- }
- return isValid
+ } catch (err) {
+ if (err.code === 'EACCES') {
+ return eaccesFallback(err)
}
- // download the tarball and extract!
- if (tarPath) {
- return tar.extract({
- file: tarPath,
- strip: 1,
- filter: isValid,
- cwd: devDir
- }).then(afterTarball, cb)
- }
+ throw err
+ }
- try {
- var req = download(gyp, process.env, release.tarballUrl)
- } catch (e) {
- return cb(e)
+ // now download the node tarball
+ const tarPath = gyp.opts.tarball
+ let extractCount = 0
+ const contentShasums = {}
+ const expectShasums = {}
+
+ // checks if a file to be extracted from the tarball is valid.
+ // only .h header files and the gyp files get extracted
+ function isValid (path) {
+ const isValid = valid(path)
+ if (isValid) {
+ log.verbose('extracted file from tarball', path)
+ extractCount++
+ } else {
+ // invalid
+ log.silly('ignoring from tarball', path)
}
+ return isValid
+ }
- // something went wrong downloading the tarball?
- req.on('error', function (err) {
- if (err.code === 'ENOTFOUND') {
- return cb(new Error('This is most likely not a problem with node-gyp or the package itself and\n' +
- 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' +
- 'network settings.'))
- }
- badDownload = true
- cb(err)
- })
+ // download the tarball and extract!
- req.on('close', function () {
- if (extractCount === 0) {
- cb(new Error('Connection closed while downloading tarball file'))
- }
+ if (tarPath) {
+ await tar.extract({
+ file: tarPath,
+ strip: 1,
+ filter: isValid,
+ cwd: devDir
})
+ } else {
+ try {
+ const res = await download(gyp, release.tarballUrl)
- req.on('response', function (res) {
- if (res.statusCode !== 200) {
- badDownload = true
- cb(new Error(res.statusCode + ' response downloading ' + release.tarballUrl))
- return
+ if (res.status !== 200) {
+ throw new Error(`${res.status} response downloading ${release.tarballUrl}`)
}
- // content checksum
- getContentSha(res, function (_, checksum) {
- var filename = path.basename(release.tarballUrl).trim()
- contentShasums[filename] = checksum
- log.verbose('content checksum', filename, checksum)
- })
-
- // start unzipping and untaring
- res.pipe(tar.extract({
- strip: 1,
- cwd: devDir,
- filter: isValid
- }).on('close', afterTarball).on('error', cb))
- })
- // invoked after the tarball has finished being extracted
- function afterTarball () {
- if (badDownload) {
- return
- }
- if (extractCount === 0) {
- return cb(new Error('There was a fatal problem while downloading/extracting the tarball'))
+ await streamPipeline(
+ res.body,
+ // content checksum
+ new ShaSum((_, checksum) => {
+ const filename = path.basename(release.tarballUrl).trim()
+ contentShasums[filename] = checksum
+ log.verbose('content checksum', filename, checksum)
+ }),
+ tar.extract({
+ strip: 1,
+ cwd: devDir,
+ filter: isValid
+ })
+ )
+ } catch (err) {
+ // something went wrong downloading the tarball?
+ if (err.code === 'ENOTFOUND') {
+ throw new Error('This is most likely not a problem with node-gyp or the package itself and\n' +
+ 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' +
+ 'network settings.')
}
- log.verbose('tarball', 'done parsing tarball')
- var async = 0
+ throw err
+ }
+ }
- if (win) {
- // need to download node.lib
- async++
- downloadNodeLib(deref)
- }
+ // invoked after the tarball has finished being extracted
+ if (extractCount === 0) {
+ throw new Error('There was a fatal problem while downloading/extracting the tarball')
+ }
- // write the "installVersion" file
- async++
- var installVersionPath = path.resolve(devDir, 'installVersion')
- fs.writeFile(installVersionPath, gyp.package.installVersion + '\n', deref)
+ log.verbose('tarball', 'done parsing tarball')
+
+ const installVersionPath = path.resolve(devDir, 'installVersion')
+ await Promise.all([
+ // need to download node.lib
+ ...(win ? downloadNodeLib() : []),
+ // write the "installVersion" file
+ fs.promises.writeFile(installVersionPath, gyp.package.installVersion + '\n'),
+ // Only download SHASUMS.txt if we downloaded something in need of SHA verification
+ ...(!tarPath || win ? [downloadShasums()] : [])
+ ])
+
+ log.verbose('download contents checksum', JSON.stringify(contentShasums))
+ // check content shasums
+ for (const k in contentShasums) {
+ log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k])
+ if (contentShasums[k] !== expectShasums[k]) {
+ throw new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k])
+ }
+ }
- // Only download SHASUMS.txt if we downloaded something in need of SHA verification
- if (!tarPath || win) {
- // download SHASUMS.txt
- async++
- downloadShasums(deref)
- }
+ async function downloadShasums () {
+ log.verbose('check download content checksum, need to download `SHASUMS256.txt`...')
+ log.verbose('checksum url', release.shasumsUrl)
- if (async === 0) {
- // no async tasks required
- cb()
- }
+ const res = await download(gyp, release.shasumsUrl)
- function deref (err) {
- if (err) {
- return cb(err)
- }
+ if (res.status !== 200) {
+ throw new Error(`${res.status} status code downloading checksum`)
+ }
- async--
- if (!async) {
- log.verbose('download contents checksum', JSON.stringify(contentShasums))
- // check content shasums
- for (var k in contentShasums) {
- log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k])
- if (contentShasums[k] !== expectShasums[k]) {
- cb(new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k]))
- return
- }
- }
- cb()
- }
+ for (const line of (await res.text()).trim().split('\n')) {
+ const items = line.trim().split(/\s+/)
+ if (items.length !== 2) {
+ return
}
+
+ // 0035d18e2dcf9aad669b1c7c07319e17abfe3762 ./node-v0.11.4.tar.gz
+ const name = items[1].replace(/^\.\//, '')
+ expectShasums[name] = items[0]
}
- function downloadShasums (done) {
- log.verbose('check download content checksum, need to download `SHASUMS256.txt`...')
- log.verbose('checksum url', release.shasumsUrl)
- try {
- var req = download(gyp, process.env, release.shasumsUrl)
- } catch (e) {
- return cb(e)
- }
+ log.verbose('checksum data', JSON.stringify(expectShasums))
+ }
- req.on('error', done)
- req.on('response', function (res) {
- if (res.statusCode !== 200) {
- done(new Error(res.statusCode + ' status code downloading checksum'))
- return
+ function downloadNodeLib () {
+ log.verbose('on Windows; need to download `' + release.name + '.lib`...')
+ const archs = ['ia32', 'x64', 'arm64']
+ return archs.map(async (arch) => {
+ const dir = path.resolve(devDir, arch)
+ const targetLibPath = path.resolve(dir, release.name + '.lib')
+ const { libUrl, libPath } = release[arch]
+ const name = `${arch} ${release.name}.lib`
+ log.verbose(name, 'dir', dir)
+ log.verbose(name, 'url', libUrl)
+
+ await fs.promises.mkdir(dir, { recursive: true })
+ log.verbose('streaming', name, 'to:', targetLibPath)
+
+ const res = await download(gyp, libUrl)
+
+ if (res.status === 403 || res.status === 404) {
+ if (arch === 'arm64') {
+ // Arm64 is a newer platform on Windows and not all node distributions provide it.
+ log.verbose(`${name} was not found in ${libUrl}`)
+ } else {
+ log.warn(`${name} was not found in ${libUrl}`)
}
+ return
+ } else if (res.status !== 200) {
+ throw new Error(`${res.status} status code downloading ${name}`)
+ }
- var chunks = []
- res.on('data', function (chunk) {
- chunks.push(chunk)
- })
- res.on('end', function () {
- var lines = Buffer.concat(chunks).toString().trim().split('\n')
- lines.forEach(function (line) {
- var items = line.trim().split(/\s+/)
- if (items.length !== 2) {
- return
- }
-
- // 0035d18e2dcf9aad669b1c7c07319e17abfe3762 ./node-v0.11.4.tar.gz
- var name = items[1].replace(/^\.\//, '')
- expectShasums[name] = items[0]
- })
-
- log.verbose('checksum data', JSON.stringify(expectShasums))
- done()
- })
- })
- }
-
- function downloadNodeLib (done) {
- log.verbose('on Windows; need to download `' + release.name + '.lib`...')
- var archs = ['ia32', 'x64', 'arm64']
- var async = archs.length
- archs.forEach(function (arch) {
- var dir = path.resolve(devDir, arch)
- var targetLibPath = path.resolve(dir, release.name + '.lib')
- var libUrl = release[arch].libUrl
- var libPath = release[arch].libPath
- var name = arch + ' ' + release.name + '.lib'
- log.verbose(name, 'dir', dir)
- log.verbose(name, 'url', libUrl)
-
- fs.mkdir(dir, { recursive: true }, function (err) {
- if (err) {
- return done(err)
- }
- log.verbose('streaming', name, 'to:', targetLibPath)
-
- try {
- var req = download(gyp, process.env, libUrl, cb)
- } catch (e) {
- return cb(e)
- }
-
- req.on('error', done)
- req.on('response', function (res) {
- if (res.statusCode === 403 || res.statusCode === 404) {
- if (arch === 'arm64') {
- // Arm64 is a newer platform on Windows and not all node distributions provide it.
- log.verbose(`${name} was not found in ${libUrl}`)
- } else {
- log.warn(`${name} was not found in ${libUrl}`)
- }
- return
- } else if (res.statusCode !== 200) {
- done(new Error(res.statusCode + ' status code downloading ' + name))
- return
- }
-
- getContentSha(res, function (_, checksum) {
- contentShasums[libPath] = checksum
- log.verbose('content checksum', libPath, checksum)
- })
-
- var ws = fs.createWriteStream(targetLibPath)
- ws.on('error', cb)
- req.pipe(ws)
- })
- req.on('end', function () { --async || done() })
- })
- })
- } // downloadNodeLib()
- }) // mkdir()
+ return streamPipeline(
+ res.body,
+ new ShaSum((_, checksum) => {
+ contentShasums[libPath] = checksum
+ log.verbose('content checksum', libPath, checksum)
+ }),
+ fs.createWriteStream(targetLibPath)
+ )
+ })
+ } // downloadNodeLib()
} // go()
/**
@@ -358,10 +274,17 @@ function install (fs, gyp, argv, callback) {
function valid (file) {
// header files
- var extname = path.extname(file)
+ const extname = path.extname(file)
return extname === '.h' || extname === '.gypi'
}
+ async function rollback (err) {
+ log.warn('install', 'got an error, rolling back install')
+ // roll-back the install if anything went wrong
+ await util.promisify(gyp.commands.remove)([release.versionDir])
+ throw err
+ }
+
/**
* The EACCES fallback is a workaround for npm's `sudo` behavior, where
* it drops the permissions before invoking any child processes (like
@@ -371,14 +294,14 @@ function install (fs, gyp, argv, callback) {
* the compilation will succeed...
*/
- function eaccesFallback (err) {
- var noretry = '--node_gyp_internal_noretry'
+ async function eaccesFallback (err) {
+ const noretry = '--node_gyp_internal_noretry'
if (argv.indexOf(noretry) !== -1) {
- return cb(err)
+ throw err
}
- var tmpdir = os.tmpdir()
+ const tmpdir = os.tmpdir()
gyp.devDir = path.resolve(tmpdir, '.node-gyp')
- var userString = ''
+ let userString = ''
try {
// os.userInfo can fail on some systems, it's not critical here
userString = ` ("${os.userInfo().username}")`
@@ -389,59 +312,65 @@ function install (fs, gyp, argv, callback) {
log.verbose('tmpdir == cwd', 'automatically will remove dev files after to save disk space')
gyp.todo.push({ name: 'remove', args: argv })
}
- gyp.commands.install([noretry].concat(argv), cb)
+ return util.promisify(gyp.commands.install)([noretry].concat(argv))
+ }
+}
+
+class ShaSum extends stream.Transform {
+ constructor (callback) {
+ super()
+ this._callback = callback
+ this._digester = crypto.createHash('sha256')
+ }
+
+ _transform (chunk, _, callback) {
+ this._digester.update(chunk)
+ callback(null, chunk)
+ }
+
+ _flush (callback) {
+ this._callback(null, this._digester.digest('hex'))
+ callback()
}
}
-function download (gyp, env, url) {
+async function download (gyp, url) {
log.http('GET', url)
- var requestOpts = {
- uri: url,
+ const requestOpts = {
headers: {
- 'User-Agent': 'node-gyp v' + gyp.version + ' (node ' + process.version + ')',
+ 'User-Agent': `node-gyp v${gyp.version} (node ${process.version})`,
Connection: 'keep-alive'
- }
+ },
+ proxy: gyp.opts.proxy,
+ noProxy: gyp.opts.noproxy
}
- var cafile = gyp.opts.cafile
+ const cafile = gyp.opts.cafile
if (cafile) {
- requestOpts.ca = readCAFile(cafile)
- }
-
- // basic support for a proxy server
- var proxyUrl = getProxyFromURI(gyp, env, url)
- if (proxyUrl) {
- if (/^https?:\/\//i.test(proxyUrl)) {
- log.verbose('download', 'using proxy url: "%s"', proxyUrl)
- requestOpts.proxy = proxyUrl
- } else {
- log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl)
- }
+ requestOpts.ca = await readCAFile(cafile)
}
- var req = request(requestOpts)
- req.on('response', function (res) {
- log.http(res.statusCode, url)
- })
+ const res = await fetch(url, requestOpts)
+ log.http(res.status, res.url)
- return req
+ return res
}
-function readCAFile (filename) {
+async function readCAFile (filename) {
// The CA file can contain multiple certificates so split on certificate
// boundaries. [\S\s]*? is used to match everything including newlines.
- var ca = fs.readFileSync(filename, 'utf8')
- var re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g
+ const ca = await fs.promises.readFile(filename, 'utf8')
+ const re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g
return ca.match(re)
}
module.exports = function (gyp, argv, callback) {
- return install(fs, gyp, argv, callback)
+ install(fs, gyp, argv).then(callback.bind(undefined, null), callback)
}
module.exports.test = {
- download: download,
- install: install,
- readCAFile: readCAFile
+ download,
+ install,
+ readCAFile
}
module.exports.usage = 'Install node development files for the specified node version.'
diff --git a/node_modules/node-gyp/lib/proxy.js b/node_modules/node-gyp/lib/proxy.js
deleted file mode 100644
index 92d9ed2f7..000000000
--- a/node_modules/node-gyp/lib/proxy.js
+++ /dev/null
@@ -1,92 +0,0 @@
-'use strict'
-// Taken from https://github.com/request/request/blob/212570b/lib/getProxyFromURI.js
-
-const url = require('url')
-
-function formatHostname (hostname) {
- // canonicalize the hostname, so that 'oogle.com' won't match 'google.com'
- return hostname.replace(/^\.*/, '.').toLowerCase()
-}
-
-function parseNoProxyZone (zone) {
- zone = zone.trim().toLowerCase()
-
- var zoneParts = zone.split(':', 2)
- var zoneHost = formatHostname(zoneParts[0])
- var zonePort = zoneParts[1]
- var hasPort = zone.indexOf(':') > -1
-
- return { hostname: zoneHost, port: zonePort, hasPort: hasPort }
-}
-
-function uriInNoProxy (uri, noProxy) {
- var port = uri.port || (uri.protocol === 'https:' ? '443' : '80')
- var hostname = formatHostname(uri.hostname)
- var noProxyList = noProxy.split(',')
-
- // iterate through the noProxyList until it finds a match.
- return noProxyList.map(parseNoProxyZone).some(function (noProxyZone) {
- var isMatchedAt = hostname.indexOf(noProxyZone.hostname)
- var hostnameMatched = (
- isMatchedAt > -1 &&
- (isMatchedAt === hostname.length - noProxyZone.hostname.length)
- )
-
- if (noProxyZone.hasPort) {
- return (port === noProxyZone.port) && hostnameMatched
- }
-
- return hostnameMatched
- })
-}
-
-function getProxyFromURI (gyp, env, uri) {
- // If a string URI/URL was given, parse it into a URL object
- if (typeof uri === 'string') {
- // eslint-disable-next-line
- uri = url.parse(uri)
- }
-
- // Decide the proper request proxy to use based on the request URI object and the
- // environmental variables (NO_PROXY, HTTP_PROXY, etc.)
- // respect NO_PROXY environment variables (see: https://lynx.invisible-island.net/lynx2.8.7/breakout/lynx_help/keystrokes/environments.html)
-
- var noProxy = gyp.opts.noproxy || env.NO_PROXY || env.no_proxy || env.npm_config_noproxy || ''
-
- // if the noProxy is a wildcard then return null
-
- if (noProxy === '*') {
- return null
- }
-
- // if the noProxy is not empty and the uri is found return null
-
- if (noProxy !== '' && uriInNoProxy(uri, noProxy)) {
- return null
- }
-
- // Check for HTTP or HTTPS Proxy in environment Else default to null
-
- if (uri.protocol === 'http:') {
- return gyp.opts.proxy ||
- env.HTTP_PROXY ||
- env.http_proxy ||
- env.npm_config_proxy || null
- }
-
- if (uri.protocol === 'https:') {
- return gyp.opts.proxy ||
- env.HTTPS_PROXY ||
- env.https_proxy ||
- env.HTTP_PROXY ||
- env.http_proxy ||
- env.npm_config_proxy || null
- }
-
- // if none of that works, return null
- // (What uri protocol are you using then?)
-
- return null
-}
-
-module.exports = getProxyFromURI
diff --git a/node_modules/node-gyp/macOS_Catalina.md b/node_modules/node-gyp/macOS_Catalina.md
index ca2fd2347..4fe0f29b2 100644
--- a/node_modules/node-gyp/macOS_Catalina.md
+++ b/node_modules/node-gyp/macOS_Catalina.md
@@ -40,7 +40,7 @@ To see if `Xcode Command Line Tools` is installed in a way that will work with `
curl -sL https://github.com/nodejs/node-gyp/raw/master/macOS_Catalina_acid_test.sh | bash
```
-If test succeeded, _you are done_! You should be ready to install `node-gyp`.
+If test succeeded, _you are done_! You should be ready to [install](https://github.com/nodejs/node-gyp#installation) `node-gyp`.
If test failed, there is a problem with your Xcode Command Line Tools installation. [Continue to Solutions](#Solutions).
@@ -89,7 +89,7 @@ There are three ways to install the Xcode libraries `node-gyp` needs on macOS. P
### I did all that and the acid test still does not pass :-(
1. `sudo rm -rf $(xcode-select -print-path)` # Enter root password. No output is normal.
2. `sudo rm -rf /Library/Developer/CommandLineTools` # Enter root password.
-3. `xcode-select --reset`
+3. `sudo xcode-select --reset`
4. `xcode-select --install`
5. If the [_acid test_ steps above](#The-acid-test) still does _not_ pass then...
6. `npm explore npm -g -- npm install node-gyp@latest`
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
new file mode 100644
index 000000000..8d28acf86
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/agent.js b/node_modules/node-gyp/node_modules/make-fetch-happen/agent.js
new file mode 100644
index 000000000..e27eb4f3a
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/agent.js
@@ -0,0 +1,209 @@
+'use strict'
+const LRU = require('lru-cache')
+const url = require('url')
+const isLambda = require('is-lambda')
+
+const AGENT_CACHE = new LRU({ max: 50 })
+let HttpsAgent
+let HttpAgent
+
+module.exports = getAgent
+
+const getAgentTimeout = timeout =>
+ typeof timeout !== 'number' || !timeout ? 0 : timeout + 1
+
+const getMaxSockets = maxSockets => maxSockets || 15
+
+function getAgent (uri, opts) {
+ const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url)
+ const isHttps = parsedUri.protocol === 'https:'
+ const pxuri = getProxyUri(parsedUri.href, opts)
+
+ // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
+ // of zero disables the timeout behavior (OS limits still apply). Else, if
+ // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
+ // the node-fetch-npm timeout will always fire first, giving us more
+ // consistent errors.
+ const agentTimeout = getAgentTimeout(opts.timeout)
+ const agentMaxSockets = getMaxSockets(opts.maxSockets)
+
+ const key = [
+ `https:${isHttps}`,
+ pxuri
+ ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
+ : '>no-proxy<',
+ `local-address:${opts.localAddress || '>no-local-address<'}`,
+ `strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`,
+ `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
+ `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
+ `key:${(isHttps && opts.key) || '>no-key<'}`,
+ `timeout:${agentTimeout}`,
+ `maxSockets:${agentMaxSockets}`,
+ ].join(':')
+
+ if (opts.agent != null) { // `agent: false` has special behavior!
+ return opts.agent
+ }
+
+ // keep alive in AWS lambda makes no sense
+ const lambdaAgent = !isLambda ? null
+ : isHttps ? require('https').globalAgent
+ : require('http').globalAgent
+
+ if (isLambda && !pxuri)
+ return lambdaAgent
+
+ if (AGENT_CACHE.peek(key))
+ return AGENT_CACHE.get(key)
+
+ if (pxuri) {
+ const pxopts = isLambda ? {
+ ...opts,
+ agent: lambdaAgent,
+ } : opts
+ const proxy = getProxy(pxuri, pxopts, isHttps)
+ AGENT_CACHE.set(key, proxy)
+ return proxy
+ }
+
+ if (!HttpsAgent) {
+ HttpAgent = require('agentkeepalive')
+ HttpsAgent = HttpAgent.HttpsAgent
+ }
+
+ const agent = isHttps ? new HttpsAgent({
+ maxSockets: agentMaxSockets,
+ ca: opts.ca,
+ cert: opts.cert,
+ key: opts.key,
+ localAddress: opts.localAddress,
+ rejectUnauthorized: opts.strictSSL,
+ timeout: agentTimeout,
+ }) : new HttpAgent({
+ maxSockets: agentMaxSockets,
+ localAddress: opts.localAddress,
+ timeout: agentTimeout,
+ })
+ AGENT_CACHE.set(key, agent)
+ return agent
+}
+
+function checkNoProxy (uri, opts) {
+ const host = new url.URL(uri).hostname.split('.').reverse()
+ let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
+ if (typeof noproxy === 'string')
+ noproxy = noproxy.split(/\s*,\s*/g)
+
+ return noproxy && noproxy.some(no => {
+ const noParts = no.split('.').filter(x => x).reverse()
+ if (!noParts.length)
+ return false
+ for (let i = 0; i < noParts.length; i++) {
+ if (host[i] !== noParts[i])
+ return false
+ }
+ return true
+ })
+}
+
+module.exports.getProcessEnv = getProcessEnv
+
+function getProcessEnv (env) {
+ if (!env)
+ return
+
+ let value
+
+ if (Array.isArray(env)) {
+ for (const e of env) {
+ value = process.env[e] ||
+ process.env[e.toUpperCase()] ||
+ process.env[e.toLowerCase()]
+ if (typeof value !== 'undefined')
+ break
+ }
+ }
+
+ if (typeof env === 'string') {
+ value = process.env[env] ||
+ process.env[env.toUpperCase()] ||
+ process.env[env.toLowerCase()]
+ }
+
+ return value
+}
+
+module.exports.getProxyUri = getProxyUri
+function getProxyUri (uri, opts) {
+ const protocol = new url.URL(uri).protocol
+
+ const proxy = opts.proxy ||
+ (
+ protocol === 'https:' &&
+ getProcessEnv('https_proxy')
+ ) ||
+ (
+ protocol === 'http:' &&
+ getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
+ )
+ if (!proxy)
+ return null
+
+ const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy
+
+ return !checkNoProxy(uri, opts) && parsedProxy
+}
+
+const getAuth = u =>
+ u.username && u.password ? `${u.username}:${u.password}`
+ : u.username ? u.username
+ : null
+
+const getPath = u => u.pathname + u.search + u.hash
+
+let HttpProxyAgent
+let HttpsProxyAgent
+let SocksProxyAgent
+module.exports.getProxy = getProxy
+function getProxy (proxyUrl, opts, isHttps) {
+ const popts = {
+ host: proxyUrl.hostname,
+ port: proxyUrl.port,
+ protocol: proxyUrl.protocol,
+ path: getPath(proxyUrl),
+ auth: getAuth(proxyUrl),
+ ca: opts.ca,
+ cert: opts.cert,
+ key: opts.key,
+ timeout: getAgentTimeout(opts.timeout),
+ localAddress: opts.localAddress,
+ maxSockets: getMaxSockets(opts.maxSockets),
+ rejectUnauthorized: opts.strictSSL,
+ }
+
+ if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
+ if (!isHttps) {
+ if (!HttpProxyAgent)
+ HttpProxyAgent = require('http-proxy-agent')
+
+ return new HttpProxyAgent(popts)
+ } else {
+ if (!HttpsProxyAgent)
+ HttpsProxyAgent = require('https-proxy-agent')
+
+ return new HttpsProxyAgent(popts)
+ }
+ } else if (proxyUrl.protocol.startsWith('socks')) {
+ if (!SocksProxyAgent)
+ SocksProxyAgent = require('socks-proxy-agent')
+
+ return new SocksProxyAgent(popts)
+ } else {
+ throw Object.assign(
+ new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`),
+ {
+ url: proxyUrl.href,
+ }
+ )
+ }
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/cache.js b/node_modules/node-gyp/node_modules/make-fetch-happen/cache.js
new file mode 100644
index 000000000..234e3a41d
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/cache.js
@@ -0,0 +1,260 @@
+'use strict'
+
+const fetch = require('minipass-fetch')
+const cacache = require('cacache')
+const ssri = require('ssri')
+const url = require('url')
+
+const Minipass = require('minipass')
+const MinipassFlush = require('minipass-flush')
+const MinipassCollect = require('minipass-collect')
+const MinipassPipeline = require('minipass-pipeline')
+
+const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB
+
+// some headers should never be stored in the cache, either because
+// they're a security footgun to leave lying around, or because we
+// just don't need them taking up space.
+// set to undefined so they're omitted from the JSON.stringify
+const pruneHeaders = {
+ authorization: undefined,
+ 'npm-session': undefined,
+ 'set-cookie': undefined,
+ 'cf-ray': undefined,
+ 'cf-cache-status': undefined,
+ 'cf-request-id': undefined,
+ 'x-fetch-attempts': undefined,
+}
+
+function cacheKey (req) {
+ const parsed = new url.URL(req.url)
+ return `make-fetch-happen:request-cache:${
+ url.format({
+ protocol: parsed.protocol,
+ slashes: true,
+ port: parsed.port,
+ hostname: parsed.hostname,
+ pathname: parsed.pathname,
+ search: parsed.search,
+ })
+ }`
+}
+
+// This is a cacache-based implementation of the Cache standard,
+// using node-fetch.
+// docs: https://developer.mozilla.org/en-US/docs/Web/API/Cache
+//
+module.exports = class Cache {
+ constructor (path, opts) {
+ this._path = path
+ this.Promise = (opts && opts.Promise) || Promise
+ }
+
+ static get pruneHeaders () {
+ // exposed for testing, not modifiable
+ return { ...pruneHeaders }
+ }
+
+ // Returns a Promise that resolves to the response associated with the first
+ // matching request in the Cache object.
+ match (req, opts) {
+ const key = cacheKey(req)
+ return cacache.get.info(this._path, key).then(info => {
+ return info && cacache.get.hasContent(
+ this._path, info.integrity, opts
+ ).then(exists => exists && info)
+ }).then(info => {
+ if (info && info.metadata && matchDetails(req, {
+ url: info.metadata.url,
+ reqHeaders: new fetch.Headers(info.metadata.reqHeaders),
+ resHeaders: new fetch.Headers(info.metadata.resHeaders),
+ cacheIntegrity: info.integrity,
+ integrity: opts && opts.integrity,
+ })) {
+ const resHeaders = new fetch.Headers(info.metadata.resHeaders)
+ addCacheHeaders(resHeaders, this._path, key, info.integrity, info.time)
+ if (req.method === 'HEAD') {
+ return new fetch.Response(null, {
+ url: req.url,
+ headers: resHeaders,
+ status: 200,
+ })
+ }
+ const cachePath = this._path
+ // avoid opening cache file handles until a user actually tries to
+ // read from it.
+ const body = new Minipass()
+ const fitInMemory = info.size < MAX_MEM_SIZE
+ const removeOnResume = () => body.removeListener('resume', onResume)
+ const onResume =
+ opts.memoize !== false && fitInMemory
+ ? () => {
+ const c = cacache.get.stream.byDigest(cachePath, info.integrity, {
+ memoize: opts.memoize,
+ })
+ c.on('error', /* istanbul ignore next */ err => {
+ body.emit('error', err)
+ })
+ c.pipe(body)
+ }
+ : () => {
+ removeOnResume()
+ cacache.get.byDigest(cachePath, info.integrity, {
+ memoize: opts.memoize,
+ })
+ .then(data => body.end(data))
+ .catch(/* istanbul ignore next */ err => {
+ body.emit('error', err)
+ })
+ }
+ body.once('resume', onResume)
+ body.once('end', () => removeOnResume)
+ return this.Promise.resolve(new fetch.Response(body, {
+ url: req.url,
+ headers: resHeaders,
+ status: 200,
+ size: info.size,
+ }))
+ }
+ })
+ }
+
+ // Takes both a request and its response and adds it to the given cache.
+ put (req, response, opts) {
+ opts = opts || {}
+ const size = response.headers.get('content-length')
+ const fitInMemory = !!size && opts.memoize !== false && size < MAX_MEM_SIZE
+ const ckey = cacheKey(req)
+ const cacheOpts = {
+ algorithms: opts.algorithms,
+ metadata: {
+ url: req.url,
+ reqHeaders: {
+ ...req.headers.raw(),
+ ...pruneHeaders,
+ },
+ resHeaders: {
+ ...response.headers.raw(),
+ ...pruneHeaders,
+ },
+ },
+ size,
+ memoize: fitInMemory && opts.memoize,
+ }
+ if (req.method === 'HEAD' || response.status === 304) {
+ // Update metadata without writing
+ return cacache.get.info(this._path, ckey).then(info => {
+ // Providing these will bypass content write
+ cacheOpts.integrity = info.integrity
+ addCacheHeaders(
+ response.headers, this._path, ckey, info.integrity, info.time
+ )
+
+ return new MinipassPipeline(
+ cacache.get.stream.byDigest(this._path, info.integrity, cacheOpts),
+ cacache.put.stream(this._path, ckey, cacheOpts)
+ ).promise().then(() => {
+ return response
+ })
+ })
+ }
+ const oldBody = response.body
+ // the flush is the last thing in the pipeline. Build the pipeline
+ // back-to-front so we don't consume the data before we use it!
+ // We unshift in either a tee-stream to the cache put stream,
+ // or a collecter that dumps it to cache in one go, then the
+ // old body to bring in the data.
+ const newBody = new MinipassPipeline(new MinipassFlush({
+ flush () {
+ return cacheWritePromise
+ },
+ }))
+
+ let cacheWriteResolve, cacheWriteReject
+ const cacheWritePromise = new Promise((resolve, reject) => {
+ cacheWriteResolve = resolve
+ cacheWriteReject = reject
+ })
+ const cachePath = this._path
+
+ if (fitInMemory) {
+ const collecter = new MinipassCollect.PassThrough()
+ collecter.on('collect', data => {
+ cacache.put(
+ cachePath,
+ ckey,
+ data,
+ cacheOpts
+ ).then(cacheWriteResolve, cacheWriteReject)
+ })
+ newBody.unshift(collecter)
+ } else {
+ const tee = new Minipass()
+ const cacheStream = cacache.put.stream(
+ cachePath,
+ ckey,
+ cacheOpts
+ )
+ tee.pipe(cacheStream)
+ cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
+ newBody.unshift(tee)
+ }
+
+ newBody.unshift(oldBody)
+ return Promise.resolve(new fetch.Response(newBody, response))
+ }
+
+ // Finds the Cache entry whose key is the request, and if found, deletes the
+ // Cache entry and returns a Promise that resolves to true. If no Cache entry
+ // is found, it returns false.
+ 'delete' (req, opts) {
+ opts = opts || {}
+ if (typeof opts.memoize === 'object') {
+ if (opts.memoize.reset)
+ opts.memoize.reset()
+ else if (opts.memoize.clear)
+ opts.memoize.clear()
+ else {
+ Object.keys(opts.memoize).forEach(k => {
+ opts.memoize[k] = null
+ })
+ }
+ }
+ return cacache.rm.entry(
+ this._path,
+ cacheKey(req)
+ // TODO - true/false
+ ).then(() => false)
+ }
+}
+
+function matchDetails (req, cached) {
+ const reqUrl = new url.URL(req.url)
+ const cacheUrl = new url.URL(cached.url)
+ const vary = cached.resHeaders.get('Vary')
+ // https://tools.ietf.org/html/rfc7234#section-4.1
+ if (vary) {
+ if (vary.match(/\*/))
+ return false
+ else {
+ const fieldsMatch = vary.split(/\s*,\s*/).every(field => {
+ return cached.reqHeaders.get(field) === req.headers.get(field)
+ })
+ if (!fieldsMatch)
+ return false
+ }
+ }
+ if (cached.integrity)
+ return ssri.parse(cached.integrity).match(cached.cacheIntegrity)
+
+ reqUrl.hash = null
+ cacheUrl.hash = null
+ return url.format(reqUrl) === url.format(cacheUrl)
+}
+
+function addCacheHeaders (resHeaders, path, key, hash, time) {
+ resHeaders.set('X-Local-Cache', encodeURIComponent(path))
+ resHeaders.set('X-Local-Cache-Key', encodeURIComponent(key))
+ resHeaders.set('X-Local-Cache-Hash', encodeURIComponent(hash))
+ resHeaders.set('X-Local-Cache-Time', new Date(time).toUTCString())
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/index.js
new file mode 100644
index 000000000..54f72049c
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/index.js
@@ -0,0 +1,457 @@
+'use strict'
+
+const url = require('url')
+const fetch = require('minipass-fetch')
+const pkg = require('./package.json')
+const retry = require('promise-retry')
+let ssri
+
+const Minipass = require('minipass')
+const MinipassPipeline = require('minipass-pipeline')
+const getAgent = require('./agent')
+const setWarning = require('./warning')
+
+const configureOptions = require('./utils/configure-options')
+const iterableToObject = require('./utils/iterable-to-object')
+const makePolicy = require('./utils/make-policy')
+
+const isURL = /^https?:/
+const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
+
+const RETRY_ERRORS = [
+ 'ECONNRESET', // remote socket closed on us
+ 'ECONNREFUSED', // remote host refused to open connection
+ 'EADDRINUSE', // failed to bind to a local port (proxy?)
+ 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
+ // Known codes we do NOT retry on:
+ // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+]
+
+const RETRY_TYPES = [
+ 'request-timeout',
+]
+
+// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
+module.exports = cachingFetch
+cachingFetch.defaults = function (_uri, _opts) {
+ const fetch = this
+ if (typeof _uri === 'object') {
+ _opts = _uri
+ _uri = null
+ }
+
+ function defaultedFetch (uri, opts) {
+ const finalOpts = Object.assign({}, _opts || {}, opts || {})
+ return fetch(uri || _uri, finalOpts)
+ }
+
+ defaultedFetch.defaults = fetch.defaults
+ defaultedFetch.delete = fetch.delete
+ return defaultedFetch
+}
+
+cachingFetch.delete = cacheDelete
+function cacheDelete (uri, opts) {
+ opts = configureOptions(opts)
+ if (opts.cacheManager) {
+ const req = new fetch.Request(uri, {
+ method: opts.method,
+ headers: opts.headers,
+ })
+ return opts.cacheManager.delete(req, opts)
+ }
+}
+
+function initializeSsri () {
+ if (!ssri)
+ ssri = require('ssri')
+}
+
+function cachingFetch (uri, _opts) {
+ const opts = configureOptions(_opts)
+
+ if (opts.integrity) {
+ initializeSsri()
+ // if verifying integrity, fetch must not decompress
+ opts.compress = false
+ }
+
+ const isCachable = (
+ (
+ opts.method === 'GET' ||
+ opts.method === 'HEAD'
+ ) &&
+ Boolean(opts.cacheManager) &&
+ opts.cache !== 'no-store' &&
+ opts.cache !== 'reload'
+ )
+
+ if (isCachable) {
+ const req = new fetch.Request(uri, {
+ method: opts.method,
+ headers: opts.headers,
+ })
+
+ return opts.cacheManager.match(req, opts).then(res => {
+ if (res) {
+ const warningCode = (res.headers.get('Warning') || '').match(/^\d+/)
+ if (warningCode && +warningCode >= 100 && +warningCode < 200) {
+ // https://tools.ietf.org/html/rfc7234#section-4.3.4
+ //
+ // If a stored response is selected for update, the cache MUST:
+ //
+ // * delete any Warning header fields in the stored response with
+ // warn-code 1xx (see Section 5.5);
+ //
+ // * retain any Warning header fields in the stored response with
+ // warn-code 2xx;
+ //
+ res.headers.delete('Warning')
+ }
+
+ if (opts.cache === 'default' && !isStale(req, res))
+ return res
+
+ if (opts.cache === 'default' || opts.cache === 'no-cache')
+ return conditionalFetch(req, res, opts)
+
+ if (opts.cache === 'force-cache' || opts.cache === 'only-if-cached') {
+ // 112 Disconnected operation
+ // SHOULD be included if the cache is intentionally disconnected from
+ // the rest of the network for a period of time.
+ // (https://tools.ietf.org/html/rfc2616#section-14.46)
+ setWarning(res, 112, 'Disconnected operation')
+ return res
+ }
+ }
+
+ if (!res && opts.cache === 'only-if-cached') {
+ const errorMsg = `request to ${
+ uri
+ } failed: cache mode is 'only-if-cached' but no cached response available.`
+
+ const err = new Error(errorMsg)
+ err.code = 'ENOTCACHED'
+ throw err
+ }
+
+ // Missing cache entry, or mode is default (if stale), reload, no-store
+ return remoteFetch(req.url, opts)
+ })
+ }
+ return remoteFetch(uri, opts)
+}
+
+// https://tools.ietf.org/html/rfc7234#section-4.2
+function isStale (req, res) {
+ const _req = {
+ url: req.url,
+ method: req.method,
+ headers: iterableToObject(req.headers),
+ }
+
+ const policy = makePolicy(req, res)
+
+ const responseTime = res.headers.get('x-local-cache-time') ||
+ /* istanbul ignore next - would be weird to get a 'stale'
+ * response that didn't come from cache with a cache time header */
+ (res.headers.get('date') || 0)
+
+ policy._responseTime = new Date(responseTime)
+
+ const bool = !policy.satisfiesWithoutRevalidation(_req)
+ const headers = policy.responseHeaders()
+ if (headers.warning && /^113\b/.test(headers.warning)) {
+ // Possible to pick up a rfc7234 warning at this point.
+ // This is kind of a weird place to stick this, should probably go
+ // in cachingFetch. But by putting it here, we save an extra
+ // CachePolicy object construction.
+ res.headers.append('warning', headers.warning)
+ }
+ return bool
+}
+
+function mustRevalidate (res) {
+ return (res.headers.get('cache-control') || '').match(/must-revalidate/i)
+}
+
+function conditionalFetch (req, cachedRes, opts) {
+ const _req = {
+ url: req.url,
+ method: req.method,
+ headers: Object.assign({}, opts.headers || {}),
+ }
+
+ const policy = makePolicy(req, cachedRes)
+ opts.headers = policy.revalidationHeaders(_req)
+
+ return remoteFetch(req.url, opts)
+ .then(condRes => {
+ const revalidatedPolicy = policy.revalidatedPolicy(_req, {
+ status: condRes.status,
+ headers: iterableToObject(condRes.headers),
+ })
+
+ if (condRes.status >= 500 && !mustRevalidate(cachedRes)) {
+ // 111 Revalidation failed
+ // MUST be included if a cache returns a stale response because an
+ // attempt to revalidate the response failed, due to an inability to
+ // reach the server.
+ // (https://tools.ietf.org/html/rfc2616#section-14.46)
+ setWarning(cachedRes, 111, 'Revalidation failed')
+ return cachedRes
+ }
+
+ if (condRes.status === 304) { // 304 Not Modified
+ // Create a synthetic response from the cached body and original req
+ const synthRes = new fetch.Response(cachedRes.body, condRes)
+ return opts.cacheManager.put(req, synthRes, opts)
+ .then(newRes => {
+ // Get the list first, because if we delete while iterating,
+ // it'll throw off the count and not make it through all
+ // of them.
+ const newHeaders = revalidatedPolicy.policy.responseHeaders()
+ const toDelete = [...newRes.headers.keys()]
+ .filter(k => !newHeaders[k])
+ for (const key of toDelete)
+ newRes.headers.delete(key)
+
+ for (const [key, val] of Object.entries(newHeaders))
+ newRes.headers.set(key, val)
+
+ return newRes
+ })
+ }
+
+ return condRes
+ })
+ .then(res => res)
+ .catch(err => {
+ if (mustRevalidate(cachedRes))
+ throw err
+ else {
+ // 111 Revalidation failed
+ // MUST be included if a cache returns a stale response because an
+ // attempt to revalidate the response failed, due to an inability to
+ // reach the server.
+ // (https://tools.ietf.org/html/rfc2616#section-14.46)
+ setWarning(cachedRes, 111, 'Revalidation failed')
+ // 199 Miscellaneous warning
+ // The warning text MAY include arbitrary information to be presented to
+ // a human user, or logged. A system receiving this warning MUST NOT take
+ // any automated action, besides presenting the warning to the user.
+ // (https://tools.ietf.org/html/rfc2616#section-14.46)
+ setWarning(
+ cachedRes,
+ 199,
+ `Miscellaneous Warning ${err.code}: ${err.message}`
+ )
+
+ return cachedRes
+ }
+ })
+}
+
+function remoteFetchHandleIntegrity (res, integrity) {
+ if (res.status !== 200)
+ return res // Error responses aren't subject to integrity checks.
+
+ const oldBod = res.body
+ const newBod = ssri.integrityStream({
+ integrity,
+ })
+ return new fetch.Response(new MinipassPipeline(oldBod, newBod), res)
+}
+
+function remoteFetch (uri, opts) {
+ const agent = getAgent(uri, opts)
+ const headers = opts.headers instanceof fetch.Headers
+ ? opts.headers
+ : new fetch.Headers(opts.headers)
+ if (!headers.get('connection'))
+ headers.set('connection', agent ? 'keep-alive' : 'close')
+
+ if (!headers.get('user-agent'))
+ headers.set('user-agent', USER_AGENT)
+
+ const reqOpts = {
+ agent,
+ body: opts.body,
+ compress: opts.compress,
+ follow: opts.follow,
+ headers,
+ method: opts.method,
+ redirect: 'manual',
+ size: opts.size,
+ counter: opts.counter,
+ timeout: opts.timeout,
+ ca: opts.ca,
+ cert: opts.cert,
+ key: opts.key,
+ rejectUnauthorized: opts.strictSSL,
+ }
+
+ return retry(
+ (retryHandler, attemptNum) => {
+ const req = new fetch.Request(uri, reqOpts)
+ return fetch(req)
+ .then((res) => {
+ if (opts.integrity)
+ res = remoteFetchHandleIntegrity(res, opts.integrity)
+
+ res.headers.set('x-fetch-attempts', attemptNum)
+
+ const isStream = Minipass.isStream(req.body)
+
+ if (opts.cacheManager) {
+ const isMethodGetHead = (
+ req.method === 'GET' ||
+ req.method === 'HEAD'
+ )
+
+ const isCachable = (
+ opts.cache !== 'no-store' &&
+ isMethodGetHead &&
+ makePolicy(req, res).storable() &&
+ res.status === 200 // No other statuses should be stored!
+ )
+
+ if (isCachable)
+ return opts.cacheManager.put(req, res, opts)
+
+ if (!isMethodGetHead) {
+ return opts.cacheManager.delete(req).then(() => {
+ if (res.status >= 500 && req.method !== 'POST' && !isStream) {
+ if (typeof opts.onRetry === 'function')
+ opts.onRetry(res)
+
+ return retryHandler(res)
+ }
+
+ return res
+ })
+ }
+ }
+
+ const isRetriable = (
+ req.method !== 'POST' &&
+ !isStream &&
+ (
+ res.status === 408 || // Request Timeout
+ res.status === 420 || // Enhance Your Calm (usually Twitter rate-limit)
+ res.status === 429 || // Too Many Requests ("standard" rate-limiting)
+ res.status >= 500 // Assume server errors are momentary hiccups
+ )
+ )
+
+ if (isRetriable) {
+ if (typeof opts.onRetry === 'function')
+ opts.onRetry(res)
+
+ return retryHandler(res)
+ }
+
+ if (!fetch.isRedirect(res.status))
+ return res
+
+ if (opts.redirect === 'manual')
+ return res
+
+ // if (!fetch.isRedirect(res.status) || opts.redirect === 'manual') {
+ // return res
+ // }
+
+ // handle redirects - matches behavior of fetch: https://github.com/bitinn/node-fetch
+ if (opts.redirect === 'error') {
+ const err = new fetch.FetchError(`redirect mode is set to error: ${uri}`, 'no-redirect', { code: 'ENOREDIRECT' })
+ throw err
+ }
+
+ if (!res.headers.get('location')) {
+ const err = new fetch.FetchError(`redirect location header missing at: ${uri}`, 'no-location', { code: 'EINVALIDREDIRECT' })
+ throw err
+ }
+
+ if (req.counter >= req.follow) {
+ const err = new fetch.FetchError(`maximum redirect reached at: ${uri}`, 'max-redirect', { code: 'EMAXREDIRECT' })
+ throw err
+ }
+
+ const resolvedUrlParsed = new url.URL(res.headers.get('location'), req.url)
+ const resolvedUrl = url.format(resolvedUrlParsed)
+ const redirectURL = (isURL.test(res.headers.get('location')))
+ ? new url.URL(res.headers.get('location'))
+ : resolvedUrlParsed
+
+ // Comment below is used under the following license:
+ // Copyright (c) 2010-2012 Mikeal Rogers
+ // Licensed under the Apache License, Version 2.0 (the "License");
+ // you may not use this file except in compliance with the License.
+ // You may obtain a copy of the License at
+ // http://www.apache.org/licenses/LICENSE-2.0
+ // Unless required by applicable law or agreed to in writing,
+ // software distributed under the License is distributed on an "AS
+ // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ // express or implied. See the License for the specific language
+ // governing permissions and limitations under the License.
+
+ // Remove authorization if changing hostnames (but not if just
+ // changing ports or protocols). This matches the behavior of request:
+ // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+ if (new url.URL(req.url).hostname !== redirectURL.hostname)
+ req.headers.delete('authorization')
+
+ // for POST request with 301/302 response, or any request with 303 response,
+ // use GET when following redirect
+ if (
+ res.status === 303 ||
+ (
+ req.method === 'POST' &&
+ (
+ res.status === 301 ||
+ res.status === 302
+ )
+ )
+ ) {
+ opts.method = 'GET'
+ opts.body = null
+ req.headers.delete('content-length')
+ }
+
+ opts.headers = {}
+ req.headers.forEach((value, name) => {
+ opts.headers[name] = value
+ })
+
+ opts.counter = ++req.counter
+ return cachingFetch(resolvedUrl, opts)
+ })
+ .catch(err => {
+ const code = (err.code === 'EPROMISERETRY')
+ ? err.retried.code
+ : err.code
+
+ const isRetryError = (
+ RETRY_ERRORS.indexOf(code) === -1 &&
+ RETRY_TYPES.indexOf(err.type) === -1
+ )
+
+ if (req.method === 'POST' || isRetryError)
+ throw err
+
+ if (typeof opts.onRetry === 'function')
+ opts.onRetry(err)
+
+ return retryHandler(err)
+ })
+ },
+ opts.retry
+ ).catch(err => {
+ if (err.status >= 400 && err.type !== 'system') {
+ // this is an HTTP response "error" that we care about
+ return err
+ }
+
+ throw err
+ })
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
new file mode 100644
index 000000000..7e854dcdf
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
@@ -0,0 +1,72 @@
+{
+ "name": "make-fetch-happen",
+ "version": "8.0.14",
+ "description": "Opinionated, caching, retrying fetch client",
+ "main": "index.js",
+ "files": [
+ "*.js",
+ "lib",
+ "utils"
+ ],
+ "scripts": {
+ "preversion": "npm t",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push --follow-tags",
+ "test": "tap test/*.js",
+ "posttest": "npm run lint",
+ "eslint": "eslint",
+ "lint": "npm run eslint -- *.js utils test",
+ "lintfix": "npm run lint -- --fix"
+ },
+ "repository": "https://github.com/npm/make-fetch-happen",
+ "keywords": [
+ "http",
+ "request",
+ "fetch",
+ "mean girls",
+ "caching",
+ "cache",
+ "subresource integrity"
+ ],
+ "author": {
+ "name": "Kat Marchán",
+ "email": "kzm@zkat.tech",
+ "twitter": "maybekatz"
+ },
+ "license": "ISC",
+ "dependencies": {
+ "agentkeepalive": "^4.1.3",
+ "cacache": "^15.0.5",
+ "http-cache-semantics": "^4.1.0",
+ "http-proxy-agent": "^4.0.1",
+ "https-proxy-agent": "^5.0.0",
+ "is-lambda": "^1.0.1",
+ "lru-cache": "^6.0.0",
+ "minipass": "^3.1.3",
+ "minipass-collect": "^1.0.2",
+ "minipass-fetch": "^1.3.2",
+ "minipass-flush": "^1.0.5",
+ "minipass-pipeline": "^1.2.4",
+ "promise-retry": "^2.0.1",
+ "socks-proxy-agent": "^5.0.0",
+ "ssri": "^8.0.0"
+ },
+ "devDependencies": {
+ "eslint": "^7.14.0",
+ "eslint-plugin-import": "^2.22.1",
+ "eslint-plugin-node": "^11.1.0",
+ "eslint-plugin-promise": "^4.2.1",
+ "eslint-plugin-standard": "^5.0.0",
+ "mkdirp": "^1.0.4",
+ "nock": "^11.9.1",
+ "npmlog": "^4.1.2",
+ "require-inject": "^1.4.2",
+ "rimraf": "^2.7.1",
+ "safe-buffer": "^5.2.1",
+ "standard-version": "^7.1.0",
+ "tap": "^14.11.0"
+ },
+ "engines": {
+ "node": ">= 10"
+ }
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/utils/configure-options.js b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/configure-options.js
new file mode 100644
index 000000000..75ea5d15e
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/configure-options.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const initializeCache = require('./initialize-cache')
+
+module.exports = function configureOptions (_opts) {
+ const opts = Object.assign({}, _opts || {})
+ opts.method = (opts.method || 'GET').toUpperCase()
+
+ if (!opts.retry) {
+ // opts.retry was falsy; set default
+ opts.retry = { retries: 0 }
+ } else {
+ if (typeof opts.retry !== 'object') {
+ // Shorthand
+ if (typeof opts.retry === 'number')
+ opts.retry = { retries: opts.retry }
+
+ if (typeof opts.retry === 'string') {
+ const value = parseInt(opts.retry, 10)
+ opts.retry = (value) ? { retries: value } : { retries: 0 }
+ }
+ } else {
+ // Set default retries
+ opts.retry = Object.assign({}, { retries: 0 }, opts.retry)
+ }
+ }
+
+ if (opts.cacheManager)
+ initializeCache(opts)
+
+ return opts
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/utils/initialize-cache.js b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/initialize-cache.js
new file mode 100644
index 000000000..9f96bf562
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/initialize-cache.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const isHeaderConditional = require('./is-header-conditional')
+// Default cacache-based cache
+const Cache = require('../cache')
+
+module.exports = function initializeCache (opts) {
+ /**
+ * NOTE: `opts.cacheManager` is the path to cache
+ * We're making the assumption that if `opts.cacheManager` *isn't* a string,
+ * it's a cache object
+ */
+ if (typeof opts.cacheManager === 'string') {
+ // Need to make a cache object
+ opts.cacheManager = new Cache(opts.cacheManager, opts)
+ }
+
+ opts.cache = opts.cache || 'default'
+
+ if (opts.cache === 'default' && isHeaderConditional(opts.headers)) {
+ // If header list contains `If-Modified-Since`, `If-None-Match`,
+ // `If-Unmodified-Since`, `If-Match`, or `If-Range`, fetch will set cache
+ // mode to "no-store" if it is "default".
+ opts.cache = 'no-store'
+ }
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/utils/is-header-conditional.js b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/is-header-conditional.js
new file mode 100644
index 000000000..5081e0ce1
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/is-header-conditional.js
@@ -0,0 +1,17 @@
+'use strict'
+
+module.exports = function isHeaderConditional (headers) {
+ if (!headers || typeof headers !== 'object')
+ return false
+
+ const modifiers = [
+ 'if-modified-since',
+ 'if-none-match',
+ 'if-unmodified-since',
+ 'if-match',
+ 'if-range',
+ ]
+
+ return Object.keys(headers)
+ .some(h => modifiers.indexOf(h.toLowerCase()) !== -1)
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/utils/iterable-to-object.js b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/iterable-to-object.js
new file mode 100644
index 000000000..1fe5ba654
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/iterable-to-object.js
@@ -0,0 +1,9 @@
+'use strict'
+
+module.exports = function iterableToObject (iter) {
+ const obj = {}
+ for (const k of iter.keys())
+ obj[k] = iter.get(k)
+
+ return obj
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/utils/make-policy.js b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/make-policy.js
new file mode 100644
index 000000000..5e884847d
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/utils/make-policy.js
@@ -0,0 +1,19 @@
+'use strict'
+
+const CachePolicy = require('http-cache-semantics')
+
+const iterableToObject = require('./iterable-to-object')
+
+module.exports = function makePolicy (req, res) {
+ const _req = {
+ url: req.url,
+ method: req.method,
+ headers: iterableToObject(req.headers),
+ }
+ const _res = {
+ status: res.status,
+ headers: iterableToObject(res.headers),
+ }
+
+ return new CachePolicy(_req, _res, { shared: false })
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/warning.js b/node_modules/node-gyp/node_modules/make-fetch-happen/warning.js
new file mode 100644
index 000000000..2b9602471
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/warning.js
@@ -0,0 +1,24 @@
+const url = require('url')
+
+module.exports = setWarning
+
+function setWarning (reqOrRes, code, message, replace) {
+ // Warning = "Warning" ":" 1#warning-value
+ // warning-value = warn-code SP warn-agent SP warn-text [SP warn-date]
+ // warn-code = 3DIGIT
+ // warn-agent = ( host [ ":" port ] ) | pseudonym
+ // ; the name or pseudonym of the server adding
+ // ; the Warning header, for use in debugging
+ // warn-text = quoted-string
+ // warn-date = <"> HTTP-date <">
+ // (https://tools.ietf.org/html/rfc2616#section-14.46)
+ const host = new url.URL(reqOrRes.url).host
+ const jsonMessage = JSON.stringify(message)
+ const jsonDate = JSON.stringify(new Date().toUTCString())
+ const header = replace ? 'set' : 'append'
+
+ reqOrRes.headers[header](
+ 'Warning',
+ `${code} ${host} ${jsonMessage} ${jsonDate}`
+ )
+}
diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.d.ts b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.d.ts
new file mode 100644
index 000000000..52341a1b5
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.d.ts
@@ -0,0 +1,21 @@
+/// <reference types="node" />
+import net from 'net';
+import { Agent, ClientRequest, RequestOptions } from 'agent-base';
+import { SocksProxyAgentOptions } from '.';
+/**
+ * The `SocksProxyAgent`.
+ *
+ * @api public
+ */
+export default class SocksProxyAgent extends Agent {
+ private lookup;
+ private proxy;
+ constructor(_opts: string | SocksProxyAgentOptions);
+ /**
+ * Initiates a SOCKS connection to the specified SOCKS proxy server,
+ * which in turn connects to the specified remote host and port.
+ *
+ * @api protected
+ */
+ callback(req: ClientRequest, opts: RequestOptions): Promise<net.Socket>;
+}
diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js
new file mode 100644
index 000000000..7af0d62f8
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js
@@ -0,0 +1,180 @@
+"use strict";
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const dns_1 = __importDefault(require("dns"));
+const tls_1 = __importDefault(require("tls"));
+const url_1 = __importDefault(require("url"));
+const debug_1 = __importDefault(require("debug"));
+const agent_base_1 = require("agent-base");
+const socks_1 = require("socks");
+const debug = debug_1.default('socks-proxy-agent');
+function dnsLookup(host) {
+ return new Promise((resolve, reject) => {
+ dns_1.default.lookup(host, (err, res) => {
+ if (err) {
+ reject(err);
+ }
+ else {
+ resolve(res);
+ }
+ });
+ });
+}
+function parseSocksProxy(opts) {
+ let port = 0;
+ let lookup = false;
+ let type = 5;
+ // Prefer `hostname` over `host`, because of `url.parse()`
+ const host = opts.hostname || opts.host;
+ if (!host) {
+ throw new TypeError('No "host"');
+ }
+ if (typeof opts.port === 'number') {
+ port = opts.port;
+ }
+ else if (typeof opts.port === 'string') {
+ port = parseInt(opts.port, 10);
+ }
+ // From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3
+ // "The SOCKS service is conventionally located on TCP port 1080"
+ if (!port) {
+ port = 1080;
+ }
+ // figure out if we want socks v4 or v5, based on the "protocol" used.
+ // Defaults to 5.
+ if (opts.protocol) {
+ switch (opts.protocol.replace(':', '')) {
+ case 'socks4':
+ lookup = true;
+ // pass through
+ case 'socks4a':
+ type = 4;
+ break;
+ case 'socks5':
+ lookup = true;
+ // pass through
+ case 'socks': // no version specified, default to 5h
+ case 'socks5h':
+ type = 5;
+ break;
+ default:
+ throw new TypeError(`A "socks" protocol must be specified! Got: ${opts.protocol}`);
+ }
+ }
+ if (typeof opts.type !== 'undefined') {
+ if (opts.type === 4 || opts.type === 5) {
+ type = opts.type;
+ }
+ else {
+ throw new TypeError(`"type" must be 4 or 5, got: ${opts.type}`);
+ }
+ }
+ const proxy = {
+ host,
+ port,
+ type
+ };
+ let userId = opts.userId || opts.username;
+ let password = opts.password;
+ if (opts.auth) {
+ const auth = opts.auth.split(':');
+ userId = auth[0];
+ password = auth[1];
+ }
+ if (userId) {
+ Object.defineProperty(proxy, 'userId', {
+ value: userId,
+ enumerable: false
+ });
+ }
+ if (password) {
+ Object.defineProperty(proxy, 'password', {
+ value: password,
+ enumerable: false
+ });
+ }
+ return { lookup, proxy };
+}
+/**
+ * The `SocksProxyAgent`.
+ *
+ * @api public
+ */
+class SocksProxyAgent extends agent_base_1.Agent {
+ constructor(_opts) {
+ let opts;
+ if (typeof _opts === 'string') {
+ opts = url_1.default.parse(_opts);
+ }
+ else {
+ opts = _opts;
+ }
+ if (!opts) {
+ throw new TypeError('a SOCKS proxy server `host` and `port` must be specified!');
+ }
+ super(opts);
+ const parsedProxy = parseSocksProxy(opts);
+ this.lookup = parsedProxy.lookup;
+ this.proxy = parsedProxy.proxy;
+ }
+ /**
+ * Initiates a SOCKS connection to the specified SOCKS proxy server,
+ * which in turn connects to the specified remote host and port.
+ *
+ * @api protected
+ */
+ callback(req, opts) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const { lookup, proxy } = this;
+ let { host, port, timeout } = opts;
+ if (!host) {
+ throw new Error('No `host` defined!');
+ }
+ if (lookup) {
+ // Client-side DNS resolution for "4" and "5" socks proxy versions.
+ host = yield dnsLookup(host);
+ }
+ const socksOpts = {
+ proxy,
+ destination: { host, port },
+ command: 'connect',
+ timeout
+ };
+ debug('Creating socks proxy connection: %o', socksOpts);
+ const { socket } = yield socks_1.SocksClient.createConnection(socksOpts);
+ debug('Successfully created socks proxy connection');
+ if (opts.secureEndpoint) {
+ // The proxy is connecting to a TLS server, so upgrade
+ // this socket connection to a TLS connection.
+ debug('Upgrading socket connection to TLS');
+ const servername = opts.servername || host;
+ return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket,
+ servername }));
+ }
+ return socket;
+ });
+ }
+}
+exports.default = SocksProxyAgent;
+function omit(obj, ...keys) {
+ const ret = {};
+ let key;
+ for (key in obj) {
+ if (!keys.includes(key)) {
+ ret[key] = obj[key];
+ }
+ }
+ return ret;
+}
+//# sourceMappingURL=agent.js.map \ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js.map b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js.map
new file mode 100644
index 000000000..f36e1f8bb
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/agent.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AAEtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,2CAAkE;AAClE,iCAAoE;AAGpE,MAAM,KAAK,GAAG,eAAW,CAAC,mBAAmB,CAAC,CAAC;AAE/C,SAAS,SAAS,CAAC,IAAY;IAC9B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,aAAG,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;YAC7B,IAAI,GAAG,EAAE;gBACR,MAAM,CAAC,GAAG,CAAC,CAAC;aACZ;iBAAM;gBACN,OAAO,CAAC,GAAG,CAAC,CAAC;aACb;QACF,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CACvB,IAA4B;IAE5B,IAAI,IAAI,GAAG,CAAC,CAAC;IACb,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,IAAI,GAAuB,CAAC,CAAC;IAEjC,0DAA0D;IAC1D,MAAM,IAAI,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;IACxC,IAAI,CAAC,IAAI,EAAE;QACV,MAAM,IAAI,SAAS,CAAC,WAAW,CAAC,CAAC;KACjC;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QAClC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;KACjB;SAAM,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QACzC,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;KAC/B;IAED,0EAA0E;IAC1E,iEAAiE;IACjE,IAAI,CAAC,IAAI,EAAE;QACV,IAAI,GAAG,IAAI,CAAC;KACZ;IAED,sEAAsE;IACtE,iBAAiB;IACjB,IAAI,IAAI,CAAC,QAAQ,EAAE;QAClB,QAAQ,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,QAAQ;gBACZ,MAAM,GAAG,IAAI,CAAC;YACf,eAAe;YACf,KAAK,SAAS;gBACb,IAAI,GAAG,CAAC,CAAC;gBACT,MAAM;YACP,KAAK,QAAQ;gBACZ,MAAM,GAAG,IAAI,CAAC;YACf,eAAe;YACf,KAAK,OAAO,CAAC,CAAC,sCAAsC;YACpD,KAAK,SAAS;gBACb,IAAI,GAAG,CAAC,CAAC;gBACT,MAAM;YACP;gBACC,MAAM,IAAI,SAAS,CAClB,8CAA8C,IAAI,CAAC,QAAQ,EAAE,CAC7D,CAAC;SACH;KACD;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;QACrC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,EAAE;YACvC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;SACjB;aAAM;YACN,MAAM,IAAI,SAAS,CAAC,+BAA+B,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;SAChE;KACD;IAED,MAAM,KAAK,GAAe;QACzB,IAAI;QACJ,IAAI;QACJ,IAAI;KACJ,CAAC;IAEF,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,QAAQ,CAAC;IAC1C,IAAI,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC7B,IAAI,IAAI,CAAC,IAAI,EAAE;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAClC,MAAM,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;QACjB,QAAQ,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;KACnB;IACD,IAAI,MAAM,EAAE;QACX,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,QAAQ,EAAE;YACtC,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SACjB,CAAC,CAAC;KACH;IACD,IAAI,QAAQ,EAAE;QACb,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,UAAU,EAAE;YACxC,KAAK,EAAE,QAAQ;YACf,UAAU,EAAE,KAAK;SACjB,CAAC,CAAC;KACH;IAED,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC;AAC1B,CAAC;AAED;;;;GAIG;AACH,MAAqB,eAAgB,SAAQ,kBAAK;IAIjD,YAAY,KAAsC;QACjD,IAAI,IAA4B,CAAC;QACjC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,SAAS,CAClB,2DAA2D,CAC3D,CAAC;SACF;QACD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,WAAW,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;IAChC,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAkB,EAClB,IAAoB;;YAEpB,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,CAAC;YAC/B,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC;YAEnC,IAAI,CAAC,IAAI,EAAE;gBACV,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC;aACtC;YAED,IAAI,MAAM,EAAE;gBACX,mEAAmE;gBACnE,IAAI,GAAG,MAAM,SAAS,CAAC,IAAI,CAAC,CAAC;aAC7B;YAED,MAAM,SAAS,GAAuB;gBACrC,KAAK;gBACL,WAAW,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE;gBAC3B,OAAO,EAAE,SAAS;gBAClB,OAAO;aACP,CAAC;YACF,KAAK,CAAC,qCAAqC,EAAE,SAAS,CAAC,CAAC;YACxD,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YACjE,KAAK,CAAC,6CAA6C,CAAC,CAAC;YAErD,IAAI,IAAI,CAAC,cAAc,EAAE;gBACxB,sDAAsD;gBACtD,8CAA8C;gBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAC;gBAC5C,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC;gBAC3C,OAAO,aAAG,CAAC,OAAO,iCACd,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,KACjD,MAAM;oBACN,UAAU,IACT,CAAC;aACH;YAED,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AArED,kCAqEC;AAED,SAAS,IAAI,CACZ,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAEX,CAAC;IACF,IAAI,GAAqB,CAAC;IAC1B,KAAK,GAAG,IAAI,GAAG,EAAE;QAChB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACxB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACpB;KACD;IACD,OAAO,GAAG,CAAC;AACZ,CAAC"} \ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts
new file mode 100644
index 000000000..8fe0e5888
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts
@@ -0,0 +1,19 @@
+/// <reference types="node" />
+import { Url } from 'url';
+import { SocksProxy } from 'socks';
+import { AgentOptions } from 'agent-base';
+import _SocksProxyAgent from './agent';
+declare function createSocksProxyAgent(opts: string | createSocksProxyAgent.SocksProxyAgentOptions): _SocksProxyAgent;
+declare namespace createSocksProxyAgent {
+ interface BaseSocksProxyAgentOptions {
+ host?: string | null;
+ port?: string | number | null;
+ username?: string | null;
+ }
+ export interface SocksProxyAgentOptions extends AgentOptions, BaseSocksProxyAgentOptions, Partial<Omit<Url & SocksProxy, keyof BaseSocksProxyAgentOptions>> {
+ }
+ export type SocksProxyAgent = _SocksProxyAgent;
+ export const SocksProxyAgent: typeof _SocksProxyAgent;
+ export {};
+}
+export = createSocksProxyAgent;
diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js
new file mode 100644
index 000000000..dd1e49a77
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js
@@ -0,0 +1,14 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+const agent_1 = __importDefault(require("./agent"));
+function createSocksProxyAgent(opts) {
+ return new agent_1.default(opts);
+}
+(function (createSocksProxyAgent) {
+ createSocksProxyAgent.SocksProxyAgent = agent_1.default;
+ createSocksProxyAgent.prototype = agent_1.default.prototype;
+})(createSocksProxyAgent || (createSocksProxyAgent = {}));
+module.exports = createSocksProxyAgent;
+//# sourceMappingURL=index.js.map \ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map
new file mode 100644
index 000000000..56fa84868
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAGA,oDAAuC;AAEvC,SAAS,qBAAqB,CAC7B,IAA2D;IAE3D,OAAO,IAAI,eAAgB,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,WAAU,qBAAqB;IAajB,qCAAe,GAAG,eAAgB,CAAC;IAEhD,qBAAqB,CAAC,SAAS,GAAG,eAAgB,CAAC,SAAS,CAAC;AAC9D,CAAC,EAhBS,qBAAqB,KAArB,qBAAqB,QAgB9B;AAED,iBAAS,qBAAqB,CAAC"} \ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json b/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json
new file mode 100644
index 000000000..bdb8367fb
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json
@@ -0,0 +1,64 @@
+{
+ "name": "socks-proxy-agent",
+ "version": "5.0.1",
+ "description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS",
+ "main": "dist/index",
+ "typings": "dist/index",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prebuild": "rimraf dist",
+ "build": "tsc",
+ "test": "mocha --reporter spec",
+ "test-lint": "eslint src --ext .js,.ts",
+ "prepublishOnly": "npm run build"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/TooTallNate/node-socks-proxy-agent.git"
+ },
+ "keywords": [
+ "socks",
+ "socks4",
+ "socks4a",
+ "socks5",
+ "socks5h",
+ "proxy",
+ "http",
+ "https",
+ "agent"
+ ],
+ "author": "Nathan Rajlich <nathan@tootallnate.net> (http://n8.io/)",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/TooTallNate/node-socks-proxy-agent/issues"
+ },
+ "dependencies": {
+ "agent-base": "^6.0.2",
+ "debug": "4",
+ "socks": "^2.3.3"
+ },
+ "devDependencies": {
+ "@types/debug": "4",
+ "@types/node": "^12.12.11",
+ "@typescript-eslint/eslint-plugin": "1.6.0",
+ "@typescript-eslint/parser": "1.1.0",
+ "eslint": "5.16.0",
+ "eslint-config-airbnb": "17.1.0",
+ "eslint-config-prettier": "4.1.0",
+ "eslint-import-resolver-typescript": "1.1.1",
+ "eslint-plugin-import": "2.16.0",
+ "eslint-plugin-jsx-a11y": "6.2.1",
+ "eslint-plugin-react": "7.12.4",
+ "mocha": "^6.2.2",
+ "proxy": "1",
+ "raw-body": "^2.3.2",
+ "rimraf": "^3.0.0",
+ "socksv5": "TooTallNate/socksv5#fix/dstSock-close-event",
+ "typescript": "^3.5.3"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+}
diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json
index 8e256f017..ec5c3c552 100644
--- a/node_modules/node-gyp/package.json
+++ b/node_modules/node-gyp/package.json
@@ -11,7 +11,7 @@
"bindings",
"gyp"
],
- "version": "7.1.2",
+ "version": "8.2.0",
"installVersion": 9,
"author": "Nathan Rajlich <nathan@tootallnate.net> (http://tootallnate.net)",
"repository": {
@@ -24,13 +24,13 @@
"dependencies": {
"env-paths": "^2.2.0",
"glob": "^7.1.4",
- "graceful-fs": "^4.2.3",
+ "graceful-fs": "^4.2.6",
+ "make-fetch-happen": "^8.0.14",
"nopt": "^5.0.0",
"npmlog": "^4.1.2",
- "request": "^2.88.2",
"rimraf": "^3.0.2",
- "semver": "^7.3.2",
- "tar": "^6.0.2",
+ "semver": "^7.3.5",
+ "tar": "^6.1.2",
"which": "^2.0.2"
},
"engines": {
diff --git a/node_modules/node-gyp/test/fixtures/test-charmap.py b/node_modules/node-gyp/test/fixtures/test-charmap.py
index b338f915b..63aa77bb4 100644
--- a/node_modules/node-gyp/test/fixtures/test-charmap.py
+++ b/node_modules/node-gyp/test/fixtures/test-charmap.py
@@ -1,30 +1,31 @@
-from __future__ import print_function
import sys
import locale
try:
- reload(sys)
+ reload(sys)
except NameError: # Python 3
- pass
+ pass
+
def main():
- encoding = locale.getdefaultlocale()[1]
- if not encoding:
- return False
+ encoding = locale.getdefaultlocale()[1]
+ if not encoding:
+ return False
- try:
- sys.setdefaultencoding(encoding)
- except AttributeError: # Python 3
- pass
+ try:
+ sys.setdefaultencoding(encoding)
+ except AttributeError: # Python 3
+ pass
+
+ textmap = {
+ "cp936": "\u4e2d\u6587",
+ "cp1252": "Lat\u012Bna",
+ "cp932": "\u306b\u307b\u3093\u3054",
+ }
+ if encoding in textmap:
+ print(textmap[encoding])
+ return True
- textmap = {
- 'cp936': u'\u4e2d\u6587',
- 'cp1252': u'Lat\u012Bna',
- 'cp932': u'\u306b\u307b\u3093\u3054'
- }
- if encoding in textmap:
- print(textmap[encoding])
- return True
-if __name__ == '__main__':
- print(main())
+if __name__ == "__main__":
+ print(main())
diff --git a/node_modules/node-gyp/test/test-download.js b/node_modules/node-gyp/test/test-download.js
index fe373e328..71a3c0d09 100644
--- a/node_modules/node-gyp/test/test-download.js
+++ b/node_modules/node-gyp/test/test-download.js
@@ -1,8 +1,9 @@
'use strict'
-const test = require('tap').test
+const { test } = require('tap')
const fs = require('fs')
const path = require('path')
+const util = require('util')
const http = require('http')
const https = require('https')
const install = require('../lib/install')
@@ -14,191 +15,142 @@ const log = require('npmlog')
log.level = 'warn'
-test('download over http', function (t) {
+test('download over http', async (t) => {
t.plan(2)
- var server = http.createServer(function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
+ const server = http.createServer((req, res) => {
+ t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`)
res.end('ok')
- server.close()
})
- var host = 'localhost'
- server.listen(0, host, function () {
- var port = this.address().port
- var gyp = {
- opts: {},
- version: '42'
- }
- var url = 'http://' + host + ':' + port
- var req = install.test.download(gyp, {}, url)
- req.on('response', function (res) {
- var body = ''
- res.setEncoding('utf8')
- res.on('data', function (data) {
- body += data
- })
- res.on('end', function () {
- t.strictEqual(body, 'ok')
- })
- })
- })
+ t.tearDown(() => new Promise((resolve) => server.close(resolve)))
+
+ const host = 'localhost'
+ await new Promise((resolve) => server.listen(0, host, resolve))
+ const { port } = server.address()
+ const gyp = {
+ opts: {},
+ version: '42'
+ }
+ const url = `http://${host}:${port}`
+ const res = await install.test.download(gyp, url)
+ t.strictEqual(await res.text(), 'ok')
})
-test('download over https with custom ca', function (t) {
+test('download over https with custom ca', async (t) => {
t.plan(3)
- var cert = fs.readFileSync(path.join(__dirname, 'fixtures/server.crt'), 'utf8')
- var key = fs.readFileSync(path.join(__dirname, 'fixtures/server.key'), 'utf8')
+ const cafile = path.join(__dirname, '/fixtures/ca.crt')
+ const [cert, key, ca] = await Promise.all([
+ fs.promises.readFile(path.join(__dirname, 'fixtures/server.crt'), 'utf8'),
+ fs.promises.readFile(path.join(__dirname, 'fixtures/server.key'), 'utf8'),
+ install.test.readCAFile(cafile)
+ ])
- var cafile = path.join(__dirname, '/fixtures/ca.crt')
- var ca = install.test.readCAFile(cafile)
t.strictEqual(ca.length, 1)
- var options = { ca: ca, cert: cert, key: key }
- var server = https.createServer(options, function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
+ const options = { ca: ca, cert: cert, key: key }
+ const server = https.createServer(options, (req, res) => {
+ t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`)
res.end('ok')
- server.close()
})
- server.on('clientError', function (err) {
- throw err
- })
+ t.tearDown(() => new Promise((resolve) => server.close(resolve)))
- var host = 'localhost'
- server.listen(8000, host, function () {
- var port = this.address().port
- var gyp = {
- opts: { cafile: cafile },
- version: '42'
- }
- var url = 'https://' + host + ':' + port
- var req = install.test.download(gyp, {}, url)
- req.on('response', function (res) {
- var body = ''
- res.setEncoding('utf8')
- res.on('data', function (data) {
- body += data
- })
- res.on('end', function () {
- t.strictEqual(body, 'ok')
- })
- })
- })
+ server.on('clientError', (err) => { throw err })
+
+ const host = 'localhost'
+ await new Promise((resolve) => server.listen(0, host, resolve))
+ const { port } = server.address()
+ const gyp = {
+ opts: { cafile },
+ version: '42'
+ }
+ const url = `https://${host}:${port}`
+ const res = await install.test.download(gyp, url)
+ t.strictEqual(await res.text(), 'ok')
})
-test('download over http with proxy', function (t) {
+test('download over http with proxy', async (t) => {
t.plan(2)
- var server = http.createServer(function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
+ const server = http.createServer((_, res) => {
res.end('ok')
- pserver.close(function () {
- server.close()
- })
})
- var pserver = http.createServer(function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
+ const pserver = http.createServer((req, res) => {
+ t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`)
res.end('proxy ok')
- server.close(function () {
- pserver.close()
- })
})
- var host = 'localhost'
- server.listen(0, host, function () {
- var port = this.address().port
- pserver.listen(port + 1, host, function () {
- var gyp = {
- opts: {
- proxy: 'http://' + host + ':' + (port + 1)
- },
- version: '42'
- }
- var url = 'http://' + host + ':' + port
- var req = install.test.download(gyp, {}, url)
- req.on('response', function (res) {
- var body = ''
- res.setEncoding('utf8')
- res.on('data', function (data) {
- body += data
- })
- res.on('end', function () {
- t.strictEqual(body, 'proxy ok')
- })
- })
- })
- })
+ t.tearDown(() => Promise.all([
+ new Promise((resolve) => server.close(resolve)),
+ new Promise((resolve) => pserver.close(resolve))
+ ]))
+
+ const host = 'localhost'
+ await new Promise((resolve) => server.listen(0, host, resolve))
+ const { port } = server.address()
+ await new Promise((resolve) => pserver.listen(port + 1, host, resolve))
+ const gyp = {
+ opts: {
+ proxy: `http://${host}:${port + 1}`,
+ noproxy: 'bad'
+ },
+ version: '42'
+ }
+ const url = `http://${host}:${port}`
+ const res = await install.test.download(gyp, url)
+ t.strictEqual(await res.text(), 'proxy ok')
})
-test('download over http with noproxy', function (t) {
+test('download over http with noproxy', async (t) => {
t.plan(2)
- var server = http.createServer(function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
+ const server = http.createServer((req, res) => {
+ t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`)
res.end('ok')
- pserver.close(function () {
- server.close()
- })
})
- var pserver = http.createServer(function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
+ const pserver = http.createServer((_, res) => {
res.end('proxy ok')
- server.close(function () {
- pserver.close()
- })
})
- var host = 'localhost'
- server.listen(0, host, function () {
- var port = this.address().port
- pserver.listen(port + 1, host, function () {
- var gyp = {
- opts: {
- proxy: 'http://' + host + ':' + (port + 1),
- noproxy: 'localhost'
- },
- version: '42'
- }
- var url = 'http://' + host + ':' + port
- var req = install.test.download(gyp, {}, url)
- req.on('response', function (res) {
- var body = ''
- res.setEncoding('utf8')
- res.on('data', function (data) {
- body += data
- })
- res.on('end', function () {
- t.strictEqual(body, 'ok')
- })
- })
- })
- })
+ t.tearDown(() => Promise.all([
+ new Promise((resolve) => server.close(resolve)),
+ new Promise((resolve) => pserver.close(resolve))
+ ]))
+
+ const host = 'localhost'
+ await new Promise((resolve) => server.listen(0, host, resolve))
+ const { port } = server.address()
+ await new Promise((resolve) => pserver.listen(port + 1, host, resolve))
+ const gyp = {
+ opts: {
+ proxy: `http://${host}:${port + 1}`,
+ noproxy: host
+ },
+ version: '42'
+ }
+ const url = `http://${host}:${port}`
+ const res = await install.test.download(gyp, url)
+ t.strictEqual(await res.text(), 'ok')
})
-test('download with missing cafile', function (t) {
+test('download with missing cafile', async (t) => {
t.plan(1)
- var gyp = {
+ const gyp = {
opts: { cafile: 'no.such.file' }
}
try {
- install.test.download(gyp, {}, 'http://bad/')
+ await install.test.download(gyp, {}, 'http://bad/')
} catch (e) {
t.ok(/no.such.file/.test(e.message))
}
})
-test('check certificate splitting', function (t) {
- var cas = install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt'))
+test('check certificate splitting', async (t) => {
+ const cas = await install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt'))
t.plan(2)
t.strictEqual(cas.length, 2)
t.notStrictEqual(cas[0], cas[1])
@@ -206,7 +158,7 @@ test('check certificate splitting', function (t) {
// only run this test if we are running a version of Node with predictable version path behavior
-test('download headers (actual)', function (t) {
+test('download headers (actual)', async (t) => {
if (process.env.FAST_TEST ||
process.release.name !== 'node' ||
semver.prerelease(process.version) !== null ||
@@ -214,55 +166,42 @@ test('download headers (actual)', function (t) {
return t.skip('Skipping actual download of headers due to test environment configuration')
}
- t.plan(17)
+ t.plan(12)
const expectedDir = path.join(devDir, process.version.replace(/^v/, ''))
- rimraf(expectedDir, (err) => {
- t.ifError(err)
-
- const prog = gyp()
- prog.parseArgv([])
- prog.devDir = devDir
- log.level = 'warn'
- install(prog, [], (err) => {
- t.ifError(err)
-
- fs.readFile(path.join(expectedDir, 'installVersion'), 'utf8', (err, data) => {
- t.ifError(err)
- t.strictEqual(data, '9\n', 'correct installVersion')
- })
-
- fs.readdir(path.join(expectedDir, 'include/node'), (err, list) => {
- t.ifError(err)
-
- t.ok(list.includes('common.gypi'))
- t.ok(list.includes('config.gypi'))
- t.ok(list.includes('node.h'))
- t.ok(list.includes('node_version.h'))
- t.ok(list.includes('openssl'))
- t.ok(list.includes('uv'))
- t.ok(list.includes('uv.h'))
- t.ok(list.includes('v8-platform.h'))
- t.ok(list.includes('v8.h'))
- t.ok(list.includes('zlib.h'))
- })
-
- fs.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8', (err, contents) => {
- t.ifError(err)
-
- const lines = contents.split('\n')
-
- // extract the 3 version parts from the defines to build a valid version string and
- // and check them against our current env version
- const version = ['major', 'minor', 'patch'].reduce((version, type) => {
- const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`)
- const line = lines.find((l) => re.test(l))
- const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR'
- return `${version}${type !== 'major' ? '.' : 'v'}${i}`
- }, '')
-
- t.strictEqual(version, process.version)
- })
- })
- })
+ await util.promisify(rimraf)(expectedDir)
+
+ const prog = gyp()
+ prog.parseArgv([])
+ prog.devDir = devDir
+ log.level = 'warn'
+ await util.promisify(install)(prog, [])
+
+ const data = await fs.promises.readFile(path.join(expectedDir, 'installVersion'), 'utf8')
+ t.strictEqual(data, '9\n', 'correct installVersion')
+
+ const list = await fs.promises.readdir(path.join(expectedDir, 'include/node'))
+ t.ok(list.includes('common.gypi'))
+ t.ok(list.includes('config.gypi'))
+ t.ok(list.includes('node.h'))
+ t.ok(list.includes('node_version.h'))
+ t.ok(list.includes('openssl'))
+ t.ok(list.includes('uv'))
+ t.ok(list.includes('uv.h'))
+ t.ok(list.includes('v8-platform.h'))
+ t.ok(list.includes('v8.h'))
+ t.ok(list.includes('zlib.h'))
+
+ const lines = (await fs.promises.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8')).split('\n')
+
+ // extract the 3 version parts from the defines to build a valid version string and
+ // and check them against our current env version
+ const version = ['major', 'minor', 'patch'].reduce((version, type) => {
+ const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`)
+ const line = lines.find((l) => re.test(l))
+ const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR'
+ return `${version}${type !== 'major' ? '.' : 'v'}${i}`
+ }, '')
+
+ t.strictEqual(version, process.version)
})
diff --git a/node_modules/node-gyp/test/test-find-python.js b/node_modules/node-gyp/test/test-find-python.js
index 6be887f7e..67d0b2664 100644
--- a/node_modules/node-gyp/test/test-find-python.js
+++ b/node_modules/node-gyp/test/test-find-python.js
@@ -16,13 +16,8 @@ test('find python', function (t) {
t.strictEqual(err, null)
var proc = execFile(found, ['-V'], function (err, stdout, stderr) {
t.strictEqual(err, null)
- if (/Python 2/.test(stderr)) {
- t.strictEqual(stdout, '')
- t.ok(/Python 2/.test(stderr))
- } else {
- t.ok(/Python 3/.test(stdout))
- t.strictEqual(stderr, '')
- }
+ t.ok(/Python 3/.test(stdout))
+ t.strictEqual(stderr, '')
})
proc.stdout.setEncoding('utf-8')
proc.stderr.setEncoding('utf-8')
@@ -66,7 +61,7 @@ test('find python - python', function (t) {
poison(f, 'execFile')
t.strictEqual(program, '/path/python')
t.ok(/sys\.version_info/.test(args[1]))
- cb(null, '2.7.15')
+ cb(null, '3.9.1')
}
t.strictEqual(program,
process.platform === 'win32' ? '"python"' : 'python')
@@ -146,13 +141,14 @@ test('find python - no python2, no python, unix', function (t) {
})
test('find python - no python, use python launcher', function (t) {
- t.plan(3)
+ t.plan(4)
var f = new TestPythonFinder(null, done)
f.win = true
f.execFile = function (program, args, opts, cb) {
if (program === 'py.exe') {
+ t.notEqual(args.indexOf('-3'), -1)
t.notEqual(args.indexOf('-c'), -1)
return cb(null, 'Z:\\snake.exe')
}
@@ -162,7 +158,7 @@ test('find python - no python, use python launcher', function (t) {
cb(new Error('not found'))
} else if (/sys\.version_info/.test(args[args.length - 1])) {
if (program === 'Z:\\snake.exe') {
- cb(null, '2.7.14')
+ cb(null, '3.9.0')
} else {
t.fail()
}
@@ -181,9 +177,9 @@ test('find python - no python, use python launcher', function (t) {
test('find python - no python, no python launcher, good guess', function (t) {
t.plan(2)
- var re = /C:[\\/]Python37[\\/]python[.]exe/
var f = new TestPythonFinder(null, done)
f.win = true
+ const expectedProgram = f.winDefaultLocations[0]
f.execFile = function (program, args, opts, cb) {
if (program === 'py.exe') {
@@ -191,7 +187,7 @@ test('find python - no python, no python launcher, good guess', function (t) {
}
if (/sys\.executable/.test(args[args.length - 1])) {
cb(new Error('not found'))
- } else if (re.test(program) &&
+ } else if (program === expectedProgram &&
/sys\.version_info/.test(args[args.length - 1])) {
cb(null, '3.7.3')
} else {
@@ -202,7 +198,7 @@ test('find python - no python, no python launcher, good guess', function (t) {
function done (err, python) {
t.strictEqual(err, null)
- t.ok(re.test(python))
+ t.ok(python === expectedProgram)
}
})
diff --git a/node_modules/node-gyp/test/test-install.js b/node_modules/node-gyp/test/test-install.js
index c3317155e..5039dc992 100644
--- a/node_modules/node-gyp/test/test-install.js
+++ b/node_modules/node-gyp/test/test-install.js
@@ -1,38 +1,46 @@
'use strict'
-const test = require('tap').test
-const install = require('../lib/install').test.install
+const { test } = require('tap')
+const { test: { install } } = require('../lib/install')
+const log = require('npmlog')
-require('npmlog').level = 'error' // we expect a warning
+log.level = 'error' // we expect a warning
-test('EACCES retry once', function (t) {
+test('EACCES retry once', async (t) => {
t.plan(3)
- var fs = {}
- fs.stat = function (path, cb) {
- var err = new Error()
- err.code = 'EACCES'
- cb(err)
- t.ok(true)
+ const fs = {
+ promises: {
+ stat (_) {
+ const err = new Error()
+ err.code = 'EACCES'
+ t.ok(true)
+ throw err
+ }
+ }
}
- var gyp = {}
- gyp.devDir = __dirname
- gyp.opts = {}
- gyp.opts.ensure = true
- gyp.commands = {}
- gyp.commands.install = function (argv, cb) {
- install(fs, gyp, argv, cb)
- }
- gyp.commands.remove = function (argv, cb) {
- cb()
+ const Gyp = {
+ devDir: __dirname,
+ opts: {
+ ensure: true
+ },
+ commands: {
+ install (argv, cb) {
+ install(fs, Gyp, argv).then(cb, cb)
+ },
+ remove (_, cb) {
+ cb()
+ }
+ }
}
- gyp.commands.install([], function (err) {
+ try {
+ await install(fs, Gyp, [])
+ } catch (err) {
t.ok(true)
if (/"pre" versions of node cannot be installed/.test(err.message)) {
t.ok(true)
- t.ok(true)
}
- })
+ }
})
diff --git a/node_modules/node-gyp/update-gyp.py b/node_modules/node-gyp/update-gyp.py
index aa2bcb9eb..bb84f071a 100755
--- a/node_modules/node-gyp/update-gyp.py
+++ b/node_modules/node-gyp/update-gyp.py
@@ -4,14 +4,13 @@ import argparse
import os
import shutil
import subprocess
-import sys
import tarfile
import tempfile
import urllib.request
BASE_URL = "https://github.com/nodejs/gyp-next/archive/"
CHECKOUT_PATH = os.path.dirname(os.path.realpath(__file__))
-CHECKOUT_GYP_PATH = os.path.join(CHECKOUT_PATH, 'gyp')
+CHECKOUT_GYP_PATH = os.path.join(CHECKOUT_PATH, "gyp")
parser = argparse.ArgumentParser()
parser.add_argument("tag", help="gyp tag to update to")
@@ -21,25 +20,27 @@ tar_url = BASE_URL + args.tag + ".tar.gz"
changed_files = subprocess.check_output(["git", "diff", "--name-only"]).strip()
if changed_files:
- raise Exception("Can't update gyp while you have uncommitted changes in node-gyp")
+ raise Exception("Can't update gyp while you have uncommitted changes in node-gyp")
with tempfile.TemporaryDirectory() as tmp_dir:
- tar_file = os.path.join(tmp_dir, 'gyp.tar.gz')
- unzip_target = os.path.join(tmp_dir, 'gyp')
- with open(tar_file, 'wb') as f:
- print("Downloading gyp-next@" + args.tag + " into temporary directory...")
- print("From: " + tar_url)
- with urllib.request.urlopen(tar_url) as in_file:
- f.write(in_file.read())
-
- print("Unzipping...")
- with tarfile.open(tar_file, "r:gz") as tar_ref:
- tar_ref.extractall(unzip_target)
-
- print("Moving to current checkout (" + CHECKOUT_PATH + ")...")
- if os.path.exists(CHECKOUT_GYP_PATH):
- shutil.rmtree(CHECKOUT_GYP_PATH)
- shutil.move(os.path.join(unzip_target, os.listdir(unzip_target)[0]), CHECKOUT_GYP_PATH)
+ tar_file = os.path.join(tmp_dir, "gyp.tar.gz")
+ unzip_target = os.path.join(tmp_dir, "gyp")
+ with open(tar_file, "wb") as f:
+ print("Downloading gyp-next@" + args.tag + " into temporary directory...")
+ print("From: " + tar_url)
+ with urllib.request.urlopen(tar_url) as in_file:
+ f.write(in_file.read())
+
+ print("Unzipping...")
+ with tarfile.open(tar_file, "r:gz") as tar_ref:
+ tar_ref.extractall(unzip_target)
+
+ print("Moving to current checkout (" + CHECKOUT_PATH + ")...")
+ if os.path.exists(CHECKOUT_GYP_PATH):
+ shutil.rmtree(CHECKOUT_GYP_PATH)
+ shutil.move(
+ os.path.join(unzip_target, os.listdir(unzip_target)[0]), CHECKOUT_GYP_PATH
+ )
subprocess.check_output(["git", "add", "gyp"], cwd=CHECKOUT_PATH)
-subprocess.check_output(["git", "commit", "-m", "gyp: update gyp to " + args.tag])
+subprocess.check_output(["git", "commit", "-m", "feat(gyp): update gyp to " + args.tag])
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 437bb8f79..17933ad12 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
{
"name": "pacote",
- "version": "11.3.5",
+ "version": "12.0.0",
"description": "JavaScript package downloader",
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
"bin": {
@@ -36,7 +36,7 @@
"@npmcli/git": "^2.1.0",
"@npmcli/installed-package-contents": "^1.0.6",
"@npmcli/promise-spawn": "^1.2.0",
- "@npmcli/run-script": "^1.8.2",
+ "@npmcli/run-script": "^2.0.0",
"cacache": "^15.0.5",
"chownr": "^2.0.0",
"fs-minipass": "^2.1.0",
@@ -54,7 +54,7 @@
"tar": "^6.1.0"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
},
"repository": "git@github.com:npm/pacote"
}
diff --git a/package-lock.json b/package-lock.json
index bf19dc4f0..3aec69a7f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -86,12 +86,12 @@
],
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
- "@npmcli/arborist": "^2.9.0",
+ "@npmcli/arborist": "^3.0.0",
"@npmcli/ci-detect": "^1.2.0",
"@npmcli/config": "^2.3.0",
"@npmcli/map-workspaces": "^1.0.4",
"@npmcli/package-json": "^1.0.1",
- "@npmcli/run-script": "^1.8.6",
+ "@npmcli/run-script": "^2.0.0",
"abbrev": "~1.1.1",
"ansicolors": "~0.3.2",
"ansistyles": "~0.1.3",
@@ -112,22 +112,22 @@
"json-parse-even-better-errors": "^2.3.1",
"libnpmaccess": "^4.0.2",
"libnpmdiff": "^2.0.4",
- "libnpmexec": "^2.0.1",
- "libnpmfund": "^1.1.0",
+ "libnpmexec": "^3.0.0",
+ "libnpmfund": "^2.0.0",
"libnpmhook": "^6.0.2",
"libnpmorg": "^2.0.2",
- "libnpmpack": "^2.0.1",
+ "libnpmpack": "^3.0.0",
"libnpmpublish": "^4.0.1",
"libnpmsearch": "^3.1.1",
"libnpmteam": "^2.0.3",
- "libnpmversion": "^1.2.1",
+ "libnpmversion": "^2.0.1",
"make-fetch-happen": "^9.1.0",
"minipass": "^3.1.3",
"minipass-pipeline": "^1.2.4",
"mkdirp": "^1.0.4",
"mkdirp-infer-owner": "^2.0.0",
"ms": "^2.1.2",
- "node-gyp": "^7.1.2",
+ "node-gyp": "^8.2.0",
"nopt": "^5.0.0",
"npm-audit-report": "^2.1.5",
"npm-install-checks": "^4.0.0",
@@ -138,7 +138,7 @@
"npm-user-validate": "^1.0.1",
"npmlog": "^5.0.1",
"opener": "^1.5.2",
- "pacote": "^11.3.5",
+ "pacote": "^12.0.0",
"parse-conflict-json": "^1.1.1",
"qrcode-terminal": "^0.12.0",
"read": "~1.0.7",
@@ -171,7 +171,7 @@
"tap": "^15.0.9"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"docs": {
@@ -772,20 +772,20 @@
}
},
"node_modules/@npmcli/arborist": {
- "version": "2.9.0",
- "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.9.0.tgz",
- "integrity": "sha512-21DTow2xC0GlkowlE4zOu99UY21nSymW14fHZmB0yeAqhagmttJPmCUZXU+ngJmJ/Dwe5YP9QJUTgEVRLqnwcg==",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-3.0.0.tgz",
+ "integrity": "sha512-zAmy3LwjQ81HKYA8Z4Uao8Re+ydiad2sDKI+PKe2loqDXnFolm69LIGmHp8+7BPWX1CAJCs1/XRNTLdXmuMZZw==",
"inBundle": true,
"dependencies": {
"@isaacs/string-locale-compare": "^1.0.1",
"@npmcli/installed-package-contents": "^1.0.7",
"@npmcli/map-workspaces": "^1.0.2",
- "@npmcli/metavuln-calculator": "^1.1.0",
+ "@npmcli/metavuln-calculator": "^2.0.0",
"@npmcli/move-file": "^1.1.0",
"@npmcli/name-from-folder": "^1.0.1",
"@npmcli/node-gyp": "^1.0.1",
"@npmcli/package-json": "^1.0.1",
- "@npmcli/run-script": "^1.8.2",
+ "@npmcli/run-script": "^2.0.0",
"bin-links": "^2.2.1",
"cacache": "^15.0.3",
"common-ancestor-path": "^1.0.1",
@@ -797,7 +797,7 @@
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.0",
"npm-registry-fetch": "^11.0.0",
- "pacote": "^11.3.5",
+ "pacote": "^12.0.0",
"parse-conflict-json": "^1.1.1",
"proc-log": "^1.0.0",
"promise-all-reject-late": "^1.0.0",
@@ -814,7 +814,7 @@
"arborist": "bin/index.js"
},
"engines": {
- "node": ">= 10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/@npmcli/ci-detect": {
@@ -908,14 +908,18 @@
}
},
"node_modules/@npmcli/metavuln-calculator": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.1.tgz",
- "integrity": "sha512-9xe+ZZ1iGVaUovBVFI9h3qW+UuECUzhvZPxK9RaEA2mjU26o5D0JloGYWwLYvQELJNmBdQB6rrpuN8jni6LwzQ==",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-2.0.0.tgz",
+ "integrity": "sha512-VVW+JhWCKRwCTE+0xvD6p3uV4WpqocNYYtzyvenqL/u1Q3Xx6fGTJ+6UoIoii07fbuEO9U3IIyuGY0CYHDv1sg==",
"inBundle": true,
"dependencies": {
"cacache": "^15.0.5",
- "pacote": "^11.1.11",
+ "json-parse-even-better-errors": "^2.3.1",
+ "pacote": "^12.0.0",
"semver": "^7.3.2"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/@npmcli/move-file": {
@@ -962,14 +966,14 @@
}
},
"node_modules/@npmcli/run-script": {
- "version": "1.8.6",
- "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.6.tgz",
- "integrity": "sha512-e42bVZnC6VluBZBAFEr3YrdqSspG3bgilyg4nSLBJ7TRGNCzxHa92XAHxQBLYg0BmgwO4b2mf3h/l5EkEWRn3g==",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-2.0.0.tgz",
+ "integrity": "sha512-fSan/Pu11xS/TdaTpTB0MRn9guwGU8dye+x56mEVgBEd/QsybBbYcAL0phPXi8SGWFEChkQd6M9qL4y6VOpFig==",
"inBundle": true,
"dependencies": {
"@npmcli/node-gyp": "^1.0.2",
"@npmcli/promise-spawn": "^1.3.2",
- "node-gyp": "^7.1.0",
+ "node-gyp": "^8.2.0",
"read-package-json-fast": "^2.0.1"
}
},
@@ -1113,7 +1117,7 @@
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
@@ -1284,7 +1288,7 @@
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"safer-buffer": "~2.1.0"
}
@@ -1293,7 +1297,7 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": ">=0.8"
}
@@ -1320,13 +1324,13 @@
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=",
- "inBundle": true
+ "dev": true
},
"node_modules/aws-sign2": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
"integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": "*"
}
@@ -1335,7 +1339,7 @@
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz",
"integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==",
- "inBundle": true
+ "dev": true
},
"node_modules/babel-plugin-apply-mdx-type-prop": {
"version": "1.6.22",
@@ -1419,7 +1423,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
"integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"tweetnacl": "^0.14.3"
}
@@ -1630,7 +1634,7 @@
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=",
- "inBundle": true
+ "dev": true
},
"node_modules/ccount": {
"version": "1.1.0",
@@ -1950,7 +1954,7 @@
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"delayed-stream": "~1.0.0"
},
@@ -2011,7 +2015,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=",
- "inBundle": true
+ "dev": true
},
"node_modules/correct-license-metadata": {
"version": "1.4.0",
@@ -2083,7 +2087,7 @@
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"assert-plus": "^1.0.0"
},
@@ -2225,7 +2229,7 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": ">=0.4.0"
}
@@ -2338,7 +2342,7 @@
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
"integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"jsbn": "~0.1.0",
"safer-buffer": "^2.1.0"
@@ -3027,28 +3031,28 @@
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
- "inBundle": true
+ "dev": true
},
"node_modules/extsprintf": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
"integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=",
+ "dev": true,
"engines": [
"node >=0.6.0"
- ],
- "inBundle": true
+ ]
},
"node_modules/fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
- "inBundle": true
+ "dev": true
},
"node_modules/fast-json-stable-stringify": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
- "inBundle": true
+ "dev": true
},
"node_modules/fast-levenshtein": {
"version": "2.0.6",
@@ -3245,7 +3249,7 @@
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
"integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": "*"
}
@@ -3424,7 +3428,7 @@
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"assert-plus": "^1.0.0"
}
@@ -3486,7 +3490,7 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
"integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": ">=4"
}
@@ -3496,7 +3500,7 @@
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz",
"integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==",
"deprecated": "this library is no longer supported",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"ajv": "^6.12.3",
"har-schema": "^2.0.0"
@@ -3736,7 +3740,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"assert-plus": "^1.0.0",
"jsprim": "^1.2.2",
@@ -4262,7 +4266,7 @@
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=",
- "inBundle": true
+ "dev": true
},
"node_modules/istanbul-lib-coverage": {
"version": "3.0.0",
@@ -4424,7 +4428,7 @@
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
- "inBundle": true
+ "dev": true
},
"node_modules/jsdom": {
"version": "16.7.0",
@@ -4518,13 +4522,13 @@
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
"integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=",
- "inBundle": true
+ "dev": true
},
"node_modules/json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "inBundle": true
+ "dev": true
},
"node_modules/json-stable-stringify-without-jsonify": {
"version": "1.0.1",
@@ -4545,7 +4549,7 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
"integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=",
- "inBundle": true
+ "dev": true
},
"node_modules/json5": {
"version": "2.2.0",
@@ -4575,10 +4579,10 @@
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
"integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
+ "dev": true,
"engines": [
"node >=0.6.0"
],
- "inBundle": true,
"dependencies": {
"assert-plus": "1.0.0",
"extsprintf": "1.3.0",
@@ -4640,34 +4644,37 @@
"link": true
},
"node_modules/libnpmexec": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-2.0.1.tgz",
- "integrity": "sha512-4SqBB7eJvJWmUKNF42Q5qTOn20DRjEE4TgvEh2yneKlAiRlwlhuS9MNR45juWwmoURJlf2K43bozlVt7OZiIOw==",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-3.0.0.tgz",
+ "integrity": "sha512-qyt0gSMSHeHIqb/a+vcryfw3VXfNNgle8jK9QVnrNQAyoDvyVQ6auyoB3ycqWPIl2swTEXPEVremSUaDzOiEgw==",
"inBundle": true,
"dependencies": {
- "@npmcli/arborist": "^2.3.0",
+ "@npmcli/arborist": "^3.0.0",
"@npmcli/ci-detect": "^1.3.0",
- "@npmcli/run-script": "^1.8.4",
+ "@npmcli/run-script": "^2.0.0",
"chalk": "^4.1.0",
"mkdirp-infer-owner": "^2.0.0",
"npm-package-arg": "^8.1.2",
- "pacote": "^11.3.1",
+ "pacote": "^12.0.0",
"proc-log": "^1.0.0",
"read": "^1.0.7",
"read-package-json-fast": "^2.0.2",
"walk-up-path": "^1.0.0"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/libnpmfund": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-1.1.0.tgz",
- "integrity": "sha512-Kfmh3pLS5/RGKG5WXEig8mjahPVOxkik6lsbH4iX0si1xxNi6eeUh/+nF1MD+2cgalsQif3O5qyr6mNz2ryJrQ==",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-2.0.0.tgz",
+ "integrity": "sha512-A89Mp+VcbVS2IzXlTJxcAEJEulVX7pvCB+NFqWKRIaqIncwGku1u8b0h8Qp9IUHrvzzzJiJxJmMYCXmlf6xFxw==",
"inBundle": true,
"dependencies": {
- "@npmcli/arborist": "^2.5.0"
+ "@npmcli/arborist": "^3.0.0"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/libnpmhook": {
@@ -4697,17 +4704,17 @@
}
},
"node_modules/libnpmpack": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/libnpmpack/-/libnpmpack-2.0.1.tgz",
- "integrity": "sha512-He4/jxOwlaQ7YG7sIC1+yNeXeUDQt8RLBvpI68R3RzPMZPa4/VpxhlDo8GtBOBDYoU8eq6v1wKL38sq58u4ibQ==",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/libnpmpack/-/libnpmpack-3.0.0.tgz",
+ "integrity": "sha512-W6lt4blkR9YXu/qOrFknfnKBajz/1GvAc5q1XcWTGuBJn2DYKDWHtA7x1fuMQdn7hKDBOPlZ/Aqll+ZvAnrM6g==",
"inBundle": true,
"dependencies": {
- "@npmcli/run-script": "^1.8.3",
+ "@npmcli/run-script": "^2.0.0",
"npm-package-arg": "^8.1.0",
- "pacote": "^11.2.6"
+ "pacote": "^12.0.0"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/libnpmpublish": {
@@ -4752,16 +4759,19 @@
}
},
"node_modules/libnpmversion": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-1.2.1.tgz",
- "integrity": "sha512-AA7x5CFgBFN+L4/JWobnY5t4OAHjQuPbAwUYJ7/NtHuyLut5meb+ne/aj0n7PWNiTGCJcRw/W6Zd2LoLT7EZuQ==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-2.0.1.tgz",
+ "integrity": "sha512-uFGtNTe/m0GOIBQCE4ryIsgGNJdeShW+qvYtKNLCCuiG7JY3YEslL/maFFZbaO4wlQa/oj1t0Bm9TyjahvtgQQ==",
"inBundle": true,
"dependencies": {
"@npmcli/git": "^2.0.7",
- "@npmcli/run-script": "^1.8.4",
+ "@npmcli/run-script": "^2.0.0",
"json-parse-even-better-errors": "^2.3.1",
"semver": "^7.3.5",
"stringify-package": "^1.0.1"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/libtap": {
@@ -5075,7 +5085,7 @@
"version": "1.49.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz",
"integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": ">= 0.6"
}
@@ -5084,7 +5094,7 @@
"version": "2.1.32",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz",
"integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"mime-db": "1.49.0"
},
@@ -5312,20 +5322,20 @@
"dev": true
},
"node_modules/node-gyp": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-7.1.2.tgz",
- "integrity": "sha512-CbpcIo7C3eMu3dL1c3d0xw449fHIGALIJsRP4DDPHpyiW8vcriNY7ubh9TE4zEKfSxscY7PjeFnshE7h75ynjQ==",
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.2.0.tgz",
+ "integrity": "sha512-KG8SdcoAnw2d6augGwl1kOayALUrXW/P2uOAm2J2+nmW/HjZo7y+8TDg7LejxbekOOSv3kzhq+NSUYkIDAX8eA==",
"inBundle": true,
"dependencies": {
"env-paths": "^2.2.0",
"glob": "^7.1.4",
- "graceful-fs": "^4.2.3",
+ "graceful-fs": "^4.2.6",
+ "make-fetch-happen": "^8.0.14",
"nopt": "^5.0.0",
"npmlog": "^4.1.2",
- "request": "^2.88.2",
"rimraf": "^3.0.2",
- "semver": "^7.3.2",
- "tar": "^6.0.2",
+ "semver": "^7.3.5",
+ "tar": "^6.1.2",
"which": "^2.0.2"
},
"bin": {
@@ -5369,6 +5379,32 @@
"node": ">=0.10.0"
}
},
+ "node_modules/node-gyp/node_modules/make-fetch-happen": {
+ "version": "8.0.14",
+ "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz",
+ "integrity": "sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ==",
+ "inBundle": true,
+ "dependencies": {
+ "agentkeepalive": "^4.1.3",
+ "cacache": "^15.0.5",
+ "http-cache-semantics": "^4.1.0",
+ "http-proxy-agent": "^4.0.1",
+ "https-proxy-agent": "^5.0.0",
+ "is-lambda": "^1.0.1",
+ "lru-cache": "^6.0.0",
+ "minipass": "^3.1.3",
+ "minipass-collect": "^1.0.2",
+ "minipass-fetch": "^1.3.2",
+ "minipass-flush": "^1.0.5",
+ "minipass-pipeline": "^1.2.4",
+ "promise-retry": "^2.0.1",
+ "socks-proxy-agent": "^5.0.0",
+ "ssri": "^8.0.0"
+ },
+ "engines": {
+ "node": ">= 10"
+ }
+ },
"node_modules/node-gyp/node_modules/npmlog": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
@@ -5381,6 +5417,20 @@
"set-blocking": "~2.0.0"
}
},
+ "node_modules/node-gyp/node_modules/socks-proxy-agent": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-5.0.1.tgz",
+ "integrity": "sha512-vZdmnjb9a2Tz6WEQVIurybSwElwPxMZaIc7PzqbJTrezcKNznv6giT7J7tZDZ1BojVaa1jvO/UiUdhDVB0ACoQ==",
+ "inBundle": true,
+ "dependencies": {
+ "agent-base": "^6.0.2",
+ "debug": "4",
+ "socks": "^2.3.3"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
"node_modules/node-gyp/node_modules/string-width": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
@@ -5761,7 +5811,7 @@
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": "*"
}
@@ -5959,15 +6009,15 @@
}
},
"node_modules/pacote": {
- "version": "11.3.5",
- "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz",
- "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==",
+ "version": "12.0.0",
+ "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.0.tgz",
+ "integrity": "sha512-5DnYqZU0w7GIskuc5yXii1kKpQS2fsaxCaI0FXRsMULXB06lXnZpRdV7JC1TTcQN5uy62h4VWS4WMPYGWu3MYg==",
"inBundle": true,
"dependencies": {
"@npmcli/git": "^2.1.0",
"@npmcli/installed-package-contents": "^1.0.6",
"@npmcli/promise-spawn": "^1.2.0",
- "@npmcli/run-script": "^1.8.2",
+ "@npmcli/run-script": "^2.0.0",
"cacache": "^15.0.5",
"chownr": "^2.0.0",
"fs-minipass": "^2.1.0",
@@ -5988,7 +6038,7 @@
"pacote": "lib/bin.js"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/parent-module": {
@@ -6100,7 +6150,7 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
"integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=",
- "inBundle": true
+ "dev": true
},
"node_modules/picomatch": {
"version": "2.3.0",
@@ -6345,7 +6395,7 @@
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
"integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==",
- "inBundle": true
+ "dev": true
},
"node_modules/pump": {
"version": "3.0.0",
@@ -6361,7 +6411,7 @@
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": ">=6"
}
@@ -6379,7 +6429,7 @@
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==",
- "inBundle": true,
+ "dev": true,
"engines": {
"node": ">=0.6"
}
@@ -6757,7 +6807,7 @@
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
"deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"aws-sign2": "~0.7.0",
"aws4": "^1.8.0",
@@ -6788,7 +6838,7 @@
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
@@ -6802,7 +6852,7 @@
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"psl": "^1.1.28",
"punycode": "^2.1.1"
@@ -6928,6 +6978,7 @@
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
+ "devOptional": true,
"inBundle": true
},
"node_modules/saxes": {
@@ -7233,7 +7284,7 @@
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
"integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"asn1": "~0.2.3",
"assert-plus": "^1.0.0",
@@ -9692,7 +9743,7 @@
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"safe-buffer": "^5.0.1"
},
@@ -9704,7 +9755,7 @@
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=",
- "inBundle": true
+ "dev": true
},
"node_modules/type-check": {
"version": "0.4.0",
@@ -9932,7 +9983,7 @@
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
- "inBundle": true,
+ "dev": true,
"dependencies": {
"punycode": "^2.1.0"
}
@@ -9957,7 +10008,7 @@
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
"deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.",
- "inBundle": true,
+ "dev": true,
"bin": {
"uuid": "bin/uuid"
}
@@ -9991,10 +10042,10 @@
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
+ "dev": true,
"engines": [
"node >=0.6.0"
],
- "inBundle": true,
"dependencies": {
"assert-plus": "^1.0.0",
"core-util-is": "1.0.2",
@@ -10512,7 +10563,7 @@
"diff": "^5.0.0",
"minimatch": "^3.0.4",
"npm-package-arg": "^8.1.4",
- "pacote": "^11.3.4",
+ "pacote": "^12.0.0",
"tar": "^6.1.0"
},
"devDependencies": {
@@ -11024,19 +11075,19 @@
"dev": true
},
"@npmcli/arborist": {
- "version": "2.9.0",
- "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.9.0.tgz",
- "integrity": "sha512-21DTow2xC0GlkowlE4zOu99UY21nSymW14fHZmB0yeAqhagmttJPmCUZXU+ngJmJ/Dwe5YP9QJUTgEVRLqnwcg==",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-3.0.0.tgz",
+ "integrity": "sha512-zAmy3LwjQ81HKYA8Z4Uao8Re+ydiad2sDKI+PKe2loqDXnFolm69LIGmHp8+7BPWX1CAJCs1/XRNTLdXmuMZZw==",
"requires": {
"@isaacs/string-locale-compare": "^1.0.1",
"@npmcli/installed-package-contents": "^1.0.7",
"@npmcli/map-workspaces": "^1.0.2",
- "@npmcli/metavuln-calculator": "^1.1.0",
+ "@npmcli/metavuln-calculator": "^2.0.0",
"@npmcli/move-file": "^1.1.0",
"@npmcli/name-from-folder": "^1.0.1",
"@npmcli/node-gyp": "^1.0.1",
"@npmcli/package-json": "^1.0.1",
- "@npmcli/run-script": "^1.8.2",
+ "@npmcli/run-script": "^2.0.0",
"bin-links": "^2.2.1",
"cacache": "^15.0.3",
"common-ancestor-path": "^1.0.1",
@@ -11048,7 +11099,7 @@
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.0",
"npm-registry-fetch": "^11.0.0",
- "pacote": "^11.3.5",
+ "pacote": "^12.0.0",
"parse-conflict-json": "^1.1.1",
"proc-log": "^1.0.0",
"promise-all-reject-late": "^1.0.0",
@@ -11132,12 +11183,13 @@
}
},
"@npmcli/metavuln-calculator": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.1.tgz",
- "integrity": "sha512-9xe+ZZ1iGVaUovBVFI9h3qW+UuECUzhvZPxK9RaEA2mjU26o5D0JloGYWwLYvQELJNmBdQB6rrpuN8jni6LwzQ==",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-2.0.0.tgz",
+ "integrity": "sha512-VVW+JhWCKRwCTE+0xvD6p3uV4WpqocNYYtzyvenqL/u1Q3Xx6fGTJ+6UoIoii07fbuEO9U3IIyuGY0CYHDv1sg==",
"requires": {
"cacache": "^15.0.5",
- "pacote": "^11.1.11",
+ "json-parse-even-better-errors": "^2.3.1",
+ "pacote": "^12.0.0",
"semver": "^7.3.2"
}
},
@@ -11177,13 +11229,13 @@
}
},
"@npmcli/run-script": {
- "version": "1.8.6",
- "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.8.6.tgz",
- "integrity": "sha512-e42bVZnC6VluBZBAFEr3YrdqSspG3bgilyg4nSLBJ7TRGNCzxHa92XAHxQBLYg0BmgwO4b2mf3h/l5EkEWRn3g==",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-2.0.0.tgz",
+ "integrity": "sha512-fSan/Pu11xS/TdaTpTB0MRn9guwGU8dye+x56mEVgBEd/QsybBbYcAL0phPXi8SGWFEChkQd6M9qL4y6VOpFig==",
"requires": {
"@npmcli/node-gyp": "^1.0.2",
"@npmcli/promise-spawn": "^1.3.2",
- "node-gyp": "^7.1.0",
+ "node-gyp": "^8.2.0",
"read-package-json-fast": "^2.0.1"
}
},
@@ -11299,6 +11351,7 @@
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "dev": true,
"requires": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
@@ -11421,6 +11474,7 @@
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
+ "dev": true,
"requires": {
"safer-buffer": "~2.1.0"
}
@@ -11428,7 +11482,8 @@
"assert-plus": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
- "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
+ "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=",
+ "dev": true
},
"astral-regex": {
"version": "2.0.0",
@@ -11445,17 +11500,20 @@
"asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
- "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
+ "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=",
+ "dev": true
},
"aws-sign2": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
- "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
+ "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=",
+ "dev": true
},
"aws4": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz",
- "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA=="
+ "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==",
+ "dev": true
},
"babel-plugin-apply-mdx-type-prop": {
"version": "1.6.22",
@@ -11513,6 +11571,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
"integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
+ "dev": true,
"requires": {
"tweetnacl": "^0.14.3"
}
@@ -11674,7 +11733,8 @@
"caseless": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
- "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
+ "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=",
+ "dev": true
},
"ccount": {
"version": "1.1.0",
@@ -11892,6 +11952,7 @@
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+ "dev": true,
"requires": {
"delayed-stream": "~1.0.0"
}
@@ -11943,7 +12004,8 @@
"core-util-is": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
- "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
+ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=",
+ "dev": true
},
"correct-license-metadata": {
"version": "1.4.0",
@@ -12005,6 +12067,7 @@
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
+ "dev": true,
"requires": {
"assert-plus": "^1.0.0"
}
@@ -12110,7 +12173,8 @@
"delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
- "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
+ "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=",
+ "dev": true
},
"delegates": {
"version": "1.0.0",
@@ -12197,6 +12261,7 @@
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
"integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
+ "dev": true,
"requires": {
"jsbn": "~0.1.0",
"safer-buffer": "^2.1.0"
@@ -12718,22 +12783,26 @@
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
- "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
+ "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
+ "dev": true
},
"extsprintf": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
- "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
+ "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=",
+ "dev": true
},
"fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
- "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "dev": true
},
"fast-json-stable-stringify": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
- "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
+ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
+ "dev": true
},
"fast-levenshtein": {
"version": "2.0.6",
@@ -12885,7 +12954,8 @@
"forever-agent": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
- "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE="
+ "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=",
+ "dev": true
},
"form-data": {
"version": "3.0.1",
@@ -13011,6 +13081,7 @@
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
+ "dev": true,
"requires": {
"assert-plus": "^1.0.0"
}
@@ -13057,12 +13128,14 @@
"har-schema": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
- "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
+ "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=",
+ "dev": true
},
"har-validator": {
"version": "5.1.5",
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz",
"integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==",
+ "dev": true,
"requires": {
"ajv": "^6.12.3",
"har-schema": "^2.0.0"
@@ -13235,6 +13308,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
+ "dev": true,
"requires": {
"assert-plus": "^1.0.0",
"jsprim": "^1.2.2",
@@ -13570,7 +13644,8 @@
"isstream": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
- "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
+ "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=",
+ "dev": true
},
"istanbul-lib-coverage": {
"version": "3.0.0",
@@ -13701,7 +13776,8 @@
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
- "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
+ "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
+ "dev": true
},
"jsdom": {
"version": "16.7.0",
@@ -13772,12 +13848,14 @@
"json-schema": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
- "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
+ "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=",
+ "dev": true
},
"json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
+ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
+ "dev": true
},
"json-stable-stringify-without-jsonify": {
"version": "1.0.1",
@@ -13793,7 +13871,8 @@
"json-stringify-safe": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
- "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
+ "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=",
+ "dev": true
},
"json5": {
"version": "2.2.0",
@@ -13813,6 +13892,7 @@
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
"integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
+ "dev": true,
"requires": {
"assert-plus": "1.0.0",
"extsprintf": "1.3.0",
@@ -13871,23 +13951,23 @@
"eslint-plugin-standard": "^5.0.0",
"minimatch": "^3.0.4",
"npm-package-arg": "^8.1.4",
- "pacote": "^11.3.4",
+ "pacote": "^12.0.0",
"tap": "^15.0.9",
"tar": "^6.1.0"
}
},
"libnpmexec": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-2.0.1.tgz",
- "integrity": "sha512-4SqBB7eJvJWmUKNF42Q5qTOn20DRjEE4TgvEh2yneKlAiRlwlhuS9MNR45juWwmoURJlf2K43bozlVt7OZiIOw==",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/libnpmexec/-/libnpmexec-3.0.0.tgz",
+ "integrity": "sha512-qyt0gSMSHeHIqb/a+vcryfw3VXfNNgle8jK9QVnrNQAyoDvyVQ6auyoB3ycqWPIl2swTEXPEVremSUaDzOiEgw==",
"requires": {
- "@npmcli/arborist": "^2.3.0",
+ "@npmcli/arborist": "^3.0.0",
"@npmcli/ci-detect": "^1.3.0",
- "@npmcli/run-script": "^1.8.4",
+ "@npmcli/run-script": "^2.0.0",
"chalk": "^4.1.0",
"mkdirp-infer-owner": "^2.0.0",
"npm-package-arg": "^8.1.2",
- "pacote": "^11.3.1",
+ "pacote": "^12.0.0",
"proc-log": "^1.0.0",
"read": "^1.0.7",
"read-package-json-fast": "^2.0.2",
@@ -13895,11 +13975,11 @@
}
},
"libnpmfund": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-1.1.0.tgz",
- "integrity": "sha512-Kfmh3pLS5/RGKG5WXEig8mjahPVOxkik6lsbH4iX0si1xxNi6eeUh/+nF1MD+2cgalsQif3O5qyr6mNz2ryJrQ==",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/libnpmfund/-/libnpmfund-2.0.0.tgz",
+ "integrity": "sha512-A89Mp+VcbVS2IzXlTJxcAEJEulVX7pvCB+NFqWKRIaqIncwGku1u8b0h8Qp9IUHrvzzzJiJxJmMYCXmlf6xFxw==",
"requires": {
- "@npmcli/arborist": "^2.5.0"
+ "@npmcli/arborist": "^3.0.0"
}
},
"libnpmhook": {
@@ -13921,13 +14001,13 @@
}
},
"libnpmpack": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/libnpmpack/-/libnpmpack-2.0.1.tgz",
- "integrity": "sha512-He4/jxOwlaQ7YG7sIC1+yNeXeUDQt8RLBvpI68R3RzPMZPa4/VpxhlDo8GtBOBDYoU8eq6v1wKL38sq58u4ibQ==",
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/libnpmpack/-/libnpmpack-3.0.0.tgz",
+ "integrity": "sha512-W6lt4blkR9YXu/qOrFknfnKBajz/1GvAc5q1XcWTGuBJn2DYKDWHtA7x1fuMQdn7hKDBOPlZ/Aqll+ZvAnrM6g==",
"requires": {
- "@npmcli/run-script": "^1.8.3",
+ "@npmcli/run-script": "^2.0.0",
"npm-package-arg": "^8.1.0",
- "pacote": "^11.2.6"
+ "pacote": "^12.0.0"
}
},
"libnpmpublish": {
@@ -13960,12 +14040,12 @@
}
},
"libnpmversion": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-1.2.1.tgz",
- "integrity": "sha512-AA7x5CFgBFN+L4/JWobnY5t4OAHjQuPbAwUYJ7/NtHuyLut5meb+ne/aj0n7PWNiTGCJcRw/W6Zd2LoLT7EZuQ==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/libnpmversion/-/libnpmversion-2.0.1.tgz",
+ "integrity": "sha512-uFGtNTe/m0GOIBQCE4ryIsgGNJdeShW+qvYtKNLCCuiG7JY3YEslL/maFFZbaO4wlQa/oj1t0Bm9TyjahvtgQQ==",
"requires": {
"@npmcli/git": "^2.0.7",
- "@npmcli/run-script": "^1.8.4",
+ "@npmcli/run-script": "^2.0.0",
"json-parse-even-better-errors": "^2.3.1",
"semver": "^7.3.5",
"stringify-package": "^1.0.1"
@@ -14216,12 +14296,14 @@
"mime-db": {
"version": "1.49.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz",
- "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA=="
+ "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==",
+ "dev": true
},
"mime-types": {
"version": "2.1.32",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz",
"integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==",
+ "dev": true,
"requires": {
"mime-db": "1.49.0"
}
@@ -14387,19 +14469,19 @@
"dev": true
},
"node-gyp": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-7.1.2.tgz",
- "integrity": "sha512-CbpcIo7C3eMu3dL1c3d0xw449fHIGALIJsRP4DDPHpyiW8vcriNY7ubh9TE4zEKfSxscY7PjeFnshE7h75ynjQ==",
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.2.0.tgz",
+ "integrity": "sha512-KG8SdcoAnw2d6augGwl1kOayALUrXW/P2uOAm2J2+nmW/HjZo7y+8TDg7LejxbekOOSv3kzhq+NSUYkIDAX8eA==",
"requires": {
"env-paths": "^2.2.0",
"glob": "^7.1.4",
- "graceful-fs": "^4.2.3",
+ "graceful-fs": "^4.2.6",
+ "make-fetch-happen": "^8.0.14",
"nopt": "^5.0.0",
"npmlog": "^4.1.2",
- "request": "^2.88.2",
"rimraf": "^3.0.2",
- "semver": "^7.3.2",
- "tar": "^6.0.2",
+ "semver": "^7.3.5",
+ "tar": "^6.1.2",
"which": "^2.0.2"
},
"dependencies": {
@@ -14431,6 +14513,28 @@
"number-is-nan": "^1.0.0"
}
},
+ "make-fetch-happen": {
+ "version": "8.0.14",
+ "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz",
+ "integrity": "sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ==",
+ "requires": {
+ "agentkeepalive": "^4.1.3",
+ "cacache": "^15.0.5",
+ "http-cache-semantics": "^4.1.0",
+ "http-proxy-agent": "^4.0.1",
+ "https-proxy-agent": "^5.0.0",
+ "is-lambda": "^1.0.1",
+ "lru-cache": "^6.0.0",
+ "minipass": "^3.1.3",
+ "minipass-collect": "^1.0.2",
+ "minipass-fetch": "^1.3.2",
+ "minipass-flush": "^1.0.5",
+ "minipass-pipeline": "^1.2.4",
+ "promise-retry": "^2.0.1",
+ "socks-proxy-agent": "^5.0.0",
+ "ssri": "^8.0.0"
+ }
+ },
"npmlog": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
@@ -14442,6 +14546,16 @@
"set-blocking": "~2.0.0"
}
},
+ "socks-proxy-agent": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-5.0.1.tgz",
+ "integrity": "sha512-vZdmnjb9a2Tz6WEQVIurybSwElwPxMZaIc7PzqbJTrezcKNznv6giT7J7tZDZ1BojVaa1jvO/UiUdhDVB0ACoQ==",
+ "requires": {
+ "agent-base": "^6.0.2",
+ "debug": "4",
+ "socks": "^2.3.3"
+ }
+ },
"string-width": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
@@ -14730,7 +14844,8 @@
"oauth-sign": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
- "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
+ "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==",
+ "dev": true
},
"object-assign": {
"version": "4.1.1",
@@ -14870,14 +14985,14 @@
}
},
"pacote": {
- "version": "11.3.5",
- "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz",
- "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==",
+ "version": "12.0.0",
+ "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.0.tgz",
+ "integrity": "sha512-5DnYqZU0w7GIskuc5yXii1kKpQS2fsaxCaI0FXRsMULXB06lXnZpRdV7JC1TTcQN5uy62h4VWS4WMPYGWu3MYg==",
"requires": {
"@npmcli/git": "^2.1.0",
"@npmcli/installed-package-contents": "^1.0.6",
"@npmcli/promise-spawn": "^1.2.0",
- "@npmcli/run-script": "^1.8.2",
+ "@npmcli/run-script": "^2.0.0",
"cacache": "^15.0.5",
"chownr": "^2.0.0",
"fs-minipass": "^2.1.0",
@@ -14979,7 +15094,8 @@
"performance-now": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
- "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
+ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=",
+ "dev": true
},
"picomatch": {
"version": "2.3.0",
@@ -15170,7 +15286,8 @@
"psl": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
- "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ=="
+ "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==",
+ "dev": true
},
"pump": {
"version": "3.0.0",
@@ -15185,7 +15302,8 @@
"punycode": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
- "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
+ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
+ "dev": true
},
"qrcode-terminal": {
"version": "0.12.0",
@@ -15195,7 +15313,8 @@
"qs": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
- "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
+ "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==",
+ "dev": true
},
"queue-microtask": {
"version": "1.2.3",
@@ -15493,6 +15612,7 @@
"version": "2.88.2",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
+ "dev": true,
"requires": {
"aws-sign2": "~0.7.0",
"aws4": "^1.8.0",
@@ -15520,6 +15640,7 @@
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
+ "dev": true,
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
@@ -15530,6 +15651,7 @@
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
+ "dev": true,
"requires": {
"psl": "^1.1.28",
"punycode": "^2.1.1"
@@ -15601,7 +15723,8 @@
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
+ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
+ "devOptional": true
},
"saxes": {
"version": "5.0.1",
@@ -15835,6 +15958,7 @@
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
"integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
+ "dev": true,
"requires": {
"asn1": "~0.2.3",
"assert-plus": "^1.0.0",
@@ -17589,6 +17713,7 @@
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
+ "dev": true,
"requires": {
"safe-buffer": "^5.0.1"
}
@@ -17596,7 +17721,8 @@
"tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
- "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
+ "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=",
+ "dev": true
},
"type-check": {
"version": "0.4.0",
@@ -17765,6 +17891,7 @@
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
+ "dev": true,
"requires": {
"punycode": "^2.1.0"
}
@@ -17786,7 +17913,8 @@
"uuid": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
- "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
+ "dev": true
},
"v8-compile-cache": {
"version": "2.3.0",
@@ -17815,6 +17943,7 @@
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
+ "dev": true,
"requires": {
"assert-plus": "^1.0.0",
"core-util-is": "1.0.2",
diff --git a/package.json b/package.json
index 3fb2af4ae..bd5675f95 100644
--- a/package.json
+++ b/package.json
@@ -38,7 +38,7 @@
"lib": "./lib",
"man": "./man"
},
- "main": "./lib/npm.js",
+ "main": "./index.js",
"bin": {
"npm": "bin/npm-cli.js",
"npx": "bin/npx-cli.js"
@@ -46,20 +46,20 @@
"exports": {
".": [
{
- "default": "./lib/npm.js"
+ "default": "./index.js"
},
- "./lib/npm.js"
+ "./index.js"
],
"./package.json": "./package.json"
},
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
- "@npmcli/arborist": "^2.9.0",
+ "@npmcli/arborist": "^3.0.0",
"@npmcli/ci-detect": "^1.2.0",
"@npmcli/config": "^2.3.0",
"@npmcli/map-workspaces": "^1.0.4",
"@npmcli/package-json": "^1.0.1",
- "@npmcli/run-script": "^1.8.6",
+ "@npmcli/run-script": "^2.0.0",
"abbrev": "~1.1.1",
"ansicolors": "~0.3.2",
"ansistyles": "~0.1.3",
@@ -80,22 +80,22 @@
"json-parse-even-better-errors": "^2.3.1",
"libnpmaccess": "^4.0.2",
"libnpmdiff": "^2.0.4",
- "libnpmexec": "^2.0.1",
- "libnpmfund": "^1.1.0",
+ "libnpmexec": "^3.0.0",
+ "libnpmfund": "^2.0.0",
"libnpmhook": "^6.0.2",
"libnpmorg": "^2.0.2",
- "libnpmpack": "^2.0.1",
+ "libnpmpack": "^3.0.0",
"libnpmpublish": "^4.0.1",
"libnpmsearch": "^3.1.1",
"libnpmteam": "^2.0.3",
- "libnpmversion": "^1.2.1",
+ "libnpmversion": "^2.0.1",
"make-fetch-happen": "^9.1.0",
"minipass": "^3.1.3",
"minipass-pipeline": "^1.2.4",
"mkdirp": "^1.0.4",
"mkdirp-infer-owner": "^2.0.0",
"ms": "^2.1.2",
- "node-gyp": "^7.1.2",
+ "node-gyp": "^8.2.0",
"nopt": "^5.0.0",
"npm-audit-report": "^2.1.5",
"npm-install-checks": "^4.0.0",
@@ -106,7 +106,7 @@
"npm-user-validate": "^1.0.1",
"npmlog": "^5.0.1",
"opener": "^1.5.2",
- "pacote": "^11.3.5",
+ "pacote": "^12.0.0",
"parse-conflict-json": "^1.1.1",
"qrcode-terminal": "^0.12.0",
"read": "~1.0.7",
@@ -230,13 +230,13 @@
"LC_ALL=sk"
],
"color": 1,
- "files": "test/{lib,bin}",
+ "files": "test/{lib,bin,index.js}",
"coverage-map": "test/coverage-map.js",
"check-coverage": true,
"timeout": 600
},
"license": "Artistic-2.0",
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
}
diff --git a/packages/libnpmdiff/package.json b/packages/libnpmdiff/package.json
index 53fd5d4be..529fc7953 100644
--- a/packages/libnpmdiff/package.json
+++ b/packages/libnpmdiff/package.json
@@ -60,7 +60,7 @@
"diff": "^5.0.0",
"minimatch": "^3.0.4",
"npm-package-arg": "^8.1.4",
- "pacote": "^11.3.4",
+ "pacote": "^12.0.0",
"tar": "^6.1.0"
}
}
diff --git a/smoke-tests/index.js b/smoke-tests/index.js
index 076c53e78..bf99b4730 100644
--- a/smoke-tests/index.js
+++ b/smoke-tests/index.js
@@ -30,7 +30,7 @@ const path = t.testdir({
})
const localPrefix = resolve(path, 'project')
const userconfigLocation = resolve(path, '.npmrc')
-const npmLocation = resolve(__dirname, '..')
+const npmLocation = resolve(__dirname, '../bin/npm-cli.js')
const cacheLocation = resolve(path, 'cache')
const binLocation = resolve(path, 'bin')
const env = {
diff --git a/test/coverage-map.js b/test/coverage-map.js
index 63f2a608e..aff7a6527 100644
--- a/test/coverage-map.js
+++ b/test/coverage-map.js
@@ -11,7 +11,7 @@ const coverageMap = (filename) => {
// this one doesn't provide any coverage nyc can track
return []
}
- if (/^test\/(lib|bin)\//.test(filename))
+ if (/^test\/(lib\/|bin\/|index\.js$)/.test(filename))
return filename.replace(/^test\//, '')
return []
}
diff --git a/test/index.js b/test/index.js
new file mode 100644
index 000000000..11dd5eb2d
--- /dev/null
+++ b/test/index.js
@@ -0,0 +1,23 @@
+const t = require('tap')
+const index = require.resolve('../index.js')
+const packageIndex = require.resolve('../')
+t.equal(index, packageIndex, 'index is main package require() export')
+t.throws(() => require(index), {
+ message: 'The programmatic API was removed in npm v8.0.0',
+})
+
+t.test('loading as main module will load the cli', t => {
+ const { spawn } = require('child_process')
+ const LS = require('../lib/ls.js')
+ const ls = new LS({})
+ const p = spawn(process.execPath, [index, 'ls', '-h'])
+ const out = []
+ p.stdout.on('data', c => out.push(c))
+ p.on('close', (code, signal) => {
+ t.equal(code, 0)
+ t.equal(signal, null)
+ t.match(Buffer.concat(out).toString(), ls.usage)
+ t.end()
+ })
+})
+
diff --git a/test/lib/npm.js b/test/lib/npm.js
index 03bb46d8d..7d6176247 100644
--- a/test/lib/npm.js
+++ b/test/lib/npm.js
@@ -412,22 +412,6 @@ t.test('npm.load', t => {
t.end()
})
-t.test('loading as main will load the cli', t => {
- const { spawn } = require('child_process')
- const npm = require.resolve('../../lib/npm.js')
- const LS = require('../../lib/ls.js')
- const ls = new LS({})
- const p = spawn(process.execPath, [npm, 'ls', '-h'])
- const out = []
- p.stdout.on('data', c => out.push(c))
- p.on('close', (code, signal) => {
- t.equal(code, 0)
- t.equal(signal, null)
- t.match(Buffer.concat(out).toString(), ls.usage)
- t.end()
- })
-})
-
t.test('set process.title', t => {
t.test('basic title setting', async t => {
process.argv = [
@@ -501,3 +485,26 @@ t.test('timings', t => {
t.match(npm.timings, { foo: Number, bar: Number })
t.end()
})
+
+t.test('output clears progress and console.logs the message', t => {
+ const npm = require('../../lib/npm.js')
+ const logs = []
+ const { log } = console
+ const { log: { clearProgress, showProgress } } = npm
+ let showingProgress = true
+ npm.log.clearProgress = () => showingProgress = false
+ npm.log.showProgress = () => showingProgress = true
+ console.log = (...args) => {
+ t.equal(showingProgress, false, 'should not be showing progress right now')
+ logs.push(args)
+ }
+ t.teardown(() => {
+ console.log = log
+ npm.log.showProgress = showProgress
+ npm.log.clearProgress = clearProgress
+ })
+
+ npm.output('hello')
+ t.strictSame(logs, [['hello']])
+ t.end()
+})
diff --git a/test/lib/utils/unsupported.js b/test/lib/utils/unsupported.js
index 3a05d9066..4d806cefc 100644
--- a/test/lib/utils/unsupported.js
+++ b/test/lib/utils/unsupported.js
@@ -27,10 +27,15 @@ const versions = [
['v7.2.3', false, true],
['v8.4.0', false, true],
['v9.3.0', false, true],
- ['v10.0.0-0', false, false],
- ['v11.0.0-0', false, false],
- ['v12.0.0-0', false, false],
- ['v13.0.0-0', false, false],
+ ['v10.0.0-0', false, true],
+ ['v11.0.0-0', false, true],
+ ['v12.0.0-0', false, true],
+ ['v12.13.0-0', false, false],
+ ['v13.0.0-0', false, true],
+ ['v14.0.0-0', false, true],
+ ['v14.15.0-0', false, false],
+ ['v15.0.0-0', false, true],
+ ['v16.0.0-0', false, false],
]
t.test('versions', function (t) {