Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorisaacs <i@izs.me>2020-04-24 02:09:04 +0300
committerisaacs <i@izs.me>2020-05-08 04:18:59 +0300
commit5e940d93204883884fcca9780dbe96157061b716 (patch)
tree393566e29d6f15cda0fe3874840e913662587ff3 /node_modules
parente57a99113ca6ab41b7c325830f22f261a4260f89 (diff)
update @npmcli/arborist, dedupe/organize metadeps
This adds support for Arborist.audit()
Diffstat (limited to 'node_modules')
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/audit.js24
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js141
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/index.js6
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/load-actual.js5
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/load-virtual.js45
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/reify.js126
-rw-r--r--node_modules/@npmcli/arborist/lib/audit-report.js385
-rw-r--r--node_modules/@npmcli/arborist/lib/calc-dep-flags.js3
-rw-r--r--node_modules/@npmcli/arborist/lib/diff.js15
-rw-r--r--node_modules/@npmcli/arborist/lib/edge.js6
-rw-r--r--node_modules/@npmcli/arborist/lib/node.js35
-rw-r--r--node_modules/@npmcli/arborist/lib/shrinkwrap.js82
-rw-r--r--node_modules/@npmcli/arborist/lib/vuln.js128
-rw-r--r--node_modules/@npmcli/arborist/node_modules/mkdirp/lib/opts-arg.js4
-rw-r--r--node_modules/@npmcli/arborist/node_modules/mkdirp/package.json14
-rw-r--r--node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/CHANGELOG.md219
-rw-r--r--node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/LICENSE.md (renamed from node_modules/figgy-pudding/LICENSE.md)0
-rw-r--r--node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/README.md157
-rw-r--r--node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/index.js216
-rw-r--r--node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/package.json76
-rw-r--r--node_modules/@npmcli/arborist/node_modules/rimraf/package.json2
-rw-r--r--node_modules/@npmcli/arborist/package.json20
-rw-r--r--node_modules/@npmcli/git/node_modules/mkdirp/lib/opts-arg.js4
-rw-r--r--node_modules/@npmcli/git/node_modules/mkdirp/package.json12
-rw-r--r--node_modules/@npmcli/git/package.json32
-rw-r--r--node_modules/@npmcli/map-workspaces/CHANGELOG.md6
-rw-r--r--node_modules/@npmcli/map-workspaces/LICENSE (renamed from node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/LICENSE)2
-rw-r--r--node_modules/@npmcli/map-workspaces/README.md83
-rw-r--r--node_modules/@npmcli/map-workspaces/index.js190
-rw-r--r--node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE21
-rw-r--r--node_modules/@npmcli/map-workspaces/node_modules/glob/README.md375
-rw-r--r--node_modules/@npmcli/map-workspaces/node_modules/glob/changelog.md67
-rw-r--r--node_modules/@npmcli/map-workspaces/node_modules/glob/common.js240
-rw-r--r--node_modules/@npmcli/map-workspaces/node_modules/glob/glob.js790
-rw-r--r--node_modules/@npmcli/map-workspaces/node_modules/glob/package.json79
-rw-r--r--node_modules/@npmcli/map-workspaces/node_modules/glob/sync.js486
-rw-r--r--node_modules/@npmcli/map-workspaces/package.json95
-rw-r--r--node_modules/@npmcli/name-from-folder/LICENSE15
-rw-r--r--node_modules/@npmcli/name-from-folder/README.md14
-rw-r--r--node_modules/@npmcli/name-from-folder/index.js (renamed from node_modules/@npmcli/arborist/lib/name-from-folder.js)0
-rw-r--r--node_modules/@npmcli/name-from-folder/package.json61
-rw-r--r--node_modules/@npmcli/promise-spawn/README.md4
-rw-r--r--node_modules/@npmcli/promise-spawn/index.js11
-rw-r--r--node_modules/@npmcli/promise-spawn/package.json25
-rw-r--r--node_modules/@npmcli/run-script/README.md4
-rw-r--r--node_modules/@npmcli/run-script/lib/run-script-pkg.js5
-rw-r--r--node_modules/@npmcli/run-script/package.json17
-rw-r--r--node_modules/cacache/CHANGELOG.md18
-rw-r--r--node_modules/cacache/lib/util/move-file.js5
-rw-r--r--node_modules/cacache/node_modules/chownr/LICENSE (renamed from node_modules/libnpmpack/node_modules/chownr/LICENSE)0
-rw-r--r--node_modules/cacache/node_modules/chownr/README.md (renamed from node_modules/libnpmpack/node_modules/chownr/README.md)0
-rw-r--r--node_modules/cacache/node_modules/chownr/chownr.js (renamed from node_modules/libnpmpack/node_modules/chownr/chownr.js)0
-rw-r--r--node_modules/cacache/node_modules/chownr/package.json (renamed from node_modules/libnpmpack/node_modules/chownr/package.json)30
-rw-r--r--node_modules/cacache/node_modules/mkdirp/lib/opts-arg.js4
-rw-r--r--node_modules/cacache/node_modules/mkdirp/package.json14
-rw-r--r--node_modules/cacache/node_modules/rimraf/CHANGELOG.md65
-rwxr-xr-xnode_modules/cacache/node_modules/rimraf/bin.js64
-rw-r--r--node_modules/cacache/node_modules/rimraf/package.json27
-rw-r--r--node_modules/cacache/node_modules/rimraf/rimraf.js146
-rw-r--r--node_modules/cacache/node_modules/tar/CHANGELOG.md68
-rw-r--r--node_modules/cacache/node_modules/tar/LICENSE (renamed from node_modules/libnpmpack/node_modules/rimraf/LICENSE)0
-rw-r--r--node_modules/cacache/node_modules/tar/README.md1031
-rw-r--r--node_modules/cacache/node_modules/tar/index.js18
-rw-r--r--node_modules/cacache/node_modules/tar/lib/create.js105
-rw-r--r--node_modules/cacache/node_modules/tar/lib/extract.js112
-rw-r--r--node_modules/cacache/node_modules/tar/lib/get-write-flag.js20
-rw-r--r--node_modules/cacache/node_modules/tar/lib/header.js288
-rw-r--r--node_modules/cacache/node_modules/tar/lib/high-level-opt.js29
-rw-r--r--node_modules/cacache/node_modules/tar/lib/large-numbers.js97
-rw-r--r--node_modules/cacache/node_modules/tar/lib/list.js128
-rw-r--r--node_modules/cacache/node_modules/tar/lib/mkdir.js206
-rw-r--r--node_modules/cacache/node_modules/tar/lib/mode-fix.js24
-rw-r--r--node_modules/cacache/node_modules/tar/lib/pack.js403
-rw-r--r--node_modules/cacache/node_modules/tar/lib/parse.js483
-rw-r--r--node_modules/cacache/node_modules/tar/lib/path-reservations.js125
-rw-r--r--node_modules/cacache/node_modules/tar/lib/pax.js145
-rw-r--r--node_modules/cacache/node_modules/tar/lib/read-entry.js98
-rw-r--r--node_modules/cacache/node_modules/tar/lib/replace.js219
-rw-r--r--node_modules/cacache/node_modules/tar/lib/types.js44
-rw-r--r--node_modules/cacache/node_modules/tar/lib/unpack.js680
-rw-r--r--node_modules/cacache/node_modules/tar/lib/update.js36
-rw-r--r--node_modules/cacache/node_modules/tar/lib/warn-mixin.js21
-rw-r--r--node_modules/cacache/node_modules/tar/lib/winchars.js23
-rw-r--r--node_modules/cacache/node_modules/tar/lib/write-entry.js436
-rw-r--r--node_modules/cacache/node_modules/tar/package.json81
-rw-r--r--node_modules/cacache/package.json47
-rw-r--r--node_modules/figgy-pudding/CHANGELOG.md151
-rw-r--r--node_modules/figgy-pudding/README.md260
-rw-r--r--node_modules/figgy-pudding/index.js197
-rw-r--r--node_modules/figgy-pudding/package.json77
l---------node_modules/libnpmpack/node_modules/.bin/mkdirp1
l---------node_modules/libnpmpack/node_modules/.bin/pacote1
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md15
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/LICENSE21
-rwxr-xr-xnode_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js68
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/index.js31
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js29
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js64
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js39
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js23
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js29
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js10
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/package.json75
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/readme.markdown266
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/README.md244
-rwxr-xr-xnode_modules/libnpmpack/node_modules/pacote/lib/bin.js149
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/dir.js98
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js470
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/file.js93
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/git.js272
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/index.js12
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/registry.js159
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/remote.js72
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js12
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js24
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js9
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js21
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/CHANGELOG.md52
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/README.md83
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/npa.js301
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json69
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/package.json107
-rwxr-xr-xnode_modules/libnpmpack/node_modules/rimraf/bin.js50
-rw-r--r--node_modules/libnpmpack/package.json39
-rw-r--r--node_modules/mkdirp-infer-owner/node_modules/mkdirp/lib/opts-arg.js4
-rw-r--r--node_modules/mkdirp-infer-owner/node_modules/mkdirp/package.json14
-rw-r--r--node_modules/move-file/index.d.ts41
-rw-r--r--node_modules/move-file/index.js62
-rw-r--r--node_modules/move-file/license9
-rw-r--r--node_modules/move-file/node_modules/path-exists/index.d.ts28
-rw-r--r--node_modules/move-file/node_modules/path-exists/index.js23
-rw-r--r--node_modules/move-file/node_modules/path-exists/license9
-rw-r--r--node_modules/move-file/node_modules/path-exists/package.json71
-rw-r--r--node_modules/move-file/node_modules/path-exists/readme.md52
-rw-r--r--node_modules/move-file/package.json82
-rw-r--r--node_modules/move-file/readme.md67
-rw-r--r--node_modules/ms/index.js4
-rw-r--r--node_modules/ms/package.json34
-rw-r--r--node_modules/ms/readme.md2
-rw-r--r--node_modules/npm-package-arg/npa.js10
-rw-r--r--node_modules/npm-package-arg/package.json31
-rw-r--r--node_modules/p-map/index.d.ts4
-rw-r--r--node_modules/p-map/index.js12
-rw-r--r--node_modules/p-map/license2
-rw-r--r--node_modules/p-map/package.json33
-rw-r--r--node_modules/p-map/readme.md26
-rw-r--r--node_modules/pacote/lib/dir.js31
-rw-r--r--node_modules/pacote/lib/fetcher.js7
-rw-r--r--node_modules/pacote/lib/git.js7
-rw-r--r--node_modules/pacote/lib/remote.js5
-rw-r--r--node_modules/pacote/lib/util/cache-dir.js6
-rw-r--r--node_modules/pacote/lib/util/git/clone.js138
-rw-r--r--node_modules/pacote/lib/util/git/env.js33
-rw-r--r--node_modules/pacote/lib/util/git/index.js5
-rw-r--r--node_modules/pacote/lib/util/git/lines-to-revs.js133
-rw-r--r--node_modules/pacote/lib/util/git/opts.js16
-rw-r--r--node_modules/pacote/lib/util/git/revs.js24
-rw-r--r--node_modules/pacote/lib/util/git/should-retry.js17
-rw-r--r--node_modules/pacote/lib/util/git/spawn.js34
-rw-r--r--node_modules/pacote/lib/util/git/which.js11
-rw-r--r--node_modules/pacote/lib/util/npm.js6
-rw-r--r--node_modules/pacote/lib/util/spawn.js36
l---------node_modules/pacote/node_modules/.bin/rimraf (renamed from node_modules/libnpmpack/node_modules/.bin/rimraf)0
-rw-r--r--node_modules/pacote/node_modules/chownr/package.json27
-rw-r--r--node_modules/pacote/node_modules/mkdirp/lib/opts-arg.js4
-rw-r--r--node_modules/pacote/node_modules/mkdirp/package.json18
-rw-r--r--node_modules/pacote/node_modules/rimraf/CHANGELOG.md65
-rw-r--r--node_modules/pacote/node_modules/rimraf/LICENSE (renamed from node_modules/libnpmpack/node_modules/pacote/LICENSE)2
-rw-r--r--node_modules/pacote/node_modules/rimraf/README.md (renamed from node_modules/libnpmpack/node_modules/rimraf/README.md)0
-rwxr-xr-xnode_modules/pacote/node_modules/rimraf/bin.js68
-rw-r--r--node_modules/pacote/node_modules/rimraf/package.json (renamed from node_modules/libnpmpack/node_modules/rimraf/package.json)32
-rw-r--r--node_modules/pacote/node_modules/rimraf/rimraf.js (renamed from node_modules/libnpmpack/node_modules/rimraf/rimraf.js)146
-rw-r--r--node_modules/pacote/package.json47
-rw-r--r--node_modules/semver/CHANGELOG.md26
-rw-r--r--node_modules/semver/README.md58
-rw-r--r--node_modules/semver/classes/range.js87
-rw-r--r--node_modules/semver/index.js2
-rw-r--r--node_modules/semver/internal/re.js3
-rw-r--r--node_modules/semver/package.json40
-rw-r--r--node_modules/semver/ranges/simplify.js44
-rw-r--r--node_modules/semver/ranges/subset.js155
181 files changed, 10588 insertions, 4745 deletions
diff --git a/node_modules/@npmcli/arborist/lib/arborist/audit.js b/node_modules/@npmcli/arborist/lib/arborist/audit.js
new file mode 100644
index 000000000..7e860845a
--- /dev/null
+++ b/node_modules/@npmcli/arborist/lib/arborist/audit.js
@@ -0,0 +1,24 @@
+// mixin implementing the audit method
+
+const Reifier = require('./reify.js')
+const AuditReport = require('../audit-report.js')
+
+// shared with reify
+const _global = Symbol.for('global')
+
+module.exports = cls => class Auditor extends Reifier(cls) {
+ async audit (options = {}) {
+ if (this[_global])
+ throw Object.assign(
+ new Error('`npm audit` does not support testing globals'),
+ { code: 'EAUDITGLOBAL' }
+ )
+
+ process.emit('time', 'audit')
+ const tree = await this.loadVirtual()
+ this.auditReport = await AuditReport.load(tree, this.options)
+ const ret = options.fix ? this.reify() : this.auditReport
+ process.emit('timeEnd', 'audit')
+ return ret
+ }
+}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
index 08a6e0585..8627abee6 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
@@ -4,6 +4,8 @@ const rpj = require('read-package-json-fast')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const semver = require('semver')
+const pickManifest = require('npm-pick-manifest')
+const mapWorkspaces = require('@npmcli/map-workspaces')
const calcDepFlags = require('../calc-dep-flags.js')
const Shrinkwrap = require('../shrinkwrap.js')
@@ -45,6 +47,7 @@ const _nodeFromSpec = Symbol('nodeFromSpec')
const _fetchManifest = Symbol('fetchManifest')
const _problemEdges = Symbol('problemEdges')
const _manifests = Symbol('manifests')
+const _mapWorkspaces = Symbol('mapWorkspaces')
const _linkFromSpec = Symbol('linkFromSpec')
const _loadPeerSet = Symbol('loadPeerSet')
// shared symbols so we can hit them with unit tests
@@ -59,6 +62,8 @@ const _rootNodeFromPackage = Symbol('rootNodeFromPackage')
const _add = Symbol('add')
const _resolvedAdd = Symbol.for('resolvedAdd')
const _queueNamedUpdates = Symbol('queueNamedUpdates')
+const _queueVulnDependents = Symbol('queueVulnDependents')
+const _avoidRange = Symbol('avoidRange')
const _shouldUpdateNode = Symbol('shouldUpdateNode')
const _resetDepFlags = Symbol('resetDepFlags')
const _loadFailures = Symbol('loadFailures')
@@ -67,8 +72,10 @@ const _linkNodes = Symbol('linkNodes')
const _follow = Symbol('follow')
const _globalStyle = Symbol('globalStyle')
const _globalRootNode = Symbol('globalRootNode')
+const _isVulnerable = Symbol.for('isVulnerable')
// used by Reify mixin
+const _force = Symbol.for('force')
const _explicitRequests = Symbol.for('explicitRequests')
const _global = Symbol.for('global')
const _idealTreePrune = Symbol.for('idealTreePrune')
@@ -89,9 +96,14 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
global = false,
follow = false,
globalStyle = false,
+ legacyPeerDeps = false,
+ force = false,
} = options
+ this[_force] = !!force
+
this.idealTree = options.idealTree
+ this.legacyPeerDeps = legacyPeerDeps
this[_globalStyle] = this[_global] || globalStyle
this[_follow] = !!follow
@@ -119,6 +131,8 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
if (this.idealTree)
return Promise.resolve(this.idealTree)
+ process.emit('time', 'idealTree')
+
if (!options.add && !options.rm && this[_global])
return Promise.reject(new Error('global requires an add or rm option'))
@@ -139,6 +153,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
.then(() => this[_fixDepFlags]())
.then(() => this[_pruneFailedOptional]())
.then(() => {
+ process.emit('timeEnd', 'idealTree')
this.finishTracker('idealTree')
return this.idealTree
})
@@ -173,6 +188,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// load the initial tree, either the virtualTree from a shrinkwrap,
// or just the root node from a package.json
[_initTree] () {
+ process.emit('time', 'idealTree:init')
return (
this[_global] ? this[_globalRootNode]()
: rpj(this.path + '/package.json')
@@ -185,18 +201,17 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// to build out the full virtual tree from it, since we'll be
// reconstructing it anyway.
.then(root => this[_global] ? root
- : this[_updateAll] ? Shrinkwrap.load({ path: this.path }).then(meta => {
- meta.reset()
- root.meta = meta
- return root
- })
+ : this[_updateAll] ? Shrinkwrap.reset({ path: this.path })
+ .then(meta => Object.assign(root, {meta}))
: this.loadVirtual({ root }))
+ .then(tree => this[_mapWorkspaces](tree))
.then(tree => {
// null the virtual tree, because we're about to hack away at it
// if you want another one, load another copy.
this.idealTree = tree
this.virtualTree = null
+ process.emit('timeEnd', 'idealTree:init')
})
}
@@ -221,18 +236,32 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
peer: false,
optional: false,
global: this[_global],
+ legacyPeerDeps: this.legacyPeerDeps,
})
}
+ [_mapWorkspaces] (node) {
+ return mapWorkspaces({ cwd: node.path, pkg: node.package })
+ .then(workspaces => {
+ if (workspaces.size)
+ node.workspaces = workspaces
+ return node
+ })
+ }
+
// process the add/rm requests by modifying the root node, and the
// update.names request by queueing nodes dependent on those named.
[_applyUserRequests] (options) {
+ process.emit('time', 'idealTree:userRequests')
// If we have a list of package names to update, and we know it's
// going to update them wherever they are, add any paths into those
// named nodes to the buildIdealTree queue.
if (this[_updateNames].length)
this[_queueNamedUpdates]()
+ if (this.auditReport && this.auditReport.size > 0)
+ this[_queueVulnDependents](options)
+
if (options.rm && options.rm.length) {
addRmPkgDeps.rm(this.idealTree.package, options.rm)
for (const name of options.rm)
@@ -240,7 +269,10 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
}
// triggers a refresh of all edgesOut
- const after = () => this.idealTree.package = this.idealTree.package
+ const after = () => {
+ this.idealTree.package = this.idealTree.package
+ process.emit('timeEnd', 'idealTree:userRequests')
+ }
// these just add and remove to/from the root node
return (options.add)
@@ -248,7 +280,6 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
: after()
}
-
// This returns a promise because we might not have the name yet,
// and need to call pacote.manifest to find the name.
[_add] ({add, saveType = null, saveBundle = false}) {
@@ -280,6 +311,73 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
})
}
+ [_queueVulnDependents] (options) {
+ for (const [name, {nodes}] of this.auditReport.entries()) {
+ for (const node of nodes) {
+ for (const edge of node.edgesIn) {
+ this.addTracker('buildIdealTree', edge.from.name, edge.from.location)
+ this[_depsQueue].push(edge.from)
+ }
+ }
+ }
+
+ // note any that can't be fixed at the root level without --force
+ // if there's a fix, we use that. otherwise, the user has to remove it,
+ // find a different thing, fix the upstream, etc.
+ //
+ // XXX: how to handle top nodes that aren't the root? Maybe the report
+ // just tells the user to cd into that directory and fix it?
+ if (this[_force] && this.auditReport && this.auditReport.topVulns.size) {
+ options.add = options.add || []
+ options.rm = options.rm || []
+ for (const [name, topVuln] of this.auditReport.topVulns.entries()) {
+ const {
+ packument,
+ simpleRange,
+ range: avoid,
+ topNodes,
+ fixAvailable,
+ } = topVuln
+ for (const node of topNodes) {
+ if (node !== this.idealTree) {
+ // not something we're going to fix, sorry. have to cd into
+ // that directory and fix it yourself.
+ this.log.warn('audit', 'Manual fix required in linked project ' +
+ `at ./${node.location} for ${name}@${simpleRange}.\n` +
+ `'cd ./${node.location}' and run 'npm audit' for details.`)
+ continue
+ }
+
+ if (!fixAvailable) {
+ this.log.warn('audit', `No fix available for ${name}@${simpleRange}`)
+ continue
+ }
+
+ const { isSemVerMajor, version } = fixAvailable
+ const breakingMessage = isSemVerMajor
+ ? 'a SemVer major change'
+ : 'outside your stated dependency range'
+ this.log.warn('audit', `Updating ${name} to ${version},` +
+ `which is ${breakingMessage}.`)
+ options.add.push(`${name}@${version}`)
+ }
+ }
+ }
+ }
+
+ [_isVulnerable] (node) {
+ return this.auditReport && this.auditReport.isVulnerable(node)
+ }
+
+ [_avoidRange] (name) {
+ if (!this.auditReport)
+ return null
+ const vuln = this.auditReport.get(name)
+ if (!vuln)
+ return null
+ return vuln.range
+ }
+
[_queueNamedUpdates] () {
const names = this[_updateNames]
// ignore top nodes, since they are not loaded the same way, and
@@ -290,6 +388,8 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// anything on the update names list will get refreshed, even if
// it isn't a problem.
+ // XXX this could be faster by doing a series of inventory.query('name')
+ // calls rather than walking over everything in the tree.
const set = this.idealTree.inventory
.filter(n => this[_shouldUpdateNode](n))
for (const node of set) {
@@ -312,16 +412,20 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// package deps, which may be partly or entirely incomplete, invalid
// or extraneous.
[_buildDeps] (node) {
+ process.emit('time', 'idealTree:buildDeps')
this[_depsQueue].push(this.idealTree)
this.log.silly('idealTree', 'buildDeps')
this.addTracker('idealTree', this.idealTree.name, '')
return this[_buildDepStep]()
+ .then(() => process.emit('timeEnd', 'idealTree:buildDeps'))
}
[_buildDepStep] () {
// removes tracker of previous dependency in the queue
if (this[_currentDep]) {
- this.finishTracker('idealTree', this[_currentDep].name, this[_currentDep].location)
+ const { location, name } = this[_currentDep]
+ process.emit('timeEnd', `idealTree:${location || '#root'}`)
+ this.finishTracker('idealTree', name, location)
this[_currentDep] = null
}
@@ -346,6 +450,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
this[_depsSeen].add(node)
this[_currentDep] = node
+ process.emit('time', `idealTree:${node.location || '#root'}`)
// if any deps are missing or invalid, then we fetch the manifest for
// the thing we want, and build a new dep node from that.
@@ -431,9 +536,11 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// create a virtual root node with the same deps as the node that
// is requesting this one, so that we can get all the peer deps in
// a context where they're likely to be resolvable.
+ const { legacyPeerDeps } = this
parent = parent || new Node({
path: '/virtual-root',
pkg: edge.from.package,
+ legacyPeerDeps,
})
const spec = npa.resolve(edge.name, edge.spec, edge.from.path)
return this[_nodeFromSpec](edge.name, spec, parent, edge)
@@ -455,6 +562,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
.filter(edge => !bundled.has(edge.name) &&
!(edge.to && edge.to.inShrinkwrap) &&
(!edge.valid || !edge.to || this[_updateNames].includes(edge.name) ||
+ this[_isVulnerable](edge.to) ||
node.isRoot && this[_explicitRequests].has(edge.name)))
}
@@ -462,7 +570,10 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
if (this[_manifests].has(spec.raw))
return this[_manifests].get(spec.raw)
else {
- const options = Object.create(this.options)
+ const options = {
+ ...this.options,
+ avoid: this[_avoidRange](spec.name),
+ }
const p = pacote.manifest(spec, options)
this[_manifests].set(spec.raw, p)
return p
@@ -474,10 +585,11 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// the object so it doesn't get mutated.
// Don't bother to load the manifest for link deps, because the target
// might be within another package that doesn't exist yet.
+ const { legacyPeerDeps } = this
return spec.type === 'directory'
? this[_linkFromSpec](name, spec, parent, edge)
: this[_fetchManifest](spec)
- .then(pkg => new Node({ name, pkg, parent }), error => {
+ .then(pkg => new Node({ name, pkg, parent, legacyPeerDeps }), error => {
error.requiredBy = edge.from.location || '.'
// failed to load the spec, either because of enotarget or
// fetch failure of some other sort. save it so we can verify
@@ -486,6 +598,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
name,
parent,
error,
+ legacyPeerDeps,
})
this[_loadFailures].add(n)
return n
@@ -494,8 +607,9 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
[_linkFromSpec] (name, spec, parent, edge) {
const realpath = spec.fetchSpec
+ const { legacyPeerDeps } = this
return rpj(realpath + '/package.json').catch(() => ({})).then(pkg => {
- const link = new Link({ name, parent, realpath, pkg })
+ const link = new Link({ name, parent, realpath, pkg, legacyPeerDeps })
this[_linkNodes].add(link)
return link
})
@@ -521,7 +635,8 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// where this dep cannot be placed, and use the one right before that.
// place dep, requested by node, to satisfy edge
[_placeDep] (dep, node, edge, peerEntryEdge = null) {
- if (edge.to && !edge.error && !this[_updateNames].includes(edge.name))
+ if (edge.to && !edge.error && !this[_updateNames].includes(edge.name) &&
+ !this[_isVulnerable](edge.to))
return []
// top nodes should still get peer deps from their parent or fsParent
@@ -868,6 +983,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
}
[_fixDepFlags] () {
+ process.emit('time', 'idealTree:fixDepFlags')
const metaFromDisk = this.idealTree.meta.loadedFromDisk
// if the options set prune:false, then we don't prune, but we still
// mark the extraneous items in the tree if we modified it at all.
@@ -904,6 +1020,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
if (this[_prune] && metaFromDisk && this[_mutateTree]) {
this[_idealTreePrune]()
}
+ process.emit('timeEnd', 'idealTree:fixDepFlags')
}
[_idealTreePrune] () {
diff --git a/node_modules/@npmcli/arborist/lib/arborist/index.js b/node_modules/@npmcli/arborist/lib/arborist/index.js
index 6e3139478..d2215cbfe 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/index.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/index.js
@@ -24,10 +24,11 @@
// reify extends buildideal, which extends actual and virtual, so that's
// the only one to pull in. This class is just here to grab the options
// and path, and call out to the others.
-const Reify = require('./reify.js')
+const Auditor = require('./audit.js')
const {resolve} = require('path')
-class Arborist extends Reify(require('events')) {
+class Arborist extends Auditor(require('events')) {
constructor (options = {}) {
+ process.emit('time', 'arborist:ctor')
super(options)
this.options = {
nodeVersion: process.version,
@@ -35,6 +36,7 @@ class Arborist extends Reify(require('events')) {
path: options.path || '.',
}
this.path = resolve(this.options.path)
+ process.emit('timeEnd', 'arborist:ctor')
}
}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
index 3e7abf796..925e234b1 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
@@ -80,6 +80,8 @@ module.exports = cls => class ActualLoader extends cls {
})).then(node => {
this.actualTree = node
return this[_loadActualActually]()
+ }).then(tree => {
+ return tree
})
}
@@ -161,6 +163,7 @@ module.exports = cls => class ActualLoader extends cls {
.then(pkg => [pkg, null], error => [null, error])
.then(([pkg, error]) => {
return this[path === real ? _newNode : _newLink]({
+ legacyPeerDeps: this.legacyPeerDeps,
path,
realpath: real,
pkg,
@@ -192,7 +195,7 @@ module.exports = cls => class ActualLoader extends cls {
if (!parent && path !== this.path)
this[_linkTargets].add(realpath)
return process.env._TEST_ARBORIST_SLOW_LINK_TARGET_ === '1'
- ? new Promise(res => setTimeout(() => res(new Node(options)), 10))
+ ? new Promise(res => setTimeout(() => res(new Node(options)), 100))
: new Node(options)
}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
index 2a78f2172..0b1d0f91a 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
@@ -1,12 +1,14 @@
// mixin providing the loadVirtual method
const {resolve} = require('path')
+const mapWorkspaces = require('@npmcli/map-workspaces')
const consistentResolve = require('../consistent-resolve.js')
const Shrinkwrap = require('../shrinkwrap.js')
const Node = require('../node.js')
const Link = require('../link.js')
const relpath = require('../relpath.js')
+const rpj = require('read-package-json-fast')
const loadFromShrinkwrap = Symbol('loadFromShrinkwrap')
const resolveNodes = Symbol('resolveNodes')
@@ -14,6 +16,7 @@ const resolveLinks = Symbol('resolveLinks')
const assignParentage = Symbol('assignParentage')
const loadNode = Symbol('loadVirtualNode')
const loadLink = Symbol('loadVirtualLink')
+const loadWorkspaces = Symbol('loadWorkspaces')
module.exports = cls => class VirtualLoader extends cls {
constructor (options) {
@@ -38,21 +41,24 @@ module.exports = cls => class VirtualLoader extends cls {
// when building the ideal tree, we pass in a root node to this function
// otherwise, load it from the root package in the lockfile
const {
- root = this[loadNode]('', s.data.packages[''])
+ root = this[loadWorkspaces](
+ this[loadNode]('', s.data.packages[''] || {}),
+ s
+ )
} = options
return this[loadFromShrinkwrap](s, root)
})
}
- [loadFromShrinkwrap] (s, root) {
+ async [loadFromShrinkwrap] (s, root) {
root.meta = s
s.add(root)
this.virtualTree = root
const {links, nodes} = this[resolveNodes](s, root)
- this[resolveLinks](links, nodes)
+ await this[resolveLinks](links, nodes)
this[assignParentage](nodes)
- return Promise.resolve(root)
+ return root
}
// separate out link metadatas, and create Node objects for nodes
@@ -74,15 +80,30 @@ module.exports = cls => class VirtualLoader extends cls {
// links is the set of metadata, and nodes is the map of non-Link nodes
// Set the targets to nodes in the set, if we have them (we might not)
- [resolveLinks] (links, nodes) {
+ async [resolveLinks] (links, nodes) {
// now we've loaded the root, and all real nodes
// link up the links
+ const {meta} = this.virtualTree
+ const {loadedFromDisk, originalLockfileVersion} = meta
+ const oldLockfile = loadedFromDisk && !(originalLockfileVersion >= 2)
+
for (const [location, meta] of links.entries()) {
const targetPath = resolve(this.path, meta.resolved)
const targetLoc = relpath(this.path, targetPath)
const target = nodes.get(targetLoc)
const link = this[loadLink](location, targetLoc, target, meta)
nodes.set(location, link)
+ nodes.set(targetLoc, link.target)
+ // legacy shrinkwraps do not store all the info we need for the target.
+ // if we're loading from disk, and have a link in place, we need to
+ // look in that actual folder (or at least try to) in order to get
+ // the dependencies of the link target and load it properly.
+ if (oldLockfile) {
+ const pj = link.realpath + '/package.json'
+ const pkg = await rpj(pj).catch(() => null)
+ if (pkg)
+ link.target.package = pkg
+ }
}
}
@@ -116,7 +137,7 @@ module.exports = cls => class VirtualLoader extends cls {
const ppkg = parent.package
if (!ppkg.bundleDependencies)
ppkg.bundleDependencies = [name]
- else
+ else if (!ppkg.bundleDependencies.includes(name))
ppkg.bundleDependencies.push(name)
}
}
@@ -126,6 +147,7 @@ module.exports = cls => class VirtualLoader extends cls {
[loadNode] (location, sw) {
const path = resolve(this.path, location)
const node = new Node({
+ legacyPeerDeps: this.legacyPeerDeps,
root: this.virtualTree,
path,
realpath: path,
@@ -143,9 +165,20 @@ module.exports = cls => class VirtualLoader extends cls {
return node
}
+ [loadWorkspaces] (node, s) {
+ const workspaces = mapWorkspaces.virtual({
+ cwd: node.path,
+ lockfile: s.data
+ })
+ if (workspaces.size)
+ node.workspaces = workspaces
+ return node
+ }
+
[loadLink] (location, targetLoc, target, meta) {
const path = resolve(this.path, location)
const link = new Link({
+ legacyPeerDeps: this.legacyPeerDeps,
path,
realpath: resolve(this.path, targetLoc),
target,
diff --git a/node_modules/@npmcli/arborist/lib/arborist/reify.js b/node_modules/@npmcli/arborist/lib/arborist/reify.js
index fe82f83d1..31de23f3f 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/reify.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/reify.js
@@ -1,11 +1,5 @@
// mixin implementing the reify method
-// XXX unsupported platforms should be failures if the node is optional
-// otherwise we try anyway.
-// XXX this needs to clone rather than copy, so that we can leave failed
-// optional deps in the ideal tree, but remove them from the actual.
-// But to do that, we need a way to clone a tree efficiently.
-
const npa = require('npm-package-arg')
const pacote = require('pacote')
const binLinks = require('bin-links')
@@ -13,6 +7,7 @@ const runScript = require('@npmcli/run-script')
const rpj = require('read-package-json-fast')
const {checkEngine, checkPlatform} = require('npm-install-checks')
const updateDepSpec = require('../update-dep-spec.js')
+const AuditReport = require('../audit-report.js')
const boolEnv = b => b ? '1' : ''
@@ -45,7 +40,7 @@ const _registryResolved = Symbol('registryResolved')
const _trashList = Symbol('trashList')
const _addNodeToTrashList = Symbol('addNodeToTrashList')
const _handleOptionalFailure = Symbol('handleOptionalFailure')
-const _loadTrees = Symbol('loadTrees')
+const _loadTrees = Symbol.for('loadTrees')
// shared symbols for swapping out when testing
const _diffTrees = Symbol.for('diffTrees')
@@ -55,11 +50,14 @@ const _reifyNode = Symbol.for('reifyNode')
const _checkEngineAndPlatform = Symbol('checkEngineAndPlatform')
const _extractOrLink = Symbol('extractOrLink')
const _symlink = Symbol('symlink')
+const _warnDeprecated = Symbol('warnDeprecated')
const _recheckEngineAndPlatform = Symbol('recheckEngineAndPlatform')
const _checkEngine = Symbol('checkEngine')
const _checkPlatform = Symbol('checkPlatform')
const _binLinks = Symbol('binLinks')
const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees')
+const _submitQuickAudit = Symbol('submitQuickAudit')
+const _awaitQuickAudit = Symbol('awaitQuickAudit')
const _unpackNewModules = Symbol.for('unpackNewModules')
const _moveContents = Symbol.for('moveContents')
const _moveBackRetiredUnchanged = Symbol.for('moveBackRetiredUnchanged')
@@ -81,9 +79,9 @@ const _omitPeer = Symbol('omitPeer')
const _global = Symbol.for('global')
const _ignoreScripts = Symbol('ignoreScripts')
const _scriptShell = Symbol('scriptShell')
-const _force = Symbol('force')
// defined by Ideal mixin
+const _force = Symbol.for('force')
const _idealTreePrune = Symbol.for('idealTreePrune')
const _explicitRequests = Symbol.for('explicitRequests')
const _resolvedAdd = Symbol.for('resolvedAdd')
@@ -94,7 +92,6 @@ module.exports = cls => class Reifier extends Ideal(cls) {
const {
ignoreScripts = false,
- force = false,
scriptShell,
savePrefix = '^',
binLinks = true,
@@ -104,7 +101,6 @@ module.exports = cls => class Reifier extends Ideal(cls) {
this[_binLinks] = () => {}
this[_ignoreScripts] = !!ignoreScripts
- this[_force] = !!force
this[_scriptShell] = scriptShell
this[_savePrefix] = savePrefix
@@ -124,6 +120,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// start tracker block
this.addTracker('reify')
+ process.emit('time', 'reify')
return this[_loadTrees](options)
.then(() => this[_diffTrees]())
.then(() => this[_retireShallowNodes]())
@@ -131,14 +128,17 @@ module.exports = cls => class Reifier extends Ideal(cls) {
.then(() => this[_addOmitsToTrashList]())
.then(() => this[_loadShrinkwrapsAndUpdateTrees]())
.then(() => this[_loadBundlesAndUpdateTrees]())
+ .then(() => this[_submitQuickAudit]())
.then(() => this[_unpackNewModules]())
.then(() => this[_moveBackRetiredUnchanged]())
.then(() => this[_runLifecycleScripts]())
.then(() => this[_removeTrash]())
.then(() => this[_saveIdealTree](options))
.then(() => this[_copyIdealToActual]())
+ .then(() => this[_awaitQuickAudit]())
.then(() => {
this.finishTracker('reify')
+ process.emit('timeEnd', 'reify')
return this.actualTree
})
}
@@ -146,8 +146,10 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// when doing a local install, we load everything and figure it all out.
// when doing a global install, we *only* care about the explicit requests.
[_loadTrees] (options) {
+ process.emit('time', 'reify:loadTrees')
if (!this[_global])
return Promise.all([this.loadActual(), this.buildIdealTree(options)])
+ .then(() => process.emit('timeEnd', 'reify:loadTrees'))
// the global install space tends to have a lot of stuff in it. don't
// load all of it, just what we care about. we won't be saving a
@@ -159,9 +161,11 @@ module.exports = cls => class Reifier extends Ideal(cls) {
}
return this.buildIdealTree(options)
.then(() => this.loadActual(actualOpts))
+ .then(() => process.emit('timeEnd', 'reify:loadTrees'))
}
[_diffTrees] () {
+ process.emit('time', 'reify:diffTrees')
// XXX if we have an existing diff already, there should be a way
// to just invalidate the parts that changed, but avoid walking the
// whole tree again.
@@ -175,6 +179,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
for (const node of this.diff.removed) {
this[_addNodeToTrashList](node)
}
+ process.emit('timeEnd', 'reify:diffTrees')
}
// add the node and all its bins to the list of things to be
@@ -197,6 +202,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// move aside the shallowest nodes in the tree that have to be
// changed or removed, so that we can rollback if necessary.
[_retireShallowNodes] () {
+ process.emit('time', 'reify:retireShallow')
const moves = this[_retiredPaths] = {}
for (const diff of this.diff.children) {
if (diff.action === 'CHANGE' || diff.action === 'REMOVE') {
@@ -209,6 +215,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
.map(([from, to]) => this[_renamePath](from, to))
return promiseAllRejectLate(movePromises)
.catch(er => this[_rollbackRetireShallowNodes](er))
+ .then(() => process.emit('timeEnd', 'reify:retireShallow'))
}
[_renamePath] (from, to, didMkdirp = false) {
@@ -229,12 +236,14 @@ module.exports = cls => class Reifier extends Ideal(cls) {
}
[_rollbackRetireShallowNodes] (er) {
+ process.emit('time', 'reify:rollback:retireShallow')
const moves = this[_retiredPaths]
const movePromises = Object.entries(moves)
.map(([from, to]) => this[_renamePath](to, from))
return promiseAllRejectLate(movePromises)
// ignore subsequent rollback errors
.catch(er => {})
+ .then(() => process.emit('timeEnd', 'reify:rollback:retireShallow'))
.then(() => { throw er })
}
@@ -244,6 +253,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
if (!this[_omitDev] && !this[_omitOptional] && !this[_omitPeer])
return
+ process.emit('time', 'reify:trashOmits')
const filter = node =>
node.peer && this[_omitPeer] ||
node.dev && this[_omitDev] ||
@@ -253,9 +263,11 @@ module.exports = cls => class Reifier extends Ideal(cls) {
for (const node of this.idealTree.inventory.filter(filter)) {
this[_trashList].add(node.path)
}
+ process.emit('timeEnd', 'reify:trashOmits')
}
[_createSparseTree] () {
+ process.emit('time', 'reify:createSparse')
// if we call this fn again, we look for the previous list
// so that we can avoid making the same directory multiple times
const dirs = this.diff.leaves
@@ -267,10 +279,12 @@ module.exports = cls => class Reifier extends Ideal(cls) {
return promiseAllRejectLate(dirs.map(d => mkdirp(d)))
.then(() => dirs.forEach(dir => this[_sparseTreeDirs].add(dir)))
+ .then(() => process.emit('timeEnd', 'reify:createSparse'))
.catch(er => this[_rollbackCreateSparseTree](er))
}
[_rollbackCreateSparseTree] (er) {
+ process.emit('time', 'reify:rollback:createSparse')
// cut the roots of the sparse tree, not the leaves
const moves = this[_retiredPaths]
const failures = []
@@ -279,8 +293,9 @@ module.exports = cls => class Reifier extends Ideal(cls) {
return promiseAllRejectLate(unlinks)
.then(() => {
if (failures.length)
- this.emit('warn', 'Failed to clean up some directories', failures)
+ this.log.warn('Failed to clean up some directories', failures)
})
+ .then(() => process.emit('timeEnd', 'reify:rollback:createSparse'))
.then(() => this[_rollbackRetireShallowNodes](er))
}
@@ -296,6 +311,8 @@ module.exports = cls => class Reifier extends Ideal(cls) {
if (!shrinkwraps.length)
return
+ process.emit('time', 'reify:loadShrinkwraps')
+
const Arborist = this.constructor
return promiseAllRejectLate(shrinkwraps.map(diff => {
const node = diff.ideal
@@ -309,6 +326,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// reload the diff and sparse tree because the ideal tree changed
.then(() => this[_diffTrees]())
.then(() => this[_createSparseTree]())
+ .then(() => process.emit('timeEnd', 'reify:loadShrinkwraps'))
.catch(er => this[_rollbackCreateSparseTree](er))
}
@@ -323,24 +341,45 @@ module.exports = cls => class Reifier extends Ideal(cls) {
if (this[_trashList].has(node.path))
return node
+ process.emit('time', `reifyNode:${node.location}`)
this.addTracker('reify', node.name, node.location)
const p = this[_checkEngineAndPlatform](node)
.then(() => this[_extractOrLink](node))
+ .then(() => this[_warnDeprecated](node))
.then(() => this[_recheckEngineAndPlatform](node))
.then(() => this[_binLinks](node))
return this[_handleOptionalFailure](node, p)
.then(() => {
this.finishTracker('reify', node.name, node.location)
+ process.emit('timeEnd', `reifyNode:${node.location}`)
return node
})
}
[_extractOrLink] (node) {
+ // in normal cases, node.resolved should *always* be set by now.
+ // however, it is possible when a lockfile is damaged, or very old,
+ // or in some other race condition bugs in npm v6, that a previously
+ // bundled dependency will have just a version, but no resolved value,
+ // and no 'bundled: true' setting.
+ // Do the best with what we have, or else remove it from the tree
+ // entirely, since we can't possibly reify it.
+ const res = node.resolved ? this[_registryResolved](node.resolved)
+ : node.package.version ? `${node.package.name || node.name}@${node.package.version}`
+ : null
+
+ // no idea what this thing is. remove it from the tree.
+ if (!res) {
+ node.parent = null
+ this[_addNodeToTrashList](node)
+ return
+ }
+
return node.isLink
? rimraf(node.path).then(() => this[_symlink](node))
- : pacote.extract(this[_registryResolved](node.resolved), node.path, {
+ : pacote.extract(res, node.path, {
...this.options,
resolved: node.resolved,
integrity: node.integrity,
@@ -354,17 +393,27 @@ module.exports = cls => class Reifier extends Ideal(cls) {
return symlink(rel, node.path, 'dir')
}
+ [_warnDeprecated] (node) {
+ const {_id, deprecated} = node.package
+ if (deprecated)
+ this.log.warn('deprecated', `${_id}: ${deprecated}`)
+ }
+
[_recheckEngineAndPlatform] (node) {
// If we're loading from a v1 lockfile, then need to do this again later
- // after reading from the disk.
+ // after reading from the disk. Also grab the bin, because old lockfiles
+ // did not track that useful bit of info.
const {meta} = this.idealTree
- return meta.loadedFromDisk && meta.originalLockfileVersion < 2 &&
- rpj(node.path + '/package.json').then(pkg => {
+ if (meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2)) {
+ return rpj(node.path + '/package.json').then(pkg => {
+ node.package.bin = pkg.bin
node.package.os = pkg.os
node.package.cpu = pkg.cpu
node.package.engines = pkg.engines
+ meta.add(node)
return this[_checkEngineAndPlatform](node)
})
+ }
}
[_checkEngineAndPlatform] (node) {
@@ -389,7 +438,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
try {
c()
} catch (er) {
- this.emit('log', 'warn', er)
+ this.log.warn(er)
}
}
}
@@ -434,6 +483,8 @@ module.exports = cls => class Reifier extends Ideal(cls) {
[_loadBundlesAndUpdateTrees] (
depth = 0, bundlesByDepth = this[_getBundlesByDepth]()
) {
+ if (depth === 0)
+ process.emit('time', 'reify:loadBundles')
const maxBundleDepth = bundlesByDepth.get('maxBundleDepth')
if (depth > maxBundleDepth) {
// if we did something, then prune the tree and update the diffs
@@ -442,6 +493,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
this[_idealTreePrune]()
this[_diffTrees]()
}
+ process.emit('timeEnd', 'reify:loadBundles')
return
}
@@ -457,7 +509,6 @@ module.exports = cls => class Reifier extends Ideal(cls) {
const Arborist = this.constructor
// extract all the nodes with bundles
- this.log.silly('reify', 'reifyNode')
return promiseAllRejectLate(set.map(node => this[_reifyNode](node)))
// then load their unpacked children and move into the ideal tree
.then(nodes => promiseAllRejectLate(nodes.map(node =>
@@ -516,11 +567,36 @@ module.exports = cls => class Reifier extends Ideal(cls) {
return bundlesByDepth
}
+ [_submitQuickAudit] () {
+ if (this.options.audit === false)
+ return this.auditReport = null
+
+ // we submit the quick audit at this point in the process, as soon as
+ // we have all the deps resolved, so that it can overlap with the other
+ // actions as much as possible. Stash the promise, which we resolve
+ // before finishing the reify() and returning the tree. Thus, we do
+ // NOT return the promise, as the intent is for this to run in parallel
+ // with the reification, and be resolved at a later time.
+ process.emit('time', 'reify:audit')
+
+ this.auditReport = AuditReport.load(this.idealTree, this.options)
+ .then(res => {
+ process.emit('timeEnd', 'reify:audit')
+ this.auditReport = res
+ })
+ }
+
+ // return the promise if we're waiting for it, or the replaced result
+ [_awaitQuickAudit] () {
+ return this.auditReport
+ }
+
// ok! actually unpack stuff into their target locations!
// The sparse tree has already been created, so we walk the diff
// kicking off each unpack job. If any fail, we rimraf the sparse
// tree entirely and try to put everything back where it was.
[_unpackNewModules] () {
+ process.emit('time', 'reify:unpack')
const unpacks = []
dfwalk({
tree: this.diff,
@@ -537,6 +613,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
getChildren: diff => diff.children,
})
return promiseAllRejectLate(unpacks)
+ .then(() => process.emit('timeEnd', 'reify:unpack'))
.catch(er => this[_rollbackCreateSparseTree](er))
}
@@ -552,6 +629,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// This is sort of an inverse diff tree, of all the nodes where
// the actualTree and idealTree _don't_ differ, starting from the
// shallowest nodes that we moved aside in the first place.
+ process.emit('time', 'reify:unretire')
const moves = this[_retiredPaths]
this[_retiredUnchanged] = {}
return promiseAllRejectLate(this.diff.children.map(diff => {
@@ -579,6 +657,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
return mkdirp(dir).then(() => this[_moveContents](node, fromPath))
}))
}))
+ .then(() => process.emit('timeEnd', 'reify:unretire'))
.catch(er => this[_rollbackMoveBackRetiredUnchanged](er))
}
@@ -614,6 +693,8 @@ module.exports = cls => class Reifier extends Ideal(cls) {
if (this[_ignoreScripts])
return
+ process.emit('time', 'reify:runScripts')
+
// for all the things being installed, run their appropriate scripts
// run in tip->root order, so as to be more likely to build a node's
// deps before attempting to build it itself
@@ -665,6 +746,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
.then(() => this[_runScriptQueue]('preinstall', preinstall))
.then(() => this[_runScriptQueue]('install', install))
.then(() => this[_runScriptQueue]('postinstall', postinstall))
+ .then(() => process.emit('timeEnd', 'reify:runScripts'))
.catch(er => this[_rollbackMoveBackRetiredUnchanged](er))
}
@@ -672,6 +754,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
if (!queue.length)
return
+ process.emit('time', `reify:runScripts:${event}`)
return promiseCallLimit(queue.map(([node, pkg]) => () => {
const {path} = node
// skip any that we know we'll be deleting
@@ -696,6 +779,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
scriptShell: this[_scriptShell],
}))
}))
+ .then(() => process.emit('timeEnd', `reify:runScripts:${event}`))
}
// the tree is pretty much built now, so it's cleanup time.
@@ -703,6 +787,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// If this fails, there isn't much we can do but tell the user about it.
// Thankfully, it's pretty unlikely that it'll fail, since rimraf is a tank.
[_removeTrash] () {
+ process.emit('time', 'reify:trash')
const promises = []
const failures = []
const rm = path => rimraf(path).catch(er => failures.push([path, er]))
@@ -713,8 +798,9 @@ module.exports = cls => class Reifier extends Ideal(cls) {
return promiseAllRejectLate(promises).then(() => {
if (failures.length)
- this.emit('warn', 'Failed to clean up some directories', failures)
+ this.log.warn('Failed to clean up some directories', failures)
})
+ .then(() => process.emit('timeEnd', 'reify:trash'))
}
// last but not least, we save the ideal tree metadata to the package-lock
@@ -729,6 +815,8 @@ module.exports = cls => class Reifier extends Ideal(cls) {
if (options.save === false || this[_global])
return
+ process.emit('time', 'reify:save')
+
if (this[_resolvedAdd]) {
const root = this.idealTree
const pkg = root.package
@@ -762,7 +850,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
...this.idealTree.package,
_id: undefined,
}, null, 2) + '\n'),
- ])
+ ]).then(() => process.emit('timeEnd', 'reify:save'))
}
[_copyIdealToActual] () {
diff --git a/node_modules/@npmcli/arborist/lib/audit-report.js b/node_modules/@npmcli/arborist/lib/audit-report.js
new file mode 100644
index 000000000..6a6e68130
--- /dev/null
+++ b/node_modules/@npmcli/arborist/lib/audit-report.js
@@ -0,0 +1,385 @@
+// an object representing the set of vulnerabilities in a tree
+
+const npa = require('npm-package-arg')
+const pickManifest = require('npm-pick-manifest')
+const pacote = require('pacote')
+
+const Vuln = require('./vuln.js')
+
+const _getReport = Symbol('getReport')
+const _processDeps = Symbol('processDeps')
+const _processDependent = Symbol('processDependent')
+const _addVulnerability = Symbol('addVulnerability')
+const _vulnDependents = Symbol('vulnDependents')
+const _isVulnerable = Symbol('isVulnerable')
+const _specVulnerable = Symbol('specVulnerable')
+const _specVulnCheck = Symbol('specVulnCheck')
+const _fixAvailable = Symbol('fixAvailable')
+const _packument = Symbol('packument')
+const _packuments = Symbol('packuments')
+const _getDepSpec = Symbol('getDepSpec')
+const _init = Symbol('init')
+const procLog = require('./proc-log.js')
+
+const fetch = require('npm-registry-fetch')
+
+class AuditReport extends Map {
+ static load (tree, opts) {
+ return new AuditReport(tree, opts).run()
+ }
+
+ get auditReportVersion () {
+ return 2
+ }
+
+ toJSON () {
+ const obj = {
+ auditReportVersion: this.auditReportVersion,
+ vulnerabilities: {},
+ metadata: {
+ vulnerabilities: {
+ info: 0,
+ low: 0,
+ moderate: 0,
+ high: 0,
+ critical: 0,
+ total: this.size,
+ },
+ dependencies: {
+ prod: 0,
+ dev: 0,
+ optional: 0,
+ peer: 0,
+ peerOptional: 0,
+ total: this.tree.inventory.size,
+ },
+ },
+ }
+
+ for (const node of this.tree.inventory.values()) {
+ const { dependencies } = obj.metadata
+ let prod = true
+ for (const type of [
+ 'dev',
+ 'optional',
+ 'peer',
+ 'peerOptional',
+ ]) {
+ if (node[type]) {
+ dependencies[type]++
+ prod = false
+ }
+ }
+ if (prod)
+ dependencies.prod ++
+ }
+
+ // if it doesn't have any topVulns, then it's fixable with audit fix
+ // for each topVuln, figure out if it's fixable with audit fix --force,
+ // or if we have to just delete the thing, and if the fix --force will
+ // require a semver major update.
+ for (const [name, vuln] of this.entries()) {
+ obj.vulnerabilities[name] = vuln.toJSON()
+ obj.metadata.vulnerabilities[vuln.severity]++
+ }
+
+ return obj
+ }
+
+ constructor (tree, opts = {}) {
+ super()
+ this[_vulnDependents] = new Set()
+ this[_packuments] = new Map()
+ this.topVulns = new Map()
+ this.advisoryVulns = new Map()
+ this.dependencyVulns = new Map()
+
+ this.error = null
+ this.options = opts
+ this.log = opts.log || procLog
+ this.tree = tree
+ }
+
+ async run () {
+ this.report = await this[_getReport]()
+ if (this.report) {
+ await this[_init]()
+ await this[_processDeps]()
+ }
+ return this
+ }
+
+ async [_init] () {
+ process.emit('time', 'auditReport:init')
+ const promises = []
+ for (const advisory of Object.values(this.report.advisories)) {
+ const {
+ module_name: name,
+ vulnerable_versions: range,
+ } = advisory
+ promises.push(this[_addVulnerability](name, range, advisory))
+ }
+
+ await Promise.all(promises)
+ process.emit('timeEnd', 'auditReport:init')
+ }
+
+ // for each node P
+ // for each vulnerable dep Q
+ // pickManifest(Q, P's dep on Q, {avoid})
+ // if resulting version is vunlerable, then P@version is vulnerable
+ // find all versions of P depending on unsafe Q
+ async [_processDeps] () {
+ process.emit('time', 'auditReport:process')
+ for (const p of this[_vulnDependents]) {
+ await this[_processDependent](p)
+ }
+ process.emit('timeEnd', 'auditReport:process')
+ }
+
+ isVulnerable (node) {
+ return node && this.has(node.name) &&
+ this.get(node.name).isVulnerable(node)
+ }
+
+ [_specVulnCheck] (paku, spec) {
+ // if it's not a thing that came from the registry, and for whatever
+ // reason, it's vulnerable, and we have to assume we can't fix that.
+ if (!paku || !paku.versions || typeof paku.versions !== 'object')
+ return false
+
+ // similarly, even if we HAVE a packument, but we're looking for a version
+ // that doesn't come out of that packument, and what we've got is
+ // vulnerable, then we're stuck with it.
+ const specObj = npa(spec)
+ if (!specObj.registry)
+ return false
+
+ return spec
+ }
+
+ // pass in the packument for the vulnerable dep, the spec that is
+ // depended upon, and the range of dep versions to avoid.
+ // returns true if every satisfying version is vulnerable.
+ [_specVulnerable] (paku, spec, avoid) {
+ spec = this[_specVulnCheck](paku, spec)
+ if (spec === false)
+ return true
+
+ // if we can't avoid the vulnerable version range within the spec
+ // required, then the dep range is entirely vulnerable.
+ return pickManifest(paku, spec, {
+ ...this.options,
+ avoid,
+ })._shouldAvoid
+ }
+
+ // see if the top node CAN be fixed, even with a semver major update
+ // if not, then the user just has to find a different thing to use.
+ [_fixAvailable] (paku, spec, avoid) {
+ spec = this[_specVulnCheck](paku, spec)
+ if (spec === false)
+ return false
+
+ try {
+ const {
+ _isSemVerMajor: isSemVerMajor,
+ version,
+ name,
+ } = pickManifest(paku, spec, {
+ ...this.options,
+ avoid,
+ avoidStrict: true,
+ })
+ return {name, version, isSemVerMajor}
+ } catch (er) {
+ return false
+ }
+ }
+
+ async [_processDependent] (p) {
+ const loc = p.location || '#ROOT'
+ process.emit('time', `auditReport:dep:${loc}`)
+ // remove it from the queue so we can process it again if another
+ // vulnerability will affect it.
+ this[_vulnDependents].delete(p)
+ for (const edge of p.edgesOut.values()) {
+ if (!this.isVulnerable(edge.to))
+ continue
+
+ const {name, type, spec} = edge
+ process.emit('time', `auditReport:dep:${loc}:${edge.to.location}`)
+ const vuln = this.get(name)
+ const {packument, range: avoid} = vuln
+
+ if (this[_specVulnerable](packument, spec, avoid)) {
+ // whether it's the root, or just something we symlinked to a
+ // random place on disk, we aren't going to update it by looking
+ // in the registry. Track these separately.
+ if (p.isTop) {
+ // this indicates that the root is vulnerable, and cannot be
+ // upgraded out of the bad place without --force. But, there's
+ // no need to add it to the actual vulns list, because nothing
+ // depends on root.
+ this.topVulns.set(name, vuln)
+ vuln.topNodes.add(p)
+ // We don't provide fixes for top nodes other than root, but we
+ // still check to see if the node is fixable, and if semver major
+ vuln.fixAvailable = this[_fixAvailable](packument, spec, avoid)
+ } else {
+ // p is vulnerable!
+ // mark all versions with this problem, and then add the
+ // vulnerability for the dependent
+ const paku = await this[_packument](p.name)
+ const metaVuln = []
+ if (!paku) {
+ // not a dep that comes from the registry, apparently
+ metaVuln.push(p.package.version)
+ } else {
+ for (const [version, pmani] of Object.entries(paku.versions)) {
+ const spec = this[_getDepSpec](pmani, name)
+ // if we don't even depend on the thing, we're in the clear
+ if (typeof spec !== 'string')
+ continue
+ const specVuln = this[_specVulnerable](packument, spec, avoid)
+ if (specVuln)
+ metaVuln.push(version)
+ }
+ }
+ await this[_addVulnerability](p.name, metaVuln.join(' || '), vuln)
+ }
+ }
+
+ process.emit('timeEnd', `auditReport:dep:${loc}:${edge.to.location}`)
+ }
+ process.emit('timeEnd', `auditReport:dep:${loc}`)
+ }
+
+ async [_packument] (name) {
+ return this[_packuments].has(name) ? this[_packuments].get(name)
+ : pacote.packument(name, { ...this.options })
+ .catch(() => null)
+ .then(packument => {
+ this[_packuments].set(name, packument)
+ return packument
+ })
+ }
+
+ [_getDepSpec] (mani, name) {
+ // skip dev because that only matters at the root,
+ // where we aren't fetching a manifest from the registry
+ // with multiple versions anyway.
+ return mani.dependencies && mani.dependencies[name] ||
+ mani.optionalDependencies && mani.optionalDependencies[name] ||
+ mani.peerDependencies && mani.peerDependencies[name]
+ }
+
+ delete (name) {
+ super.delete(name)
+ this.topVulns.delete(name)
+ this.advisoryVulns.delete(name)
+ this.dependencyVulns.delete(name)
+ }
+
+ set () {
+ throw new Error('do not call AuditReport.set() directly')
+ }
+
+ async [_addVulnerability] (name, range, via) {
+ const has = this.has(name)
+ const vuln = has ? this.get(name) : new Vuln({ name, via })
+
+ if (has)
+ vuln.addVia(via)
+ else
+ super.set(name, vuln)
+
+ // if we've already seen this exact range, just make sure that
+ // we have the advisory or source already, but do nothing else,
+ // because all the matching have already been collected.
+ if (vuln.hasRange(range))
+ return
+
+ vuln.addRange(range)
+
+ // track it in the appropriate maps for reporting on later
+ super.set(name, vuln)
+ if (!(via instanceof Vuln)) {
+ this.dependencyVulns.delete(name)
+ this.advisoryVulns.set(name, vuln)
+ } else if (!this.advisoryVulns.has(name))
+ this.dependencyVulns.set(name, vuln)
+
+ process.emit('time', `auditReport:add:${name}@${range}`)
+
+ for (const node of this.tree.inventory.query('name', name)) {
+ if (vuln.nodes.has(node) || !vuln.isVulnerable(node))
+ continue
+
+ for (const {from} of node.edgesIn) {
+ this[_vulnDependents].add(from)
+ }
+ }
+
+ // if we didn't get anything, then why is this even here??
+ if (vuln.nodes.size === 0)
+ return this.delete(name)
+
+ if (!vuln.packument)
+ vuln.packument = await this[_packument](name)
+
+ process.emit('timeEnd', `auditReport:add:${name}@${range}`)
+ }
+
+ async [_getReport] () {
+ process.emit('time', 'auditReport:getReport')
+ try {
+ // if we're not auditing, just return false
+ if (this.options.audit === false || this.tree.inventory.size === 0)
+ return null
+
+ // we always hit the quick endpoint, because we calculate remediations
+ // locally anyway, to handle meta-vulnerabilities.
+ const res = await fetch('/-/npm/v1/security/audits/quick', {
+ ...this.options,
+ registry: this.options.auditRegistry || this.options.registry,
+ method: 'POST',
+ gzip: true,
+ body: prepareData(this.tree, this.options),
+ })
+
+ return await res.json()
+ } catch (er) {
+ this.log.verbose('audit error', er)
+ this.log.silly('audit error', String(er.body))
+ this.error = er
+ return null
+ } finally {
+ process.emit('timeEnd', 'auditReport:getReport')
+ }
+ }
+}
+
+const prepareData = (tree, opts) => {
+ const { npmVersion: npm_version } = opts
+ const node_version = process.version
+ const { platform, arch } = process
+ const { NODE_ENV: node_env } = process.env
+ const data = tree.meta.commit()
+ return JSON.stringify({
+ ...data,
+ requires: {
+ ...(tree.package.devDependencies || {}),
+ ...(tree.package.peerDependencies|| {}),
+ ...(tree.package.optionalDependencies|| {}),
+ ...(tree.package.dependencies|| {}),
+ },
+ node_version,
+ npm_version,
+ platform,
+ arch,
+ node_env,
+ }, 0, 2)
+}
+
+module.exports = AuditReport
diff --git a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js
index ee24407ac..3d612607a 100644
--- a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js
+++ b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js
@@ -5,12 +5,13 @@ const calcDepFlags = tree => {
tree.optional = false
tree.devOptional = false
tree.peer = false
- return depth({
+ const ret = depth({
tree,
visit: node => calcDepFlagsStep(node),
filter: node => node,
getChildren: node => [...node.edgesOut.values()].map(edge => edge.to),
})
+ return ret
}
const calcDepFlagsStep = (node) => {
diff --git a/node_modules/@npmcli/arborist/lib/diff.js b/node_modules/@npmcli/arborist/lib/diff.js
index e44107b4a..cb4d8c445 100644
--- a/node_modules/@npmcli/arborist/lib/diff.js
+++ b/node_modules/@npmcli/arborist/lib/diff.js
@@ -99,6 +99,7 @@ const getChildren = diff => {
const diffNode = (actual, ideal, children, unchanged, removed) => {
const action = getAction({actual, ideal})
+
// if it's a match, then get its children
// otherwise, this is the child diff node
if (action) {
@@ -117,11 +118,23 @@ const diffNode = (actual, ideal, children, unchanged, removed) => {
// diffing trees can mutate them, but otherwise we have to walk over
// all unchanging bundlers and correct the diff later, so it's more
// efficient to just fix it while we're passing through already.
+ //
+ // Note that moving over a bundled dep will break the links to other
+ // deps under this parent, which may have been transitively bundled.
+ // Breaking those links means that we'll no longer see the transitive
+ // dependency, meaning that it won't appear as bundled any longer!
+ // In order to not end up dropping transitively bundled deps, we have
+ // to get the list of nodes to move, then move them all at once, rather
+ // than moving them one at a time in the first loop.
const bd = ideal.package.bundleDependencies
if (actual && bd && bd.length) {
+ const bundledChildren = []
for (const [name, node] of actual.children.entries()) {
if (node.inBundle)
- node.parent = ideal
+ bundledChildren.push(node)
+ }
+ for (const node of bundledChildren) {
+ node.parent = ideal
}
}
children.push(...getChildren({actual, ideal, unchanged, removed}))
diff --git a/node_modules/@npmcli/arborist/lib/edge.js b/node_modules/@npmcli/arborist/lib/edge.js
index 29003b64a..a0bb7a432 100644
--- a/node_modules/@npmcli/arborist/lib/edge.js
+++ b/node_modules/@npmcli/arborist/lib/edge.js
@@ -1,6 +1,7 @@
// An edge in the dependency graph
// Represents a dependency relationship of some kind
+const npa = require('npm-package-arg')
const depValid = require('./dep-valid.js')
const _from = Symbol('_from')
const _to = Symbol('_to')
@@ -18,6 +19,7 @@ const types = new Set([
'optional',
'peer',
'peerOptional',
+ 'workspace'
])
class Edge {
@@ -26,6 +28,10 @@ class Edge {
if (typeof spec !== 'string')
throw new TypeError('must provide string spec')
+
+ if (type === 'workspace' && npa(spec).type !== 'directory')
+ throw new TypeError('workspace edges must be a symlink')
+
this[_spec] = spec
if (accept !== undefined) {
diff --git a/node_modules/@npmcli/arborist/lib/node.js b/node_modules/@npmcli/arborist/lib/node.js
index a7330a29a..f17e61dc5 100644
--- a/node_modules/@npmcli/arborist/lib/node.js
+++ b/node_modules/@npmcli/arborist/lib/node.js
@@ -28,7 +28,7 @@
// where we need to quickly find all instances of a given package name within a
// tree.
-const nameFromFolder = require('./name-from-folder.js')
+const nameFromFolder = require('@npmcli/name-from-folder')
const Edge = require('./edge.js')
const Inventory = require('./inventory.js')
const Shrinkwrap = require('./shrinkwrap.js')
@@ -46,6 +46,7 @@ const _fsParent = Symbol('_fsParent')
const _reloadEdges = Symbol('_reloadEdges')
const _loadType = Symbol('_loadType')
const _loadDepType = Symbol('_loadDepType')
+const _loadWorkspaces = Symbol('_loadWorkspaces')
const _reloadNamedEdges = Symbol('_reloadNamedEdges')
// overridden by Link class
const _loadDeps = Symbol.for('Arborist.Node._loadDeps')
@@ -55,6 +56,7 @@ const _refreshTopMeta = Symbol('_refreshTopMeta')
const _refreshPath = Symbol('_refreshPath')
const _delistFromMeta = Symbol('_delistFromMeta')
const _global = Symbol.for('global')
+const _workspaces = Symbol('_workspaces')
const relpath = require('./relpath.js')
const consistentResolve = require('./consistent-resolve.js')
@@ -79,6 +81,7 @@ class Node {
name,
children,
fsChildren,
+ legacyPeerDeps = false,
linksIn,
hasShrinkwrap,
extraneous = true,
@@ -92,6 +95,8 @@ class Node {
// true if part of a global install
this[_global] = global
+ this[_workspaces] = null
+
this.errors = error ? [error] : []
const pkg = normalize(options.pkg || {})
@@ -123,6 +128,7 @@ class Node {
}
this.integrity = integrity || pkg._integrity || null
this.hasShrinkwrap = hasShrinkwrap || pkg._hasShrinkwrap || false
+ this.legacyPeerDeps = legacyPeerDeps
this.children = new Map()
this.fsChildren = new Set()
@@ -207,6 +213,22 @@ class Node {
return this.global && this.parent.isRoot
}
+ get workspaces() {
+ return this[_workspaces]
+ }
+
+ set workspaces(workspaces) {
+ // deletes edges if they already exists
+ if (this[_workspaces])
+ for (const [name, path] of this[_workspaces].entries()) {
+ if (!workspaces.has(name)) this.edgesOut.get(name).detach()
+ }
+
+ this[_workspaces] = workspaces
+ this[_loadWorkspaces]()
+ this[_loadDeps]()
+ }
+
get binPaths () {
if (!this.parent)
return []
@@ -240,6 +262,7 @@ class Node {
}
this[_package] = pkg
+ this[_loadWorkspaces]()
this[_loadDeps]()
// do a hard reload, since the dependents may now be valid or invalid
// as a result of the package change.
@@ -332,6 +355,14 @@ class Node {
return this[_root] || this
}
+ [_loadWorkspaces] () {
+ if (!this[_workspaces]) return
+
+ for (const [name, path] of this[_workspaces].entries()) {
+ new Edge({ from: this, name, spec: `file:${path}`, type: 'workspace' })
+ }
+ }
+
[_loadDeps] () {
// Caveat! Order is relevant!
// packages in optionalDependencies and prod/peer/dev are
@@ -342,7 +373,7 @@ class Node {
this[_loadDepType](this.package.dependencies, 'prod')
const pd = this.package.peerDependencies
- if (pd && typeof pd === 'object') {
+ if (pd && typeof pd === 'object' && !this.legacyPeerDeps) {
const pm = this.package.peerDependenciesMeta || {}
const peerDependencies = {}
const peerOptional = {}
diff --git a/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/node_modules/@npmcli/arborist/lib/shrinkwrap.js
index 959fc4dd9..aa094c8e6 100644
--- a/node_modules/@npmcli/arborist/lib/shrinkwrap.js
+++ b/node_modules/@npmcli/arborist/lib/shrinkwrap.js
@@ -31,6 +31,7 @@ const rimraf = promisify(require('rimraf'))
const fs = require('fs')
const readFile = promisify(fs.readFile)
const writeFile = promisify(fs.writeFile)
+const stat = promisify(fs.stat)
const link = promisify(fs.link)
const { resolve, dirname, basename } = require('path')
const specFromLock = require('./spec-from-lock.js')
@@ -74,6 +75,16 @@ const maybeReadFile = file => {
})
}
+const maybeStatFile = file => {
+ return stat(file).then(st => st.isFile(), er => {
+ /* istanbul ignore else - can't test without breaking module itself */
+ if (er.code === 'ENOENT')
+ return null
+ else
+ throw er
+ })
+}
+
const pkgMetaKeys = [
'name',
'version',
@@ -92,6 +103,8 @@ const pkgMetaKeys = [
'_hasShrinkwrap',
'hasInstallScript',
'bin',
+ 'deprecated',
+ 'workspaces',
]
const nodeMetaKeys = [
@@ -120,11 +133,32 @@ const _loadAll = Symbol('_loadAll')
const _metaFromLock = Symbol('_metaFromLock')
const _resolveMetaNode = Symbol('_resolveMetaNode')
const _fixDependencies = Symbol('_fixDependencies')
+
+const _filenameSet = Symbol('_filenameSet')
+const _maybeRead = Symbol('_maybeRead')
+const _maybeStat = Symbol('_maybeStat')
class Shrinkwrap {
static load (options) {
return new Shrinkwrap(options).load()
}
+ static reset (options) {
+ // still need to know if it was loaded from the disk, but don't
+ // bother reading it if we're gonna just throw it away.
+ const s = new Shrinkwrap(options)
+ s.reset()
+
+ return s[_maybeStat]().then(([sw, lock]) => {
+ s.filename = resolve(s.path,
+ (s.hiddenLockfile ? 'node_modules/.package-lock'
+ : s.shrinkwrapOnly || sw && !lock ? 'npm-shrinkwrap'
+ : 'package-lock') + '.json')
+ s.loadedFromDisk = sw || lock
+ s.type = basename(s.filename)
+ return s
+ })
+ }
+
static metaFromNode (node, path) {
if (node.isLink)
return {
@@ -198,19 +232,35 @@ class Shrinkwrap {
}
}
- load () {
- // we don't need to load package-lock.json except for top of tree nodes,
- // only npm-shrinkwrap.json.
- return Promise.all(this.shrinkwrapOnly ? [
- maybeReadFile(this.path + '/npm-shrinkwrap.json'),
+ [_filenameSet] () {
+ return this.shrinkwrapOnly ? [
+ this.path + '/npm-shrinkwrap.json',
] : this.hiddenLockfile ? [
null,
- maybeReadFile(this.path + '/node_modules/.package-lock.json'),
+ this.path + '/node_modules/.package-lock.json',
] : [
- maybeReadFile(this.path + '/npm-shrinkwrap.json'),
- maybeReadFile(this.path + '/package-lock.json'),
- maybeReadFile(this.path + '/yarn.lock'),
- ]).then(([sw, lock, yarn]) => {
+ this.path + '/npm-shrinkwrap.json',
+ this.path + '/package-lock.json',
+ this.path + '/yarn.lock',
+ ]
+ }
+
+ [_maybeRead] () {
+ return Promise.all(this[_filenameSet]().map(fn => fn && maybeReadFile(fn)))
+ }
+
+ [_maybeStat] () {
+ // throw away yarn, we only care about lock or shrinkwrap when checking
+ // this way, since we're not actually loading the full lock metadata
+ return Promise.all(this[_filenameSet]().slice(0, 2)
+ .map(fn => fn && maybeStatFile(fn)))
+ }
+
+ load () {
+ const timer = `shrinkwrap:${this.path}${this.hiddenLockfile ? ':hidden' : ''}`
+ // we don't need to load package-lock.json except for top of tree nodes,
+ // only npm-shrinkwrap.json.
+ return this[_maybeRead]().then(([sw, lock, yarn]) => {
const data = lock || sw || ''
// don't use detect-indent, just pick the first line.
const indent = data.match(/^\{\n([\s\t]+)/)
@@ -219,11 +269,10 @@ class Shrinkwrap {
// use shrinkwrap only for deps, otherwise prefer package-lock
// and ignore npm-shrinkwrap if both are present.
// TODO: emit a warning here or something if both are present.
- this.filename = `${this.path}/${
- this.hiddenLockfile ? 'node_modules/.package-lock'
+ this.filename = resolve(this.path,
+ (this.hiddenLockfile ? 'node_modules/.package-lock'
: this.shrinkwrapOnly || sw && !lock ? 'npm-shrinkwrap'
- : 'package-lock'
- }.json`
+ : 'package-lock') + '.json')
this.type = basename(this.filename)
this.loadedFromDisk = !!data
@@ -252,12 +301,9 @@ class Shrinkwrap {
.then(pkg => {
this[_loadAll]('', null, this.data)
this[_fixDependencies](pkg)
- return this
})
}
-
- return this
- })
+ }).then(() => this)
}
diff --git a/node_modules/@npmcli/arborist/lib/vuln.js b/node_modules/@npmcli/arborist/lib/vuln.js
new file mode 100644
index 000000000..3b5f3756e
--- /dev/null
+++ b/node_modules/@npmcli/arborist/lib/vuln.js
@@ -0,0 +1,128 @@
+// An object representing a vulnerability either as the result of an
+// advisory or due to the package in question depending exclusively on
+// vulnerable versions of a dep.
+//
+// - name: package name
+// - range: Set of vulnerable versions
+// - nodes: Set of nodes affected
+// - effects: Set of vulns triggered by this one
+// - via: Set of advisories or vulnerabilities causing this vuln
+//
+// These objects are filled in by the operations in the AuditReport
+// class, which sets the the packument and calls addRange() with
+// the vulnerable range.
+
+const {satisfies, simplifyRange} = require('semver')
+const semverOpt = { loose: true, includePrerelease: true }
+
+const _range = Symbol('_range')
+const _ranges = Symbol('_ranges')
+const _simpleRange = Symbol('_simpleRange')
+const _fixAvailable = Symbol('_fixAvailable')
+
+const severities = new Map([
+ ['info', 0],
+ ['low', 1],
+ ['moderate', 2],
+ ['high', 3],
+ ['critical', 4],
+ [null, -1],
+])
+
+for (const [name, val] of severities.entries()) {
+ severities.set(val, name)
+}
+
+class Vuln {
+ constructor ({ name, via }) {
+ this.name = name
+ this.via = new Set()
+ this.severity = null
+ this.addVia(via)
+ this.effects = new Set()
+ this.topNodes = new Set()
+ this[_ranges] = new Set()
+ this[_range] = null
+ this[_simpleRange] = null
+ this.nodes = new Set()
+ this.packument = null
+ // assume a fix is available unless it hits a top node
+ // that locks it in place, setting this to false or {isSemVerMajor, version}.
+ this[_fixAvailable] = true
+ }
+
+ get fixAvailable () {
+ return this[_fixAvailable]
+ }
+ set fixAvailable (f) {
+ this[_fixAvailable] = f
+ // if there's a fix available for this at the top level, it means that
+ // it will also fix the vulns that led to it being there.
+ for (const v of this.via) {
+ if (v.fixAvailable === true)
+ v.fixAvailable = f
+ }
+ }
+
+ toJSON () {
+ return {
+ name: this.name,
+ severity: this.severity,
+ via: [...this.via].map(v => v instanceof Vuln ? v.name : v),
+ effects: [...this.effects].map(v => v.name),
+ range: this.simpleRange,
+ nodes: [...this.nodes].map(n => n.location),
+ fixAvailable: this[_fixAvailable],
+ }
+ }
+
+ addVia (via) {
+ this.via.add(via)
+ const sev = severities.get(via.severity)
+ if (sev > severities.get(this.severity))
+ this.severity = via.severity
+
+ if (via instanceof Vuln)
+ via.effects.add(this)
+ }
+
+ hasRange (range) {
+ return this[_ranges].has(range)
+ }
+
+ addRange (range) {
+ this[_ranges].add(range)
+ this[_range] = [...this[_ranges]].join(' || ')
+ this[_simpleRange] = null
+ }
+
+ get range () {
+ return this[_range] || (this[_range] = [...this[_ranges]].join(' || '))
+ }
+
+ get simpleRange () {
+ if (this[_simpleRange] && this[_simpleRange] === this[_range])
+ return this[_simpleRange]
+ const range = this.range
+ if (!this.packument)
+ return range
+ const versions = Object.keys(this.packument.versions)
+ const simple = simplifyRange(versions, range, semverOpt)
+ return this[_simpleRange] = this[_range] = simple
+ }
+
+ isVulnerable (node) {
+ if (this.nodes.has(node))
+ return true
+
+ const { version } = node.package
+ if (version && satisfies(version, this.range, semverOpt)) {
+ this.nodes.add(node)
+ return true
+ }
+
+ return false
+ }
+}
+
+module.exports = Vuln
diff --git a/node_modules/@npmcli/arborist/node_modules/mkdirp/lib/opts-arg.js b/node_modules/@npmcli/arborist/node_modules/mkdirp/lib/opts-arg.js
index 488bd44c3..2fa4833fa 100644
--- a/node_modules/@npmcli/arborist/node_modules/mkdirp/lib/opts-arg.js
+++ b/node_modules/@npmcli/arborist/node_modules/mkdirp/lib/opts-arg.js
@@ -2,9 +2,9 @@ const { promisify } = require('util')
const fs = require('fs')
const optsArg = opts => {
if (!opts)
- opts = { mode: 0o777 & (~process.umask()), fs }
+ opts = { mode: 0o777, fs }
else if (typeof opts === 'object')
- opts = { mode: 0o777 & (~process.umask()), fs, ...opts }
+ opts = { mode: 0o777, fs, ...opts }
else if (typeof opts === 'number')
opts = { mode: opts, fs }
else if (typeof opts === 'string')
diff --git a/node_modules/@npmcli/arborist/node_modules/mkdirp/package.json b/node_modules/@npmcli/arborist/node_modules/mkdirp/package.json
index cfb677135..49be2d3e7 100644
--- a/node_modules/@npmcli/arborist/node_modules/mkdirp/package.json
+++ b/node_modules/@npmcli/arborist/node_modules/mkdirp/package.json
@@ -1,8 +1,8 @@
{
"_from": "mkdirp@^1.0.3",
- "_id": "mkdirp@1.0.3",
+ "_id": "mkdirp@1.0.4",
"_inBundle": false,
- "_integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g==",
+ "_integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"_location": "/@npmcli/arborist/mkdirp",
"_phantomChildren": {},
"_requested": {
@@ -18,10 +18,10 @@
"_requiredBy": [
"/@npmcli/arborist/bin-links"
],
- "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
- "_shasum": "4cf2e30ad45959dddea53ad97d518b6c8205e1ea",
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+ "_shasum": "3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
"_spec": "mkdirp@^1.0.3",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/@npmcli/arborist/node_modules/bin-links",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist/node_modules/bin-links",
"bin": {
"mkdirp": "bin/cmd.js"
},
@@ -33,7 +33,7 @@
"description": "Recursively mkdir, like `mkdir -p`",
"devDependencies": {
"require-inject": "^1.4.4",
- "tap": "^14.10.6"
+ "tap": "^14.10.7"
},
"engines": {
"node": ">=10"
@@ -71,5 +71,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "1.0.3"
+ "version": "1.0.4"
}
diff --git a/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/CHANGELOG.md b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/CHANGELOG.md
new file mode 100644
index 000000000..a4ee13e92
--- /dev/null
+++ b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/CHANGELOG.md
@@ -0,0 +1,219 @@
+# Changelog
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+## [6.1.0](https://github.com/npm/npm-pick-manifest/compare/v6.0.0...v6.1.0) (2020-04-07)
+
+
+### Features
+
+* add 'avoid' semver range option ([c64973d](https://github.com/npm/npm-pick-manifest/commit/c64973d63ddf6797edf41c20df641f816d30ff03))
+* add avoidStrict option to strictly avoid ([c268796](https://github.com/npm/npm-pick-manifest/commit/c2687967b6294f5ce01aa6b59071e79272dc57de)), closes [#30](https://github.com/npm/npm-pick-manifest/issues/30)
+
+## [6.0.0](https://github.com/npm/npm-pick-manifest/compare/v5.0.0...v6.0.0) (2020-02-18)
+
+
+### ⚠ BREAKING CHANGES
+
+* 'enjoyBy' is no longer an acceptable alias.
+
+### Features
+
+* add GitHub Actions file for ci ([8985247](https://github.com/npm/npm-pick-manifest/commit/898524727fa157f46fdf4eb0c11148ae4808226b))
+
+
+### Bug Fixes
+
+* Handle edge cases around before:Date and filtering staged publishes ([ed2f92e](https://github.com/npm/npm-pick-manifest/commit/ed2f92e7fdc9cc7836b13ebc73e17d8fd296a07e))
+* remove figgy pudding ([c24fed2](https://github.com/npm/npm-pick-manifest/commit/c24fed25b8f77fbbcc3107030f2dfed55fa54222))
+* remove outdated cruft from docs ([aae7ef7](https://github.com/npm/npm-pick-manifest/commit/aae7ef7625ddddbac0548287e5d57b8f76593322))
+* update some missing {loose:true} semver configs ([4015424](https://github.com/npm/npm-pick-manifest/commit/40154244a3fe1af86462bc1d6165199fc3315c10))
+* Use canonical 'before' config name ([029de59](https://github.com/npm/npm-pick-manifest/commit/029de59bda6d3376f03760a00efe4ac9d997c623))
+
+## [5.0.0](https://github.com/npm/npm-pick-manifest/compare/v4.0.0...v5.0.0) (2019-12-15)
+
+
+### ⚠ BREAKING CHANGES
+
+* This drops support for node < 10.
+
+* normalize settings, drop old nodes, update deps ([dc2e61c](https://github.com/npm/npm-pick-manifest/commit/dc2e61cc06bd19e079128e77397df7593741da50))
+
+<a name="4.0.0"></a>
+# [4.0.0](https://github.com/npm/npm-pick-manifest/compare/v3.0.2...v4.0.0) (2019-11-11)
+
+
+### deps
+
+* bump npm-package-arg to v7 ([42c76d8](https://github.com/npm/npm-pick-manifest/commit/42c76d8)), closes [/github.com/npm/hosted-git-info/pull/38#issuecomment-520243803](https://github.com//github.com/npm/hosted-git-info/pull/38/issues/issuecomment-520243803)
+
+
+### BREAKING CHANGES
+
+* this drops support for ancient node versions.
+
+
+
+<a name="3.0.2"></a>
+## [3.0.2](https://github.com/npm/npm-pick-manifest/compare/v3.0.1...v3.0.2) (2019-08-30)
+
+
+
+<a name="3.0.1"></a>
+## [3.0.1](https://github.com/npm/npm-pick-manifest/compare/v3.0.0...v3.0.1) (2019-08-28)
+
+
+### Bug Fixes
+
+* throw 403 for forbidden major/minor versions ([003286e](https://github.com/npm/npm-pick-manifest/commit/003286e)), closes [#2](https://github.com/npm/npm-pick-manifest/issues/2)
+
+
+
+<a name="3.0.0"></a>
+# [3.0.0](https://github.com/npm/npm-pick-manifest/compare/v2.2.3...v3.0.0) (2019-08-20)
+
+
+### Features
+
+* throw forbidden error when package is blocked by policy ([ad2a962](https://github.com/npm/npm-pick-manifest/commit/ad2a962)), closes [#1](https://github.com/npm/npm-pick-manifest/issues/1)
+
+
+### BREAKING CHANGES
+
+* This adds a new error code when package versions are
+blocked.
+
+PR-URL: https://github.com/npm/npm-pick-manifest/pull/1
+Credit: @claudiahdz
+
+
+
+<a name="2.2.3"></a>
+## [2.2.3](https://github.com/npm/npm-pick-manifest/compare/v2.2.2...v2.2.3) (2018-10-31)
+
+
+### Bug Fixes
+
+* **enjoyBy:** rework semantics for enjoyBy again ([5e89b62](https://github.com/npm/npm-pick-manifest/commit/5e89b62))
+
+
+
+<a name="2.2.2"></a>
+## [2.2.2](https://github.com/npm/npm-pick-manifest/compare/v2.2.1...v2.2.2) (2018-10-31)
+
+
+### Bug Fixes
+
+* **enjoyBy:** rework semantics for enjoyBy ([5684f45](https://github.com/npm/npm-pick-manifest/commit/5684f45))
+
+
+
+<a name="2.2.1"></a>
+## [2.2.1](https://github.com/npm/npm-pick-manifest/compare/v2.2.0...v2.2.1) (2018-10-30)
+
+
+
+<a name="2.2.0"></a>
+# [2.2.0](https://github.com/npm/npm-pick-manifest/compare/v2.1.0...v2.2.0) (2018-10-30)
+
+
+### Bug Fixes
+
+* **audit:** npm audit fix --force ([d5ae6c4](https://github.com/npm/npm-pick-manifest/commit/d5ae6c4))
+
+
+### Features
+
+* **enjoyBy:** add opts.enjoyBy option to filter versions by date ([0b8a790](https://github.com/npm/npm-pick-manifest/commit/0b8a790))
+
+
+
+<a name="2.1.0"></a>
+# [2.1.0](https://github.com/npm/npm-pick-manifest/compare/v2.0.1...v2.1.0) (2017-10-18)
+
+
+### Features
+
+* **selection:** allow manually disabling deprecation skipping ([0d239d3](https://github.com/npm/npm-pick-manifest/commit/0d239d3))
+
+
+
+<a name="2.0.1"></a>
+## [2.0.1](https://github.com/npm/npm-pick-manifest/compare/v2.0.0...v2.0.1) (2017-10-18)
+
+
+
+<a name="2.0.0"></a>
+# [2.0.0](https://github.com/npm/npm-pick-manifest/compare/v1.0.4...v2.0.0) (2017-10-03)
+
+
+### Bug Fixes
+
+* **license:** relicense project according to npm policy (#3) ([ed743a0](https://github.com/npm/npm-pick-manifest/commit/ed743a0))
+
+
+### Features
+
+* **selection:** Avoid matching deprecated packages if possible ([3fc6c3a](https://github.com/npm/npm-pick-manifest/commit/3fc6c3a))
+
+
+### BREAKING CHANGES
+
+* **selection:** deprecated versions may be skipped now
+* **license:** This moves the license from CC0 to ISC and properly documents the copyright as belonging to npm, Inc.
+
+
+
+<a name="1.0.4"></a>
+## [1.0.4](https://github.com/npm/npm-pick-manifest/compare/v1.0.3...v1.0.4) (2017-06-29)
+
+
+### Bug Fixes
+
+* **npa:** bump npa version for bugfixes ([7cdaca7](https://github.com/npm/npm-pick-manifest/commit/7cdaca7))
+* **semver:** use loose semver parsing for *all* ops ([bbc0daa](https://github.com/npm/npm-pick-manifest/commit/bbc0daa))
+
+
+
+<a name="1.0.3"></a>
+## [1.0.3](https://github.com/npm/npm-pick-manifest/compare/v1.0.2...v1.0.3) (2017-05-04)
+
+
+### Bug Fixes
+
+* **semver:** use semver.clean() instead ([f4133b5](https://github.com/npm/npm-pick-manifest/commit/f4133b5))
+
+
+
+<a name="1.0.2"></a>
+## [1.0.2](https://github.com/npm/npm-pick-manifest/compare/v1.0.1...v1.0.2) (2017-05-04)
+
+
+### Bug Fixes
+
+* **picker:** spaces in `wanted` prevented match ([97a7d0a](https://github.com/npm/npm-pick-manifest/commit/97a7d0a))
+
+
+
+<a name="1.0.1"></a>
+## [1.0.1](https://github.com/npm/npm-pick-manifest/compare/v1.0.0...v1.0.1) (2017-04-24)
+
+
+### Bug Fixes
+
+* **deps:** forgot to add semver ([1876f4f](https://github.com/npm/npm-pick-manifest/commit/1876f4f))
+
+
+
+<a name="1.0.0"></a>
+# 1.0.0 (2017-04-24)
+
+
+### Features
+
+* **api:** initial implementation. ([b086912](https://github.com/npm/npm-pick-manifest/commit/b086912))
+
+
+### BREAKING CHANGES
+
+* **api:** ex nihilo
diff --git a/node_modules/figgy-pudding/LICENSE.md b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/LICENSE.md
index 8d28acf86..8d28acf86 100644
--- a/node_modules/figgy-pudding/LICENSE.md
+++ b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/LICENSE.md
diff --git a/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/README.md b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/README.md
new file mode 100644
index 000000000..26ee43e05
--- /dev/null
+++ b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/README.md
@@ -0,0 +1,157 @@
+# npm-pick-manifest [![npm version](https://img.shields.io/npm/v/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![license](https://img.shields.io/npm/l/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![Travis](https://img.shields.io/travis/npm/npm-pick-manifest.svg)](https://travis-ci.org/npm/npm-pick-manifest) [![Coverage Status](https://coveralls.io/repos/github/npm/npm-pick-manifest/badge.svg?branch=latest)](https://coveralls.io/github/npm/npm-pick-manifest?branch=latest)
+
+[`npm-pick-manifest`](https://github.com/npm/npm-pick-manifest) is a standalone
+implementation of [npm](https://npmjs.com)'s semver range resolution algorithm.
+
+## Install
+
+`$ npm install --save npm-pick-manifest`
+
+## Table of Contents
+
+* [Example](#example)
+* [Features](#features)
+* [API](#api)
+ * [`pickManifest()`](#pick-manifest)
+
+### Example
+
+```javascript
+const pickManifest = require('npm-pick-manifest')
+
+fetch('https://registry.npmjs.org/npm-pick-manifest').then(res => {
+ return res.json()
+}).then(packument => {
+ return pickManifest(packument, '^1.0.0')
+}) // get same manifest as npm would get if you `npm i npm-pick-manifest@^1.0.0`
+```
+
+### Features
+
+* Uses npm's exact [semver resolution algorithm](http://npm.im/semver).
+* Supports ranges, tags, and versions.
+* Prefers non-deprecated versions to deprecated versions.
+* Prefers versions whose `engines` requirements are satisfied over those
+ that will raise a warning or error at install time.
+
+### API
+
+#### <a name="pick-manifest"></a> `> pickManifest(packument, selector, [opts]) -> manifest`
+
+Returns the manifest that best matches `selector`, or throws an error.
+
+Packuments are anything returned by metadata URLs from the npm registry. That
+is, they're objects with the following shape (only fields used by
+`npm-pick-manifest` included):
+
+```javascript
+{
+ name: 'some-package',
+ 'dist-tags': {
+ foo: '1.0.1'
+ },
+ versions: {
+ '1.0.0': { version: '1.0.0' },
+ '1.0.1': { version: '1.0.1' },
+ '1.0.2': { version: '1.0.2' },
+ '2.0.0': { version: '2.0.0' }
+ }
+}
+```
+
+The algorithm will follow npm's algorithm for semver resolution, and only
+`tag`, `range`, and `version` selectors are supported.
+
+The function will throw `ETARGET` if there was no matching manifest, and
+`ENOVERSIONS` if the packument object has no valid versions in `versions`.
+If the only matching manifest is included in a `policyRestrictions` section
+of the packument, then an `E403` is raised.
+
+#### <a name="pick-manifest-options"></a> Options
+
+All options are optional.
+
+* `includeStaged` - Boolean, default `false`. Include manifests in the
+ `stagedVersions.versions` set, to support installing [staged
+ packages](https://github.com/npm/rfcs/pull/92) when appropriate. Note
+ that staged packages are always treated as lower priority than actual
+ publishes, even when `includeStaged` is set.
+* `defaultTag` - String, default `'latest'`. The default `dist-tag` to
+ install when no specifier is provided. Note that the version indicated
+ by this specifier will be given top priority if it matches a supplied
+ semver range.
+* `before` - String, Date, or Number, default `null`. This is passed to
+ `new Date()`, so anything that works there will be valid. Do not
+ consider _any_ manifests that were published after the date indicated.
+ Note that this is only relevant when the packument includes a `time`
+ field listing the publish date of all the packages.
+* `nodeVersion` - String, default `process.version`. The Node.js version
+ to use when checking manifests for `engines` requirement satisfaction.
+* `npmVersion` - String, default `null`. The npm version to use when
+ checking manifest for `engines` requirement satisfaction. (If `null`,
+ then this particular check is skipped.)
+* `avoid` - String, default `null`. A SemVer range of
+ versions that should be avoided. An avoided version MAY be selected if
+ there is no other option, so when using this for version selection ensure
+ that you check the result against the range to see if there was no
+ alternative available.
+* `avoidStrict` Boolean, default `false`. If set to true, then
+ `pickManifest` will never return a version in the `avoid` range. If the
+ only available version in the `wanted` range is a version that should be
+ avoided, then it will return a version _outside_ the `wanted` range,
+ preferring to do so without making a SemVer-major jump, if possible. If
+ there are no versions outside the `avoid` range, then throw an
+ `ETARGET` error. It does this by calling pickManifest first with the
+ `wanted` range, then with a `^` affixed to the version returned by the
+ `wanted` range, and then with a `*` version range, and throwing if
+ nothing could be found to satisfy the avoidance request.
+
+Return value is the manifest as it exists in the packument, possibly
+decorated with the following boolean flags:
+
+* `_shouldAvoid` The version is in the `avoid` range. Watch out!
+* `_outsideDependencyRange` The version is outside the `wanted` range,
+ because `avoidStrict: true` was set.
+* `_isSemVerMajor` The `_outsideDependencyRange` result is a SemVer-major
+ step up from the version returned by the `wanted` range.
+
+### Algorithm
+
+1. Create list of all versions in `versions`,
+ `policyRestrictions.versions`, and (if `includeStaged` is set)
+ `stagedVersions.versions`.
+2. If a `dist-tag` is requested,
+ 1. If the manifest is not after the specified `before` date, then
+ select that from the set.
+ 2. If the manifest is after the specified `before` date, then re-start
+ the selection looking for the highest SemVer range that is equal to
+ or less than the `dist-tag` target.
+3. If a specific version is requested,
+ 1. If the manifest is not after the specified `before` date, then
+ select the specified manifest.
+ 2. If the manifest is after the specified `before` date, then raise
+ `ETARGET` error. (NB: this is a breaking change from v5, where a
+ specified version would override the `before` setting.)
+4. (At this point we know a range is requested.)
+5. If the `defaultTag` refers to a `dist-tag` that satisfies the range (or
+ if the range is `'*'` or `''`), and the manifest is published before the
+ `before` setting, then select that manifest.
+6. If nothing is yet selected, sort by the following heuristics in order,
+ and select the top item:
+ 1. Prioritize versions that are not in the `avoid` range over those
+ that are.
+ 2. Prioritize versions that are not in `policyRestrictions` over those
+ that are.
+ 3. Prioritize published versions over staged versions.
+ 4. Prioritize versions that are not deprecated, and which have a
+ satisfied engines requirement, over those that are either deprecated
+ or have an engines mismatch.
+ 5. Prioritize versions that have a satisfied engines requirement over
+ those that do not.
+ 6. Prioritize versions that are not are not deprecated (but have a
+ mismatched engines requirement) over those that are deprecated.
+ 7. Prioritize higher SemVer precedence over lower SemVer precedence.
+7. If no manifest was selected, raise an `ETARGET` error.
+8. If the selected item is in the `policyRestrictions.versions` list, raise
+ an `E403` error.
+9. Return the selected manifest.
diff --git a/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/index.js b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/index.js
new file mode 100644
index 000000000..2b3ea6ffa
--- /dev/null
+++ b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/index.js
@@ -0,0 +1,216 @@
+'use strict'
+
+const npa = require('npm-package-arg')
+const semver = require('semver')
+const { checkEngine } = require('npm-install-checks')
+
+const engineOk = (manifest, npmVersion, nodeVersion) => {
+ try {
+ checkEngine(manifest, npmVersion, nodeVersion)
+ return true
+ } catch (_) {
+ return false
+ }
+}
+
+const isBefore = (verTimes, ver, time) =>
+ !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
+
+const avoidSemverOpt = { includePrerelease: true, loose: true }
+const shouldAvoid = (ver, avoid) =>
+ avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
+
+const decorateAvoid = (result, avoid) =>
+ result && shouldAvoid(result.version, avoid)
+ ? { ...result, _shouldAvoid: true }
+ : result
+
+const pickManifest = (packument, wanted, opts) => {
+ const {
+ defaultTag = 'latest',
+ before = null,
+ nodeVersion = process.version,
+ npmVersion = null,
+ includeStaged = false,
+ avoid = null,
+ avoidStrict = false
+ } = opts
+
+ const { name, time: verTimes } = packument
+ const versions = packument.versions || {}
+
+ if (avoidStrict) {
+ const looseOpts = {
+ ...opts,
+ avoidStrict: false
+ }
+
+ const result = pickManifest(packument, wanted, looseOpts)
+ if (!result || !result._shouldAvoid) {
+ return result
+ }
+
+ const caret = pickManifest(packument, `^${result.version}`, looseOpts)
+ if (!caret || !caret._shouldAvoid) {
+ return {
+ ...caret,
+ _outsideDependencyRange: true,
+ _isSemVerMajor: false
+ }
+ }
+
+ const star = pickManifest(packument, '*', looseOpts)
+ if (!star || !star._shouldAvoid) {
+ return {
+ ...star,
+ _outsideDependencyRange: true,
+ _isSemVerMajor: true
+ }
+ }
+
+ throw Object.assign(new Error(`No avoidable versions for ${name}`), {
+ code: 'ETARGET',
+ name,
+ wanted,
+ avoid,
+ before,
+ versions: Object.keys(versions)
+ })
+ }
+
+ const staged = (includeStaged && packument.stagedVersions &&
+ packument.stagedVersions.versions) || {}
+ const restricted = (packument.policyRestrictions &&
+ packument.policyRestrictions.versions) || {}
+
+ const time = before && verTimes ? +(new Date(before)) : Infinity
+ const spec = npa.resolve(name, wanted || defaultTag)
+ const type = spec.type
+ const distTags = packument['dist-tags'] || {}
+
+ if (type !== 'tag' && type !== 'version' && type !== 'range') {
+ throw new Error('Only tag, version, and range are supported')
+ }
+
+ // if the type is 'tag', and not just the implicit default, then it must
+ // be that exactly, or nothing else will do.
+ if (wanted && type === 'tag') {
+ const ver = distTags[wanted]
+ // if the version in the dist-tags is before the before date, then
+ // we use that. Otherwise, we get the highest precedence version
+ // prior to the dist-tag.
+ if (isBefore(verTimes, ver, time)) {
+ return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
+ } else {
+ return pickManifest(packument, `<=${ver}`, opts)
+ }
+ }
+
+ // similarly, if a specific version, then only that version will do
+ if (wanted && type === 'version') {
+ const ver = semver.clean(wanted, { loose: true })
+ const mani = versions[ver] || staged[ver] || restricted[ver]
+ return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
+ }
+
+ // ok, sort based on our heuristics, and pick the best fit
+ const range = type === 'range' ? wanted : '*'
+
+ // if the range is *, then we prefer the 'latest' if available
+ // but skip this if it should be avoided, in that case we have
+ // to try a little harder.
+ const defaultVer = distTags[defaultTag]
+ if (defaultVer &&
+ (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
+ !shouldAvoid(defaultVer, avoid)) {
+ const mani = versions[defaultVer]
+ if (mani && isBefore(verTimes, defaultVer, time)) {
+ return mani
+ }
+ }
+
+ // ok, actually have to sort the list and take the winner
+ const allEntries = Object.entries(versions)
+ .concat(Object.entries(staged))
+ .concat(Object.entries(restricted))
+ .filter(([ver, mani]) => isBefore(verTimes, ver, time))
+
+ if (!allEntries.length) {
+ throw Object.assign(new Error(`No versions available for ${name}`), {
+ code: 'ENOVERSIONS',
+ name,
+ type,
+ wanted,
+ before,
+ versions: Object.keys(versions)
+ })
+ }
+
+ const sortSemverOpt = { loose: true }
+ const entries = allEntries.filter(([ver, mani]) =>
+ semver.satisfies(ver, range, { loose: true }))
+ .sort((a, b) => {
+ const [vera, mania] = a
+ const [verb, manib] = b
+ const notavoida = !shouldAvoid(vera, avoid)
+ const notavoidb = !shouldAvoid(verb, avoid)
+ const notrestra = !restricted[a]
+ const notrestrb = !restricted[b]
+ const notstagea = !staged[a]
+ const notstageb = !staged[b]
+ const notdepra = !mania.deprecated
+ const notdeprb = !manib.deprecated
+ const enginea = engineOk(mania, npmVersion, nodeVersion)
+ const engineb = engineOk(manib, npmVersion, nodeVersion)
+ // sort by:
+ // - not an avoided version
+ // - not restricted
+ // - not staged
+ // - not deprecated and engine ok
+ // - engine ok
+ // - not deprecated
+ // - semver
+ return (notavoidb - notavoida) ||
+ (notrestrb - notrestra) ||
+ (notstageb - notstagea) ||
+ ((notdeprb && engineb) - (notdepra && enginea)) ||
+ (engineb - enginea) ||
+ (notdeprb - notdepra) ||
+ semver.rcompare(vera, verb, sortSemverOpt)
+ })
+
+ return decorateAvoid(entries[0] && entries[0][1], avoid)
+}
+
+module.exports = (packument, wanted, opts = {}) => {
+ const picked = pickManifest(packument, wanted, opts)
+ const policyRestrictions = packument.policyRestrictions
+ const restricted = (policyRestrictions && policyRestrictions.versions) || {}
+
+ if (picked && !restricted[picked.version]) {
+ return picked
+ }
+
+ const { before = null, defaultTag = 'latest' } = opts
+ const bstr = before ? new Date(before).toLocaleString() : ''
+ const { name } = packument
+ const pckg = `${name}@${wanted}` +
+ (before ? ` with a date before ${bstr}` : '')
+
+ const isForbidden = picked && !!restricted[picked.version]
+ const polMsg = isForbidden ? policyRestrictions.message : ''
+
+ const msg = !isForbidden ? `No matching version found for ${pckg}.`
+ : `Could not download ${pckg} due to policy violations:\n${polMsg}`
+
+ const code = isForbidden ? 'E403' : 'ETARGET'
+ throw Object.assign(new Error(msg), {
+ code,
+ type: npa.resolve(packument.name, wanted).type,
+ wanted,
+ versions: Object.keys(packument.versions),
+ name,
+ distTags: packument['dist-tags'],
+ defaultTag
+ })
+}
diff --git a/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/package.json
new file mode 100644
index 000000000..2498a6a60
--- /dev/null
+++ b/node_modules/@npmcli/arborist/node_modules/npm-pick-manifest/package.json
@@ -0,0 +1,76 @@
+{
+ "_from": "npm-pick-manifest@^6.1.0",
+ "_id": "npm-pick-manifest@6.1.0",
+ "_inBundle": false,
+ "_integrity": "sha512-ygs4k6f54ZxJXrzT0x34NybRlLeZ4+6nECAIbr2i0foTnijtS1TJiyzpqtuUAJOps/hO0tNDr8fRV5g+BtRlTw==",
+ "_location": "/@npmcli/arborist/npm-pick-manifest",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "npm-pick-manifest@^6.1.0",
+ "name": "npm-pick-manifest",
+ "escapedName": "npm-pick-manifest",
+ "rawSpec": "^6.1.0",
+ "saveSpec": null,
+ "fetchSpec": "^6.1.0"
+ },
+ "_requiredBy": [
+ "/@npmcli/arborist",
+ "/@npmcli/arborist/pacote"
+ ],
+ "_resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-6.1.0.tgz",
+ "_shasum": "2befed87b0fce956790f62d32afb56d7539c022a",
+ "_spec": "npm-pick-manifest@^6.1.0",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist",
+ "author": {
+ "name": "Kat Marchán",
+ "email": "kzm@sykosomatic.org"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/npm-pick-manifest/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "npm-install-checks": "^4.0.0",
+ "npm-package-arg": "^8.0.0",
+ "semver": "^7.0.0"
+ },
+ "deprecated": false,
+ "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
+ "devDependencies": {
+ "standard": "^14.3.1",
+ "standard-version": "^7.0.1",
+ "tap": "^14.10.2"
+ },
+ "files": [
+ "*.js"
+ ],
+ "homepage": "https://github.com/npm/npm-pick-manifest#readme",
+ "keywords": [
+ "npm",
+ "semver",
+ "package manager"
+ ],
+ "license": "ISC",
+ "main": "index.js",
+ "name": "npm-pick-manifest",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/npm-pick-manifest.git"
+ },
+ "scripts": {
+ "coverage": "tap",
+ "lint": "standard",
+ "postrelease": "npm publish",
+ "posttest": "npm run lint",
+ "prepublishOnly": "git push --follow-tags",
+ "prerelease": "npm t",
+ "release": "standard-version -s",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "6.1.0"
+}
diff --git a/node_modules/@npmcli/arborist/node_modules/rimraf/package.json b/node_modules/@npmcli/arborist/node_modules/rimraf/package.json
index f8529b7de..420890958 100644
--- a/node_modules/@npmcli/arborist/node_modules/rimraf/package.json
+++ b/node_modules/@npmcli/arborist/node_modules/rimraf/package.json
@@ -21,7 +21,7 @@
"_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"_shasum": "f1a5402ba6220ad52cc1282bac1ae3aa49fd061a",
"_spec": "rimraf@^3.0.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/@npmcli/arborist/node_modules/bin-links",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist/node_modules/bin-links",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
diff --git a/node_modules/@npmcli/arborist/package.json b/node_modules/@npmcli/arborist/package.json
index 5b77ec793..98465c93e 100644
--- a/node_modules/@npmcli/arborist/package.json
+++ b/node_modules/@npmcli/arborist/package.json
@@ -1,14 +1,15 @@
{
"_from": "@npmcli/arborist@latest",
- "_id": "@npmcli/arborist@0.0.0-pre.13",
+ "_id": "@npmcli/arborist@0.0.0-pre.17",
"_inBundle": false,
- "_integrity": "sha512-sJjOIIP718HqWJuTU4VPIxj5E2RV88C0SmQF3DwtSuYggcuocFTdo/quDdhZGeRzFxxpqPl7+XvZsBfWC1Qzyg==",
+ "_integrity": "sha512-kg0Qp+vzpyuQl5AeKaSEI6GVlQTlrcr9YMMvifDjJLGNALyo53/7T+myVHlr6eYb59YFo5nm8ePb+VOdadJBkg==",
"_location": "/@npmcli/arborist",
"_phantomChildren": {
"glob": "7.1.4",
"graceful-fs": "4.2.3",
"mkdirp-infer-owner": "1.0.2",
"npm-normalize-package-bin": "1.0.1",
+ "npm-package-arg": "8.0.0",
"semver": "7.1.3",
"write-file-atomic": "2.4.3"
},
@@ -27,8 +28,8 @@
"#USER",
"/"
],
- "_resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-0.0.0-pre.13.tgz",
- "_shasum": "5e3d1a5811ab781a02d482b5c18c1e4909e9ea9a",
+ "_resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-0.0.0-pre.17.tgz",
+ "_shasum": "a12593231f6d65ccfd1a0eaa6e76d7fc24e86ebe",
"_spec": "@npmcli/arborist@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
@@ -42,13 +43,16 @@
"bundleDependencies": false,
"dependencies": {
"@npmcli/installed-package-contents": "^1.0.5",
- "@npmcli/run-script": "^1.2.1",
+ "@npmcli/map-workspaces": "0.0.0-pre.1",
+ "@npmcli/name-from-folder": "^1.0.1",
+ "@npmcli/run-script": "^1.3.1",
"bin-links": "^2.1.2",
"json-stringify-nice": "^1.1.1",
"mkdirp-infer-owner": "^1.0.2",
"npm-install-checks": "^4.0.0",
"npm-package-arg": "^8.0.0",
- "pacote": "^11.1.0",
+ "npm-pick-manifest": "^6.1.0",
+ "pacote": "^11.1.6",
"parse-conflict-json": "^1.0.0",
"promise-all-reject-late": "^1.0.0",
"promise-call-limit": "^1.0.1",
@@ -63,7 +67,7 @@
"minify-registry-metadata": "^2.1.0",
"mutate-fs": "^2.1.1",
"require-inject": "^1.4.4",
- "tap": "^14.10.6",
+ "tap": "^14.10.7",
"tcompare": "^3.0.4"
},
"files": [
@@ -91,5 +95,5 @@
"esm": false,
"timeout": "60"
},
- "version": "0.0.0-pre.13"
+ "version": "0.0.0-pre.17"
}
diff --git a/node_modules/@npmcli/git/node_modules/mkdirp/lib/opts-arg.js b/node_modules/@npmcli/git/node_modules/mkdirp/lib/opts-arg.js
index 488bd44c3..2fa4833fa 100644
--- a/node_modules/@npmcli/git/node_modules/mkdirp/lib/opts-arg.js
+++ b/node_modules/@npmcli/git/node_modules/mkdirp/lib/opts-arg.js
@@ -2,9 +2,9 @@ const { promisify } = require('util')
const fs = require('fs')
const optsArg = opts => {
if (!opts)
- opts = { mode: 0o777 & (~process.umask()), fs }
+ opts = { mode: 0o777, fs }
else if (typeof opts === 'object')
- opts = { mode: 0o777 & (~process.umask()), fs, ...opts }
+ opts = { mode: 0o777, fs, ...opts }
else if (typeof opts === 'number')
opts = { mode: opts, fs }
else if (typeof opts === 'string')
diff --git a/node_modules/@npmcli/git/node_modules/mkdirp/package.json b/node_modules/@npmcli/git/node_modules/mkdirp/package.json
index ff085e1bb..9341da149 100644
--- a/node_modules/@npmcli/git/node_modules/mkdirp/package.json
+++ b/node_modules/@npmcli/git/node_modules/mkdirp/package.json
@@ -1,8 +1,8 @@
{
"_from": "mkdirp@^1.0.3",
- "_id": "mkdirp@1.0.3",
+ "_id": "mkdirp@1.0.4",
"_inBundle": false,
- "_integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g==",
+ "_integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"_location": "/@npmcli/git/mkdirp",
"_phantomChildren": {},
"_requested": {
@@ -18,8 +18,8 @@
"_requiredBy": [
"/@npmcli/git"
],
- "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
- "_shasum": "4cf2e30ad45959dddea53ad97d518b6c8205e1ea",
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+ "_shasum": "3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
"_spec": "mkdirp@^1.0.3",
"_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/git",
"bin": {
@@ -33,7 +33,7 @@
"description": "Recursively mkdir, like `mkdir -p`",
"devDependencies": {
"require-inject": "^1.4.4",
- "tap": "^14.10.6"
+ "tap": "^14.10.7"
},
"engines": {
"node": ">=10"
@@ -71,5 +71,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "1.0.3"
+ "version": "1.0.4"
}
diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json
index fad742a60..42f4995e9 100644
--- a/node_modules/@npmcli/git/package.json
+++ b/node_modules/@npmcli/git/package.json
@@ -1,28 +1,31 @@
{
- "_from": "@npmcli/git@^2.0.1",
- "_id": "@npmcli/git@2.0.1",
+ "_from": "@npmcli/git@latest",
+ "_id": "@npmcli/git@2.0.2",
"_inBundle": false,
- "_integrity": "sha512-hVatexiBtx71F01Ars38Hr5AFUGmJgHAfQtRlO5fJlnAawRGSXwEFgjB5i3XdUUmElZU/RXy7fefN02dZKxgPw==",
+ "_integrity": "sha512-uv9+EuP5YWluNPgkEOL+iyB/+MVt4U5PMBCfl+I8korKluFdiSp7RxjXYzpWM/wU4wXaROAUFiOiCMmBftonjw==",
"_location": "/@npmcli/git",
"_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "@npmcli/git@^2.0.1",
+ "raw": "@npmcli/git@latest",
"name": "@npmcli/git",
"escapedName": "@npmcli%2fgit",
"scope": "@npmcli",
- "rawSpec": "^2.0.1",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "^2.0.1"
+ "fetchSpec": "latest"
},
"_requiredBy": [
- "/libnpmversion"
+ "#USER",
+ "/",
+ "/libnpmversion",
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.0.1.tgz",
- "_shasum": "d7ecaa9c945de6bb1af5a7e6ea634771193c168b",
- "_spec": "@npmcli/git@^2.0.1",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/libnpmversion",
+ "_resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.0.2.tgz",
+ "_shasum": "41d75caf59e4799c62b354a3e4eed3b0e64310c0",
+ "_spec": "@npmcli/git@latest",
+ "_where": "/Users/isaacs/dev/npm/cli",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
@@ -34,16 +37,19 @@
"bundleDependencies": false,
"dependencies": {
"@npmcli/promise-spawn": "^1.1.0",
+ "lru-cache": "^5.1.1",
"mkdirp": "^1.0.3",
"npm-pick-manifest": "^6.0.0",
"promise-inflight": "^1.0.1",
"promise-retry": "^1.1.1",
+ "semver": "^7.3.2",
"unique-filename": "^1.1.1",
"which": "^2.0.2"
},
"deprecated": false,
"description": "a util for spawning git from npm CLI contexts",
"devDependencies": {
+ "slash": "^3.0.0",
"tap": "^14.10.6"
},
"files": [
@@ -68,5 +74,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "2.0.1"
+ "version": "2.0.2"
}
diff --git a/node_modules/@npmcli/map-workspaces/CHANGELOG.md b/node_modules/@npmcli/map-workspaces/CHANGELOG.md
new file mode 100644
index 000000000..b890b58e1
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/CHANGELOG.md
@@ -0,0 +1,6 @@
+# Changelog
+
+## 0.0.0-pre.0
+
+- Initial pre-release.
+
diff --git a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/map-workspaces/LICENSE
index 19cec97b1..dedcd7d2f 100644
--- a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/LICENSE
+++ b/node_modules/@npmcli/map-workspaces/LICENSE
@@ -1,6 +1,6 @@
The ISC License
-Copyright (c) npm, Inc.
+Copyright (c) npm Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/@npmcli/map-workspaces/README.md b/node_modules/@npmcli/map-workspaces/README.md
new file mode 100644
index 000000000..f8e114c2a
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/README.md
@@ -0,0 +1,83 @@
+# @npmcli/map-workspaces
+
+[![NPM version](https://img.shields.io/npm/v/@npmcli/map-workspaces)](https://www.npmjs.com/package/@npmcli/map-workspaces)
+[![Build Status](https://img.shields.io/github/workflow/status/npm/map-workspaces/node-ci)](https://github.com/npm/map-workspaces)
+[![License](https://img.shields.io/github/license/npm/map-workspaces)](https://github.com/npm/map-workspaces/blob/master/LICENSE)
+
+Retrieves a name:pathname Map for a given workspaces config.
+
+Long version: Reads the `workspaces` property from a valid **workspaces configuration** object and traverses the paths and globs defined there in order to find valid nested packages and return a **Map** of all found packages where keys are package names and values are folder locations.
+
+## Install
+
+`npm install map-workspaces`
+
+## Usage:
+
+```js
+const mapWorkspaces = require('@npmcli/map-workspaces')
+await mapWorkspaces({
+ workspaces: {
+ packages: [
+ "a",
+ "b"
+ ]
+ }
+}, { cwd })
+// ->
+// Map {
+// 'a': '<cwd>/a'
+// 'b': '<cwd>/b'
+// }
+```
+
+## Examples:
+
+### Glob usage:
+
+Given a folder structure such as:
+
+```
+├── package.json
+└── apps
+ ├── a
+ │ └── package.json
+ ├── b
+ │ └── package.json
+ └── c
+ └── package.json
+```
+
+```js
+const mapWorkspaces = require('@npmcli/map-workspaces')
+await mapWorkspaces({
+ workspaces: [
+ "apps/*"
+ ]
+}, { cwd })
+// ->
+// Map {
+// 'a': '<cwd>/apps/a'
+// 'b': '<cwd>/apps/b'
+// 'c': '<cwd>/apps/c'
+// }
+```
+
+## API:
+
+### `mapWorkspaces(pkg, opts = {}) -> Promise<Map>`
+
+- `pkg`: A valid `package.json` **Object**
+- `opts`:
+ - `cwd`: A **String** defining the base directory to use when reading globs and paths.
+ - `ignore`: An **Array** of paths to be ignored when using [globs](https://www.npmjs.com/package/glob) to look for nested package.
+ - ...[Also support all other glob options](https://www.npmjs.com/package/glob#options)
+
+#### Returns
+
+A **Map** in which keys are **package names** and values are the **pathnames** for each found **workspace**.
+
+## LICENSE
+
+[ISC](./LICENSE)
+
diff --git a/node_modules/@npmcli/map-workspaces/index.js b/node_modules/@npmcli/map-workspaces/index.js
new file mode 100644
index 000000000..0a789d244
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/index.js
@@ -0,0 +1,190 @@
+const { promisify } = require('util')
+const path = require('path')
+
+const getName = require('@npmcli/name-from-folder')
+const minimatch = require('minimatch')
+const rpj = require('read-package-json-fast')
+const glob = require('glob')
+const pGlob = promisify(glob)
+
+function appendNegatedPatterns (patterns) {
+ const results = []
+ for (let pattern of patterns) {
+ const excl = pattern.match(/^!+/)
+ if (excl) {
+ pattern = pattern.substr(excl[0].length)
+ }
+
+ // strip off any / from the start of the pattern. /foo => foo
+ pattern = pattern.replace(/^\/+/, '')
+
+ // an odd number of ! means a negated pattern. !!foo ==> foo
+ const negate = excl && excl[0].length % 2 === 1
+ results.push({ pattern, negate })
+ }
+
+ return results
+}
+
+function getPatterns (workspaces) {
+ const workspacesDeclaration =
+ Array.isArray(workspaces.packages)
+ ? workspaces.packages
+ : workspaces
+
+ if (!Array.isArray(workspacesDeclaration)) {
+ throw getError({
+ message: 'workspaces config expects an Array',
+ code: 'EWORKSPACESCONFIG'
+ })
+ }
+
+ return [
+ ...appendNegatedPatterns(workspacesDeclaration),
+ { pattern: '**/node_modules/**', negate: true }
+ ]
+}
+
+function isEmpty (patterns) {
+ return patterns.length < 2
+}
+
+function getPackageName (pkg, pathname) {
+ const { name } = pkg
+ return name || getName(pathname)
+}
+
+function pkgPathmame (opts) {
+ return (...args) => {
+ const cwd = opts.cwd ? opts.cwd : process.cwd()
+ return path.join.apply(null, [cwd, ...args])
+ }
+}
+
+// make sure glob pattern only matches folders
+function getGlobPattern (pattern) {
+ return pattern.endsWith('/')
+ ? pattern
+ : `${pattern}/`
+}
+
+function getError ({ Type = TypeError, message, code }) {
+ return Object.assign(new Type(message), { code })
+}
+
+function reverseResultMap (map) {
+ return new Map(Array.from(map, item => item.reverse()))
+}
+
+async function mapWorkspaces (opts = {}) {
+ if (!opts || !opts.pkg) {
+ throw getError({
+ message: 'mapWorkspaces missing pkg info',
+ code: 'EMAPWORKSPACESPKG'
+ })
+ }
+
+ const { workspaces = [] } = opts.pkg
+ const patterns = getPatterns(workspaces)
+ const results = new Map()
+ const seen = new Set()
+
+ if (isEmpty(patterns)) {
+ return results
+ }
+
+ const getGlobOpts = () => ({
+ ...opts,
+ ignore: [
+ ...opts.ignore || [],
+ ...['**/node_modules/**']
+ ]
+ })
+
+ const getPackagePathname = pkgPathmame(opts)
+
+ for (const item of patterns) {
+ const matches = await pGlob(getGlobPattern(item.pattern), getGlobOpts())
+
+ for (const match of matches) {
+ let pkg
+ const packageJsonPathname = getPackagePathname(match, 'package.json')
+ const packagePathname = path.dirname(packageJsonPathname)
+
+ try {
+ pkg = await rpj(packageJsonPathname)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ continue
+ } else {
+ throw err
+ }
+ }
+
+ const name = getPackageName(pkg, packagePathname)
+
+ if (item.negate) {
+ results.delete(packagePathname, name)
+ } else {
+ if (seen.has(name)) {
+ throw getError({
+ Type: Error,
+ message: 'must not have multiple workspaces with the same name',
+ code: 'EDUPLICATEWORKSPACE'
+ })
+ }
+
+ seen.add(name)
+ results.set(packagePathname, name)
+ }
+ }
+ }
+
+ return reverseResultMap(results)
+}
+
+mapWorkspaces.virtual = function (opts = {}) {
+ if (!opts || !opts.lockfile) {
+ throw getError({
+ message: 'mapWorkspaces.virtual missing lockfile info',
+ code: 'EMAPWORKSPACESLOCKFILE'
+ })
+ }
+
+ const { packages = {} } = opts.lockfile
+ const { workspaces = [] } = packages[''] || {}
+ const patterns = getPatterns(workspaces)
+
+ // uses a pathname-keyed map in order to negate the exact items
+ const results = new Map()
+
+ if (isEmpty(patterns)) {
+ return results
+ }
+
+ const getPackagePathname = pkgPathmame(opts)
+
+ for (const packageKey of Object.keys(packages)) {
+ if (packageKey === '') {
+ continue
+ }
+
+ for (const item of patterns) {
+ if (minimatch(packageKey, item.pattern)) {
+ const packagePathname = getPackagePathname(packageKey)
+ const name = getPackageName(packages[packageKey], packagePathname)
+
+ if (item.negate) {
+ results.delete(packagePathname)
+ } else {
+ results.set(packagePathname, name)
+ }
+ }
+ }
+ }
+
+ // Invert pathname-keyed to a proper name-to-pathnames Map
+ return reverseResultMap(results)
+}
+
+module.exports = mapWorkspaces
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE b/node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE
new file mode 100644
index 000000000..42ca266df
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE
@@ -0,0 +1,21 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+## Glob Logo
+
+Glob's logo created by Tanya Brassie <http://tanyabrassie.com/>, licensed
+under a Creative Commons Attribution-ShareAlike 4.0 International License
+https://creativecommons.org/licenses/by-sa/4.0/
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/README.md b/node_modules/@npmcli/map-workspaces/node_modules/glob/README.md
new file mode 100644
index 000000000..0916a4825
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/README.md
@@ -0,0 +1,375 @@
+# Glob
+
+Match files using the patterns the shell uses, like stars and stuff.
+
+[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master)
+
+This is a glob implementation in JavaScript. It uses the `minimatch`
+library to do its matching.
+
+![](logo/glob.png)
+
+## Usage
+
+Install with npm
+
+```
+npm i glob
+```
+
+```javascript
+var glob = require("glob")
+
+// options is optional
+glob("**/*.js", options, function (er, files) {
+ // files is an array of filenames.
+ // If the `nonull` option is set, and nothing
+ // was found, then files is ["**/*.js"]
+ // er is an error object or null.
+})
+```
+
+## Glob Primer
+
+"Globs" are the patterns you type when you do stuff like `ls *.js` on
+the command line, or put `build/*` in a `.gitignore` file.
+
+Before parsing the path part patterns, braced sections are expanded
+into a set. Braced sections start with `{` and end with `}`, with any
+number of comma-delimited sections within. Braced sections may contain
+slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
+
+The following characters have special magic meaning when used in a
+path portion:
+
+* `*` Matches 0 or more characters in a single path portion
+* `?` Matches 1 character
+* `[...]` Matches a range of characters, similar to a RegExp range.
+ If the first character of the range is `!` or `^` then it matches
+ any character not in the range.
+* `!(pattern|pattern|pattern)` Matches anything that does not match
+ any of the patterns provided.
+* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
+ patterns provided.
+* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
+ patterns provided.
+* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
+* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
+ provided
+* `**` If a "globstar" is alone in a path portion, then it matches
+ zero or more directories and subdirectories searching for matches.
+ It does not crawl symlinked directories.
+
+### Dots
+
+If a file or directory path portion has a `.` as the first character,
+then it will not match any glob pattern unless that pattern's
+corresponding path part also has a `.` as its first character.
+
+For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
+However the pattern `a/*/c` would not, because `*` does not start with
+a dot character.
+
+You can make glob treat dots as normal characters by setting
+`dot:true` in the options.
+
+### Basename Matching
+
+If you set `matchBase:true` in the options, and the pattern has no
+slashes in it, then it will seek for any file anywhere in the tree
+with a matching basename. For example, `*.js` would match
+`test/simple/basic.js`.
+
+### Empty Sets
+
+If no matching files are found, then an empty array is returned. This
+differs from the shell, where the pattern itself is returned. For
+example:
+
+ $ echo a*s*d*f
+ a*s*d*f
+
+To get the bash-style behavior, set the `nonull:true` in the options.
+
+### See Also:
+
+* `man sh`
+* `man bash` (Search for "Pattern Matching")
+* `man 3 fnmatch`
+* `man 5 gitignore`
+* [minimatch documentation](https://github.com/isaacs/minimatch)
+
+## glob.hasMagic(pattern, [options])
+
+Returns `true` if there are any special characters in the pattern, and
+`false` otherwise.
+
+Note that the options affect the results. If `noext:true` is set in
+the options object, then `+(a|b)` will not be considered a magic
+pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
+then that is considered magical, unless `nobrace:true` is set in the
+options.
+
+## glob(pattern, [options], cb)
+
+* `pattern` `{String}` Pattern to be matched
+* `options` `{Object}`
+* `cb` `{Function}`
+ * `err` `{Error | null}`
+ * `matches` `{Array<String>}` filenames found matching the pattern
+
+Perform an asynchronous glob search.
+
+## glob.sync(pattern, [options])
+
+* `pattern` `{String}` Pattern to be matched
+* `options` `{Object}`
+* return: `{Array<String>}` filenames found matching the pattern
+
+Perform a synchronous glob search.
+
+## Class: glob.Glob
+
+Create a Glob object by instantiating the `glob.Glob` class.
+
+```javascript
+var Glob = require("glob").Glob
+var mg = new Glob(pattern, options, cb)
+```
+
+It's an EventEmitter, and starts walking the filesystem to find matches
+immediately.
+
+### new glob.Glob(pattern, [options], [cb])
+
+* `pattern` `{String}` pattern to search for
+* `options` `{Object}`
+* `cb` `{Function}` Called when an error occurs, or matches are found
+ * `err` `{Error | null}`
+ * `matches` `{Array<String>}` filenames found matching the pattern
+
+Note that if the `sync` flag is set in the options, then matches will
+be immediately available on the `g.found` member.
+
+### Properties
+
+* `minimatch` The minimatch object that the glob uses.
+* `options` The options object passed in.
+* `aborted` Boolean which is set to true when calling `abort()`. There
+ is no way at this time to continue a glob search after aborting, but
+ you can re-use the statCache to avoid having to duplicate syscalls.
+* `cache` Convenience object. Each field has the following possible
+ values:
+ * `false` - Path does not exist
+ * `true` - Path exists
+ * `'FILE'` - Path exists, and is not a directory
+ * `'DIR'` - Path exists, and is a directory
+ * `[file, entries, ...]` - Path exists, is a directory, and the
+ array value is the results of `fs.readdir`
+* `statCache` Cache of `fs.stat` results, to prevent statting the same
+ path multiple times.
+* `symlinks` A record of which paths are symbolic links, which is
+ relevant in resolving `**` patterns.
+* `realpathCache` An optional object which is passed to `fs.realpath`
+ to minimize unnecessary syscalls. It is stored on the instantiated
+ Glob object, and may be re-used.
+
+### Events
+
+* `end` When the matching is finished, this is emitted with all the
+ matches found. If the `nonull` option is set, and no match was found,
+ then the `matches` list contains the original pattern. The matches
+ are sorted, unless the `nosort` flag is set.
+* `match` Every time a match is found, this is emitted with the specific
+ thing that matched. It is not deduplicated or resolved to a realpath.
+* `error` Emitted when an unexpected error is encountered, or whenever
+ any fs error occurs if `options.strict` is set.
+* `abort` When `abort()` is called, this event is raised.
+
+### Methods
+
+* `pause` Temporarily stop the search
+* `resume` Resume the search
+* `abort` Stop the search forever
+
+### Options
+
+All the options that can be passed to Minimatch can also be passed to
+Glob to change pattern matching behavior. Also, some have been added,
+or have glob-specific ramifications.
+
+All options are false by default, unless otherwise noted.
+
+All options are added to the Glob object, as well.
+
+If you are running many `glob` operations, you can pass a Glob object
+as the `options` argument to a subsequent operation to shortcut some
+`stat` and `readdir` calls. At the very least, you may pass in shared
+`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
+parallel glob operations will be sped up by sharing information about
+the filesystem.
+
+* `cwd` The current working directory in which to search. Defaults
+ to `process.cwd()`.
+* `root` The place where patterns starting with `/` will be mounted
+ onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
+ systems, and `C:\` or some such on Windows.)
+* `dot` Include `.dot` files in normal matches and `globstar` matches.
+ Note that an explicit dot in a portion of the pattern will always
+ match dot files.
+* `nomount` By default, a pattern starting with a forward-slash will be
+ "mounted" onto the root setting, so that a valid filesystem path is
+ returned. Set this flag to disable that behavior.
+* `mark` Add a `/` character to directory matches. Note that this
+ requires additional stat calls.
+* `nosort` Don't sort the results.
+* `stat` Set to true to stat *all* results. This reduces performance
+ somewhat, and is completely unnecessary, unless `readdir` is presumed
+ to be an untrustworthy indicator of file existence.
+* `silent` When an unusual error is encountered when attempting to
+ read a directory, a warning will be printed to stderr. Set the
+ `silent` option to true to suppress these warnings.
+* `strict` When an unusual error is encountered when attempting to
+ read a directory, the process will just continue on in search of
+ other matches. Set the `strict` option to raise an error in these
+ cases.
+* `cache` See `cache` property above. Pass in a previously generated
+ cache object to save some fs calls.
+* `statCache` A cache of results of filesystem information, to prevent
+ unnecessary stat calls. While it should not normally be necessary
+ to set this, you may pass the statCache from one glob() call to the
+ options object of another, if you know that the filesystem will not
+ change between calls. (See "Race Conditions" below.)
+* `symlinks` A cache of known symbolic links. You may pass in a
+ previously generated `symlinks` object to save `lstat` calls when
+ resolving `**` matches.
+* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
+* `nounique` In some cases, brace-expanded patterns can result in the
+ same file showing up multiple times in the result set. By default,
+ this implementation prevents duplicates in the result set. Set this
+ flag to disable that behavior.
+* `nonull` Set to never return an empty set, instead returning a set
+ containing the pattern itself. This is the default in glob(3).
+* `debug` Set to enable debug logging in minimatch and glob.
+* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
+* `noglobstar` Do not match `**` against multiple filenames. (Ie,
+ treat it as a normal `*` instead.)
+* `noext` Do not match `+(a|b)` "extglob" patterns.
+* `nocase` Perform a case-insensitive match. Note: on
+ case-insensitive filesystems, non-magic patterns will match by
+ default, since `stat` and `readdir` will not raise errors.
+* `matchBase` Perform a basename-only match if the pattern does not
+ contain any slash characters. That is, `*.js` would be treated as
+ equivalent to `**/*.js`, matching all js files in all directories.
+* `nodir` Do not match directories, only files. (Note: to match
+ *only* directories, simply put a `/` at the end of the pattern.)
+* `ignore` Add a pattern or an array of glob patterns to exclude matches.
+ Note: `ignore` patterns are *always* in `dot:true` mode, regardless
+ of any other settings.
+* `follow` Follow symlinked directories when expanding `**` patterns.
+ Note that this can result in a lot of duplicate references in the
+ presence of cyclic links.
+* `realpath` Set to true to call `fs.realpath` on all of the results.
+ In the case of a symlink that cannot be resolved, the full absolute
+ path to the matched entry is returned (though it will usually be a
+ broken symlink)
+* `absolute` Set to true to always receive absolute paths for matched
+ files. Unlike `realpath`, this also affects the values returned in
+ the `match` event.
+
+## Comparisons to other fnmatch/glob implementations
+
+While strict compliance with the existing standards is a worthwhile
+goal, some discrepancies exist between node-glob and other
+implementations, and are intentional.
+
+The double-star character `**` is supported by default, unless the
+`noglobstar` flag is set. This is supported in the manner of bsdglob
+and bash 4.3, where `**` only has special significance if it is the only
+thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
+`a/**b` will not.
+
+Note that symlinked directories are not crawled as part of a `**`,
+though their contents may match against subsequent portions of the
+pattern. This prevents infinite loops and duplicates and the like.
+
+If an escaped pattern has no matches, and the `nonull` flag is set,
+then glob returns the pattern as-provided, rather than
+interpreting the character escapes. For example,
+`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
+`"*a?"`. This is akin to setting the `nullglob` option in bash, except
+that it does not resolve escaped pattern characters.
+
+If brace expansion is not disabled, then it is performed before any
+other interpretation of the glob pattern. Thus, a pattern like
+`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
+**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
+checked for validity. Since those two are valid, matching proceeds.
+
+### Comments and Negation
+
+Previously, this module let you mark a pattern as a "comment" if it
+started with a `#` character, or a "negated" pattern if it started
+with a `!` character.
+
+These options were deprecated in version 5, and removed in version 6.
+
+To specify things that should not match, use the `ignore` option.
+
+## Windows
+
+**Please only use forward-slashes in glob expressions.**
+
+Though windows uses either `/` or `\` as its path separator, only `/`
+characters are used by this glob implementation. You must use
+forward-slashes **only** in glob expressions. Back-slashes will always
+be interpreted as escape characters, not path separators.
+
+Results from absolute patterns such as `/foo/*` are mounted onto the
+root setting using `path.join`. On windows, this will by default result
+in `/foo/*` matching `C:\foo\bar.txt`.
+
+## Race Conditions
+
+Glob searching, by its very nature, is susceptible to race conditions,
+since it relies on directory walking and such.
+
+As a result, it is possible that a file that exists when glob looks for
+it may have been deleted or modified by the time it returns the result.
+
+As part of its internal implementation, this program caches all stat
+and readdir calls that it makes, in order to cut down on system
+overhead. However, this also makes it even more susceptible to races,
+especially if the cache or statCache objects are reused between glob
+calls.
+
+Users are thus advised not to use a glob result as a guarantee of
+filesystem state in the face of rapid changes. For the vast majority
+of operations, this is never a problem.
+
+## Glob Logo
+Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo).
+
+The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/).
+
+## Contributing
+
+Any change to behavior (including bugfixes) must come with a test.
+
+Patches that fail tests or reduce performance will be rejected.
+
+```
+# to run tests
+npm test
+
+# to re-generate test fixtures
+npm run test-regen
+
+# to benchmark against bash/zsh
+npm run bench
+
+# to profile javascript
+npm run prof
+```
+
+![](oh-my-glob.gif)
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/changelog.md b/node_modules/@npmcli/map-workspaces/node_modules/glob/changelog.md
new file mode 100644
index 000000000..41636771e
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/changelog.md
@@ -0,0 +1,67 @@
+## 7.0
+
+- Raise error if `options.cwd` is specified, and not a directory
+
+## 6.0
+
+- Remove comment and negation pattern support
+- Ignore patterns are always in `dot:true` mode
+
+## 5.0
+
+- Deprecate comment and negation patterns
+- Fix regression in `mark` and `nodir` options from making all cache
+ keys absolute path.
+- Abort if `fs.readdir` returns an error that's unexpected
+- Don't emit `match` events for ignored items
+- Treat ENOTSUP like ENOTDIR in readdir
+
+## 4.5
+
+- Add `options.follow` to always follow directory symlinks in globstar
+- Add `options.realpath` to call `fs.realpath` on all results
+- Always cache based on absolute path
+
+## 4.4
+
+- Add `options.ignore`
+- Fix handling of broken symlinks
+
+## 4.3
+
+- Bump minimatch to 2.x
+- Pass all tests on Windows
+
+## 4.2
+
+- Add `glob.hasMagic` function
+- Add `options.nodir` flag
+
+## 4.1
+
+- Refactor sync and async implementations for performance
+- Throw if callback provided to sync glob function
+- Treat symbolic links in globstar results the same as Bash 4.3
+
+## 4.0
+
+- Use `^` for dependency versions (bumped major because this breaks
+ older npm versions)
+- Ensure callbacks are only ever called once
+- switch to ISC license
+
+## 3.x
+
+- Rewrite in JavaScript
+- Add support for setting root, cwd, and windows support
+- Cache many fs calls
+- Add globstar support
+- emit match events
+
+## 2.x
+
+- Use `glob.h` and `fnmatch.h` from NetBSD
+
+## 1.x
+
+- `glob.h` static binding.
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/common.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/common.js
new file mode 100644
index 000000000..66651bb3a
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/common.js
@@ -0,0 +1,240 @@
+exports.alphasort = alphasort
+exports.alphasorti = alphasorti
+exports.setopts = setopts
+exports.ownProp = ownProp
+exports.makeAbs = makeAbs
+exports.finish = finish
+exports.mark = mark
+exports.isIgnored = isIgnored
+exports.childrenIgnored = childrenIgnored
+
+function ownProp (obj, field) {
+ return Object.prototype.hasOwnProperty.call(obj, field)
+}
+
+var path = require("path")
+var minimatch = require("minimatch")
+var isAbsolute = require("path-is-absolute")
+var Minimatch = minimatch.Minimatch
+
+function alphasorti (a, b) {
+ return a.toLowerCase().localeCompare(b.toLowerCase())
+}
+
+function alphasort (a, b) {
+ return a.localeCompare(b)
+}
+
+function setupIgnores (self, options) {
+ self.ignore = options.ignore || []
+
+ if (!Array.isArray(self.ignore))
+ self.ignore = [self.ignore]
+
+ if (self.ignore.length) {
+ self.ignore = self.ignore.map(ignoreMap)
+ }
+}
+
+// ignore patterns are always in dot:true mode.
+function ignoreMap (pattern) {
+ var gmatcher = null
+ if (pattern.slice(-3) === '/**') {
+ var gpattern = pattern.replace(/(\/\*\*)+$/, '')
+ gmatcher = new Minimatch(gpattern, { dot: true })
+ }
+
+ return {
+ matcher: new Minimatch(pattern, { dot: true }),
+ gmatcher: gmatcher
+ }
+}
+
+function setopts (self, pattern, options) {
+ if (!options)
+ options = {}
+
+ // base-matching: just use globstar for that.
+ if (options.matchBase && -1 === pattern.indexOf("/")) {
+ if (options.noglobstar) {
+ throw new Error("base matching requires globstar")
+ }
+ pattern = "**/" + pattern
+ }
+
+ self.silent = !!options.silent
+ self.pattern = pattern
+ self.strict = options.strict !== false
+ self.realpath = !!options.realpath
+ self.realpathCache = options.realpathCache || Object.create(null)
+ self.follow = !!options.follow
+ self.dot = !!options.dot
+ self.mark = !!options.mark
+ self.nodir = !!options.nodir
+ if (self.nodir)
+ self.mark = true
+ self.sync = !!options.sync
+ self.nounique = !!options.nounique
+ self.nonull = !!options.nonull
+ self.nosort = !!options.nosort
+ self.nocase = !!options.nocase
+ self.stat = !!options.stat
+ self.noprocess = !!options.noprocess
+ self.absolute = !!options.absolute
+
+ self.maxLength = options.maxLength || Infinity
+ self.cache = options.cache || Object.create(null)
+ self.statCache = options.statCache || Object.create(null)
+ self.symlinks = options.symlinks || Object.create(null)
+
+ setupIgnores(self, options)
+
+ self.changedCwd = false
+ var cwd = process.cwd()
+ if (!ownProp(options, "cwd"))
+ self.cwd = cwd
+ else {
+ self.cwd = path.resolve(options.cwd)
+ self.changedCwd = self.cwd !== cwd
+ }
+
+ self.root = options.root || path.resolve(self.cwd, "/")
+ self.root = path.resolve(self.root)
+ if (process.platform === "win32")
+ self.root = self.root.replace(/\\/g, "/")
+
+ // TODO: is an absolute `cwd` supposed to be resolved against `root`?
+ // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
+ self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
+ if (process.platform === "win32")
+ self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
+ self.nomount = !!options.nomount
+
+ // disable comments and negation in Minimatch.
+ // Note that they are not supported in Glob itself anyway.
+ options.nonegate = true
+ options.nocomment = true
+
+ self.minimatch = new Minimatch(pattern, options)
+ self.options = self.minimatch.options
+}
+
+function finish (self) {
+ var nou = self.nounique
+ var all = nou ? [] : Object.create(null)
+
+ for (var i = 0, l = self.matches.length; i < l; i ++) {
+ var matches = self.matches[i]
+ if (!matches || Object.keys(matches).length === 0) {
+ if (self.nonull) {
+ // do like the shell, and spit out the literal glob
+ var literal = self.minimatch.globSet[i]
+ if (nou)
+ all.push(literal)
+ else
+ all[literal] = true
+ }
+ } else {
+ // had matches
+ var m = Object.keys(matches)
+ if (nou)
+ all.push.apply(all, m)
+ else
+ m.forEach(function (m) {
+ all[m] = true
+ })
+ }
+ }
+
+ if (!nou)
+ all = Object.keys(all)
+
+ if (!self.nosort)
+ all = all.sort(self.nocase ? alphasorti : alphasort)
+
+ // at *some* point we statted all of these
+ if (self.mark) {
+ for (var i = 0; i < all.length; i++) {
+ all[i] = self._mark(all[i])
+ }
+ if (self.nodir) {
+ all = all.filter(function (e) {
+ var notDir = !(/\/$/.test(e))
+ var c = self.cache[e] || self.cache[makeAbs(self, e)]
+ if (notDir && c)
+ notDir = c !== 'DIR' && !Array.isArray(c)
+ return notDir
+ })
+ }
+ }
+
+ if (self.ignore.length)
+ all = all.filter(function(m) {
+ return !isIgnored(self, m)
+ })
+
+ self.found = all
+}
+
+function mark (self, p) {
+ var abs = makeAbs(self, p)
+ var c = self.cache[abs]
+ var m = p
+ if (c) {
+ var isDir = c === 'DIR' || Array.isArray(c)
+ var slash = p.slice(-1) === '/'
+
+ if (isDir && !slash)
+ m += '/'
+ else if (!isDir && slash)
+ m = m.slice(0, -1)
+
+ if (m !== p) {
+ var mabs = makeAbs(self, m)
+ self.statCache[mabs] = self.statCache[abs]
+ self.cache[mabs] = self.cache[abs]
+ }
+ }
+
+ return m
+}
+
+// lotta situps...
+function makeAbs (self, f) {
+ var abs = f
+ if (f.charAt(0) === '/') {
+ abs = path.join(self.root, f)
+ } else if (isAbsolute(f) || f === '') {
+ abs = f
+ } else if (self.changedCwd) {
+ abs = path.resolve(self.cwd, f)
+ } else {
+ abs = path.resolve(f)
+ }
+
+ if (process.platform === 'win32')
+ abs = abs.replace(/\\/g, '/')
+
+ return abs
+}
+
+
+// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
+// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
+function isIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+function childrenIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/glob.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/glob.js
new file mode 100644
index 000000000..58dec0f6c
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/glob.js
@@ -0,0 +1,790 @@
+// Approach:
+//
+// 1. Get the minimatch set
+// 2. For each pattern in the set, PROCESS(pattern, false)
+// 3. Store matches per-set, then uniq them
+//
+// PROCESS(pattern, inGlobStar)
+// Get the first [n] items from pattern that are all strings
+// Join these together. This is PREFIX.
+// If there is no more remaining, then stat(PREFIX) and
+// add to matches if it succeeds. END.
+//
+// If inGlobStar and PREFIX is symlink and points to dir
+// set ENTRIES = []
+// else readdir(PREFIX) as ENTRIES
+// If fail, END
+//
+// with ENTRIES
+// If pattern[n] is GLOBSTAR
+// // handle the case where the globstar match is empty
+// // by pruning it out, and testing the resulting pattern
+// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
+// // handle other cases.
+// for ENTRY in ENTRIES (not dotfiles)
+// // attach globstar + tail onto the entry
+// // Mark that this entry is a globstar match
+// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
+//
+// else // not globstar
+// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
+// Test ENTRY against pattern[n]
+// If fails, continue
+// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
+//
+// Caveat:
+// Cache all stats and readdirs results to minimize syscall. Since all
+// we ever care about is existence and directory-ness, we can just keep
+// `true` for files, and [children,...] for directories, or `false` for
+// things that don't exist.
+
+module.exports = glob
+
+var fs = require('fs')
+var rp = require('fs.realpath')
+var minimatch = require('minimatch')
+var Minimatch = minimatch.Minimatch
+var inherits = require('inherits')
+var EE = require('events').EventEmitter
+var path = require('path')
+var assert = require('assert')
+var isAbsolute = require('path-is-absolute')
+var globSync = require('./sync.js')
+var common = require('./common.js')
+var alphasort = common.alphasort
+var alphasorti = common.alphasorti
+var setopts = common.setopts
+var ownProp = common.ownProp
+var inflight = require('inflight')
+var util = require('util')
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+var once = require('once')
+
+function glob (pattern, options, cb) {
+ if (typeof options === 'function') cb = options, options = {}
+ if (!options) options = {}
+
+ if (options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return globSync(pattern, options)
+ }
+
+ return new Glob(pattern, options, cb)
+}
+
+glob.sync = globSync
+var GlobSync = glob.GlobSync = globSync.GlobSync
+
+// old api surface
+glob.glob = glob
+
+function extend (origin, add) {
+ if (add === null || typeof add !== 'object') {
+ return origin
+ }
+
+ var keys = Object.keys(add)
+ var i = keys.length
+ while (i--) {
+ origin[keys[i]] = add[keys[i]]
+ }
+ return origin
+}
+
+glob.hasMagic = function (pattern, options_) {
+ var options = extend({}, options_)
+ options.noprocess = true
+
+ var g = new Glob(pattern, options)
+ var set = g.minimatch.set
+
+ if (!pattern)
+ return false
+
+ if (set.length > 1)
+ return true
+
+ for (var j = 0; j < set[0].length; j++) {
+ if (typeof set[0][j] !== 'string')
+ return true
+ }
+
+ return false
+}
+
+glob.Glob = Glob
+inherits(Glob, EE)
+function Glob (pattern, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = null
+ }
+
+ if (options && options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return new GlobSync(pattern, options)
+ }
+
+ if (!(this instanceof Glob))
+ return new Glob(pattern, options, cb)
+
+ setopts(this, pattern, options)
+ this._didRealPath = false
+
+ // process each pattern in the minimatch set
+ var n = this.minimatch.set.length
+
+ // The matches are stored as {<filename>: true,...} so that
+ // duplicates are automagically pruned.
+ // Later, we do an Object.keys() on these.
+ // Keep them as a list so we can fill in when nonull is set.
+ this.matches = new Array(n)
+
+ if (typeof cb === 'function') {
+ cb = once(cb)
+ this.on('error', cb)
+ this.on('end', function (matches) {
+ cb(null, matches)
+ })
+ }
+
+ var self = this
+ this._processing = 0
+
+ this._emitQueue = []
+ this._processQueue = []
+ this.paused = false
+
+ if (this.noprocess)
+ return this
+
+ if (n === 0)
+ return done()
+
+ var sync = true
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false, done)
+ }
+ sync = false
+
+ function done () {
+ --self._processing
+ if (self._processing <= 0) {
+ if (sync) {
+ process.nextTick(function () {
+ self._finish()
+ })
+ } else {
+ self._finish()
+ }
+ }
+ }
+}
+
+Glob.prototype._finish = function () {
+ assert(this instanceof Glob)
+ if (this.aborted)
+ return
+
+ if (this.realpath && !this._didRealpath)
+ return this._realpath()
+
+ common.finish(this)
+ this.emit('end', this.found)
+}
+
+Glob.prototype._realpath = function () {
+ if (this._didRealpath)
+ return
+
+ this._didRealpath = true
+
+ var n = this.matches.length
+ if (n === 0)
+ return this._finish()
+
+ var self = this
+ for (var i = 0; i < this.matches.length; i++)
+ this._realpathSet(i, next)
+
+ function next () {
+ if (--n === 0)
+ self._finish()
+ }
+}
+
+Glob.prototype._realpathSet = function (index, cb) {
+ var matchset = this.matches[index]
+ if (!matchset)
+ return cb()
+
+ var found = Object.keys(matchset)
+ var self = this
+ var n = found.length
+
+ if (n === 0)
+ return cb()
+
+ var set = this.matches[index] = Object.create(null)
+ found.forEach(function (p, i) {
+ // If there's a problem with the stat, then it means that
+ // one or more of the links in the realpath couldn't be
+ // resolved. just return the abs value in that case.
+ p = self._makeAbs(p)
+ rp.realpath(p, self.realpathCache, function (er, real) {
+ if (!er)
+ set[real] = true
+ else if (er.syscall === 'stat')
+ set[p] = true
+ else
+ self.emit('error', er) // srsly wtf right here
+
+ if (--n === 0) {
+ self.matches[index] = set
+ cb()
+ }
+ })
+ })
+}
+
+Glob.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+Glob.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+Glob.prototype.abort = function () {
+ this.aborted = true
+ this.emit('abort')
+}
+
+Glob.prototype.pause = function () {
+ if (!this.paused) {
+ this.paused = true
+ this.emit('pause')
+ }
+}
+
+Glob.prototype.resume = function () {
+ if (this.paused) {
+ this.emit('resume')
+ this.paused = false
+ if (this._emitQueue.length) {
+ var eq = this._emitQueue.slice(0)
+ this._emitQueue.length = 0
+ for (var i = 0; i < eq.length; i ++) {
+ var e = eq[i]
+ this._emitMatch(e[0], e[1])
+ }
+ }
+ if (this._processQueue.length) {
+ var pq = this._processQueue.slice(0)
+ this._processQueue.length = 0
+ for (var i = 0; i < pq.length; i ++) {
+ var p = pq[i]
+ this._processing--
+ this._process(p[0], p[1], p[2], p[3])
+ }
+ }
+ }
+}
+
+Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
+ assert(this instanceof Glob)
+ assert(typeof cb === 'function')
+
+ if (this.aborted)
+ return
+
+ this._processing++
+ if (this.paused) {
+ this._processQueue.push([pattern, index, inGlobStar, cb])
+ return
+ }
+
+ //console.error('PROCESS %d', this._processing, pattern)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // see if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index, cb)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip _processing
+ if (childrenIgnored(this, read))
+ return cb()
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
+}
+
+Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return cb()
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return cb()
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return cb()
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+ this._process([e].concat(remain), index, inGlobStar, cb)
+ }
+ cb()
+}
+
+Glob.prototype._emitMatch = function (index, e) {
+ if (this.aborted)
+ return
+
+ if (isIgnored(this, e))
+ return
+
+ if (this.paused) {
+ this._emitQueue.push([index, e])
+ return
+ }
+
+ var abs = isAbsolute(e) ? e : this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute)
+ e = abs
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ var st = this.statCache[abs]
+ if (st)
+ this.emit('stat', e, st)
+
+ this.emit('match', e)
+}
+
+Glob.prototype._readdirInGlobStar = function (abs, cb) {
+ if (this.aborted)
+ return
+
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false, cb)
+
+ var lstatkey = 'lstat\0' + abs
+ var self = this
+ var lstatcb = inflight(lstatkey, lstatcb_)
+
+ if (lstatcb)
+ fs.lstat(abs, lstatcb)
+
+ function lstatcb_ (er, lstat) {
+ if (er && er.code === 'ENOENT')
+ return cb()
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ self.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory()) {
+ self.cache[abs] = 'FILE'
+ cb()
+ } else
+ self._readdir(abs, false, cb)
+ }
+}
+
+Glob.prototype._readdir = function (abs, inGlobStar, cb) {
+ if (this.aborted)
+ return
+
+ cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
+ if (!cb)
+ return
+
+ //console.error('RD %j %j', +inGlobStar, abs)
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs, cb)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return cb()
+
+ if (Array.isArray(c))
+ return cb(null, c)
+ }
+
+ var self = this
+ fs.readdir(abs, readdirCb(this, abs, cb))
+}
+
+function readdirCb (self, abs, cb) {
+ return function (er, entries) {
+ if (er)
+ self._readdirError(abs, er, cb)
+ else
+ self._readdirEntries(abs, entries, cb)
+ }
+}
+
+Glob.prototype._readdirEntries = function (abs, entries, cb) {
+ if (this.aborted)
+ return
+
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+ return cb(null, entries)
+}
+
+Glob.prototype._readdirError = function (f, er, cb) {
+ if (this.aborted)
+ return
+
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ this.emit('error', error)
+ this.abort()
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict) {
+ this.emit('error', er)
+ // If the error is handled, then we abort
+ // if not, we threw out of here
+ this.abort()
+ }
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+
+ return cb()
+}
+
+Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+
+Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+ //console.error('pgs2', prefix, remain[0], entries)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return cb()
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false, cb)
+
+ var isSym = this.symlinks[abs]
+ var len = entries.length
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return cb()
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true, cb)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true, cb)
+ }
+
+ cb()
+}
+
+Glob.prototype._processSimple = function (prefix, index, cb) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var self = this
+ this._stat(prefix, function (er, exists) {
+ self._processSimple2(prefix, index, er, exists, cb)
+ })
+}
+Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
+
+ //console.error('ps2', prefix, exists)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return cb()
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+ cb()
+}
+
+// Returns either 'DIR', 'FILE', or false
+Glob.prototype._stat = function (f, cb) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return cb()
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return cb(null, c)
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (stat !== undefined) {
+ if (stat === false)
+ return cb(null, stat)
+ else {
+ var type = stat.isDirectory() ? 'DIR' : 'FILE'
+ if (needDir && type === 'FILE')
+ return cb()
+ else
+ return cb(null, type, stat)
+ }
+ }
+
+ var self = this
+ var statcb = inflight('stat\0' + abs, lstatcb_)
+ if (statcb)
+ fs.lstat(abs, statcb)
+
+ function lstatcb_ (er, lstat) {
+ if (lstat && lstat.isSymbolicLink()) {
+ // If it's a symlink, then treat it as the target, unless
+ // the target does not exist, then treat it as a file.
+ return fs.stat(abs, function (er, stat) {
+ if (er)
+ self._stat2(f, abs, null, lstat, cb)
+ else
+ self._stat2(f, abs, er, stat, cb)
+ })
+ } else {
+ self._stat2(f, abs, er, lstat, cb)
+ }
+ }
+}
+
+Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return cb()
+ }
+
+ var needDir = f.slice(-1) === '/'
+ this.statCache[abs] = stat
+
+ if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
+ return cb(null, false, stat)
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ return cb(null, c, stat)
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json b/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
new file mode 100644
index 000000000..586f70070
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
@@ -0,0 +1,79 @@
+{
+ "_from": "glob@^7.1.6",
+ "_id": "glob@7.1.6",
+ "_inBundle": false,
+ "_integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
+ "_location": "/@npmcli/map-workspaces/glob",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "glob@^7.1.6",
+ "name": "glob",
+ "escapedName": "glob",
+ "rawSpec": "^7.1.6",
+ "saveSpec": null,
+ "fetchSpec": "^7.1.6"
+ },
+ "_requiredBy": [
+ "/@npmcli/map-workspaces"
+ ],
+ "_resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
+ "_shasum": "141f33b81a7c2492e125594307480c46679278a6",
+ "_spec": "glob@^7.1.6",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/map-workspaces",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-glob/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ },
+ "deprecated": false,
+ "description": "a little globber",
+ "devDependencies": {
+ "mkdirp": "0",
+ "rimraf": "^2.2.8",
+ "tap": "^12.0.1",
+ "tick": "0.0.6"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "files": [
+ "glob.js",
+ "sync.js",
+ "common.js"
+ ],
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ },
+ "homepage": "https://github.com/isaacs/node-glob#readme",
+ "license": "ISC",
+ "main": "glob.js",
+ "name": "glob",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-glob.git"
+ },
+ "scripts": {
+ "bench": "bash benchmark.sh",
+ "benchclean": "node benchclean.js",
+ "prepublish": "npm run benchclean",
+ "prof": "bash prof.sh && cat profile.txt",
+ "profclean": "rm -f v8.log profile.txt",
+ "test": "tap test/*.js --cov",
+ "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js"
+ },
+ "version": "7.1.6"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/sync.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/sync.js
new file mode 100644
index 000000000..c952134ba
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/sync.js
@@ -0,0 +1,486 @@
+module.exports = globSync
+globSync.GlobSync = GlobSync
+
+var fs = require('fs')
+var rp = require('fs.realpath')
+var minimatch = require('minimatch')
+var Minimatch = minimatch.Minimatch
+var Glob = require('./glob.js').Glob
+var util = require('util')
+var path = require('path')
+var assert = require('assert')
+var isAbsolute = require('path-is-absolute')
+var common = require('./common.js')
+var alphasort = common.alphasort
+var alphasorti = common.alphasorti
+var setopts = common.setopts
+var ownProp = common.ownProp
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+function globSync (pattern, options) {
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ return new GlobSync(pattern, options).found
+}
+
+function GlobSync (pattern, options) {
+ if (!pattern)
+ throw new Error('must provide pattern')
+
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ if (!(this instanceof GlobSync))
+ return new GlobSync(pattern, options)
+
+ setopts(this, pattern, options)
+
+ if (this.noprocess)
+ return this
+
+ var n = this.minimatch.set.length
+ this.matches = new Array(n)
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false)
+ }
+ this._finish()
+}
+
+GlobSync.prototype._finish = function () {
+ assert(this instanceof GlobSync)
+ if (this.realpath) {
+ var self = this
+ this.matches.forEach(function (matchset, index) {
+ var set = self.matches[index] = Object.create(null)
+ for (var p in matchset) {
+ try {
+ p = self._makeAbs(p)
+ var real = rp.realpathSync(p, self.realpathCache)
+ set[real] = true
+ } catch (er) {
+ if (er.syscall === 'stat')
+ set[self._makeAbs(p)] = true
+ else
+ throw er
+ }
+ }
+ })
+ }
+ common.finish(this)
+}
+
+
+GlobSync.prototype._process = function (pattern, index, inGlobStar) {
+ assert(this instanceof GlobSync)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // See if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip processing
+ if (childrenIgnored(this, read))
+ return
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
+}
+
+
+GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
+ var entries = this._readdir(abs, inGlobStar)
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix.slice(-1) !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix)
+ newPattern = [prefix, e]
+ else
+ newPattern = [e]
+ this._process(newPattern.concat(remain), index, inGlobStar)
+ }
+}
+
+
+GlobSync.prototype._emitMatch = function (index, e) {
+ if (isIgnored(this, e))
+ return
+
+ var abs = this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute) {
+ e = abs
+ }
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ if (this.stat)
+ this._stat(e)
+}
+
+
+GlobSync.prototype._readdirInGlobStar = function (abs) {
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false)
+
+ var entries
+ var lstat
+ var stat
+ try {
+ lstat = fs.lstatSync(abs)
+ } catch (er) {
+ if (er.code === 'ENOENT') {
+ // lstat failed, doesn't exist
+ return null
+ }
+ }
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ this.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory())
+ this.cache[abs] = 'FILE'
+ else
+ entries = this._readdir(abs, false)
+
+ return entries
+}
+
+GlobSync.prototype._readdir = function (abs, inGlobStar) {
+ var entries
+
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return null
+
+ if (Array.isArray(c))
+ return c
+ }
+
+ try {
+ return this._readdirEntries(abs, fs.readdirSync(abs))
+ } catch (er) {
+ this._readdirError(abs, er)
+ return null
+ }
+}
+
+GlobSync.prototype._readdirEntries = function (abs, entries) {
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+
+ // mark and cache dir-ness
+ return entries
+}
+
+GlobSync.prototype._readdirError = function (f, er) {
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ throw error
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict)
+ throw er
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+}
+
+GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
+
+ var entries = this._readdir(abs, inGlobStar)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false)
+
+ var len = entries.length
+ var isSym = this.symlinks[abs]
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true)
+ }
+}
+
+GlobSync.prototype._processSimple = function (prefix, index) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var exists = this._stat(prefix)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+}
+
+// Returns either 'DIR', 'FILE', or false
+GlobSync.prototype._stat = function (f) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return false
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (!stat) {
+ var lstat
+ try {
+ lstat = fs.lstatSync(abs)
+ } catch (er) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return false
+ }
+ }
+
+ if (lstat && lstat.isSymbolicLink()) {
+ try {
+ stat = fs.statSync(abs)
+ } catch (er) {
+ stat = lstat
+ }
+ } else {
+ stat = lstat
+ }
+ }
+
+ this.statCache[abs] = stat
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ return c
+}
+
+GlobSync.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+GlobSync.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
diff --git a/node_modules/@npmcli/map-workspaces/package.json b/node_modules/@npmcli/map-workspaces/package.json
new file mode 100644
index 000000000..3ee1fac64
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/package.json
@@ -0,0 +1,95 @@
+{
+ "_from": "@npmcli/map-workspaces@0.0.0-pre.1",
+ "_id": "@npmcli/map-workspaces@0.0.0-pre.1",
+ "_inBundle": false,
+ "_integrity": "sha512-IovEVdr17hW/Stt0kpPjz1r0ZxRX3RGah7ww3tQpi5NtyOapJwbUffWuWETyQkOjud5soC45mnjOOBtfTggtng==",
+ "_location": "/@npmcli/map-workspaces",
+ "_phantomChildren": {
+ "fs.realpath": "1.0.0",
+ "inflight": "1.0.6",
+ "inherits": "2.0.4",
+ "minimatch": "3.0.4",
+ "once": "1.4.0",
+ "path-is-absolute": "1.0.1"
+ },
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "@npmcli/map-workspaces@0.0.0-pre.1",
+ "name": "@npmcli/map-workspaces",
+ "escapedName": "@npmcli%2fmap-workspaces",
+ "scope": "@npmcli",
+ "rawSpec": "0.0.0-pre.1",
+ "saveSpec": null,
+ "fetchSpec": "0.0.0-pre.1"
+ },
+ "_requiredBy": [
+ "/@npmcli/arborist"
+ ],
+ "_resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-0.0.0-pre.1.tgz",
+ "_shasum": "b2c38d9a78bf38e799f66d14453e7c9b72928132",
+ "_spec": "@npmcli/map-workspaces@0.0.0-pre.1",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist",
+ "author": {
+ "name": "npm Inc.",
+ "email": "support@npmjs.com"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/map-workspaces/issues"
+ },
+ "bundleDependencies": false,
+ "contributors": [
+ {
+ "name": "Ruy Adorno",
+ "url": "https://ruyadorno.com"
+ }
+ ],
+ "dependencies": {
+ "@npmcli/name-from-folder": "^1.0.1",
+ "glob": "^7.1.6",
+ "minimatch": "^3.0.4",
+ "read-package-json-fast": "^1.1.3"
+ },
+ "deprecated": false,
+ "description": "Retrieves a name:pathname Map for a given workspaces config",
+ "devDependencies": {
+ "require-inject": "^1.4.4",
+ "standard": "^14.3.1",
+ "tap": "^14.10.7"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/npm/map-workspaces#readme",
+ "keywords": [
+ "npm",
+ "npmcli",
+ "libnpm",
+ "cli",
+ "workspaces",
+ "map-workspaces"
+ ],
+ "license": "ISC",
+ "name": "@npmcli/map-workspaces",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/map-workspaces.git"
+ },
+ "scripts": {
+ "lint": "standard",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "snap": "tap test*.js",
+ "test": "tap test*.js"
+ },
+ "standard": {
+ "ignore": [
+ "/tap-snapshots/"
+ ]
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "0.0.0-pre.1"
+}
diff --git a/node_modules/@npmcli/name-from-folder/LICENSE b/node_modules/@npmcli/name-from-folder/LICENSE
new file mode 100644
index 000000000..d24a9fca7
--- /dev/null
+++ b/node_modules/@npmcli/name-from-folder/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD
+TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+SOFTWARE.
diff --git a/node_modules/@npmcli/name-from-folder/README.md b/node_modules/@npmcli/name-from-folder/README.md
new file mode 100644
index 000000000..0735ca28a
--- /dev/null
+++ b/node_modules/@npmcli/name-from-folder/README.md
@@ -0,0 +1,14 @@
+# @npmcli/name-from-folder
+
+Get the package name from a folder path, including the scope if the
+basename of the dirname starts with `@`.
+
+For a path like `/x/y/z/@scope/pkg` it'll return `@scope/pkg`. If the path
+name is something like `/x/y/z/pkg`, then it'll return `pkg`.
+
+## USAGE
+
+```js
+const nameFromFolder = require('@npmcli/name-from-folder')
+const name = nameFromFolder('/some/folder/path')
+```
diff --git a/node_modules/@npmcli/arborist/lib/name-from-folder.js b/node_modules/@npmcli/name-from-folder/index.js
index afb1dbb76..afb1dbb76 100644
--- a/node_modules/@npmcli/arborist/lib/name-from-folder.js
+++ b/node_modules/@npmcli/name-from-folder/index.js
diff --git a/node_modules/@npmcli/name-from-folder/package.json b/node_modules/@npmcli/name-from-folder/package.json
new file mode 100644
index 000000000..c61d05ae5
--- /dev/null
+++ b/node_modules/@npmcli/name-from-folder/package.json
@@ -0,0 +1,61 @@
+{
+ "_from": "@npmcli/name-from-folder@^1.0.1",
+ "_id": "@npmcli/name-from-folder@1.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-qq3oEfcLFwNfEYOQ8HLimRGKlD8WSeGEdtUa7hmzpR8Sa7haL1KVQrvgO6wqMjhWFFVjgtrh1gIxDz+P8sjUaA==",
+ "_location": "/@npmcli/name-from-folder",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "@npmcli/name-from-folder@^1.0.1",
+ "name": "@npmcli/name-from-folder",
+ "escapedName": "@npmcli%2fname-from-folder",
+ "scope": "@npmcli",
+ "rawSpec": "^1.0.1",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.1"
+ },
+ "_requiredBy": [
+ "/@npmcli/arborist"
+ ],
+ "_resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-1.0.1.tgz",
+ "_shasum": "77ecd0a4fcb772ba6fe927e2e2e155fbec2e6b1a",
+ "_spec": "@npmcli/name-from-folder@^1.0.1",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "https://izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/name-from-folder/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Get the package name from a folder path",
+ "devDependencies": {
+ "tap": "^14.10.7"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/npm/name-from-folder#readme",
+ "license": "ISC",
+ "name": "@npmcli/name-from-folder",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/name-from-folder.git"
+ },
+ "scripts": {
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "1.0.1"
+}
diff --git a/node_modules/@npmcli/promise-spawn/README.md b/node_modules/@npmcli/promise-spawn/README.md
index 71e517dc4..3b604f2a7 100644
--- a/node_modules/@npmcli/promise-spawn/README.md
+++ b/node_modules/@npmcli/promise-spawn/README.md
@@ -45,6 +45,10 @@ Result or error will be decorated with the properties in the `extra`
object. You can use this to attach some helpful info about _why_ the
command is being run, if it makes sense for your use case.
+Returned promise is decorated with the `stdin` stream if the process is set
+to pipe from `stdin`. Writing to this stream writes to the `stdin` of the
+spawned process.
+
#### Options
- `stdioString` Boolean, default `false`. Return stdio/stderr output as
diff --git a/node_modules/@npmcli/promise-spawn/index.js b/node_modules/@npmcli/promise-spawn/index.js
index 24fe2ee15..f977297bb 100644
--- a/node_modules/@npmcli/promise-spawn/index.js
+++ b/node_modules/@npmcli/promise-spawn/index.js
@@ -29,9 +29,10 @@ const stdioResult = (stdout, stderr, {stdioString}) =>
stderr: Buffer.concat(stderr),
}
-const promiseSpawnUid = (cmd, args, opts, extra) =>
- new Promise((res, rej) => {
- const proc = spawn(cmd, args, opts)
+const promiseSpawnUid = (cmd, args, opts, extra) => {
+ let proc
+ const p = new Promise((res, rej) => {
+ proc = spawn(cmd, args, opts)
const stdout = []
const stderr = []
const reject = er => rej(Object.assign(er, {
@@ -65,4 +66,8 @@ const promiseSpawnUid = (cmd, args, opts, extra) =>
})
})
+ p.stdin = proc.stdin
+ return p
+}
+
module.exports = promiseSpawn
diff --git a/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/promise-spawn/package.json
index 1ddc7f912..8aa966a15 100644
--- a/node_modules/@npmcli/promise-spawn/package.json
+++ b/node_modules/@npmcli/promise-spawn/package.json
@@ -1,28 +1,29 @@
{
- "_from": "@npmcli/promise-spawn@^1.0.0",
- "_id": "@npmcli/promise-spawn@1.1.0",
+ "_from": "@npmcli/promise-spawn@^1.1.0",
+ "_id": "@npmcli/promise-spawn@1.2.0",
"_inBundle": false,
- "_integrity": "sha512-FwbuYN9KXBkloLeIR3xRgI8dyOdfK/KzaJlChszNuwmUXD1lHXfLlSeo4n4KrKt2udIK9K9/TzlnyCA3ubM2fA==",
+ "_integrity": "sha512-nFtqjVETliApiRdjbYwKwhlSHx2ZMagyj5b9YbNt0BWeeOVxJd47ZVE2u16vxDHyTOZvk+YLV7INwfAE9a2uow==",
"_location": "/@npmcli/promise-spawn",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "@npmcli/promise-spawn@^1.0.0",
+ "raw": "@npmcli/promise-spawn@^1.1.0",
"name": "@npmcli/promise-spawn",
"escapedName": "@npmcli%2fpromise-spawn",
"scope": "@npmcli",
- "rawSpec": "^1.0.0",
+ "rawSpec": "^1.1.0",
"saveSpec": null,
- "fetchSpec": "^1.0.0"
+ "fetchSpec": "^1.1.0"
},
"_requiredBy": [
- "/@npmcli/run-script"
+ "/@npmcli/git",
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-1.1.0.tgz",
- "_shasum": "660009a5c54209142ec7c469c190d212834b6087",
- "_spec": "@npmcli/promise-spawn@^1.0.0",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/run-script",
+ "_resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-1.2.0.tgz",
+ "_shasum": "167d70b926f771c8bd8b9183bfc8b5aec29d7e45",
+ "_spec": "@npmcli/promise-spawn@^1.1.0",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/git",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
@@ -62,5 +63,5 @@
"tap": {
"check-coverage": true
},
- "version": "1.1.0"
+ "version": "1.2.0"
}
diff --git a/node_modules/@npmcli/run-script/README.md b/node_modules/@npmcli/run-script/README.md
index 66db675bb..4571b08c0 100644
--- a/node_modules/@npmcli/run-script/README.md
+++ b/node_modules/@npmcli/run-script/README.md
@@ -68,6 +68,10 @@ Returns a promise that resolves to the result of the execution. Promise
rejects if the execution fails (exits non-zero) or has any other error.
Rejected errors are decorated with the same values as the result object.
+If the stdio options mean that it'll have a piped stdin, then the stdin is
+ended immediately on the child process. If stdin is shared with the parent
+terminal, then it is up to the user to end it, of course.
+
### Results
- `code` Process exit code
diff --git a/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/@npmcli/run-script/lib/run-script-pkg.js
index 997fec000..5d1eb5430 100644
--- a/node_modules/@npmcli/run-script/lib/run-script-pkg.js
+++ b/node_modules/@npmcli/run-script/lib/run-script-pkg.js
@@ -29,7 +29,7 @@ const runScriptPkg = options => {
console.log(banner(pkg._id, event, cmd))
}
- return promiseSpawn(...makeSpawnArgs({
+ const p = promiseSpawn(...makeSpawnArgs({
event,
path,
scriptShell,
@@ -43,6 +43,9 @@ const runScriptPkg = options => {
pkgid: pkg._id,
path,
})
+ if (p.stdin)
+ p.stdin.end()
+ return p
}
module.exports = runScriptPkg
diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json
index 7b5f6d141..4589eb144 100644
--- a/node_modules/@npmcli/run-script/package.json
+++ b/node_modules/@npmcli/run-script/package.json
@@ -1,8 +1,8 @@
{
"_from": "@npmcli/run-script@latest",
- "_id": "@npmcli/run-script@1.3.0",
+ "_id": "@npmcli/run-script@1.3.1",
"_inBundle": false,
- "_integrity": "sha512-fI2iRHMkwLDc9CFtEC77yWVaPxh65PnzyernwFdAp019WVjkSoUKXodNTleGRLNtSy7lyZqaBymlI/rteMMwIg==",
+ "_integrity": "sha512-9Ea57XJjNLtBFRAaiKqqdoqRrL2QkM0vvCbMjPecljhog5IHupStPtZULbl0CoGN00N3lhLWJ4PaIEC0MGjqJw==",
"_location": "/@npmcli/run-script",
"_phantomChildren": {
"chownr": "1.1.3",
@@ -31,10 +31,13 @@
"_requiredBy": [
"#USER",
"/",
- "/@npmcli/arborist"
+ "/@npmcli/arborist",
+ "/libnpmpack",
+ "/libnpmversion",
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.3.0.tgz",
- "_shasum": "08d85c4549ead75edbf09c6954bbc50980e71d97",
+ "_resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-1.3.1.tgz",
+ "_shasum": "dc8934646634338b99898614ce083dabcd14edf3",
"_spec": "@npmcli/run-script@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
@@ -47,7 +50,7 @@
},
"bundleDependencies": false,
"dependencies": {
- "@npmcli/promise-spawn": "^1.0.0",
+ "@npmcli/promise-spawn": "^1.2.0",
"infer-owner": "^1.0.4",
"node-gyp": "^6.1.0",
"read-package-json-fast": "^1.1.3"
@@ -81,5 +84,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "1.3.0"
+ "version": "1.3.1"
}
diff --git a/node_modules/cacache/CHANGELOG.md b/node_modules/cacache/CHANGELOG.md
index b8540a375..80d9315a6 100644
--- a/node_modules/cacache/CHANGELOG.md
+++ b/node_modules/cacache/CHANGELOG.md
@@ -2,6 +2,24 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+### [15.0.3](https://github.com/npm/cacache/compare/v15.0.2...v15.0.3) (2020-04-28)
+
+
+### Bug Fixes
+
+* actually remove move-concurrently dep ([29e6eec](https://github.com/npm/cacache/commit/29e6eec9fee73444ee09daf1c1be06ddd5fe57f6))
+
+### [15.0.2](https://github.com/npm/cacache/compare/v15.0.1...v15.0.2) (2020-04-28)
+
+
+### Bug Fixes
+
+* tacks should be a dev dependency ([93ec158](https://github.com/npm/cacache/commit/93ec15852f0fdf1753ea7f75b4b8926daf8a7565))
+
+## [15.0.1](https://github.com/npm/cacache/compare/v15.0.0...v15.0.1) (2020-04-27)
+
+* **deps:** Use move-file instead of move-file-concurrently. ([92b125](https://github.com/npm/cacache/commit/92b1251a11b9848878b6c0d101b18bd8845acaa6))
+
## [15.0.0](https://github.com/npm/cacache/compare/v14.0.0...v15.0.0) (2020-02-18)
diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js
index 1b1d81067..b5d7a2534 100644
--- a/node_modules/cacache/lib/util/move-file.js
+++ b/node_modules/cacache/lib/util/move-file.js
@@ -5,7 +5,7 @@ const util = require('util')
const chmod = util.promisify(fs.chmod)
const unlink = util.promisify(fs.unlink)
const stat = util.promisify(fs.stat)
-const move = require('move-concurrently')
+const move = require('move-file')
const pinflight = require('promise-inflight')
module.exports = moveFile
@@ -61,7 +61,8 @@ function moveFile (src, dest) {
throw err
}
// file doesn't already exist! let's try a rename -> copy fallback
- return move(src, dest, { Promise, fs })
+ // only delete if it successfully copies
+ return move(src, dest)
})
})
})
diff --git a/node_modules/libnpmpack/node_modules/chownr/LICENSE b/node_modules/cacache/node_modules/chownr/LICENSE
index 19129e315..19129e315 100644
--- a/node_modules/libnpmpack/node_modules/chownr/LICENSE
+++ b/node_modules/cacache/node_modules/chownr/LICENSE
diff --git a/node_modules/libnpmpack/node_modules/chownr/README.md b/node_modules/cacache/node_modules/chownr/README.md
index 70e9a54a3..70e9a54a3 100644
--- a/node_modules/libnpmpack/node_modules/chownr/README.md
+++ b/node_modules/cacache/node_modules/chownr/README.md
diff --git a/node_modules/libnpmpack/node_modules/chownr/chownr.js b/node_modules/cacache/node_modules/chownr/chownr.js
index 0d4093216..0d4093216 100644
--- a/node_modules/libnpmpack/node_modules/chownr/chownr.js
+++ b/node_modules/cacache/node_modules/chownr/chownr.js
diff --git a/node_modules/libnpmpack/node_modules/chownr/package.json b/node_modules/cacache/node_modules/chownr/package.json
index f6ebbde8f..de3905cc2 100644
--- a/node_modules/libnpmpack/node_modules/chownr/package.json
+++ b/node_modules/cacache/node_modules/chownr/package.json
@@ -1,27 +1,28 @@
{
- "_from": "chownr@^1.1.4",
- "_id": "chownr@1.1.4",
+ "_from": "chownr@^2.0.0",
+ "_id": "chownr@2.0.0",
"_inBundle": false,
- "_integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
- "_location": "/libnpmpack/chownr",
+ "_integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
+ "_location": "/cacache/chownr",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "chownr@^1.1.4",
+ "raw": "chownr@^2.0.0",
"name": "chownr",
"escapedName": "chownr",
- "rawSpec": "^1.1.4",
+ "rawSpec": "^2.0.0",
"saveSpec": null,
- "fetchSpec": "^1.1.4"
+ "fetchSpec": "^2.0.0"
},
"_requiredBy": [
- "/libnpmpack/pacote"
+ "/cacache",
+ "/cacache/tar"
],
- "_resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
- "_shasum": "6fc9d7b42d32a583596337666e7d08084da2cc6b",
- "_spec": "chownr@^1.1.4",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
+ "_resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
+ "_shasum": "15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece",
+ "_spec": "chownr@^2.0.0",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/cacache",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
@@ -38,6 +39,9 @@
"rimraf": "^2.7.1",
"tap": "^14.10.6"
},
+ "engines": {
+ "node": ">=10"
+ },
"files": [
"chownr.js"
],
@@ -58,5 +62,5 @@
"tap": {
"check-coverage": true
},
- "version": "1.1.4"
+ "version": "2.0.0"
}
diff --git a/node_modules/cacache/node_modules/mkdirp/lib/opts-arg.js b/node_modules/cacache/node_modules/mkdirp/lib/opts-arg.js
index 488bd44c3..2fa4833fa 100644
--- a/node_modules/cacache/node_modules/mkdirp/lib/opts-arg.js
+++ b/node_modules/cacache/node_modules/mkdirp/lib/opts-arg.js
@@ -2,9 +2,9 @@ const { promisify } = require('util')
const fs = require('fs')
const optsArg = opts => {
if (!opts)
- opts = { mode: 0o777 & (~process.umask()), fs }
+ opts = { mode: 0o777, fs }
else if (typeof opts === 'object')
- opts = { mode: 0o777 & (~process.umask()), fs, ...opts }
+ opts = { mode: 0o777, fs, ...opts }
else if (typeof opts === 'number')
opts = { mode: opts, fs }
else if (typeof opts === 'string')
diff --git a/node_modules/cacache/node_modules/mkdirp/package.json b/node_modules/cacache/node_modules/mkdirp/package.json
index 1050f8218..bb46a363b 100644
--- a/node_modules/cacache/node_modules/mkdirp/package.json
+++ b/node_modules/cacache/node_modules/mkdirp/package.json
@@ -1,8 +1,8 @@
{
"_from": "mkdirp@^1.0.3",
- "_id": "mkdirp@1.0.3",
+ "_id": "mkdirp@1.0.4",
"_inBundle": false,
- "_integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g==",
+ "_integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"_location": "/cacache/mkdirp",
"_phantomChildren": {},
"_requested": {
@@ -19,10 +19,10 @@
"/cacache",
"/cacache/tar"
],
- "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
- "_shasum": "4cf2e30ad45959dddea53ad97d518b6c8205e1ea",
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+ "_shasum": "3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
"_spec": "mkdirp@^1.0.3",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/cacache",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/cacache",
"bin": {
"mkdirp": "bin/cmd.js"
},
@@ -34,7 +34,7 @@
"description": "Recursively mkdir, like `mkdir -p`",
"devDependencies": {
"require-inject": "^1.4.4",
- "tap": "^14.10.6"
+ "tap": "^14.10.7"
},
"engines": {
"node": ">=10"
@@ -72,5 +72,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "1.0.3"
+ "version": "1.0.4"
}
diff --git a/node_modules/cacache/node_modules/rimraf/CHANGELOG.md b/node_modules/cacache/node_modules/rimraf/CHANGELOG.md
new file mode 100644
index 000000000..f116f1414
--- /dev/null
+++ b/node_modules/cacache/node_modules/rimraf/CHANGELOG.md
@@ -0,0 +1,65 @@
+# v3.0
+
+- Add `--preserve-root` option to executable (default true)
+- Drop support for Node.js below version 6
+
+# v2.7
+
+- Make `glob` an optional dependency
+
+# 2.6
+
+- Retry on EBUSY on non-windows platforms as well
+- Make `rimraf.sync` 10000% more reliable on Windows
+
+# 2.5
+
+- Handle Windows EPERM when lstat-ing read-only dirs
+- Add glob option to pass options to glob
+
+# 2.4
+
+- Add EPERM to delay/retry loop
+- Add `disableGlob` option
+
+# 2.3
+
+- Make maxBusyTries and emfileWait configurable
+- Handle weird SunOS unlink-dir issue
+- Glob the CLI arg for better Windows support
+
+# 2.2
+
+- Handle ENOENT properly on Windows
+- Allow overriding fs methods
+- Treat EPERM as indicative of non-empty dir
+- Remove optional graceful-fs dep
+- Consistently return null error instead of undefined on success
+- win32: Treat ENOTEMPTY the same as EBUSY
+- Add `rimraf` binary
+
+# 2.1
+
+- Fix SunOS error code for a non-empty directory
+- Try rmdir before readdir
+- Treat EISDIR like EPERM
+- Remove chmod
+- Remove lstat polyfill, node 0.7 is not supported
+
+# 2.0
+
+- Fix myGid call to check process.getgid
+- Simplify the EBUSY backoff logic.
+- Use fs.lstat in node >= 0.7.9
+- Remove gently option
+- remove fiber implementation
+- Delete files that are marked read-only
+
+# 1.0
+
+- Allow ENOENT in sync method
+- Throw when no callback is provided
+- Make opts.gently an absolute path
+- use 'stat' if 'lstat' is not available
+- Consistent error naming, and rethrow non-ENOENT stat errors
+- add fiber implementation
diff --git a/node_modules/cacache/node_modules/rimraf/bin.js b/node_modules/cacache/node_modules/rimraf/bin.js
index 0d1e17be7..023814cc9 100755
--- a/node_modules/cacache/node_modules/rimraf/bin.js
+++ b/node_modules/cacache/node_modules/rimraf/bin.js
@@ -1,11 +1,24 @@
#!/usr/bin/env node
-var rimraf = require('./')
+const rimraf = require('./')
-var help = false
-var dashdash = false
-var noglob = false
-var args = process.argv.slice(2).filter(function(arg) {
+const path = require('path')
+
+const isRoot = arg => /^(\/|[a-zA-Z]:\\)$/.test(path.resolve(arg))
+const filterOutRoot = arg => {
+ const ok = preserveRoot === false || !isRoot(arg)
+ if (!ok) {
+ console.error(`refusing to remove ${arg}`)
+ console.error('Set --no-preserve-root to allow this')
+ }
+ return ok
+}
+
+let help = false
+let dashdash = false
+let noglob = false
+let preserveRoot = true
+const args = process.argv.slice(2).filter(arg => {
if (dashdash)
return !!arg
else if (arg === '--')
@@ -16,35 +29,40 @@ var args = process.argv.slice(2).filter(function(arg) {
noglob = false
else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/))
help = true
+ else if (arg === '--preserve-root')
+ preserveRoot = true
+ else if (arg === '--no-preserve-root')
+ preserveRoot = false
else
return !!arg
-})
+}).filter(arg => !preserveRoot || filterOutRoot(arg))
+
+const go = n => {
+ if (n >= args.length)
+ return
+ const options = noglob ? { glob: false } : {}
+ rimraf(args[n], options, er => {
+ if (er)
+ throw er
+ go(n+1)
+ })
+}
if (help || args.length === 0) {
// If they didn't ask for help, then this is not a "success"
- var log = help ? console.log : console.error
+ const log = help ? console.log : console.error
log('Usage: rimraf <path> [<path> ...]')
log('')
log(' Deletes all files and folders at "path" recursively.')
log('')
log('Options:')
log('')
- log(' -h, --help Display this usage info')
- log(' -G, --no-glob Do not expand glob patterns in arguments')
- log(' -g, --glob Expand glob patterns in arguments (default)')
+ log(' -h, --help Display this usage info')
+ log(' -G, --no-glob Do not expand glob patterns in arguments')
+ log(' -g, --glob Expand glob patterns in arguments (default)')
+ log(' --preserve-root Do not remove \'/\' (default)')
+ log(' --no-preserve-root Do not treat \'/\' specially')
+ log(' -- Stop parsing flags')
process.exit(help ? 0 : 1)
} else
go(0)
-
-function go (n) {
- if (n >= args.length)
- return
- var options = {}
- if (noglob)
- options = { glob: false }
- rimraf(args[n], options, function (er) {
- if (er)
- throw er
- go(n+1)
- })
-}
diff --git a/node_modules/cacache/node_modules/rimraf/package.json b/node_modules/cacache/node_modules/rimraf/package.json
index 3c36faa09..98bb8bbfb 100644
--- a/node_modules/cacache/node_modules/rimraf/package.json
+++ b/node_modules/cacache/node_modules/rimraf/package.json
@@ -1,27 +1,27 @@
{
- "_from": "rimraf@^2.7.1",
- "_id": "rimraf@2.7.1",
+ "_from": "rimraf@^3.0.2",
+ "_id": "rimraf@3.0.2",
"_inBundle": false,
- "_integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
+ "_integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
"_location": "/cacache/rimraf",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "rimraf@^2.7.1",
+ "raw": "rimraf@^3.0.2",
"name": "rimraf",
"escapedName": "rimraf",
- "rawSpec": "^2.7.1",
+ "rawSpec": "^3.0.2",
"saveSpec": null,
- "fetchSpec": "^2.7.1"
+ "fetchSpec": "^3.0.2"
},
"_requiredBy": [
"/cacache"
],
- "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
- "_shasum": "35797f13a7fdadc566142c29d4f07ccad483e3ec",
- "_spec": "rimraf@^2.7.1",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/cacache",
+ "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "_shasum": "f1a5402ba6220ad52cc1282bac1ae3aa49fd061a",
+ "_spec": "rimraf@^3.0.2",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/cacache",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
@@ -49,6 +49,9 @@
"bin.js",
"rimraf.js"
],
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ },
"homepage": "https://github.com/isaacs/rimraf#readme",
"license": "ISC",
"main": "rimraf.js",
@@ -58,10 +61,10 @@
"url": "git://github.com/isaacs/rimraf.git"
},
"scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
+ "postpublish": "git push origin --follow-tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js"
},
- "version": "2.7.1"
+ "version": "3.0.2"
}
diff --git a/node_modules/cacache/node_modules/rimraf/rimraf.js b/node_modules/cacache/node_modules/rimraf/rimraf.js
index a90ad029f..34da4171d 100644
--- a/node_modules/cacache/node_modules/rimraf/rimraf.js
+++ b/node_modules/cacache/node_modules/rimraf/rimraf.js
@@ -1,29 +1,25 @@
-module.exports = rimraf
-rimraf.sync = rimrafSync
-
-var assert = require("assert")
-var path = require("path")
-var fs = require("fs")
-var glob = undefined
+const assert = require("assert")
+const path = require("path")
+const fs = require("fs")
+let glob = undefined
try {
glob = require("glob")
} catch (_err) {
// treat glob as optional.
}
-var _0666 = parseInt('666', 8)
-var defaultGlobOpts = {
+const defaultGlobOpts = {
nosort: true,
silent: true
}
// for EMFILE handling
-var timeout = 0
+let timeout = 0
-var isWindows = (process.platform === "win32")
+const isWindows = (process.platform === "win32")
-function defaults (options) {
- var methods = [
+const defaults = options => {
+ const methods = [
'unlink',
'chmod',
'stat',
@@ -31,7 +27,7 @@ function defaults (options) {
'rmdir',
'readdir'
]
- methods.forEach(function(m) {
+ methods.forEach(m => {
options[m] = options[m] || fs[m]
m = m + 'Sync'
options[m] = options[m] || fs[m]
@@ -49,7 +45,7 @@ function defaults (options) {
options.glob = options.glob || defaultGlobOpts
}
-function rimraf (p, options, cb) {
+const rimraf = (p, options, cb) => {
if (typeof options === 'function') {
cb = options
options = {}
@@ -63,27 +59,17 @@ function rimraf (p, options, cb) {
defaults(options)
- var busyTries = 0
- var errState = null
- var n = 0
-
- if (options.disableGlob || !glob.hasMagic(p))
- return afterGlob(null, [p])
-
- options.lstat(p, function (er, stat) {
- if (!er)
- return afterGlob(null, [p])
+ let busyTries = 0
+ let errState = null
+ let n = 0
- glob(p, options.glob, afterGlob)
- })
-
- function next (er) {
+ const next = (er) => {
errState = errState || er
if (--n === 0)
cb(errState)
}
- function afterGlob (er, results) {
+ const afterGlob = (er, results) => {
if (er)
return cb(er)
@@ -91,24 +77,19 @@ function rimraf (p, options, cb) {
if (n === 0)
return cb()
- results.forEach(function (p) {
- rimraf_(p, options, function CB (er) {
+ results.forEach(p => {
+ const CB = (er) => {
if (er) {
if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
busyTries < options.maxBusyTries) {
busyTries ++
- var time = busyTries * 100
// try again, with the same exact callback as this one.
- return setTimeout(function () {
- rimraf_(p, options, CB)
- }, time)
+ return setTimeout(() => rimraf_(p, options, CB), busyTries * 100)
}
// this one won't happen if graceful-fs is used.
if (er.code === "EMFILE" && timeout < options.emfileWait) {
- return setTimeout(function () {
- rimraf_(p, options, CB)
- }, timeout ++)
+ return setTimeout(() => rimraf_(p, options, CB), timeout ++)
}
// already gone
@@ -117,9 +98,21 @@ function rimraf (p, options, cb) {
timeout = 0
next(er)
- })
+ }
+ rimraf_(p, options, CB)
})
}
+
+ if (options.disableGlob || !glob.hasMagic(p))
+ return afterGlob(null, [p])
+
+ options.lstat(p, (er, stat) => {
+ if (!er)
+ return afterGlob(null, [p])
+
+ glob(p, options.glob, afterGlob)
+ })
+
}
// Two possible strategies.
@@ -133,14 +126,14 @@ function rimraf (p, options, cb) {
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
-function rimraf_ (p, options, cb) {
+const rimraf_ = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
- options.lstat(p, function (er, st) {
+ options.lstat(p, (er, st) => {
if (er && er.code === "ENOENT")
return cb(null)
@@ -151,7 +144,7 @@ function rimraf_ (p, options, cb) {
if (st && st.isDirectory())
return rmdir(p, options, er, cb)
- options.unlink(p, function (er) {
+ options.unlink(p, er => {
if (er) {
if (er.code === "ENOENT")
return cb(null)
@@ -167,18 +160,16 @@ function rimraf_ (p, options, cb) {
})
}
-function fixWinEPERM (p, options, er, cb) {
+const fixWinEPERM = (p, options, er, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
- if (er)
- assert(er instanceof Error)
- options.chmod(p, _0666, function (er2) {
+ options.chmod(p, 0o666, er2 => {
if (er2)
cb(er2.code === "ENOENT" ? null : er)
else
- options.stat(p, function(er3, stats) {
+ options.stat(p, (er3, stats) => {
if (er3)
cb(er3.code === "ENOENT" ? null : er)
else if (stats.isDirectory())
@@ -189,14 +180,12 @@ function fixWinEPERM (p, options, er, cb) {
})
}
-function fixWinEPERMSync (p, options, er) {
+const fixWinEPERMSync = (p, options, er) => {
assert(p)
assert(options)
- if (er)
- assert(er instanceof Error)
try {
- options.chmodSync(p, _0666)
+ options.chmodSync(p, 0o666)
} catch (er2) {
if (er2.code === "ENOENT")
return
@@ -204,8 +193,9 @@ function fixWinEPERMSync (p, options, er) {
throw er
}
+ let stats
try {
- var stats = options.statSync(p)
+ stats = options.statSync(p)
} catch (er3) {
if (er3.code === "ENOENT")
return
@@ -219,17 +209,15 @@ function fixWinEPERMSync (p, options, er) {
options.unlinkSync(p)
}
-function rmdir (p, options, originalEr, cb) {
+const rmdir = (p, options, originalEr, cb) => {
assert(p)
assert(options)
- if (originalEr)
- assert(originalEr instanceof Error)
assert(typeof cb === 'function')
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
- options.rmdir(p, function (er) {
+ options.rmdir(p, er => {
if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
rmkids(p, options, cb)
else if (er && er.code === "ENOTDIR")
@@ -239,20 +227,20 @@ function rmdir (p, options, originalEr, cb) {
})
}
-function rmkids(p, options, cb) {
+const rmkids = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
- options.readdir(p, function (er, files) {
+ options.readdir(p, (er, files) => {
if (er)
return cb(er)
- var n = files.length
+ let n = files.length
if (n === 0)
return options.rmdir(p, cb)
- var errState
- files.forEach(function (f) {
- rimraf(path.join(p, f), options, function (er) {
+ let errState
+ files.forEach(f => {
+ rimraf(path.join(p, f), options, er => {
if (errState)
return
if (er)
@@ -267,7 +255,7 @@ function rmkids(p, options, cb) {
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
-function rimrafSync (p, options) {
+const rimrafSync = (p, options) => {
options = options || {}
defaults(options)
@@ -276,7 +264,7 @@ function rimrafSync (p, options) {
assert(options, 'rimraf: missing options')
assert.equal(typeof options, 'object', 'rimraf: options should be object')
- var results
+ let results
if (options.disableGlob || !glob.hasMagic(p)) {
results = [p]
@@ -292,11 +280,12 @@ function rimrafSync (p, options) {
if (!results.length)
return
- for (var i = 0; i < results.length; i++) {
- var p = results[i]
+ for (let i = 0; i < results.length; i++) {
+ const p = results[i]
+ let st
try {
- var st = options.lstatSync(p)
+ st = options.lstatSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
@@ -325,11 +314,9 @@ function rimrafSync (p, options) {
}
}
-function rmdirSync (p, options, originalEr) {
+const rmdirSync = (p, options, originalEr) => {
assert(p)
assert(options)
- if (originalEr)
- assert(originalEr instanceof Error)
try {
options.rmdirSync(p)
@@ -343,12 +330,10 @@ function rmdirSync (p, options, originalEr) {
}
}
-function rmkidsSync (p, options) {
+const rmkidsSync = (p, options) => {
assert(p)
assert(options)
- options.readdirSync(p).forEach(function (f) {
- rimrafSync(path.join(p, f), options)
- })
+ options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
@@ -356,12 +341,12 @@ function rmkidsSync (p, options) {
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
- var retries = isWindows ? 100 : 1
- var i = 0
+ const retries = isWindows ? 100 : 1
+ let i = 0
do {
- var threw = true
+ let threw = true
try {
- var ret = options.rmdirSync(p, options)
+ const ret = options.rmdirSync(p, options)
threw = false
return ret
} finally {
@@ -370,3 +355,6 @@ function rmkidsSync (p, options) {
}
} while (true)
}
+
+module.exports = rimraf
+rimraf.sync = rimrafSync
diff --git a/node_modules/cacache/node_modules/tar/CHANGELOG.md b/node_modules/cacache/node_modules/tar/CHANGELOG.md
new file mode 100644
index 000000000..7058fe253
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/CHANGELOG.md
@@ -0,0 +1,68 @@
+# Charge Long
+
+## 6.0
+
+- Drop support for node 6 and 8
+- fix symlinks and hardlinks on windows being packed with `\`-style path
+ targets
+
+## 5.0
+
+- Address unpack race conditions using path reservations
+- Change large-numbers errors from TypeError to Error
+- Add `TAR_*` error codes
+- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid entries
+ found in an archive
+- do not treat ignored entries as an invalid archive
+- drop support for node v4
+- unpack: conditionally use a file mapping to write files on Windows
+- Set more portable 'mode' value in portable mode
+- Set `portable` gzip option in portable mode
+
+## 4.4
+
+- Add 'mtime' option to tar creation to force mtime
+- unpack: only reuse file fs entries if nlink = 1
+- unpack: rename before unlinking files on Windows
+- Fix encoding/decoding of base-256 numbers
+- Use `stat` instead of `lstat` when checking CWD
+- Always provide a callback to fs.close()
+
+## 4.3
+
+- Add 'transform' unpack option
+
+## 4.2
+
+- Fail when zlib fails
+
+## 4.1
+
+- Add noMtime flag for tar creation
+
+## 4.0
+
+- unpack: raise error if cwd is missing or not a dir
+- pack: don't drop dots from dotfiles when prefixing
+
+## 3.1
+
+- Support `@file.tar` as an entry argument to copy entries from one tar
+ file to another.
+- Add `noPax` option
+- `noResume` option for tar.t
+- win32: convert `>|<?:` chars to windows-friendly form
+- Exclude mtime for dirs in portable mode
+
+## 3.0
+
+- Minipass-based implementation
+- Entirely new API surface, `tar.c()`, `tar.x()` etc., much closer to
+ system tar semantics
+- Massive performance improvement
+- Require node 4.x and higher
+
+## 0.x, 1.x, 2.x - 2011-2014
+
+- fstream-based implementation
+- slow and kinda bad, but better than npm shelling out to the system `tar`
diff --git a/node_modules/libnpmpack/node_modules/rimraf/LICENSE b/node_modules/cacache/node_modules/tar/LICENSE
index 19129e315..19129e315 100644
--- a/node_modules/libnpmpack/node_modules/rimraf/LICENSE
+++ b/node_modules/cacache/node_modules/tar/LICENSE
diff --git a/node_modules/cacache/node_modules/tar/README.md b/node_modules/cacache/node_modules/tar/README.md
new file mode 100644
index 000000000..5e635e622
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/README.md
@@ -0,0 +1,1031 @@
+# node-tar
+
+[![Build Status](https://travis-ci.org/npm/node-tar.svg?branch=master)](https://travis-ci.org/npm/node-tar)
+
+[Fast](./benchmarks) and full-featured Tar for Node.js
+
+The API is designed to mimic the behavior of `tar(1)` on unix systems.
+If you are familiar with how tar works, most of this will hopefully be
+straightforward for you. If not, then hopefully this module can teach
+you useful unix skills that may come in handy someday :)
+
+## Background
+
+A "tar file" or "tarball" is an archive of file system entries
+(directories, files, links, etc.) The name comes from "tape archive".
+If you run `man tar` on almost any Unix command line, you'll learn
+quite a bit about what it can do, and its history.
+
+Tar has 5 main top-level commands:
+
+* `c` Create an archive
+* `r` Replace entries within an archive
+* `u` Update entries within an archive (ie, replace if they're newer)
+* `t` List out the contents of an archive
+* `x` Extract an archive to disk
+
+The other flags and options modify how this top level function works.
+
+## High-Level API
+
+These 5 functions are the high-level API. All of them have a
+single-character name (for unix nerds familiar with `tar(1)`) as well
+as a long name (for everyone else).
+
+All the high-level functions take the following arguments, all three
+of which are optional and may be omitted.
+
+1. `options` - An optional object specifying various options
+2. `paths` - An array of paths to add or extract
+3. `callback` - Called when the command is completed, if async. (If
+ sync or no file specified, providing a callback throws a
+ `TypeError`.)
+
+If the command is sync (ie, if `options.sync=true`), then the
+callback is not allowed, since the action will be completed immediately.
+
+If a `file` argument is specified, and the command is async, then a
+`Promise` is returned. In this case, if async, a callback may be
+provided which is called when the command is completed.
+
+If a `file` option is not specified, then a stream is returned. For
+`create`, this is a readable stream of the generated archive. For
+`list` and `extract` this is a writable stream that an archive should
+be written into. If a file is not specified, then a callback is not
+allowed, because you're already getting a stream to work with.
+
+`replace` and `update` only work on existing archives, and so require
+a `file` argument.
+
+Sync commands without a file argument return a stream that acts on its
+input immediately in the same tick. For readable streams, this means
+that all of the data is immediately available by calling
+`stream.read()`. For writable streams, it will be acted upon as soon
+as it is provided, but this can be at any time.
+
+### Warnings and Errors
+
+Tar emits warnings and errors for recoverable and unrecoverable situations,
+respectively. In many cases, a warning only affects a single entry in an
+archive, or is simply informing you that it's modifying an entry to comply
+with the settings provided.
+
+Unrecoverable warnings will always raise an error (ie, emit `'error'` on
+streaming actions, throw for non-streaming sync actions, reject the
+returned Promise for non-streaming async operations, or call a provided
+callback with an `Error` as the first argument). Recoverable errors will
+raise an error only if `strict: true` is set in the options.
+
+Respond to (recoverable) warnings by listening to the `warn` event.
+Handlers receive 3 arguments:
+
+- `code` String. One of the error codes below. This may not match
+ `data.code`, which preserves the original error code from fs and zlib.
+- `message` String. More details about the error.
+- `data` Metadata about the error. An `Error` object for errors raised by
+ fs and zlib. All fields are attached to errors raisd by tar. Typically
+ contains the following fields, as relevant:
+ - `tarCode` The tar error code.
+ - `code` Either the tar error code, or the error code set by the
+ underlying system.
+ - `file` The archive file being read or written.
+ - `cwd` Working directory for creation and extraction operations.
+ - `entry` The entry object (if it could be created) for `TAR_ENTRY_INFO`,
+ `TAR_ENTRY_INVALID`, and `TAR_ENTRY_ERROR` warnings.
+ - `header` The header object (if it could be created, and the entry could
+ not be created) for `TAR_ENTRY_INFO` and `TAR_ENTRY_INVALID` warnings.
+ - `recoverable` Boolean. If `false`, then the warning will emit an
+ `error`, even in non-strict mode.
+
+#### Error Codes
+
+* `TAR_ENTRY_INFO` An informative error indicating that an entry is being
+ modified, but otherwise processed normally. For example, removing `/` or
+ `C:\` from absolute paths if `preservePaths` is not set.
+
+* `TAR_ENTRY_INVALID` An indication that a given entry is not a valid tar
+ archive entry, and will be skipped. This occurs when:
+ - a checksum fails,
+ - a `linkpath` is missing for a link type, or
+ - a `linkpath` is provided for a non-link type.
+
+ If every entry in a parsed archive raises an `TAR_ENTRY_INVALID` error,
+ then the archive is presumed to be unrecoverably broken, and
+ `TAR_BAD_ARCHIVE` will be raised.
+
+* `TAR_ENTRY_ERROR` The entry appears to be a valid tar archive entry, but
+ encountered an error which prevented it from being unpacked. This occurs
+ when:
+ - an unrecoverable fs error happens during unpacking,
+ - an entry has `..` in the path and `preservePaths` is not set, or
+ - an entry is extracting through a symbolic link, when `preservePaths` is
+ not set.
+
+* `TAR_ENTRY_UNSUPPORTED` An indication that a given entry is
+ a valid archive entry, but of a type that is unsupported, and so will be
+ skipped in archive creation or extracting.
+
+* `TAR_ABORT` When parsing gzipped-encoded archives, the parser will
+ abort the parse process raise a warning for any zlib errors encountered.
+ Aborts are considered unrecoverable for both parsing and unpacking.
+
+* `TAR_BAD_ARCHIVE` The archive file is totally hosed. This can happen for
+ a number of reasons, and always occurs at the end of a parse or extract:
+
+ - An entry body was truncated before seeing the full number of bytes.
+ - The archive contained only invalid entries, indicating that it is
+ likely not an archive, or at least, not an archive this library can
+ parse.
+
+ `TAR_BAD_ARCHIVE` is considered informative for parse operations, but
+ unrecoverable for extraction. Note that, if encountered at the end of an
+ extraction, tar WILL still have extracted as much it could from the
+ archive, so there may be some garbage files to clean up.
+
+Errors that occur deeper in the system (ie, either the filesystem or zlib)
+will have their error codes left intact, and a `tarCode` matching one of
+the above will be added to the warning metadata or the raised error object.
+
+Errors generated by tar will have one of the above codes set as the
+`error.code` field as well, but since errors originating in zlib or fs will
+have their original codes, it's better to read `error.tarCode` if you wish
+to see how tar is handling the issue.
+
+### Examples
+
+The API mimics the `tar(1)` command line functionality, with aliases
+for more human-readable option and function names. The goal is that
+if you know how to use `tar(1)` in Unix, then you know how to use
+`require('tar')` in JavaScript.
+
+To replicate `tar czf my-tarball.tgz files and folders`, you'd do:
+
+```js
+tar.c(
+ {
+ gzip: <true|gzip options>,
+ file: 'my-tarball.tgz'
+ },
+ ['some', 'files', 'and', 'folders']
+).then(_ => { .. tarball has been created .. })
+```
+
+To replicate `tar cz files and folders > my-tarball.tgz`, you'd do:
+
+```js
+tar.c( // or tar.create
+ {
+ gzip: <true|gzip options>
+ },
+ ['some', 'files', 'and', 'folders']
+).pipe(fs.createWriteStream('my-tarball.tgz'))
+```
+
+To replicate `tar xf my-tarball.tgz` you'd do:
+
+```js
+tar.x( // or tar.extract(
+ {
+ file: 'my-tarball.tgz'
+ }
+).then(_=> { .. tarball has been dumped in cwd .. })
+```
+
+To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`:
+
+```js
+fs.createReadStream('my-tarball.tgz').pipe(
+ tar.x({
+ strip: 1,
+ C: 'some-dir' // alias for cwd:'some-dir', also ok
+ })
+)
+```
+
+To replicate `tar tf my-tarball.tgz`, do this:
+
+```js
+tar.t({
+ file: 'my-tarball.tgz',
+ onentry: entry => { .. do whatever with it .. }
+})
+```
+
+To replicate `cat my-tarball.tgz | tar t` do:
+
+```js
+fs.createReadStream('my-tarball.tgz')
+ .pipe(tar.t())
+ .on('entry', entry => { .. do whatever with it .. })
+```
+
+To do anything synchronous, add `sync: true` to the options. Note
+that sync functions don't take a callback and don't return a promise.
+When the function returns, it's already done. Sync methods without a
+file argument return a sync stream, which flushes immediately. But,
+of course, it still won't be done until you `.end()` it.
+
+To filter entries, add `filter: <function>` to the options.
+Tar-creating methods call the filter with `filter(path, stat)`.
+Tar-reading methods (including extraction) call the filter with
+`filter(path, entry)`. The filter is called in the `this`-context of
+the `Pack` or `Unpack` stream object.
+
+The arguments list to `tar t` and `tar x` specify a list of filenames
+to extract or list, so they're equivalent to a filter that tests if
+the file is in the list.
+
+For those who _aren't_ fans of tar's single-character command names:
+
+```
+tar.c === tar.create
+tar.r === tar.replace (appends to archive, file is required)
+tar.u === tar.update (appends if newer, file is required)
+tar.x === tar.extract
+tar.t === tar.list
+```
+
+Keep reading for all the command descriptions and options, as well as
+the low-level API that they are built on.
+
+### tar.c(options, fileList, callback) [alias: tar.create]
+
+Create a tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Write the tarball archive to the specified filename. If this
+ is specified, then the callback will be fired when the file has been
+ written, and a promise will be returned that resolves when the file
+ is written. If a filename is not specified, then a Readable Stream
+ will be returned which will emit the file data. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`. If this is set,
+ and a file is not provided, then the resulting stream will already
+ have the data ready to `read` or `emit('data')` as soon as you
+ request it.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary for other
+ time-based operations. Additionally, `mode` is set to a "reasonable
+ default" for most unix systems, based on a `umask` value of `0o22`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `mode` The mode to set on the created file archive
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+- `noMtime` Set to true to omit writing `mtime` values for entries.
+ Note that this prevents using other mtime-based features like
+ `tar.update` or the `keepNewer` option with the resulting tar archive.
+ [Alias: `m`, `no-mtime`]
+- `mtime` Set to a `Date` object to force a specific `mtime` for
+ everything added to the archive. Overridden by `noMtime`.
+
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+
+### tar.x(options, fileList, callback) [alias: tar.extract]
+
+Extract a tarball archive.
+
+The `fileList` is an array of paths to extract from the tarball. If
+no paths are provided, then all the entries are extracted.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+Most extraction errors will cause a `warn` event to be emitted. If
+the `cwd` is missing, or not a directory, then the extraction will
+fail completely.
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory. [Alias: `C`]
+- `file` The archive file to extract. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Create files and directories synchronously.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive. [Alias: `keep-newer`,
+ `keep-newer-files`]
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies. [Alias: `k`, `keep-existing`]
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+ [Alias: `P`]
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file. [Alias:
+ `U`]
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks. [Alias: `strip-components`, `stripComponents`]
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+ [Alias: `p`]
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+- `noMtime` Set to true to omit writing `mtime` value for extracted
+ entries. [Alias: `m`, `no-mtime`]
+- `transform` Provide a function that takes an `entry` object, and
+ returns a stream, or any falsey value. If a stream is provided,
+ then that stream's data will be written instead of the contents of
+ the archive entry. If a falsey value is provided, then the entry is
+ written to disk as normal. (To exclude items from extraction, use
+ the `filter` option described above.)
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter.
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+
+Note that using an asynchronous stream type with the `transform`
+option will cause undefined behavior in sync extractions.
+[MiniPass](http://npm.im/minipass)-based streams are designed for this
+use case.
+
+### tar.t(options, fileList, callback) [alias: tar.list]
+
+List the contents of a tarball archive.
+
+The `fileList` is an array of paths to list from the tarball. If
+no paths are provided, then all the entries are listed.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'`
+events. (If you want to get actual readable entries, use the
+`tar.Parse` class instead.)
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. [Alias: `C`]
+- `file` The archive file to list. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Read the specified file synchronously. (This has no effect
+ when a file option isn't specified, because entries are emitted as
+ fast as they are parsed from the stream anyway.)
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter. This is important for when both `file` and
+ `sync` are set, because it will be called synchronously.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noResume` By default, `entry` streams are resumed immediately after
+ the call to `onentry`. Set `noResume: true` to suppress this
+ behavior. Note that by opting into this, the stream will never
+ complete until the entry data is consumed.
+
+### tar.u(options, fileList, callback) [alias: tar.update]
+
+Add files to an archive if they are newer than the entry already in
+the tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary for other
+ time-based operations. Additionally, `mode` is set to a "reasonable
+ default" for most unix systems, based on a `umask` value of `0o22`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+- `noMtime` Set to true to omit writing `mtime` values for entries.
+ Note that this prevents using other mtime-based features like
+ `tar.update` or the `keepNewer` option with the resulting tar archive.
+ [Alias: `m`, `no-mtime`]
+- `mtime` Set to a `Date` object to force a specific `mtime` for
+ everything added to the archive. Overridden by `noMtime`.
+
+### tar.r(options, fileList, callback) [alias: tar.replace]
+
+Add files to an existing archive. Because later entries override
+earlier entries, this effectively replaces any existing entries.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary for other
+ time-based operations. Additionally, `mode` is set to a "reasonable
+ default" for most unix systems, based on a `umask` value of `0o22`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+- `noMtime` Set to true to omit writing `mtime` values for entries.
+ Note that this prevents using other mtime-based features like
+ `tar.update` or the `keepNewer` option with the resulting tar archive.
+ [Alias: `m`, `no-mtime`]
+- `mtime` Set to a `Date` object to force a specific `mtime` for
+ everything added to the archive. Overridden by `noMtime`.
+
+
+## Low-Level API
+
+### class tar.Pack
+
+A readable tar stream.
+
+Has all the standard readable stream interface stuff. `'data'` and
+`'end'` events, `read()` method, `pause()` and `resume()`, etc.
+
+#### constructor(options)
+
+The following options are supported:
+
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()`
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary for other
+ time-based operations. Additionally, `mode` is set to a "reasonable
+ default" for most unix systems, based on a `umask` value of `0o22`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories.
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+- `noMtime` Set to true to omit writing `mtime` values for entries.
+ Note that this prevents using other mtime-based features like
+ `tar.update` or the `keepNewer` option with the resulting tar archive.
+- `mtime` Set to a `Date` object to force a specific `mtime` for
+ everything added to the archive. Overridden by `noMtime`.
+
+#### add(path)
+
+Adds an entry to the archive. Returns the Pack stream.
+
+#### write(path)
+
+Adds an entry to the archive. Returns true if flushed.
+
+#### end()
+
+Finishes the archive.
+
+### class tar.Pack.Sync
+
+Synchronous version of `tar.Pack`.
+
+### class tar.Unpack
+
+A writable stream that unpacks a tar archive onto the file system.
+
+All the normal writable stream stuff is supported. `write()` and
+`end()` methods, `'drain'` events, etc.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+`'close'` is emitted when it's done writing stuff to the file system.
+
+Most unpack errors will cause a `warn` event to be emitted. If the
+`cwd` is missing, or not a directory, then an error will be emitted.
+
+#### constructor(options)
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive.
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies.
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file.
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+- `win32` True if on a windows platform. Causes behavior where
+ filenames containing `<|>?` chars are converted to
+ windows-compatible values while being unpacked.
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+- `noMtime` Set to true to omit writing `mtime` value for extracted
+ entries.
+- `transform` Provide a function that takes an `entry` object, and
+ returns a stream, or any falsey value. If a stream is provided,
+ then that stream's data will be written instead of the contents of
+ the archive entry. If a falsey value is provided, then the entry is
+ written to disk as normal. (To exclude items from extraction, use
+ the `filter` option described above.)
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+
+### class tar.Unpack.Sync
+
+Synchronous version of `tar.Unpack`.
+
+Note that using an asynchronous stream type with the `transform`
+option will cause undefined behavior in sync unpack streams.
+[MiniPass](http://npm.im/minipass)-based streams are designed for this
+use case.
+
+### class tar.Parse
+
+A writable stream that parses a tar archive stream. All the standard
+writable stream stuff is supported.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Emits `'entry'` events with `tar.ReadEntry` objects, which are
+themselves readable streams that you can pipe wherever.
+
+Each `entry` will not emit until the one before it is flushed through,
+so make sure to either consume the data (with `on('data', ...)` or
+`.pipe(...)`) or throw it away with `.resume()` to keep the stream
+flowing.
+
+#### constructor(options)
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects.
+
+The following options are supported:
+
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+
+#### abort(error)
+
+Stop all parsing activities. This is called when there are zlib
+errors. It also emits an unrecoverable warning with the error provided.
+
+### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being read out of a tar archive.
+
+It has the following fields:
+
+- `extended` The extended metadata object provided to the constructor.
+- `globalExtended` The global extended metadata object provided to the
+ constructor.
+- `remain` The number of bytes remaining to be written into the
+ stream.
+- `blockRemain` The number of 512-byte blocks remaining to be written
+ into the stream.
+- `ignore` Whether this entry should be ignored.
+- `meta` True if this represents metadata about the next entry, false
+ if it represents a filesystem object.
+- All the fields from the header, extended header, and global extended
+ header are added to the ReadEntry object. So it has `path`, `type`,
+ `size, `mode`, and so on.
+
+#### constructor(header, extended, globalExtended)
+
+Create a new ReadEntry object with the specified header, extended
+header, and global extended header values.
+
+### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being written from the file
+system into a tar archive.
+
+Emits data for the Header, and for the Pax Extended Header if one is
+required, as well as any body data.
+
+Creating a WriteEntry for a directory does not also create
+WriteEntry objects for all of the directory contents.
+
+It has the following fields:
+
+- `path` The path field that will be written to the archive. By
+ default, this is also the path from the cwd to the file system
+ object.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary for other
+ time-based operations. Additionally, `mode` is set to a "reasonable
+ default" for most unix systems, based on a `umask` value of `0o22`.
+- `myuid` If supported, the uid of the user running the current
+ process.
+- `myuser` The `env.USER` string if set, or `''`. Set as the entry
+ `uname` field if the file's `uid` matches `this.myuid`.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/` and filenames containing the windows-compatible
+ forms of `<|>?:` characters are converted to actual `<|>?:` characters
+ in the archive.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+- `noMtime` Set to true to omit writing `mtime` values for entries.
+ Note that this prevents using other mtime-based features like
+ `tar.update` or the `keepNewer` option with the resulting tar archive.
+
+
+#### constructor(path, options)
+
+`path` is the path of the entry as it is written in the archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary for other
+ time-based operations. Additionally, `mode` is set to a "reasonable
+ default" for most unix systems, based on a `umask` value of `0o22`.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/`.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `noMtime` Set to true to omit writing `mtime` values for entries.
+ Note that this prevents using other mtime-based features like
+ `tar.update` or the `keepNewer` option with the resulting tar archive.
+- `umask` Set to restrict the modes on the entries in the archive,
+ somewhat like how umask works on file creation. Defaults to
+ `process.umask()` on unix systems, or `0o22` on Windows.
+
+#### warn(message, data)
+
+If strict, emit an error with the provided message.
+
+Othewise, emit a `'warn'` event with the provided message and data.
+
+### class tar.WriteEntry.Sync
+
+Synchronous version of tar.WriteEntry
+
+### class tar.WriteEntry.Tar
+
+A version of tar.WriteEntry that gets its data from a tar.ReadEntry
+instead of from the filesystem.
+
+#### constructor(readEntry, options)
+
+`readEntry` is the entry being read out of another archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary for other
+ time-based operations. Additionally, `mode` is set to a "reasonable
+ default" for most unix systems, based on a `umask` value of `0o22`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `noMtime` Set to true to omit writing `mtime` values for entries.
+ Note that this prevents using other mtime-based features like
+ `tar.update` or the `keepNewer` option with the resulting tar archive.
+
+### class tar.Header
+
+A class for reading and writing header blocks.
+
+It has the following fields:
+
+- `nullBlock` True if decoding a block which is entirely composed of
+ `0x00` null bytes. (Useful because tar files are terminated by
+ at least 2 null blocks.)
+- `cksumValid` True if the checksum in the header is valid, false
+ otherwise.
+- `needPax` True if the values, as encoded, will require a Pax
+ extended header.
+- `path` The path of the entry.
+- `mode` The 4 lowest-order octal digits of the file mode. That is,
+ read/write/execute permissions for world, group, and owner, and the
+ setuid, setgid, and sticky bits.
+- `uid` Numeric user id of the file owner
+- `gid` Numeric group id of the file owner
+- `size` Size of the file in bytes
+- `mtime` Modified time of the file
+- `cksum` The checksum of the header. This is generated by adding all
+ the bytes of the header block, treating the checksum field itself as
+ all ascii space characters (that is, `0x20`).
+- `type` The human-readable name of the type of entry this represents,
+ or the alphanumeric key if unknown.
+- `typeKey` The alphanumeric key for the type of entry this header
+ represents.
+- `linkpath` The target of Link and SymbolicLink entries.
+- `uname` Human-readable user name of the file owner
+- `gname` Human-readable group name of the file owner
+- `devmaj` The major portion of the device number. Always `0` for
+ files, directories, and links.
+- `devmin` The minor portion of the device number. Always `0` for
+ files, directories, and links.
+- `atime` File access time.
+- `ctime` File change time.
+
+#### constructor(data, [offset=0])
+
+`data` is optional. It is either a Buffer that should be interpreted
+as a tar Header starting at the specified offset and continuing for
+512 bytes, or a data object of keys and values to set on the header
+object, and eventually encode as a tar Header.
+
+#### decode(block, offset)
+
+Decode the provided buffer starting at the specified offset.
+
+Buffer length must be greater than 512 bytes.
+
+#### set(data)
+
+Set the fields in the data object.
+
+#### encode(buffer, offset)
+
+Encode the header fields into the buffer at the specified offset.
+
+Returns `this.needPax` to indicate whether a Pax Extended Header is
+required to properly encode the specified data.
+
+### class tar.Pax
+
+An object representing a set of key-value pairs in an Pax extended
+header entry.
+
+It has the following fields. Where the same name is used, they have
+the same semantics as the tar.Header field of the same name.
+
+- `global` True if this represents a global extended header, or false
+ if it is for a single entry.
+- `atime`
+- `charset`
+- `comment`
+- `ctime`
+- `gid`
+- `gname`
+- `linkpath`
+- `mtime`
+- `path`
+- `size`
+- `uid`
+- `uname`
+- `dev`
+- `ino`
+- `nlink`
+
+#### constructor(object, global)
+
+Set the fields set in the object. `global` is a boolean that defaults
+to false.
+
+#### encode()
+
+Return a Buffer containing the header and body for the Pax extended
+header entry, or `null` if there is nothing to encode.
+
+#### encodeBody()
+
+Return a string representing the body of the pax extended header
+entry.
+
+#### encodeField(fieldName)
+
+Return a string representing the key/value encoding for the specified
+fieldName, or `''` if the field is unset.
+
+### tar.Pax.parse(string, extended, global)
+
+Return a new Pax object created by parsing the contents of the string
+provided.
+
+If the `extended` object is set, then also add the fields from that
+object. (This is necessary because multiple metadata entries can
+occur in sequence.)
+
+### tar.types
+
+A translation table for the `type` field in tar headers.
+
+#### tar.types.name.get(code)
+
+Get the human-readable name for a given alphanumeric code.
+
+#### tar.types.code.get(name)
+
+Get the alphanumeric code for a given human-readable name.
diff --git a/node_modules/cacache/node_modules/tar/index.js b/node_modules/cacache/node_modules/tar/index.js
new file mode 100644
index 000000000..c9ae06e79
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/index.js
@@ -0,0 +1,18 @@
+'use strict'
+
+// high-level commands
+exports.c = exports.create = require('./lib/create.js')
+exports.r = exports.replace = require('./lib/replace.js')
+exports.t = exports.list = require('./lib/list.js')
+exports.u = exports.update = require('./lib/update.js')
+exports.x = exports.extract = require('./lib/extract.js')
+
+// classes
+exports.Pack = require('./lib/pack.js')
+exports.Unpack = require('./lib/unpack.js')
+exports.Parse = require('./lib/parse.js')
+exports.ReadEntry = require('./lib/read-entry.js')
+exports.WriteEntry = require('./lib/write-entry.js')
+exports.Header = require('./lib/header.js')
+exports.Pax = require('./lib/pax.js')
+exports.types = require('./lib/types.js')
diff --git a/node_modules/cacache/node_modules/tar/lib/create.js b/node_modules/cacache/node_modules/tar/lib/create.js
new file mode 100644
index 000000000..a37aa52e6
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/create.js
@@ -0,0 +1,105 @@
+'use strict'
+
+// tar -c
+const hlo = require('./high-level-opt.js')
+
+const Pack = require('./pack.js')
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const t = require('./list.js')
+const path = require('path')
+
+const c = module.exports = (opt_, files, cb) => {
+ if (typeof files === 'function')
+ cb = files
+
+ if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ return opt.file && opt.sync ? createFileSync(opt, files)
+ : opt.file ? createFile(opt, files, cb)
+ : opt.sync ? createSync(opt, files)
+ : create(opt, files)
+}
+
+const createFileSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+ const stream = new fsm.WriteStreamSync(opt.file, {
+ mode: opt.mode || 0o666
+ })
+ p.pipe(stream)
+ addFilesSync(p, files)
+}
+
+const createFile = (opt, files, cb) => {
+ const p = new Pack(opt)
+ const stream = new fsm.WriteStream(opt.file, {
+ mode: opt.mode || 0o666
+ })
+ p.pipe(stream)
+
+ const promise = new Promise((res, rej) => {
+ stream.on('error', rej)
+ stream.on('close', res)
+ p.on('error', rej)
+ })
+
+ addFilesAsync(p, files)
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
+
+const createSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+ addFilesSync(p, files)
+ return p
+}
+
+const create = (opt, files) => {
+ const p = new Pack(opt)
+ addFilesAsync(p, files)
+ return p
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/extract.js b/node_modules/cacache/node_modules/tar/lib/extract.js
new file mode 100644
index 000000000..cbb458a0a
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/extract.js
@@ -0,0 +1,112 @@
+'use strict'
+
+// tar -x
+const hlo = require('./high-level-opt.js')
+const Unpack = require('./unpack.js')
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const path = require('path')
+
+const x = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (typeof files === 'function')
+ cb = files, files = null
+
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ if (files.length)
+ filesFilter(opt, files)
+
+ return opt.file && opt.sync ? extractFileSync(opt)
+ : opt.file ? extractFile(opt, cb)
+ : opt.sync ? extractSync(opt)
+ : extract(opt)
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
+
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
+
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const extractFileSync = opt => {
+ const u = new Unpack.Sync(opt)
+
+ const file = opt.file
+ let threw = true
+ let fd
+ const stat = fs.statSync(file)
+ // This trades a zero-byte read() syscall for a stat
+ // However, it will usually result in less memory allocation
+ const readSize = opt.maxReadSize || 16*1024*1024
+ const stream = new fsm.ReadStreamSync(file, {
+ readSize: readSize,
+ size: stat.size
+ })
+ stream.pipe(u)
+}
+
+const extractFile = (opt, cb) => {
+ const u = new Unpack(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ u.on('error', reject)
+ u.on('close', resolve)
+
+ // This trades a zero-byte read() syscall for a stat
+ // However, it will usually result in less memory allocation
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else {
+ const stream = new fsm.ReadStream(file, {
+ readSize: readSize,
+ size: stat.size
+ })
+ stream.on('error', reject)
+ stream.pipe(u)
+ }
+ })
+ })
+ return cb ? p.then(cb, cb) : p
+}
+
+const extractSync = opt => {
+ return new Unpack.Sync(opt)
+}
+
+const extract = opt => {
+ return new Unpack(opt)
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/get-write-flag.js b/node_modules/cacache/node_modules/tar/lib/get-write-flag.js
new file mode 100644
index 000000000..e86959996
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/get-write-flag.js
@@ -0,0 +1,20 @@
+// Get the appropriate flag to use for creating files
+// We use fmap on Windows platforms for files less than
+// 512kb. This is a fairly low limit, but avoids making
+// things slower in some cases. Since most of what this
+// library is used for is extracting tarballs of many
+// relatively small files in npm packages and the like,
+// it can be a big boost on Windows platforms.
+// Only supported in Node v12.9.0 and above.
+const platform = process.env.__FAKE_PLATFORM__ || process.platform
+const isWindows = platform === 'win32'
+const fs = global.__FAKE_TESTING_FS__ || require('fs')
+
+/* istanbul ignore next */
+const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants
+
+const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP
+const fMapLimit = 512 * 1024
+const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY
+module.exports = !fMapEnabled ? () => 'w'
+ : size => size < fMapLimit ? fMapFlag : 'w'
diff --git a/node_modules/cacache/node_modules/tar/lib/header.js b/node_modules/cacache/node_modules/tar/lib/header.js
new file mode 100644
index 000000000..5d88f6cf8
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/header.js
@@ -0,0 +1,288 @@
+'use strict'
+// parse a 512-byte header block to a data object, or vice-versa
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+
+const types = require('./types.js')
+const pathModule = require('path').posix
+const large = require('./large-numbers.js')
+
+const SLURP = Symbol('slurp')
+const TYPE = Symbol('type')
+
+class Header {
+ constructor (data, off, ex, gex) {
+ this.cksumValid = false
+ this.needPax = false
+ this.nullBlock = false
+
+ this.block = null
+ this.path = null
+ this.mode = null
+ this.uid = null
+ this.gid = null
+ this.size = null
+ this.mtime = null
+ this.cksum = null
+ this[TYPE] = '0'
+ this.linkpath = null
+ this.uname = null
+ this.gname = null
+ this.devmaj = 0
+ this.devmin = 0
+ this.atime = null
+ this.ctime = null
+
+ if (Buffer.isBuffer(data))
+ this.decode(data, off || 0, ex, gex)
+ else if (data)
+ this.set(data)
+ }
+
+ decode (buf, off, ex, gex) {
+ if (!off)
+ off = 0
+
+ if (!buf || !(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ this.path = decString(buf, off, 100)
+ this.mode = decNumber(buf, off + 100, 8)
+ this.uid = decNumber(buf, off + 108, 8)
+ this.gid = decNumber(buf, off + 116, 8)
+ this.size = decNumber(buf, off + 124, 12)
+ this.mtime = decDate(buf, off + 136, 12)
+ this.cksum = decNumber(buf, off + 148, 12)
+
+ // if we have extended or global extended headers, apply them now
+ // See https://github.com/npm/node-tar/pull/187
+ this[SLURP](ex)
+ this[SLURP](gex, true)
+
+ // old tar versions marked dirs as a file with a trailing /
+ this[TYPE] = decString(buf, off + 156, 1)
+ if (this[TYPE] === '')
+ this[TYPE] = '0'
+ if (this[TYPE] === '0' && this.path.substr(-1) === '/')
+ this[TYPE] = '5'
+
+ // tar implementations sometimes incorrectly put the stat(dir).size
+ // as the size in the tarball, even though Directory entries are
+ // not able to have any body at all. In the very rare chance that
+ // it actually DOES have a body, we weren't going to do anything with
+ // it anyway, and it'll just be a warning about an invalid header.
+ if (this[TYPE] === '5')
+ this.size = 0
+
+ this.linkpath = decString(buf, off + 157, 100)
+ if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
+ this.uname = decString(buf, off + 265, 32)
+ this.gname = decString(buf, off + 297, 32)
+ this.devmaj = decNumber(buf, off + 329, 8)
+ this.devmin = decNumber(buf, off + 337, 8)
+ if (buf[off + 475] !== 0) {
+ // definitely a prefix, definitely >130 chars.
+ const prefix = decString(buf, off + 345, 155)
+ this.path = prefix + '/' + this.path
+ } else {
+ const prefix = decString(buf, off + 345, 130)
+ if (prefix)
+ this.path = prefix + '/' + this.path
+ this.atime = decDate(buf, off + 476, 12)
+ this.ctime = decDate(buf, off + 488, 12)
+ }
+ }
+
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
+ }
+ this.cksumValid = sum === this.cksum
+ if (this.cksum === null && sum === 8 * 0x20)
+ this.nullBlock = true
+ }
+
+ [SLURP] (ex, global) {
+ for (let k in ex) {
+ // we slurp in everything except for the path attribute in
+ // a global extended header, because that's weird.
+ if (ex[k] !== null && ex[k] !== undefined &&
+ !(global && k === 'path'))
+ this[k] = ex[k]
+ }
+ }
+
+ encode (buf, off) {
+ if (!buf) {
+ buf = this.block = Buffer.alloc(512)
+ off = 0
+ }
+
+ if (!off)
+ off = 0
+
+ if (!(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ const prefixSize = this.ctime || this.atime ? 130 : 155
+ const split = splitPrefix(this.path || '', prefixSize)
+ const path = split[0]
+ const prefix = split[1]
+ this.needPax = split[2]
+
+ this.needPax = encString(buf, off, 100, path) || this.needPax
+ this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
+ this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
+ this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
+ this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
+ this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
+ buf[off + 156] = this[TYPE].charCodeAt(0)
+ this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
+ buf.write('ustar\u000000', off + 257, 8)
+ this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
+ this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
+ this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
+ this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
+ this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
+ if (buf[off + 475] !== 0)
+ this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
+ else {
+ this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
+ this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
+ this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
+ }
+
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
+ }
+ this.cksum = sum
+ encNumber(buf, off + 148, 8, this.cksum)
+ this.cksumValid = true
+
+ return this.needPax
+ }
+
+ set (data) {
+ for (let i in data) {
+ if (data[i] !== null && data[i] !== undefined)
+ this[i] = data[i]
+ }
+ }
+
+ get type () {
+ return types.name.get(this[TYPE]) || this[TYPE]
+ }
+
+ get typeKey () {
+ return this[TYPE]
+ }
+
+ set type (type) {
+ if (types.code.has(type))
+ this[TYPE] = types.code.get(type)
+ else
+ this[TYPE] = type
+ }
+}
+
+const splitPrefix = (p, prefixSize) => {
+ const pathSize = 100
+ let pp = p
+ let prefix = ''
+ let ret
+ const root = pathModule.parse(p).root || '.'
+
+ if (Buffer.byteLength(pp) < pathSize)
+ ret = [pp, prefix, false]
+ else {
+ // first set prefix to the dir, and path to the base
+ prefix = pathModule.dirname(pp)
+ pp = pathModule.basename(pp)
+
+ do {
+ // both fit!
+ if (Buffer.byteLength(pp) <= pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp, prefix, false]
+
+ // prefix fits in prefix, but path doesn't fit in path
+ else if (Buffer.byteLength(pp) > pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp.substr(0, pathSize - 1), prefix, true]
+
+ else {
+ // make path take a bit from prefix
+ pp = pathModule.join(pathModule.basename(prefix), pp)
+ prefix = pathModule.dirname(prefix)
+ }
+ } while (prefix !== root && !ret)
+
+ // at this point, found no resolution, just truncate
+ if (!ret)
+ ret = [p.substr(0, pathSize - 1), '', true]
+ }
+ return ret
+}
+
+const decString = (buf, off, size) =>
+ buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
+
+const decDate = (buf, off, size) =>
+ numToDate(decNumber(buf, off, size))
+
+const numToDate = num => num === null ? null : new Date(num * 1000)
+
+const decNumber = (buf, off, size) =>
+ buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
+ : decSmallNumber(buf, off, size)
+
+const nanNull = value => isNaN(value) ? null : value
+
+const decSmallNumber = (buf, off, size) =>
+ nanNull(parseInt(
+ buf.slice(off, off + size)
+ .toString('utf8').replace(/\0.*$/, '').trim(), 8))
+
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+ 12: 0o77777777777,
+ 8 : 0o7777777
+}
+
+const encNumber = (buf, off, size, number) =>
+ number === null ? false :
+ number > MAXNUM[size] || number < 0
+ ? (large.encode(number, buf.slice(off, off + size)), true)
+ : (encSmallNumber(buf, off, size, number), false)
+
+const encSmallNumber = (buf, off, size, number) =>
+ buf.write(octalString(number, size), off, size, 'ascii')
+
+const octalString = (number, size) =>
+ padOctal(Math.floor(number).toString(8), size)
+
+const padOctal = (string, size) =>
+ (string.length === size - 1 ? string
+ : new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
+
+const encDate = (buf, off, size, date) =>
+ date === null ? false :
+ encNumber(buf, off, size, date.getTime() / 1000)
+
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0')
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, string) =>
+ string === null ? false :
+ (buf.write(string + NULLS, off, size, 'utf8'),
+ string.length !== Buffer.byteLength(string) || string.length > size)
+
+module.exports = Header
diff --git a/node_modules/cacache/node_modules/tar/lib/high-level-opt.js b/node_modules/cacache/node_modules/tar/lib/high-level-opt.js
new file mode 100644
index 000000000..7333db915
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/high-level-opt.js
@@ -0,0 +1,29 @@
+'use strict'
+
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+
+const argmap = new Map([
+ ['C', 'cwd'],
+ ['f', 'file'],
+ ['z', 'gzip'],
+ ['P', 'preservePaths'],
+ ['U', 'unlink'],
+ ['strip-components', 'strip'],
+ ['stripComponents', 'strip'],
+ ['keep-newer', 'newer'],
+ ['keepNewer', 'newer'],
+ ['keep-newer-files', 'newer'],
+ ['keepNewerFiles', 'newer'],
+ ['k', 'keep'],
+ ['keep-existing', 'keep'],
+ ['keepExisting', 'keep'],
+ ['m', 'noMtime'],
+ ['no-mtime', 'noMtime'],
+ ['p', 'preserveOwner'],
+ ['L', 'follow'],
+ ['h', 'follow']
+])
+
+const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
+ argmap.has(k) ? argmap.get(k) : k, opt[k]
+]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
diff --git a/node_modules/cacache/node_modules/tar/lib/large-numbers.js b/node_modules/cacache/node_modules/tar/lib/large-numbers.js
new file mode 100644
index 000000000..ad30bc350
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/large-numbers.js
@@ -0,0 +1,97 @@
+'use strict'
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive.
+
+const encode = exports.encode = (num, buf) => {
+ if (!Number.isSafeInteger(num))
+ // The number is so large that javascript cannot represent it with integer
+ // precision.
+ throw Error('cannot encode number outside of javascript safe integer range')
+ else if (num < 0)
+ encodeNegative(num, buf)
+ else
+ encodePositive(num, buf)
+ return buf
+}
+
+const encodePositive = (num, buf) => {
+ buf[0] = 0x80
+
+ for (var i = buf.length; i > 1; i--) {
+ buf[i-1] = num & 0xff
+ num = Math.floor(num / 0x100)
+ }
+}
+
+const encodeNegative = (num, buf) => {
+ buf[0] = 0xff
+ var flipped = false
+ num = num * -1
+ for (var i = buf.length; i > 1; i--) {
+ var byte = num & 0xff
+ num = Math.floor(num / 0x100)
+ if (flipped)
+ buf[i-1] = onesComp(byte)
+ else if (byte === 0)
+ buf[i-1] = 0
+ else {
+ flipped = true
+ buf[i-1] = twosComp(byte)
+ }
+ }
+}
+
+const parse = exports.parse = (buf) => {
+ var post = buf[buf.length - 1]
+ var pre = buf[0]
+ var value;
+ if (pre === 0x80)
+ value = pos(buf.slice(1, buf.length))
+ else if (pre === 0xff)
+ value = twos(buf)
+ else
+ throw Error('invalid base256 encoding')
+
+ if (!Number.isSafeInteger(value))
+ // The number is so large that javascript cannot represent it with integer
+ // precision.
+ throw Error('parsed number outside of javascript safe integer range')
+
+ return value
+}
+
+const twos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ var flipped = false
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ var f
+ if (flipped)
+ f = onesComp(byte)
+ else if (byte === 0)
+ f = byte
+ else {
+ flipped = true
+ f = twosComp(byte)
+ }
+ if (f !== 0)
+ sum -= f * Math.pow(256, len - i - 1)
+ }
+ return sum
+}
+
+const pos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ if (byte !== 0)
+ sum += byte * Math.pow(256, len - i - 1)
+ }
+ return sum
+}
+
+const onesComp = byte => (0xff ^ byte) & 0xff
+
+const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
diff --git a/node_modules/cacache/node_modules/tar/lib/list.js b/node_modules/cacache/node_modules/tar/lib/list.js
new file mode 100644
index 000000000..9da3f812c
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/list.js
@@ -0,0 +1,128 @@
+'use strict'
+
+// XXX: This shares a lot in common with extract.js
+// maybe some DRY opportunity here?
+
+// tar -t
+const hlo = require('./high-level-opt.js')
+const Parser = require('./parse.js')
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const path = require('path')
+
+const t = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (typeof files === 'function')
+ cb = files, files = null
+
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ if (files.length)
+ filesFilter(opt, files)
+
+ if (!opt.noResume)
+ onentryFunction(opt)
+
+ return opt.file && opt.sync ? listFileSync(opt)
+ : opt.file ? listFile(opt, cb)
+ : list(opt)
+}
+
+const onentryFunction = opt => {
+ const onentry = opt.onentry
+ opt.onentry = onentry ? e => {
+ onentry(e)
+ e.resume()
+ } : e => e.resume()
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
+
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
+
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const listFileSync = opt => {
+ const p = list(opt)
+ const file = opt.file
+ let threw = true
+ let fd
+ try {
+ const stat = fs.statSync(file)
+ const readSize = opt.maxReadSize || 16*1024*1024
+ if (stat.size < readSize) {
+ p.end(fs.readFileSync(file))
+ } else {
+ let pos = 0
+ const buf = Buffer.allocUnsafe(readSize)
+ fd = fs.openSync(file, 'r')
+ while (pos < stat.size) {
+ let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ pos += bytesRead
+ p.write(buf.slice(0, bytesRead))
+ }
+ p.end()
+ }
+ threw = false
+ } finally {
+ if (threw && fd)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const listFile = (opt, cb) => {
+ const parse = new Parser(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ parse.on('error', reject)
+ parse.on('end', resolve)
+
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else {
+ const stream = new fsm.ReadStream(file, {
+ readSize: readSize,
+ size: stat.size
+ })
+ stream.on('error', reject)
+ stream.pipe(parse)
+ }
+ })
+ })
+ return cb ? p.then(cb, cb) : p
+}
+
+const list = opt => new Parser(opt)
diff --git a/node_modules/cacache/node_modules/tar/lib/mkdir.js b/node_modules/cacache/node_modules/tar/lib/mkdir.js
new file mode 100644
index 000000000..381d0e1b3
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/mkdir.js
@@ -0,0 +1,206 @@
+'use strict'
+// wrapper around mkdirp for tar's needs.
+
+// TODO: This should probably be a class, not functionally
+// passing around state in a gazillion args.
+
+const mkdirp = require('mkdirp')
+const fs = require('fs')
+const path = require('path')
+const chownr = require('chownr')
+
+class SymlinkError extends Error {
+ constructor (symlink, path) {
+ super('Cannot extract through symbolic link')
+ this.path = path
+ this.symlink = symlink
+ }
+
+ get name () {
+ return 'SylinkError'
+ }
+}
+
+class CwdError extends Error {
+ constructor (path, code) {
+ super(code + ': Cannot cd into \'' + path + '\'')
+ this.path = path
+ this.code = code
+ }
+
+ get name () {
+ return 'CwdError'
+ }
+}
+
+const mkdir = module.exports = (dir, opt, cb) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (er, created) => {
+ if (er)
+ cb(er)
+ else {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr(created, uid, gid, er => done(er))
+ else if (needChmod)
+ fs.chmod(dir, mode, cb)
+ else
+ cb()
+ }
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd)
+ return fs.stat(dir, (er, st) => {
+ if (er || !st.isDirectory())
+ er = new CwdError(dir, er && er.code || 'ENOTDIR')
+ done(er)
+ })
+
+ if (preserve)
+ return mkdirp(dir, {mode}).then(made => done(null, made), done)
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
+}
+
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+ if (!parts.length)
+ return cb(null, created)
+ const p = parts.shift()
+ const part = base + '/' + p
+ if (cache.get(part))
+ return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+}
+
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
+ if (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return cb(new CwdError(cwd, er.code))
+
+ fs.lstat(part, (statEr, st) => {
+ if (statEr)
+ cb(statEr)
+ else if (st.isDirectory())
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ else if (unlink)
+ fs.unlink(part, er => {
+ if (er)
+ return cb(er)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+ })
+ else if (st.isSymbolicLink())
+ return cb(new SymlinkError(part, part + '/' + parts.join('/')))
+ else
+ cb(er)
+ })
+ } else {
+ created = created || part
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ }
+}
+
+const mkdirSync = module.exports.sync = (dir, opt) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (created) => {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr.sync(created, uid, gid)
+ if (needChmod)
+ fs.chmodSync(dir, mode)
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd) {
+ let ok = false
+ let code = 'ENOTDIR'
+ try {
+ ok = fs.statSync(dir).isDirectory()
+ } catch (er) {
+ code = er.code
+ } finally {
+ if (!ok)
+ throw new CwdError(dir, code)
+ }
+ done()
+ return
+ }
+
+ if (preserve)
+ return done(mkdirp.sync(dir, mode))
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ let created = null
+ for (let p = parts.shift(), part = cwd;
+ p && (part += '/' + p);
+ p = parts.shift()) {
+
+ if (cache.get(part))
+ continue
+
+ try {
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ } catch (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return new CwdError(cwd, er.code)
+
+ const st = fs.lstatSync(part)
+ if (st.isDirectory()) {
+ cache.set(part, true)
+ continue
+ } else if (unlink) {
+ fs.unlinkSync(part)
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ continue
+ } else if (st.isSymbolicLink())
+ return new SymlinkError(part, part + '/' + parts.join('/'))
+ }
+ }
+
+ return done(created)
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/mode-fix.js b/node_modules/cacache/node_modules/tar/lib/mode-fix.js
new file mode 100644
index 000000000..c3758741c
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/mode-fix.js
@@ -0,0 +1,24 @@
+'use strict'
+module.exports = (mode, isDir, portable) => {
+ mode &= 0o7777
+
+ // in portable mode, use the minimum reasonable umask
+ // if this system creates files with 0o664 by default
+ // (as some linux distros do), then we'll write the
+ // archive with 0o644 instead. Also, don't ever create
+ // a file that is not readable/writable by the owner.
+ if (portable) {
+ mode = (mode | 0o600) &~0o22
+ }
+
+ // if dirs are readable, then they should be listable
+ if (isDir) {
+ if (mode & 0o400)
+ mode |= 0o100
+ if (mode & 0o40)
+ mode |= 0o10
+ if (mode & 0o4)
+ mode |= 0o1
+ }
+ return mode
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/pack.js b/node_modules/cacache/node_modules/tar/lib/pack.js
new file mode 100644
index 000000000..0fca4ae25
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/pack.js
@@ -0,0 +1,403 @@
+'use strict'
+
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+
+class PackJob {
+ constructor (path, absolute) {
+ this.path = path || './'
+ this.absolute = absolute
+ this.entry = null
+ this.stat = null
+ this.readdir = null
+ this.pending = false
+ this.ignore = false
+ this.piped = false
+ }
+}
+
+const MiniPass = require('minipass')
+const zlib = require('minizlib')
+const ReadEntry = require('./read-entry.js')
+const WriteEntry = require('./write-entry.js')
+const WriteEntrySync = WriteEntry.Sync
+const WriteEntryTar = WriteEntry.Tar
+const Yallist = require('yallist')
+const EOF = Buffer.alloc(1024)
+const ONSTAT = Symbol('onStat')
+const ENDED = Symbol('ended')
+const QUEUE = Symbol('queue')
+const CURRENT = Symbol('current')
+const PROCESS = Symbol('process')
+const PROCESSING = Symbol('processing')
+const PROCESSJOB = Symbol('processJob')
+const JOBS = Symbol('jobs')
+const JOBDONE = Symbol('jobDone')
+const ADDFSENTRY = Symbol('addFSEntry')
+const ADDTARENTRY = Symbol('addTarEntry')
+const STAT = Symbol('stat')
+const READDIR = Symbol('readdir')
+const ONREADDIR = Symbol('onreaddir')
+const PIPE = Symbol('pipe')
+const ENTRY = Symbol('entry')
+const ENTRYOPT = Symbol('entryOpt')
+const WRITEENTRYCLASS = Symbol('writeEntryClass')
+const WRITE = Symbol('write')
+const ONDRAIN = Symbol('ondrain')
+
+const fs = require('fs')
+const path = require('path')
+const warner = require('./warn-mixin.js')
+
+const Pack = warner(class Pack extends MiniPass {
+ constructor (opt) {
+ super(opt)
+ opt = opt || Object.create(null)
+ this.opt = opt
+ this.file = opt.file || ''
+ this.cwd = opt.cwd || process.cwd()
+ this.maxReadSize = opt.maxReadSize
+ this.preservePaths = !!opt.preservePaths
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.readdirCache = opt.readdirCache || new Map()
+
+ this[WRITEENTRYCLASS] = WriteEntry
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ this.portable = !!opt.portable
+ this.zip = null
+ if (opt.gzip) {
+ if (typeof opt.gzip !== 'object')
+ opt.gzip = {}
+ if (this.portable)
+ opt.gzip.portable = true
+ this.zip = new zlib.Gzip(opt.gzip)
+ this.zip.on('data', chunk => super.write(chunk))
+ this.zip.on('end', _ => super.end())
+ this.zip.on('drain', _ => this[ONDRAIN]())
+ this.on('resume', _ => this.zip.resume())
+ } else
+ this.on('drain', this[ONDRAIN])
+
+ this.noDirRecurse = !!opt.noDirRecurse
+ this.follow = !!opt.follow
+ this.noMtime = !!opt.noMtime
+ this.mtime = opt.mtime || null
+
+ this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
+
+ this[QUEUE] = new Yallist
+ this[JOBS] = 0
+ this.jobs = +opt.jobs || 4
+ this[PROCESSING] = false
+ this[ENDED] = false
+ }
+
+ [WRITE] (chunk) {
+ return super.write(chunk)
+ }
+
+ add (path) {
+ this.write(path)
+ return this
+ }
+
+ end (path) {
+ if (path)
+ this.write(path)
+ this[ENDED] = true
+ this[PROCESS]()
+ return this
+ }
+
+ write (path) {
+ if (this[ENDED])
+ throw new Error('write after end')
+
+ if (path instanceof ReadEntry)
+ this[ADDTARENTRY](path)
+ else
+ this[ADDFSENTRY](path)
+ return this.flowing
+ }
+
+ [ADDTARENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p.path)
+ if (this.prefix)
+ p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
+
+ // in this case, we don't have to wait for the stat
+ if (!this.filter(p.path, p))
+ p.resume()
+ else {
+ const job = new PackJob(p.path, absolute, false)
+ job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
+ job.entry.on('end', _ => this[JOBDONE](job))
+ this[JOBS] += 1
+ this[QUEUE].push(job)
+ }
+
+ this[PROCESS]()
+ }
+
+ [ADDFSENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p)
+ if (this.prefix)
+ p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
+
+ this[QUEUE].push(new PackJob(p, absolute))
+ this[PROCESS]()
+ }
+
+ [STAT] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ const stat = this.follow ? 'stat' : 'lstat'
+ fs[stat](job.absolute, (er, stat) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ this.emit('error', er)
+ else
+ this[ONSTAT](job, stat)
+ })
+ }
+
+ [ONSTAT] (job, stat) {
+ this.statCache.set(job.absolute, stat)
+ job.stat = stat
+
+ // now we have the stat, we can filter it.
+ if (!this.filter(job.path, stat))
+ job.ignore = true
+
+ this[PROCESS]()
+ }
+
+ [READDIR] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ fs.readdir(job.absolute, (er, entries) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ return this.emit('error', er)
+ this[ONREADDIR](job, entries)
+ })
+ }
+
+ [ONREADDIR] (job, entries) {
+ this.readdirCache.set(job.absolute, entries)
+ job.readdir = entries
+ this[PROCESS]()
+ }
+
+ [PROCESS] () {
+ if (this[PROCESSING])
+ return
+
+ this[PROCESSING] = true
+ for (let w = this[QUEUE].head;
+ w !== null && this[JOBS] < this.jobs;
+ w = w.next) {
+ this[PROCESSJOB](w.value)
+ if (w.value.ignore) {
+ const p = w.next
+ this[QUEUE].removeNode(w)
+ w.next = p
+ }
+ }
+
+ this[PROCESSING] = false
+
+ if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+ if (this.zip)
+ this.zip.end(EOF)
+ else {
+ super.write(EOF)
+ super.end()
+ }
+ }
+ }
+
+ get [CURRENT] () {
+ return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
+ }
+
+ [JOBDONE] (job) {
+ this[QUEUE].shift()
+ this[JOBS] -= 1
+ this[PROCESS]()
+ }
+
+ [PROCESSJOB] (job) {
+ if (job.pending)
+ return
+
+ if (job.entry) {
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ return
+ }
+
+ if (!job.stat) {
+ if (this.statCache.has(job.absolute))
+ this[ONSTAT](job, this.statCache.get(job.absolute))
+ else
+ this[STAT](job)
+ }
+ if (!job.stat)
+ return
+
+ // filtered out!
+ if (job.ignore)
+ return
+
+ if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
+ if (this.readdirCache.has(job.absolute))
+ this[ONREADDIR](job, this.readdirCache.get(job.absolute))
+ else
+ this[READDIR](job)
+ if (!job.readdir)
+ return
+ }
+
+ // we know it doesn't have an entry, because that got checked above
+ job.entry = this[ENTRY](job)
+ if (!job.entry) {
+ job.ignore = true
+ return
+ }
+
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ }
+
+ [ENTRYOPT] (job) {
+ return {
+ onwarn: (code, msg, data) => this.warn(code, msg, data),
+ noPax: this.noPax,
+ cwd: this.cwd,
+ absolute: job.absolute,
+ preservePaths: this.preservePaths,
+ maxReadSize: this.maxReadSize,
+ strict: this.strict,
+ portable: this.portable,
+ linkCache: this.linkCache,
+ statCache: this.statCache,
+ noMtime: this.noMtime,
+ mtime: this.mtime
+ }
+ }
+
+ [ENTRY] (job) {
+ this[JOBS] += 1
+ try {
+ return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
+ .on('end', () => this[JOBDONE](job))
+ .on('error', er => this.emit('error', er))
+ } catch (er) {
+ this.emit('error', er)
+ }
+ }
+
+ [ONDRAIN] () {
+ if (this[CURRENT] && this[CURRENT].entry)
+ this[CURRENT].entry.resume()
+ }
+
+ // like .pipe() but using super, because our write() is special
+ [PIPE] (job) {
+ job.piped = true
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ const source = job.entry
+ const zip = this.zip
+
+ if (zip)
+ source.on('data', chunk => {
+ if (!zip.write(chunk))
+ source.pause()
+ })
+ else
+ source.on('data', chunk => {
+ if (!super.write(chunk))
+ source.pause()
+ })
+ }
+
+ pause () {
+ if (this.zip)
+ this.zip.pause()
+ return super.pause()
+ }
+})
+
+class PackSync extends Pack {
+ constructor (opt) {
+ super(opt)
+ this[WRITEENTRYCLASS] = WriteEntrySync
+ }
+
+ // pause/resume are no-ops in sync streams.
+ pause () {}
+ resume () {}
+
+ [STAT] (job) {
+ const stat = this.follow ? 'statSync' : 'lstatSync'
+ this[ONSTAT](job, fs[stat](job.absolute))
+ }
+
+ [READDIR] (job, stat) {
+ this[ONREADDIR](job, fs.readdirSync(job.absolute))
+ }
+
+ // gotta get it all in this tick
+ [PIPE] (job) {
+ const source = job.entry
+ const zip = this.zip
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ if (zip)
+ source.on('data', chunk => {
+ zip.write(chunk)
+ })
+ else
+ source.on('data', chunk => {
+ super[WRITE](chunk)
+ })
+ }
+}
+
+Pack.Sync = PackSync
+
+module.exports = Pack
diff --git a/node_modules/cacache/node_modules/tar/lib/parse.js b/node_modules/cacache/node_modules/tar/lib/parse.js
new file mode 100644
index 000000000..d9a49ad1f
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/parse.js
@@ -0,0 +1,483 @@
+'use strict'
+
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in. We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry. The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+
+const warner = require('./warn-mixin.js')
+const path = require('path')
+const Header = require('./header.js')
+const EE = require('events')
+const Yallist = require('yallist')
+const maxMetaEntrySize = 1024 * 1024
+const Entry = require('./read-entry.js')
+const Pax = require('./pax.js')
+const zlib = require('minizlib')
+
+const gzipHeader = Buffer.from([0x1f, 0x8b])
+const STATE = Symbol('state')
+const WRITEENTRY = Symbol('writeEntry')
+const READENTRY = Symbol('readEntry')
+const NEXTENTRY = Symbol('nextEntry')
+const PROCESSENTRY = Symbol('processEntry')
+const EX = Symbol('extendedHeader')
+const GEX = Symbol('globalExtendedHeader')
+const META = Symbol('meta')
+const EMITMETA = Symbol('emitMeta')
+const BUFFER = Symbol('buffer')
+const QUEUE = Symbol('queue')
+const ENDED = Symbol('ended')
+const EMITTEDEND = Symbol('emittedEnd')
+const EMIT = Symbol('emit')
+const UNZIP = Symbol('unzip')
+const CONSUMECHUNK = Symbol('consumeChunk')
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
+const CONSUMEBODY = Symbol('consumeBody')
+const CONSUMEMETA = Symbol('consumeMeta')
+const CONSUMEHEADER = Symbol('consumeHeader')
+const CONSUMING = Symbol('consuming')
+const BUFFERCONCAT = Symbol('bufferConcat')
+const MAYBEEND = Symbol('maybeEnd')
+const WRITING = Symbol('writing')
+const ABORTED = Symbol('aborted')
+const DONE = Symbol('onDone')
+const SAW_VALID_ENTRY = Symbol('sawValidEntry')
+const SAW_NULL_BLOCK = Symbol('sawNullBlock')
+const SAW_EOF = Symbol('sawEOF')
+
+const noop = _ => true
+
+module.exports = warner(class Parser extends EE {
+ constructor (opt) {
+ opt = opt || {}
+ super(opt)
+
+ this.file = opt.file || ''
+
+ // set to boolean false when an entry starts. 1024 bytes of \0
+ // is technically a valid tarball, albeit a boring one.
+ this[SAW_VALID_ENTRY] = null
+
+ // these BADARCHIVE errors can't be detected early. listen on DONE.
+ this.on(DONE, _ => {
+ if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
+ // either less than 1 block of data, or all entries were invalid.
+ // Either way, probably not even a tarball.
+ this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
+ }
+ })
+
+ if (opt.ondone)
+ this.on(DONE, opt.ondone)
+ else
+ this.on(DONE, _ => {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ })
+
+ this.strict = !!opt.strict
+ this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
+ this.filter = typeof opt.filter === 'function' ? opt.filter : noop
+
+ // have to set this so that streams are ok piping into it
+ this.writable = true
+ this.readable = false
+
+ this[QUEUE] = new Yallist()
+ this[BUFFER] = null
+ this[READENTRY] = null
+ this[WRITEENTRY] = null
+ this[STATE] = 'begin'
+ this[META] = ''
+ this[EX] = null
+ this[GEX] = null
+ this[ENDED] = false
+ this[UNZIP] = null
+ this[ABORTED] = false
+ this[SAW_NULL_BLOCK] = false
+ this[SAW_EOF] = false
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+ if (typeof opt.onentry === 'function')
+ this.on('entry', opt.onentry)
+ }
+
+ [CONSUMEHEADER] (chunk, position) {
+ if (this[SAW_VALID_ENTRY] === null)
+ this[SAW_VALID_ENTRY] = false
+ let header
+ try {
+ header = new Header(chunk, position, this[EX], this[GEX])
+ } catch (er) {
+ return this.warn('TAR_ENTRY_INVALID', er)
+ }
+
+ if (header.nullBlock) {
+ if (this[SAW_NULL_BLOCK]) {
+ this[SAW_EOF] = true
+ // ending an archive with no entries. pointless, but legal.
+ if (this[STATE] === 'begin')
+ this[STATE] = 'header'
+ this[EMIT]('eof')
+ } else {
+ this[SAW_NULL_BLOCK] = true
+ this[EMIT]('nullBlock')
+ }
+ } else {
+ this[SAW_NULL_BLOCK] = false
+ if (!header.cksumValid)
+ this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header})
+ else if (!header.path)
+ this.warn('TAR_ENTRY_INVALID', 'path is required', {header})
+ else {
+ const type = header.type
+ if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
+ this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header})
+ else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
+ this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header})
+ else {
+ const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
+
+ // we do this for meta & ignored entries as well, because they
+ // are still valid tar, or else we wouldn't know to ignore them
+ if (!this[SAW_VALID_ENTRY]) {
+ if (entry.remain) {
+ // this might be the one!
+ const onend = () => {
+ if (!entry.invalid)
+ this[SAW_VALID_ENTRY] = true
+ }
+ entry.on('end', onend)
+ } else {
+ this[SAW_VALID_ENTRY] = true
+ }
+ }
+
+ if (entry.meta) {
+ if (entry.size > this.maxMetaEntrySize) {
+ entry.ignore = true
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = 'ignore'
+ entry.resume()
+ } else if (entry.size > 0) {
+ this[META] = ''
+ entry.on('data', c => this[META] += c)
+ this[STATE] = 'meta'
+ }
+ } else {
+ this[EX] = null
+ entry.ignore = entry.ignore || !this.filter(entry.path, entry)
+
+ if (entry.ignore) {
+ // probably valid, just not something we care about
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = entry.remain ? 'ignore' : 'header'
+ entry.resume()
+ } else {
+ if (entry.remain)
+ this[STATE] = 'body'
+ else {
+ this[STATE] = 'header'
+ entry.end()
+ }
+
+ if (!this[READENTRY]) {
+ this[QUEUE].push(entry)
+ this[NEXTENTRY]()
+ } else
+ this[QUEUE].push(entry)
+ }
+ }
+ }
+ }
+ }
+ }
+
+ [PROCESSENTRY] (entry) {
+ let go = true
+
+ if (!entry) {
+ this[READENTRY] = null
+ go = false
+ } else if (Array.isArray(entry))
+ this.emit.apply(this, entry)
+ else {
+ this[READENTRY] = entry
+ this.emit('entry', entry)
+ if (!entry.emittedEnd) {
+ entry.on('end', _ => this[NEXTENTRY]())
+ go = false
+ }
+ }
+
+ return go
+ }
+
+ [NEXTENTRY] () {
+ do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
+
+ if (!this[QUEUE].length) {
+ // At this point, there's nothing in the queue, but we may have an
+ // entry which is being consumed (readEntry).
+ // If we don't, then we definitely can handle more data.
+ // If we do, and either it's flowing, or it has never had any data
+ // written to it, then it needs more.
+ // The only other possibility is that it has returned false from a
+ // write() call, so we wait for the next drain to continue.
+ const re = this[READENTRY]
+ const drainNow = !re || re.flowing || re.size === re.remain
+ if (drainNow) {
+ if (!this[WRITING])
+ this.emit('drain')
+ } else
+ re.once('drain', _ => this.emit('drain'))
+ }
+ }
+
+ [CONSUMEBODY] (chunk, position) {
+ // write up to but no more than writeEntry.blockRemain
+ const entry = this[WRITEENTRY]
+ const br = entry.blockRemain
+ const c = (br >= chunk.length && position === 0) ? chunk
+ : chunk.slice(position, position + br)
+
+ entry.write(c)
+
+ if (!entry.blockRemain) {
+ this[STATE] = 'header'
+ this[WRITEENTRY] = null
+ entry.end()
+ }
+
+ return c.length
+ }
+
+ [CONSUMEMETA] (chunk, position) {
+ const entry = this[WRITEENTRY]
+ const ret = this[CONSUMEBODY](chunk, position)
+
+ // if we finished, then the entry is reset
+ if (!this[WRITEENTRY])
+ this[EMITMETA](entry)
+
+ return ret
+ }
+
+ [EMIT] (ev, data, extra) {
+ if (!this[QUEUE].length && !this[READENTRY])
+ this.emit(ev, data, extra)
+ else
+ this[QUEUE].push([ev, data, extra])
+ }
+
+ [EMITMETA] (entry) {
+ this[EMIT]('meta', this[META])
+ switch (entry.type) {
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this[EX] = Pax.parse(this[META], this[EX], false)
+ break
+
+ case 'GlobalExtendedHeader':
+ this[GEX] = Pax.parse(this[META], this[GEX], true)
+ break
+
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].path = this[META].replace(/\0.*/, '')
+ break
+
+ case 'NextFileHasLongLinkpath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].linkpath = this[META].replace(/\0.*/, '')
+ break
+
+ /* istanbul ignore next */
+ default: throw new Error('unknown meta: ' + entry.type)
+ }
+ }
+
+ abort (error) {
+ this[ABORTED] = true
+ this.emit('abort', error)
+ // always throws, even in non-strict mode
+ this.warn('TAR_ABORT', error, { recoverable: false })
+ }
+
+ write (chunk) {
+ if (this[ABORTED])
+ return
+
+ // first write, might be gzipped
+ if (this[UNZIP] === null && chunk) {
+ if (this[BUFFER]) {
+ chunk = Buffer.concat([this[BUFFER], chunk])
+ this[BUFFER] = null
+ }
+ if (chunk.length < gzipHeader.length) {
+ this[BUFFER] = chunk
+ return true
+ }
+ for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
+ if (chunk[i] !== gzipHeader[i])
+ this[UNZIP] = false
+ }
+ if (this[UNZIP] === null) {
+ const ended = this[ENDED]
+ this[ENDED] = false
+ this[UNZIP] = new zlib.Unzip()
+ this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
+ this[UNZIP].on('error', er => this.abort(er))
+ this[UNZIP].on('end', _ => {
+ this[ENDED] = true
+ this[CONSUMECHUNK]()
+ })
+ this[WRITING] = true
+ const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk)
+ this[WRITING] = false
+ return ret
+ }
+ }
+
+ this[WRITING] = true
+ if (this[UNZIP])
+ this[UNZIP].write(chunk)
+ else
+ this[CONSUMECHUNK](chunk)
+ this[WRITING] = false
+
+ // return false if there's a queue, or if the current entry isn't flowing
+ const ret =
+ this[QUEUE].length ? false :
+ this[READENTRY] ? this[READENTRY].flowing :
+ true
+
+ // if we have no queue, then that means a clogged READENTRY
+ if (!ret && !this[QUEUE].length)
+ this[READENTRY].once('drain', _ => this.emit('drain'))
+
+ return ret
+ }
+
+ [BUFFERCONCAT] (c) {
+ if (c && !this[ABORTED])
+ this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
+ }
+
+ [MAYBEEND] () {
+ if (this[ENDED] &&
+ !this[EMITTEDEND] &&
+ !this[ABORTED] &&
+ !this[CONSUMING]) {
+ this[EMITTEDEND] = true
+ const entry = this[WRITEENTRY]
+ if (entry && entry.blockRemain) {
+ // truncated, likely a damaged file
+ const have = this[BUFFER] ? this[BUFFER].length : 0
+ this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
+ entry.blockRemain} more bytes, only ${have} available)`, {entry})
+ if (this[BUFFER])
+ entry.write(this[BUFFER])
+ entry.end()
+ }
+ this[EMIT](DONE)
+ }
+ }
+
+ [CONSUMECHUNK] (chunk) {
+ if (this[CONSUMING])
+ this[BUFFERCONCAT](chunk)
+ else if (!chunk && !this[BUFFER])
+ this[MAYBEEND]()
+ else {
+ this[CONSUMING] = true
+ if (this[BUFFER]) {
+ this[BUFFERCONCAT](chunk)
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ } else {
+ this[CONSUMECHUNKSUB](chunk)
+ }
+
+ while (this[BUFFER] &&
+ this[BUFFER].length >= 512 &&
+ !this[ABORTED] &&
+ !this[SAW_EOF]) {
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ }
+ this[CONSUMING] = false
+ }
+
+ if (!this[BUFFER] || this[ENDED])
+ this[MAYBEEND]()
+ }
+
+ [CONSUMECHUNKSUB] (chunk) {
+ // we know that we are in CONSUMING mode, so anything written goes into
+ // the buffer. Advance the position and put any remainder in the buffer.
+ let position = 0
+ let length = chunk.length
+ while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
+ switch (this[STATE]) {
+ case 'begin':
+ case 'header':
+ this[CONSUMEHEADER](chunk, position)
+ position += 512
+ break
+
+ case 'ignore':
+ case 'body':
+ position += this[CONSUMEBODY](chunk, position)
+ break
+
+ case 'meta':
+ position += this[CONSUMEMETA](chunk, position)
+ break
+
+ /* istanbul ignore next */
+ default:
+ throw new Error('invalid state: ' + this[STATE])
+ }
+ }
+
+ if (position < length) {
+ if (this[BUFFER])
+ this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
+ else
+ this[BUFFER] = chunk.slice(position)
+ }
+ }
+
+ end (chunk) {
+ if (!this[ABORTED]) {
+ if (this[UNZIP])
+ this[UNZIP].end(chunk)
+ else {
+ this[ENDED] = true
+ this.write(chunk)
+ }
+ }
+ }
+})
diff --git a/node_modules/cacache/node_modules/tar/lib/path-reservations.js b/node_modules/cacache/node_modules/tar/lib/path-reservations.js
new file mode 100644
index 000000000..3cf0c2c12
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/path-reservations.js
@@ -0,0 +1,125 @@
+// A path exclusive reservation system
+// reserve([list, of, paths], fn)
+// When the fn is first in line for all its paths, it
+// is called with a cb that clears the reservation.
+//
+// Used by async unpack to avoid clobbering paths in use,
+// while still allowing maximal safe parallelization.
+
+const assert = require('assert')
+
+module.exports = () => {
+ // path => [function or Set]
+ // A Set object means a directory reservation
+ // A fn is a direct reservation on that path
+ const queues = new Map()
+
+ // fn => {paths:[path,...], dirs:[path, ...]}
+ const reservations = new Map()
+
+ // return a set of parent dirs for a given path
+ const { join } = require('path')
+ const getDirs = path =>
+ join(path).split(/[\\\/]/).slice(0, -1).reduce((set, path) =>
+ set.length ? set.concat(join(set[set.length-1], path)) : [path], [])
+
+ // functions currently running
+ const running = new Set()
+
+ // return the queues for each path the function cares about
+ // fn => {paths, dirs}
+ const getQueues = fn => {
+ const res = reservations.get(fn)
+ /* istanbul ignore if - unpossible */
+ if (!res)
+ throw new Error('function does not have any path reservations')
+ return {
+ paths: res.paths.map(path => queues.get(path)),
+ dirs: [...res.dirs].map(path => queues.get(path)),
+ }
+ }
+
+ // check if fn is first in line for all its paths, and is
+ // included in the first set for all its dir queues
+ const check = fn => {
+ const {paths, dirs} = getQueues(fn)
+ return paths.every(q => q[0] === fn) &&
+ dirs.every(q => q[0] instanceof Set && q[0].has(fn))
+ }
+
+ // run the function if it's first in line and not already running
+ const run = fn => {
+ if (running.has(fn) || !check(fn))
+ return false
+ running.add(fn)
+ fn(() => clear(fn))
+ return true
+ }
+
+ const clear = fn => {
+ if (!running.has(fn))
+ return false
+
+ const { paths, dirs } = reservations.get(fn)
+ const next = new Set()
+
+ paths.forEach(path => {
+ const q = queues.get(path)
+ assert.equal(q[0], fn)
+ if (q.length === 1)
+ queues.delete(path)
+ else {
+ q.shift()
+ if (typeof q[0] === 'function')
+ next.add(q[0])
+ else
+ q[0].forEach(fn => next.add(fn))
+ }
+ })
+
+ dirs.forEach(dir => {
+ const q = queues.get(dir)
+ assert(q[0] instanceof Set)
+ if (q[0].size === 1 && q.length === 1) {
+ queues.delete(dir)
+ } else if (q[0].size === 1) {
+ q.shift()
+
+ // must be a function or else the Set would've been reused
+ next.add(q[0])
+ } else
+ q[0].delete(fn)
+ })
+ running.delete(fn)
+
+ next.forEach(fn => run(fn))
+ return true
+ }
+
+ const reserve = (paths, fn) => {
+ const dirs = new Set(
+ paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
+ )
+ reservations.set(fn, {dirs, paths})
+ paths.forEach(path => {
+ const q = queues.get(path)
+ if (!q)
+ queues.set(path, [fn])
+ else
+ q.push(fn)
+ })
+ dirs.forEach(dir => {
+ const q = queues.get(dir)
+ if (!q)
+ queues.set(dir, [new Set([fn])])
+ else if (q[q.length-1] instanceof Set)
+ q[q.length-1].add(fn)
+ else
+ q.push(new Set([fn]))
+ })
+
+ return run(fn)
+ }
+
+ return { check, reserve }
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/pax.js b/node_modules/cacache/node_modules/tar/lib/pax.js
new file mode 100644
index 000000000..214a459f3
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/pax.js
@@ -0,0 +1,145 @@
+'use strict'
+const Header = require('./header.js')
+const path = require('path')
+
+class Pax {
+ constructor (obj, global) {
+ this.atime = obj.atime || null
+ this.charset = obj.charset || null
+ this.comment = obj.comment || null
+ this.ctime = obj.ctime || null
+ this.gid = obj.gid || null
+ this.gname = obj.gname || null
+ this.linkpath = obj.linkpath || null
+ this.mtime = obj.mtime || null
+ this.path = obj.path || null
+ this.size = obj.size || null
+ this.uid = obj.uid || null
+ this.uname = obj.uname || null
+ this.dev = obj.dev || null
+ this.ino = obj.ino || null
+ this.nlink = obj.nlink || null
+ this.global = global || false
+ }
+
+ encode () {
+ const body = this.encodeBody()
+ if (body === '')
+ return null
+
+ const bodyLen = Buffer.byteLength(body)
+ // round up to 512 bytes
+ // add 512 for header
+ const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
+ const buf = Buffer.allocUnsafe(bufLen)
+
+ // 0-fill the header section, it might not hit every field
+ for (let i = 0; i < 512; i++) {
+ buf[i] = 0
+ }
+
+ new Header({
+ // XXX split the path
+ // then the path should be PaxHeader + basename, but less than 99,
+ // prepend with the dirname
+ path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
+ mode: this.mode || 0o644,
+ uid: this.uid || null,
+ gid: this.gid || null,
+ size: bodyLen,
+ mtime: this.mtime || null,
+ type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+ linkpath: '',
+ uname: this.uname || '',
+ gname: this.gname || '',
+ devmaj: 0,
+ devmin: 0,
+ atime: this.atime || null,
+ ctime: this.ctime || null
+ }).encode(buf)
+
+ buf.write(body, 512, bodyLen, 'utf8')
+
+ // null pad after the body
+ for (let i = bodyLen + 512; i < buf.length; i++) {
+ buf[i] = 0
+ }
+
+ return buf
+ }
+
+ encodeBody () {
+ return (
+ this.encodeField('path') +
+ this.encodeField('ctime') +
+ this.encodeField('atime') +
+ this.encodeField('dev') +
+ this.encodeField('ino') +
+ this.encodeField('nlink') +
+ this.encodeField('charset') +
+ this.encodeField('comment') +
+ this.encodeField('gid') +
+ this.encodeField('gname') +
+ this.encodeField('linkpath') +
+ this.encodeField('mtime') +
+ this.encodeField('size') +
+ this.encodeField('uid') +
+ this.encodeField('uname')
+ )
+ }
+
+ encodeField (field) {
+ if (this[field] === null || this[field] === undefined)
+ return ''
+ const v = this[field] instanceof Date ? this[field].getTime() / 1000
+ : this[field]
+ const s = ' ' +
+ (field === 'dev' || field === 'ino' || field === 'nlink'
+ ? 'SCHILY.' : '') +
+ field + '=' + v + '\n'
+ const byteLen = Buffer.byteLength(s)
+ // the digits includes the length of the digits in ascii base-10
+ // so if it's 9 characters, then adding 1 for the 9 makes it 10
+ // which makes it 11 chars.
+ let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
+ if (byteLen + digits >= Math.pow(10, digits))
+ digits += 1
+ const len = digits + byteLen
+ return len + s
+ }
+}
+
+Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
+
+const merge = (a, b) =>
+ b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
+
+const parseKV = string =>
+ string
+ .replace(/\n$/, '')
+ .split('\n')
+ .reduce(parseKVLine, Object.create(null))
+
+const parseKVLine = (set, line) => {
+ const n = parseInt(line, 10)
+
+ // XXX Values with \n in them will fail this.
+ // Refactor to not be a naive line-by-line parse.
+ if (n !== Buffer.byteLength(line) + 1)
+ return set
+
+ line = line.substr((n + ' ').length)
+ const kv = line.split('=')
+ const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
+ if (!k)
+ return set
+
+ const v = kv.join('=')
+ set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
+ ? new Date(v * 1000)
+ : /^[0-9]+$/.test(v) ? +v
+ : v
+ return set
+}
+
+module.exports = Pax
diff --git a/node_modules/cacache/node_modules/tar/lib/read-entry.js b/node_modules/cacache/node_modules/tar/lib/read-entry.js
new file mode 100644
index 000000000..8acee94ba
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/read-entry.js
@@ -0,0 +1,98 @@
+'use strict'
+const types = require('./types.js')
+const MiniPass = require('minipass')
+
+const SLURP = Symbol('slurp')
+module.exports = class ReadEntry extends MiniPass {
+ constructor (header, ex, gex) {
+ super()
+ // read entries always start life paused. this is to avoid the
+ // situation where Minipass's auto-ending empty streams results
+ // in an entry ending before we're ready for it.
+ this.pause()
+ this.extended = ex
+ this.globalExtended = gex
+ this.header = header
+ this.startBlockSize = 512 * Math.ceil(header.size / 512)
+ this.blockRemain = this.startBlockSize
+ this.remain = header.size
+ this.type = header.type
+ this.meta = false
+ this.ignore = false
+ switch (this.type) {
+ case 'File':
+ case 'OldFile':
+ case 'Link':
+ case 'SymbolicLink':
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'Directory':
+ case 'FIFO':
+ case 'ContiguousFile':
+ case 'GNUDumpDir':
+ break
+
+ case 'NextFileHasLongLinkpath':
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ case 'GlobalExtendedHeader':
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this.meta = true
+ break
+
+ // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+ // it may be worth doing the same, but with a warning.
+ default:
+ this.ignore = true
+ }
+
+ this.path = header.path
+ this.mode = header.mode
+ if (this.mode)
+ this.mode = this.mode & 0o7777
+ this.uid = header.uid
+ this.gid = header.gid
+ this.uname = header.uname
+ this.gname = header.gname
+ this.size = header.size
+ this.mtime = header.mtime
+ this.atime = header.atime
+ this.ctime = header.ctime
+ this.linkpath = header.linkpath
+ this.uname = header.uname
+ this.gname = header.gname
+
+ if (ex) this[SLURP](ex)
+ if (gex) this[SLURP](gex, true)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+
+ const r = this.remain
+ const br = this.blockRemain
+ this.remain = Math.max(0, r - writeLen)
+ this.blockRemain = Math.max(0, br - writeLen)
+ if (this.ignore)
+ return true
+
+ if (r >= writeLen)
+ return super.write(data)
+
+ // r < writeLen
+ return super.write(data.slice(0, r))
+ }
+
+ [SLURP] (ex, global) {
+ for (let k in ex) {
+ // we slurp in everything except for the path attribute in
+ // a global extended header, because that's weird.
+ if (ex[k] !== null && ex[k] !== undefined &&
+ !(global && k === 'path'))
+ this[k] = ex[k]
+ }
+ }
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/replace.js b/node_modules/cacache/node_modules/tar/lib/replace.js
new file mode 100644
index 000000000..44126d1f8
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/replace.js
@@ -0,0 +1,219 @@
+'use strict'
+
+// tar -r
+const hlo = require('./high-level-opt.js')
+const Pack = require('./pack.js')
+const Parse = require('./parse.js')
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const t = require('./list.js')
+const path = require('path')
+
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+
+const Header = require('./header.js')
+
+const r = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ return opt.sync ? replaceSync(opt, files)
+ : replace(opt, files, cb)
+}
+
+const replaceSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+
+ let threw = true
+ let fd
+ let position
+
+ try {
+ try {
+ fd = fs.openSync(opt.file, 'r+')
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ fd = fs.openSync(opt.file, 'w+')
+ else
+ throw er
+ }
+
+ const st = fs.fstatSync(fd)
+ const headBuf = Buffer.alloc(512)
+
+ POSITION: for (position = 0; position < st.size; position += 512) {
+ for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+ bytes = fs.readSync(
+ fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ throw new Error('cannot append to compressed archives')
+
+ if (!bytes)
+ break POSITION
+ }
+
+ let h = new Header(headBuf)
+ if (!h.cksumValid)
+ break
+ let entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > st.size)
+ break
+ // the 512 for the header we just parsed will be added as well
+ // also jump ahead all the blocks for the body
+ position += entryBlockSize
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ }
+ threw = false
+
+ streamSync(opt, p, position, fd, files)
+ } finally {
+ if (threw)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const streamSync = (opt, p, position, fd, files) => {
+ const stream = new fsm.WriteStreamSync(opt.file, {
+ fd: fd,
+ start: position
+ })
+ p.pipe(stream)
+ addFilesSync(p, files)
+}
+
+const replace = (opt, files, cb) => {
+ files = Array.from(files)
+ const p = new Pack(opt)
+
+ const getPos = (fd, size, cb_) => {
+ const cb = (er, pos) => {
+ if (er)
+ fs.close(fd, _ => cb_(er))
+ else
+ cb_(null, pos)
+ }
+
+ let position = 0
+ if (size === 0)
+ return cb(null, 0)
+
+ let bufPos = 0
+ const headBuf = Buffer.alloc(512)
+ const onread = (er, bytes) => {
+ if (er)
+ return cb(er)
+ bufPos += bytes
+ if (bufPos < 512 && bytes)
+ return fs.read(
+ fd, headBuf, bufPos, headBuf.length - bufPos,
+ position + bufPos, onread
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ return cb(new Error('cannot append to compressed archives'))
+
+ // truncated header
+ if (bufPos < 512)
+ return cb(null, position)
+
+ const h = new Header(headBuf)
+ if (!h.cksumValid)
+ return cb(null, position)
+
+ const entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > size)
+ return cb(null, position)
+
+ position += entryBlockSize + 512
+ if (position >= size)
+ return cb(null, position)
+
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ bufPos = 0
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+
+ const promise = new Promise((resolve, reject) => {
+ p.on('error', reject)
+ let flag = 'r+'
+ const onopen = (er, fd) => {
+ if (er && er.code === 'ENOENT' && flag === 'r+') {
+ flag = 'w+'
+ return fs.open(opt.file, flag, onopen)
+ }
+
+ if (er)
+ return reject(er)
+
+ fs.fstat(fd, (er, st) => {
+ if (er)
+ return reject(er)
+ getPos(fd, st.size, (er, position) => {
+ if (er)
+ return reject(er)
+ const stream = new fsm.WriteStream(opt.file, {
+ fd: fd,
+ start: position
+ })
+ p.pipe(stream)
+ stream.on('error', reject)
+ stream.on('close', resolve)
+ addFilesAsync(p, files)
+ })
+ })
+ }
+ fs.open(opt.file, flag, onopen)
+ })
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/types.js b/node_modules/cacache/node_modules/tar/lib/types.js
new file mode 100644
index 000000000..df425652b
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/types.js
@@ -0,0 +1,44 @@
+'use strict'
+// map types from key to human-friendly name
+exports.name = new Map([
+ ['0', 'File'],
+ // same as File
+ ['', 'OldFile'],
+ ['1', 'Link'],
+ ['2', 'SymbolicLink'],
+ // Devices and FIFOs aren't fully supported
+ // they are parsed, but skipped when unpacking
+ ['3', 'CharacterDevice'],
+ ['4', 'BlockDevice'],
+ ['5', 'Directory'],
+ ['6', 'FIFO'],
+ // same as File
+ ['7', 'ContiguousFile'],
+ // pax headers
+ ['g', 'GlobalExtendedHeader'],
+ ['x', 'ExtendedHeader'],
+ // vendor-specific stuff
+ // skip
+ ['A', 'SolarisACL'],
+ // like 5, but with data, which should be skipped
+ ['D', 'GNUDumpDir'],
+ // metadata only, skip
+ ['I', 'Inode'],
+ // data = link path of next file
+ ['K', 'NextFileHasLongLinkpath'],
+ // data = path of next file
+ ['L', 'NextFileHasLongPath'],
+ // skip
+ ['M', 'ContinuationFile'],
+ // like L
+ ['N', 'OldGnuLongPath'],
+ // skip
+ ['S', 'SparseFile'],
+ // skip
+ ['V', 'TapeVolumeHeader'],
+ // like x
+ ['X', 'OldExtendedHeader']
+])
+
+// map the other direction
+exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
diff --git a/node_modules/cacache/node_modules/tar/lib/unpack.js b/node_modules/cacache/node_modules/tar/lib/unpack.js
new file mode 100644
index 000000000..af0e0ffa0
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/unpack.js
@@ -0,0 +1,680 @@
+'use strict'
+
+// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
+// but the path reservations are required to avoid race conditions where
+// parallelized unpack ops may mess with one another, due to dependencies
+// (like a Link depending on its target) or destructive operations (like
+// clobbering an fs object to create one of a different type.)
+
+const assert = require('assert')
+const EE = require('events').EventEmitter
+const Parser = require('./parse.js')
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const path = require('path')
+const mkdir = require('./mkdir.js')
+const mkdirSync = mkdir.sync
+const wc = require('./winchars.js')
+const pathReservations = require('./path-reservations.js')
+
+const ONENTRY = Symbol('onEntry')
+const CHECKFS = Symbol('checkFs')
+const CHECKFS2 = Symbol('checkFs2')
+const ISREUSABLE = Symbol('isReusable')
+const MAKEFS = Symbol('makeFs')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const LINK = Symbol('link')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const UNSUPPORTED = Symbol('unsupported')
+const UNKNOWN = Symbol('unknown')
+const CHECKPATH = Symbol('checkPath')
+const MKDIR = Symbol('mkdir')
+const ONERROR = Symbol('onError')
+const PENDING = Symbol('pending')
+const PEND = Symbol('pend')
+const UNPEND = Symbol('unpend')
+const ENDED = Symbol('ended')
+const MAYBECLOSE = Symbol('maybeClose')
+const SKIP = Symbol('skip')
+const DOCHOWN = Symbol('doChown')
+const UID = Symbol('uid')
+const GID = Symbol('gid')
+const crypto = require('crypto')
+const getFlag = require('./get-write-flag.js')
+
+/* istanbul ignore next */
+const neverCalled = () => {
+ throw new Error('sync function called cb somehow?!?')
+}
+
+// Unlinks on Windows are not atomic.
+//
+// This means that if you have a file entry, followed by another
+// file entry with an identical name, and you cannot re-use the file
+// (because it's a hardlink, or because unlink:true is set, or it's
+// Windows, which does not have useful nlink values), then the unlink
+// will be committed to the disk AFTER the new file has been written
+// over the old one, deleting the new file.
+//
+// To work around this, on Windows systems, we rename the file and then
+// delete the renamed file. It's a sloppy kludge, but frankly, I do not
+// know of a better way to do this, given windows' non-atomic unlink
+// semantics.
+//
+// See: https://github.com/npm/node-tar/issues/183
+/* istanbul ignore next */
+const unlinkFile = (path, cb) => {
+ if (process.platform !== 'win32')
+ return fs.unlink(path, cb)
+
+ const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
+ fs.rename(path, name, er => {
+ if (er)
+ return cb(er)
+ fs.unlink(name, cb)
+ })
+}
+
+/* istanbul ignore next */
+const unlinkFileSync = path => {
+ if (process.platform !== 'win32')
+ return fs.unlinkSync(path)
+
+ const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
+ fs.renameSync(path, name)
+ fs.unlinkSync(name)
+}
+
+// this.gid, entry.gid, this.processUid
+const uint32 = (a, b, c) =>
+ a === a >>> 0 ? a
+ : b === b >>> 0 ? b
+ : c
+
+class Unpack extends Parser {
+ constructor (opt) {
+ if (!opt)
+ opt = {}
+
+ opt.ondone = _ => {
+ this[ENDED] = true
+ this[MAYBECLOSE]()
+ }
+
+ super(opt)
+
+ this.reservations = pathReservations()
+
+ this.transform = typeof opt.transform === 'function' ? opt.transform : null
+
+ this.writable = true
+ this.readable = false
+
+ this[PENDING] = 0
+ this[ENDED] = false
+
+ this.dirCache = opt.dirCache || new Map()
+
+ if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+ // need both or neither
+ if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
+ throw new TypeError('cannot set owner without number uid and gid')
+ if (opt.preserveOwner)
+ throw new TypeError(
+ 'cannot preserve owner in archive and also set owner explicitly')
+ this.uid = opt.uid
+ this.gid = opt.gid
+ this.setOwner = true
+ } else {
+ this.uid = null
+ this.gid = null
+ this.setOwner = false
+ }
+
+ // default true for root
+ if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
+ this.preserveOwner = process.getuid && process.getuid() === 0
+ else
+ this.preserveOwner = !!opt.preserveOwner
+
+ this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
+ process.getuid() : null
+ this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
+ process.getgid() : null
+
+ // mostly just for testing, but useful in some cases.
+ // Forcibly trigger a chown on every entry, no matter what
+ this.forceChown = opt.forceChown === true
+
+ // turn ><?| in filenames into 0xf000-higher encoded forms
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+
+ // do not unpack over files that are newer than what's in the archive
+ this.newer = !!opt.newer
+
+ // do not unpack over ANY files
+ this.keep = !!opt.keep
+
+ // do not set mtime/atime of extracted entries
+ this.noMtime = !!opt.noMtime
+
+ // allow .., absolute path entries, and unpacking through symlinks
+ // without this, warn and skip .., relativize absolutes, and error
+ // on symlinks in extraction path
+ this.preservePaths = !!opt.preservePaths
+
+ // unlink files and links before writing. This breaks existing hard
+ // links, and removes symlink directories rather than erroring
+ this.unlink = !!opt.unlink
+
+ this.cwd = path.resolve(opt.cwd || process.cwd())
+ this.strip = +opt.strip || 0
+ this.processUmask = process.umask()
+ this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
+ // default mode for dirs created as parents
+ this.dmode = opt.dmode || (0o0777 & (~this.umask))
+ this.fmode = opt.fmode || (0o0666 & (~this.umask))
+ this.on('entry', entry => this[ONENTRY](entry))
+ }
+
+ // a bad or damaged archive is a warning for Parser, but an error
+ // when extracting. Mark those errors as unrecoverable, because
+ // the Unpack contract cannot be met.
+ warn (code, msg, data = {}) {
+ if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT')
+ data.recoverable = false
+ return super.warn(code, msg, data)
+ }
+
+ [MAYBECLOSE] () {
+ if (this[ENDED] && this[PENDING] === 0) {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ }
+ }
+
+ [CHECKPATH] (entry) {
+ if (this.strip) {
+ const parts = entry.path.split(/\/|\\/)
+ if (parts.length < this.strip)
+ return false
+ entry.path = parts.slice(this.strip).join('/')
+
+ if (entry.type === 'Link') {
+ const linkparts = entry.linkpath.split(/\/|\\/)
+ if (linkparts.length >= this.strip)
+ entry.linkpath = linkparts.slice(this.strip).join('/')
+ }
+ }
+
+ if (!this.preservePaths) {
+ const p = entry.path
+ if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
+ this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
+ entry,
+ path: p,
+ })
+ return false
+ }
+
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ if (path.win32.isAbsolute(p)) {
+ const parsed = path.win32.parse(p)
+ entry.path = p.substr(parsed.root.length)
+ const r = parsed.root
+ this.warn('TAR_ENTRY_INFO', `stripping ${r} from absolute path`, {
+ entry,
+ path: p,
+ })
+ }
+ }
+
+ // only encode : chars that aren't drive letter indicators
+ if (this.win32) {
+ const parsed = path.win32.parse(entry.path)
+ entry.path = parsed.root === '' ? wc.encode(entry.path)
+ : parsed.root + wc.encode(entry.path.substr(parsed.root.length))
+ }
+
+ if (path.isAbsolute(entry.path))
+ entry.absolute = entry.path
+ else
+ entry.absolute = path.resolve(this.cwd, entry.path)
+
+ return true
+ }
+
+ [ONENTRY] (entry) {
+ if (!this[CHECKPATH](entry))
+ return entry.resume()
+
+ assert.equal(typeof entry.absolute, 'string')
+
+ switch (entry.type) {
+ case 'Directory':
+ case 'GNUDumpDir':
+ if (entry.mode)
+ entry.mode = entry.mode | 0o700
+
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ case 'Link':
+ case 'SymbolicLink':
+ return this[CHECKFS](entry)
+
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'FIFO':
+ return this[UNSUPPORTED](entry)
+ }
+ }
+
+ [ONERROR] (er, entry) {
+ // Cwd has to exist, or else nothing works. That's serious.
+ // Other errors are warnings, which raise the error in strict
+ // mode, but otherwise continue on.
+ if (er.name === 'CwdError')
+ this.emit('error', er)
+ else {
+ this.warn('TAR_ENTRY_ERROR', er, {entry})
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ [MKDIR] (dir, mode, cb) {
+ mkdir(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ }, cb)
+ }
+
+ [DOCHOWN] (entry) {
+ // in preserve owner mode, chown if the entry doesn't match process
+ // in set owner mode, chown if setting doesn't match process
+ return this.forceChown ||
+ this.preserveOwner &&
+ ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
+ typeof entry.gid === 'number' && entry.gid !== this.processGid )
+ ||
+ ( typeof this.uid === 'number' && this.uid !== this.processUid ||
+ typeof this.gid === 'number' && this.gid !== this.processGid )
+ }
+
+ [UID] (entry) {
+ return uint32(this.uid, entry.uid, this.processUid)
+ }
+
+ [GID] (entry) {
+ return uint32(this.gid, entry.gid, this.processGid)
+ }
+
+ [FILE] (entry, fullyDone) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ const stream = new fsm.WriteStream(entry.absolute, {
+ flags: getFlag(entry.size),
+ mode: mode,
+ autoClose: false
+ })
+ stream.on('error', er => this[ONERROR](er, entry))
+
+ let actions = 1
+ const done = er => {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ if (--actions === 0) {
+ fs.close(stream.fd, er => {
+ fullyDone()
+ er ? this[ONERROR](er, entry) : this[UNPEND]()
+ })
+ }
+ }
+
+ stream.on('finish', _ => {
+ // if futimes fails, try utimes
+ // if utimes fails, fail with the original error
+ // same for fchown/chown
+ const abs = entry.absolute
+ const fd = stream.fd
+
+ if (entry.mtime && !this.noMtime) {
+ actions++
+ const atime = entry.atime || new Date()
+ const mtime = entry.mtime
+ fs.futimes(fd, atime, mtime, er =>
+ er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
+ : done())
+ }
+
+ if (this[DOCHOWN](entry)) {
+ actions++
+ const uid = this[UID](entry)
+ const gid = this[GID](entry)
+ fs.fchown(fd, uid, gid, er =>
+ er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
+ : done())
+ }
+
+ done()
+ })
+
+ const tx = this.transform ? this.transform(entry) || entry : entry
+ if (tx !== entry) {
+ tx.on('error', er => this[ONERROR](er, entry))
+ entry.pipe(tx)
+ }
+ tx.pipe(stream)
+ }
+
+ [DIRECTORY] (entry, fullyDone) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ this[MKDIR](entry.absolute, mode, er => {
+ if (er) {
+ fullyDone()
+ return this[ONERROR](er, entry)
+ }
+
+ let actions = 1
+ const done = _ => {
+ if (--actions === 0) {
+ fullyDone()
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ if (entry.mtime && !this.noMtime) {
+ actions++
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
+ }
+
+ if (this[DOCHOWN](entry)) {
+ actions++
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
+ }
+
+ done()
+ })
+ }
+
+ [UNSUPPORTED] (entry) {
+ entry.unsupported = true
+ this.warn('TAR_ENTRY_UNSUPPORTED',
+ `unsupported entry type: ${entry.type}`, {entry})
+ entry.resume()
+ }
+
+ [SYMLINK] (entry, done) {
+ this[LINK](entry, entry.linkpath, 'symlink', done)
+ }
+
+ [HARDLINK] (entry, done) {
+ this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link', done)
+ }
+
+ [PEND] () {
+ this[PENDING]++
+ }
+
+ [UNPEND] () {
+ this[PENDING]--
+ this[MAYBECLOSE]()
+ }
+
+ [SKIP] (entry) {
+ this[UNPEND]()
+ entry.resume()
+ }
+
+ // Check if we can reuse an existing filesystem entry safely and
+ // overwrite it, rather than unlinking and recreating
+ // Windows doesn't report a useful nlink, so we just never reuse entries
+ [ISREUSABLE] (entry, st) {
+ return entry.type === 'File' &&
+ !this.unlink &&
+ st.isFile() &&
+ st.nlink <= 1 &&
+ process.platform !== 'win32'
+ }
+
+ // check if a thing is there, and if so, try to clobber it
+ [CHECKFS] (entry) {
+ this[PEND]()
+ const paths = [entry.path]
+ if (entry.linkpath)
+ paths.push(entry.linkpath)
+ this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
+ }
+ [CHECKFS2] (entry, done) {
+ this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
+ if (er) {
+ done()
+ return this[ONERROR](er, entry)
+ }
+ fs.lstat(entry.absolute, (er, st) => {
+ if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
+ this[SKIP](entry)
+ done()
+ } else if (er || this[ISREUSABLE](entry, st)) {
+ this[MAKEFS](null, entry, done)
+ }
+ else if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (!entry.mode || (st.mode & 0o7777) === entry.mode)
+ this[MAKEFS](null, entry, done)
+ else
+ fs.chmod(entry.absolute, entry.mode,
+ er => this[MAKEFS](er, entry, done))
+ } else
+ fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry, done))
+ } else
+ unlinkFile(entry.absolute, er => this[MAKEFS](er, entry, done))
+ })
+ })
+ }
+
+ [MAKEFS] (er, entry, done) {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ switch (entry.type) {
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ return this[FILE](entry, done)
+
+ case 'Link':
+ return this[HARDLINK](entry, done)
+
+ case 'SymbolicLink':
+ return this[SYMLINK](entry, done)
+
+ case 'Directory':
+ case 'GNUDumpDir':
+ return this[DIRECTORY](entry, done)
+ }
+ }
+
+ [LINK] (entry, linkpath, link, done) {
+ // XXX: get the type ('file' or 'dir') for windows
+ fs[link](linkpath, entry.absolute, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ done()
+ this[UNPEND]()
+ entry.resume()
+ })
+ }
+}
+
+class UnpackSync extends Unpack {
+ constructor (opt) {
+ super(opt)
+ }
+
+ [CHECKFS] (entry) {
+ const er = this[MKDIR](path.dirname(entry.absolute), this.dmode, neverCalled)
+ if (er)
+ return this[ONERROR](er, entry)
+ try {
+ const st = fs.lstatSync(entry.absolute)
+ if (this.keep || this.newer && st.mtime > entry.mtime)
+ return this[SKIP](entry)
+ else if (this[ISREUSABLE](entry, st))
+ return this[MAKEFS](null, entry, neverCalled)
+ else {
+ try {
+ if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (entry.mode && (st.mode & 0o7777) !== entry.mode)
+ fs.chmodSync(entry.absolute, entry.mode)
+ } else
+ fs.rmdirSync(entry.absolute)
+ } else
+ unlinkFileSync(entry.absolute)
+ return this[MAKEFS](null, entry, neverCalled)
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+ } catch (er) {
+ return this[MAKEFS](null, entry, neverCalled)
+ }
+ }
+
+ [FILE] (entry, _) {
+ const mode = entry.mode & 0o7777 || this.fmode
+
+ const oner = er => {
+ let closeError
+ try {
+ fs.closeSync(fd)
+ } catch (e) {
+ closeError = e
+ }
+ if (er || closeError)
+ this[ONERROR](er || closeError, entry)
+ }
+
+ let stream
+ let fd
+ try {
+ fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
+ } catch (er) {
+ return oner(er)
+ }
+ const tx = this.transform ? this.transform(entry) || entry : entry
+ if (tx !== entry) {
+ tx.on('error', er => this[ONERROR](er, entry))
+ entry.pipe(tx)
+ }
+
+ tx.on('data', chunk => {
+ try {
+ fs.writeSync(fd, chunk, 0, chunk.length)
+ } catch (er) {
+ oner(er)
+ }
+ })
+
+ tx.on('end', _ => {
+ let er = null
+ // try both, falling futimes back to utimes
+ // if either fails, handle the first error
+ if (entry.mtime && !this.noMtime) {
+ const atime = entry.atime || new Date()
+ const mtime = entry.mtime
+ try {
+ fs.futimesSync(fd, atime, mtime)
+ } catch (futimeser) {
+ try {
+ fs.utimesSync(entry.absolute, atime, mtime)
+ } catch (utimeser) {
+ er = futimeser
+ }
+ }
+ }
+
+ if (this[DOCHOWN](entry)) {
+ const uid = this[UID](entry)
+ const gid = this[GID](entry)
+
+ try {
+ fs.fchownSync(fd, uid, gid)
+ } catch (fchowner) {
+ try {
+ fs.chownSync(entry.absolute, uid, gid)
+ } catch (chowner) {
+ er = er || fchowner
+ }
+ }
+ }
+
+ oner(er)
+ })
+ }
+
+ [DIRECTORY] (entry, _) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ const er = this[MKDIR](entry.absolute, mode)
+ if (er)
+ return this[ONERROR](er, entry)
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
+ }
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ entry.resume()
+ }
+
+ [MKDIR] (dir, mode) {
+ try {
+ return mkdir.sync(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ })
+ } catch (er) {
+ return er
+ }
+ }
+
+ [LINK] (entry, linkpath, link, _) {
+ try {
+ fs[link + 'Sync'](linkpath, entry.absolute)
+ entry.resume()
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+}
+
+Unpack.Sync = UnpackSync
+module.exports = Unpack
diff --git a/node_modules/cacache/node_modules/tar/lib/update.js b/node_modules/cacache/node_modules/tar/lib/update.js
new file mode 100644
index 000000000..16c3e93ed
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/update.js
@@ -0,0 +1,36 @@
+'use strict'
+
+// tar -u
+
+const hlo = require('./high-level-opt.js')
+const r = require('./replace.js')
+// just call tar.r with the filter and mtimeCache
+
+const u = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ mtimeFilter(opt)
+ return r(opt, files, cb)
+}
+
+const mtimeFilter = opt => {
+ const filter = opt.filter
+
+ if (!opt.mtimeCache)
+ opt.mtimeCache = new Map()
+
+ opt.filter = filter ? (path, stat) =>
+ filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
+ : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/warn-mixin.js b/node_modules/cacache/node_modules/tar/lib/warn-mixin.js
new file mode 100644
index 000000000..11eb52cc6
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/warn-mixin.js
@@ -0,0 +1,21 @@
+'use strict'
+module.exports = Base => class extends Base {
+ warn (code, message, data = {}) {
+ if (this.file)
+ data.file = this.file
+ if (this.cwd)
+ data.cwd = this.cwd
+ data.code = message instanceof Error && message.code || code
+ data.tarCode = code
+ if (!this.strict && data.recoverable !== false) {
+ if (message instanceof Error) {
+ data = Object.assign(message, data)
+ message = message.message
+ }
+ this.emit('warn', data.tarCode, message, data)
+ } else if (message instanceof Error) {
+ this.emit('error', Object.assign(message, data))
+ } else
+ this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
+ }
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/winchars.js b/node_modules/cacache/node_modules/tar/lib/winchars.js
new file mode 100644
index 000000000..cf6ea0606
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/winchars.js
@@ -0,0 +1,23 @@
+'use strict'
+
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+
+const raw = [
+ '|',
+ '<',
+ '>',
+ '?',
+ ':'
+]
+
+const win = raw.map(char =>
+ String.fromCharCode(0xf000 + char.charCodeAt(0)))
+
+const toWin = new Map(raw.map((char, i) => [char, win[i]]))
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
+
+module.exports = {
+ encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
+ decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)
+}
diff --git a/node_modules/cacache/node_modules/tar/lib/write-entry.js b/node_modules/cacache/node_modules/tar/lib/write-entry.js
new file mode 100644
index 000000000..0e33cb59d
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/lib/write-entry.js
@@ -0,0 +1,436 @@
+'use strict'
+const MiniPass = require('minipass')
+const Pax = require('./pax.js')
+const Header = require('./header.js')
+const ReadEntry = require('./read-entry.js')
+const fs = require('fs')
+const path = require('path')
+
+const types = require('./types.js')
+const maxReadSize = 16 * 1024 * 1024
+const PROCESS = Symbol('process')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const HEADER = Symbol('header')
+const READ = Symbol('read')
+const LSTAT = Symbol('lstat')
+const ONLSTAT = Symbol('onlstat')
+const ONREAD = Symbol('onread')
+const ONREADLINK = Symbol('onreadlink')
+const OPENFILE = Symbol('openfile')
+const ONOPENFILE = Symbol('onopenfile')
+const CLOSE = Symbol('close')
+const MODE = Symbol('mode')
+const warner = require('./warn-mixin.js')
+const winchars = require('./winchars.js')
+
+const modeFix = require('./mode-fix.js')
+
+const WriteEntry = warner(class WriteEntry extends MiniPass {
+ constructor (p, opt) {
+ opt = opt || {}
+ super(opt)
+ if (typeof p !== 'string')
+ throw new TypeError('path is required')
+ this.path = p
+ // suppress atime, ctime, uid, gid, uname, gname
+ this.portable = !!opt.portable
+ // until node has builtin pwnam functions, this'll have to do
+ this.myuid = process.getuid && process.getuid()
+ this.myuser = process.env.USER || ''
+ this.maxReadSize = opt.maxReadSize || maxReadSize
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.preservePaths = !!opt.preservePaths
+ this.cwd = opt.cwd || process.cwd()
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ this.noMtime = !!opt.noMtime
+ this.mtime = opt.mtime || null
+
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ let pathWarn = false
+ if (!this.preservePaths && path.win32.isAbsolute(p)) {
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ const parsed = path.win32.parse(p)
+ this.path = p.substr(parsed.root.length)
+ pathWarn = parsed.root
+ }
+
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+ if (this.win32) {
+ this.path = winchars.decode(this.path.replace(/\\/g, '/'))
+ p = p.replace(/\\/g, '/')
+ }
+
+ this.absolute = opt.absolute || path.resolve(this.cwd, p)
+
+ if (this.path === '')
+ this.path = './'
+
+ if (pathWarn) {
+ this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+ entry: this,
+ path: pathWarn + this.path,
+ })
+ }
+
+ if (this.statCache.has(this.absolute))
+ this[ONLSTAT](this.statCache.get(this.absolute))
+ else
+ this[LSTAT]()
+ }
+
+ [LSTAT] () {
+ fs.lstat(this.absolute, (er, stat) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONLSTAT](stat)
+ })
+ }
+
+ [ONLSTAT] (stat) {
+ this.statCache.set(this.absolute, stat)
+ this.stat = stat
+ if (!stat.isFile())
+ stat.size = 0
+ this.type = getType(stat)
+ this.emit('stat', stat)
+ this[PROCESS]()
+ }
+
+ [PROCESS] () {
+ switch (this.type) {
+ case 'File': return this[FILE]()
+ case 'Directory': return this[DIRECTORY]()
+ case 'SymbolicLink': return this[SYMLINK]()
+ // unsupported types are ignored.
+ default: return this.end()
+ }
+ }
+
+ [MODE] (mode) {
+ return modeFix(mode, this.type === 'Directory', this.portable)
+ }
+
+ [HEADER] () {
+ if (this.type === 'Directory' && this.portable)
+ this.noMtime = true
+
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this[MODE](this.stat.mode),
+ uid: this.portable ? null : this.stat.uid,
+ gid: this.portable ? null : this.stat.gid,
+ size: this.stat.size,
+ mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
+ type: this.type,
+ uname: this.portable ? null :
+ this.stat.uid === this.myuid ? this.myuser : '',
+ atime: this.portable ? null : this.stat.atime,
+ ctime: this.portable ? null : this.stat.ctime
+ })
+
+ if (this.header.encode() && !this.noPax)
+ this.write(new Pax({
+ atime: this.portable ? null : this.header.atime,
+ ctime: this.portable ? null : this.header.ctime,
+ gid: this.portable ? null : this.header.gid,
+ mtime: this.noMtime ? null : this.mtime || this.header.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.header.size,
+ uid: this.portable ? null : this.header.uid,
+ uname: this.portable ? null : this.header.uname,
+ dev: this.portable ? null : this.stat.dev,
+ ino: this.portable ? null : this.stat.ino,
+ nlink: this.portable ? null : this.stat.nlink
+ }).encode())
+ this.write(this.header.block)
+ }
+
+ [DIRECTORY] () {
+ if (this.path.substr(-1) !== '/')
+ this.path += '/'
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [SYMLINK] () {
+ fs.readlink(this.absolute, (er, linkpath) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONREADLINK](linkpath)
+ })
+ }
+
+ [ONREADLINK] (linkpath) {
+ this.linkpath = linkpath.replace(/\\/g, '/')
+ this[HEADER]()
+ this.end()
+ }
+
+ [HARDLINK] (linkpath) {
+ this.type = 'Link'
+ this.linkpath = path.relative(this.cwd, linkpath).replace(/\\/g, '/')
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [FILE] () {
+ if (this.stat.nlink > 1) {
+ const linkKey = this.stat.dev + ':' + this.stat.ino
+ if (this.linkCache.has(linkKey)) {
+ const linkpath = this.linkCache.get(linkKey)
+ if (linkpath.indexOf(this.cwd) === 0)
+ return this[HARDLINK](linkpath)
+ }
+ this.linkCache.set(linkKey, this.absolute)
+ }
+
+ this[HEADER]()
+ if (this.stat.size === 0)
+ return this.end()
+
+ this[OPENFILE]()
+ }
+
+ [OPENFILE] () {
+ fs.open(this.absolute, 'r', (er, fd) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONOPENFILE](fd)
+ })
+ }
+
+ [ONOPENFILE] (fd) {
+ const blockLen = 512 * Math.ceil(this.stat.size / 512)
+ const bufLen = Math.min(blockLen, this.maxReadSize)
+ const buf = Buffer.allocUnsafe(bufLen)
+ this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen)
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+ if (er) {
+ // ignoring the error from close(2) is a bad practice, but at
+ // this point we already have an error, don't need another one
+ return this[CLOSE](fd, () => this.emit('error', er))
+ }
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ })
+ }
+
+ [CLOSE] (fd, cb) {
+ fs.close(fd, cb)
+ }
+
+ [ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) {
+ if (bytesRead <= 0 && remain > 0) {
+ const er = new Error('encountered unexpected EOF')
+ er.path = this.absolute
+ er.syscall = 'read'
+ er.code = 'EOF'
+ return this[CLOSE](fd, () => this.emit('error', er))
+ }
+
+ if (bytesRead > remain) {
+ const er = new Error('did not encounter expected EOF')
+ er.path = this.absolute
+ er.syscall = 'read'
+ er.code = 'EOF'
+ return this[CLOSE](fd, () => this.emit('error', er))
+ }
+
+ // null out the rest of the buffer, if we could fit the block padding
+ if (bytesRead === remain) {
+ for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {
+ buf[i + offset] = 0
+ bytesRead ++
+ remain ++
+ }
+ }
+
+ const writeBuf = offset === 0 && bytesRead === buf.length ?
+ buf : buf.slice(offset, offset + bytesRead)
+ remain -= bytesRead
+ blockRemain -= bytesRead
+ pos += bytesRead
+ offset += bytesRead
+
+ this.write(writeBuf)
+
+ if (!remain) {
+ if (blockRemain)
+ this.write(Buffer.alloc(blockRemain))
+ return this[CLOSE](fd, er => er ? this.emit('error', er) : this.end())
+ }
+
+ if (offset >= length) {
+ buf = Buffer.allocUnsafe(length)
+ offset = 0
+ }
+ length = buf.length - offset
+ this[READ](fd, buf, offset, length, pos, remain, blockRemain)
+ }
+})
+
+class WriteEntrySync extends WriteEntry {
+ constructor (path, opt) {
+ super(path, opt)
+ }
+
+ [LSTAT] () {
+ this[ONLSTAT](fs.lstatSync(this.absolute))
+ }
+
+ [SYMLINK] () {
+ this[ONREADLINK](fs.readlinkSync(this.absolute))
+ }
+
+ [OPENFILE] () {
+ this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ let threw = true
+ try {
+ const bytesRead = fs.readSync(fd, buf, offset, length, pos)
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ threw = false
+ } finally {
+ // ignoring the error from close(2) is a bad practice, but at
+ // this point we already have an error, don't need another one
+ if (threw)
+ try { this[CLOSE](fd, () => {}) } catch (er) {}
+ }
+ }
+
+ [CLOSE] (fd, cb) {
+ fs.closeSync(fd)
+ cb()
+ }
+}
+
+const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
+ constructor (readEntry, opt) {
+ opt = opt || {}
+ super(opt)
+ this.preservePaths = !!opt.preservePaths
+ this.portable = !!opt.portable
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ this.noMtime = !!opt.noMtime
+
+ this.readEntry = readEntry
+ this.type = readEntry.type
+ if (this.type === 'Directory' && this.portable)
+ this.noMtime = true
+
+ this.path = readEntry.path
+ this.mode = this[MODE](readEntry.mode)
+ this.uid = this.portable ? null : readEntry.uid
+ this.gid = this.portable ? null : readEntry.gid
+ this.uname = this.portable ? null : readEntry.uname
+ this.gname = this.portable ? null : readEntry.gname
+ this.size = readEntry.size
+ this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
+ this.atime = this.portable ? null : readEntry.atime
+ this.ctime = this.portable ? null : readEntry.ctime
+ this.linkpath = readEntry.linkpath
+
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ let pathWarn = false
+ if (path.isAbsolute(this.path) && !this.preservePaths) {
+ const parsed = path.parse(this.path)
+ pathWarn = parsed.root
+ this.path = this.path.substr(parsed.root.length)
+ }
+
+ this.remain = readEntry.size
+ this.blockRemain = readEntry.startBlockSize
+
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this.mode,
+ uid: this.portable ? null : this.uid,
+ gid: this.portable ? null : this.gid,
+ size: this.size,
+ mtime: this.noMtime ? null : this.mtime,
+ type: this.type,
+ uname: this.portable ? null : this.uname,
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime
+ })
+
+ if (pathWarn) {
+ this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+ entry: this,
+ path: pathWarn + this.path,
+ })
+ }
+
+ if (this.header.encode() && !this.noPax)
+ super.write(new Pax({
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime,
+ gid: this.portable ? null : this.gid,
+ mtime: this.noMtime ? null : this.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.size,
+ uid: this.portable ? null : this.uid,
+ uname: this.portable ? null : this.uname,
+ dev: this.portable ? null : this.readEntry.dev,
+ ino: this.portable ? null : this.readEntry.ino,
+ nlink: this.portable ? null : this.readEntry.nlink
+ }).encode())
+
+ super.write(this.header.block)
+ readEntry.pipe(this)
+ }
+
+ [MODE] (mode) {
+ return modeFix(mode, this.type === 'Directory', this.portable)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+ this.blockRemain -= writeLen
+ return super.write(data)
+ }
+
+ end () {
+ if (this.blockRemain)
+ this.write(Buffer.alloc(this.blockRemain))
+ return super.end()
+ }
+})
+
+WriteEntry.Sync = WriteEntrySync
+WriteEntry.Tar = WriteEntryTar
+
+const getType = stat =>
+ stat.isFile() ? 'File'
+ : stat.isDirectory() ? 'Directory'
+ : stat.isSymbolicLink() ? 'SymbolicLink'
+ : 'Unsupported'
+
+module.exports = WriteEntry
diff --git a/node_modules/cacache/node_modules/tar/package.json b/node_modules/cacache/node_modules/tar/package.json
new file mode 100644
index 000000000..55b356790
--- /dev/null
+++ b/node_modules/cacache/node_modules/tar/package.json
@@ -0,0 +1,81 @@
+{
+ "_from": "tar@^6.0.2",
+ "_id": "tar@6.0.2",
+ "_inBundle": false,
+ "_integrity": "sha512-Glo3jkRtPcvpDlAs/0+hozav78yoXKFr+c4wgw62NNMO3oo4AaJdCo21Uu7lcwr55h39W2XD1LMERc64wtbItg==",
+ "_location": "/cacache/tar",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "tar@^6.0.2",
+ "name": "tar",
+ "escapedName": "tar",
+ "rawSpec": "^6.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^6.0.2"
+ },
+ "_requiredBy": [
+ "/cacache"
+ ],
+ "_resolved": "https://registry.npmjs.org/tar/-/tar-6.0.2.tgz",
+ "_shasum": "5df17813468a6264ff14f766886c622b84ae2f39",
+ "_spec": "tar@^6.0.2",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/cacache",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/node-tar/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "chownr": "^2.0.0",
+ "fs-minipass": "^2.0.0",
+ "minipass": "^3.0.0",
+ "minizlib": "^2.1.0",
+ "mkdirp": "^1.0.3",
+ "yallist": "^4.0.0"
+ },
+ "deprecated": false,
+ "description": "tar for node",
+ "devDependencies": {
+ "chmodr": "^1.2.0",
+ "end-of-stream": "^1.4.3",
+ "events-to-array": "^1.1.2",
+ "mutate-fs": "^2.1.1",
+ "rimraf": "^2.7.1",
+ "tap": "^14.9.2",
+ "tar-fs": "^1.16.3",
+ "tar-stream": "^1.6.2"
+ },
+ "engines": {
+ "node": ">= 10"
+ },
+ "files": [
+ "index.js",
+ "lib/*.js"
+ ],
+ "homepage": "https://github.com/npm/node-tar#readme",
+ "license": "ISC",
+ "name": "tar",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/node-tar.git"
+ },
+ "scripts": {
+ "bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done",
+ "genparse": "node scripts/generate-parse-fixtures.js",
+ "postpublish": "git push origin --follow-tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap"
+ },
+ "tap": {
+ "coverage-map": "map.js",
+ "check-coverage": true
+ },
+ "version": "6.0.2"
+}
diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json
index abe1a8114..70b741d2c 100644
--- a/node_modules/cacache/package.json
+++ b/node_modules/cacache/package.json
@@ -1,31 +1,36 @@
{
- "_from": "cacache@15.0.0",
- "_id": "cacache@15.0.0",
+ "_from": "cacache@latest",
+ "_id": "cacache@15.0.3",
"_inBundle": false,
- "_integrity": "sha512-L0JpXHhplbJSiDGzyJJnJCTL7er7NzbBgxzVqLswEb4bO91Zbv17OUMuUeu/q0ZwKn3V+1HM4wb9tO4eVE/K8g==",
+ "_integrity": "sha512-bc3jKYjqv7k4pWh7I/ixIjfcjPul4V4jme/WbjvwGS5LzoPL/GzXr4C5EgPNLO/QEZl9Oi61iGitYEdwcrwLCQ==",
"_location": "/cacache",
"_phantomChildren": {
- "chownr": "1.1.3",
- "glob": "7.1.4"
+ "fs-minipass": "2.1.0",
+ "glob": "7.1.4",
+ "minipass": "3.1.1",
+ "minizlib": "2.1.0",
+ "yallist": "4.0.0"
},
"_requested": {
- "type": "version",
+ "type": "tag",
"registry": true,
- "raw": "cacache@15.0.0",
+ "raw": "cacache@latest",
"name": "cacache",
"escapedName": "cacache",
- "rawSpec": "15.0.0",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "15.0.0"
+ "fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
- "/"
+ "/",
+ "/npm-registry-fetch/make-fetch-happen",
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/cacache/-/cacache-15.0.0.tgz",
- "_shasum": "133b59edbd2a37ea8ef2d54964c6f247e47e5059",
- "_spec": "cacache@15.0.0",
- "_where": "/Users/claudiahdz/npm/cli",
+ "_resolved": "https://registry.npmjs.org/cacache/-/cacache-15.0.3.tgz",
+ "_shasum": "2225c2d1dd8e872339950d6a39c051e0e9334392",
+ "_spec": "cacache@latest",
+ "_where": "/Users/isaacs/dev/npm/cli",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
@@ -49,7 +54,7 @@
}
],
"dependencies": {
- "chownr": "^1.1.2",
+ "chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"glob": "^7.1.4",
"infer-owner": "^1.0.4",
@@ -59,19 +64,19 @@
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.2",
"mkdirp": "^1.0.3",
- "move-concurrently": "^1.0.1",
- "p-map": "^3.0.0",
+ "move-file": "^2.0.0",
+ "p-map": "^4.0.0",
"promise-inflight": "^1.0.1",
- "rimraf": "^2.7.1",
+ "rimraf": "^3.0.2",
"ssri": "^8.0.0",
- "tar": "^6.0.1",
+ "tar": "^6.0.2",
"unique-filename": "^1.1.1"
},
"deprecated": false,
"description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
"devDependencies": {
"benchmark": "^2.1.4",
- "chalk": "^2.4.2",
+ "chalk": "^4.0.0",
"require-inject": "^1.4.4",
"standard": "^14.3.1",
"standard-version": "^7.1.0",
@@ -124,5 +129,5 @@
"100": true,
"test-regex": "test/[^/]*.js"
},
- "version": "15.0.0"
+ "version": "15.0.3"
}
diff --git a/node_modules/figgy-pudding/CHANGELOG.md b/node_modules/figgy-pudding/CHANGELOG.md
deleted file mode 100644
index 038f9c065..000000000
--- a/node_modules/figgy-pudding/CHANGELOG.md
+++ /dev/null
@@ -1,151 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="3.5.1"></a>
-## [3.5.1](https://github.com/zkat/figgy-pudding/compare/v3.5.0...v3.5.1) (2018-08-25)
-
-
-
-<a name="3.5.0"></a>
-# [3.5.0](https://github.com/zkat/figgy-pudding/compare/v3.4.1...v3.5.0) (2018-08-25)
-
-
-### Bug Fixes
-
-* **node:** get rid of Object.entries to add node6 support back ([074f779](https://github.com/zkat/figgy-pudding/commit/074f779))
-
-
-### Features
-
-* **node:** add node@10 to CI config ([78b8937](https://github.com/zkat/figgy-pudding/commit/78b8937))
-
-
-
-<a name="3.4.1"></a>
-## [3.4.1](https://github.com/zkat/figgy-pudding/compare/v3.4.0...v3.4.1) (2018-08-16)
-
-
-### Bug Fixes
-
-* **forEach:** get forEach to behave like a normal forEach ([c064755](https://github.com/zkat/figgy-pudding/commit/c064755))
-* **has:** get `in` keyword working right ([fafc5a8](https://github.com/zkat/figgy-pudding/commit/fafc5a8))
-* **iteration:** fix and test iteration of opts.other keys ([7a76217](https://github.com/zkat/figgy-pudding/commit/7a76217))
-* **iteration:** use proper args for forEach/toJSON ([974e879](https://github.com/zkat/figgy-pudding/commit/974e879))
-* **proxy:** make sure proxy corner-cases work ok ([8c66e45](https://github.com/zkat/figgy-pudding/commit/8c66e45))
-* **set:** fix and test the exceptions to writing ([206793b](https://github.com/zkat/figgy-pudding/commit/206793b))
-
-
-
-<a name="3.4.0"></a>
-# [3.4.0](https://github.com/zkat/figgy-pudding/compare/v3.3.0...v3.4.0) (2018-08-16)
-
-
-### Features
-
-* **iterator:** allow iteration over "other" keys ([3c53323](https://github.com/zkat/figgy-pudding/commit/3c53323))
-
-
-
-<a name="3.3.0"></a>
-# [3.3.0](https://github.com/zkat/figgy-pudding/compare/v3.2.1...v3.3.0) (2018-08-16)
-
-
-### Bug Fixes
-
-* **props:** allow symbols to pass through ([97b3464](https://github.com/zkat/figgy-pudding/commit/97b3464))
-
-
-### Features
-
-* **pudding:** iteration and serialization support ([0aaa50d](https://github.com/zkat/figgy-pudding/commit/0aaa50d))
-
-
-
-<a name="3.2.1"></a>
-## [3.2.1](https://github.com/zkat/figgy-pudding/compare/v3.2.0...v3.2.1) (2018-08-15)
-
-
-### Bug Fixes
-
-* **aliases:** make reverse aliases work correctly ([76a255e](https://github.com/zkat/figgy-pudding/commit/76a255e))
-
-
-
-<a name="3.2.0"></a>
-# [3.2.0](https://github.com/zkat/figgy-pudding/compare/v3.1.0...v3.2.0) (2018-07-26)
-
-
-### Bug Fixes
-
-* **concat:** have concat spit out a proxy, too ([64e3495](https://github.com/zkat/figgy-pudding/commit/64e3495))
-
-
-### Features
-
-* **default:** pass the pudding itself to default fns ([d9d9e09](https://github.com/zkat/figgy-pudding/commit/d9d9e09))
-
-
-
-<a name="3.1.0"></a>
-# [3.1.0](https://github.com/zkat/figgy-pudding/compare/v3.0.0...v3.1.0) (2018-04-08)
-
-
-### Features
-
-* **opts:** allow direct option fetching without .get() ([ca77aad](https://github.com/zkat/figgy-pudding/commit/ca77aad))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/figgy-pudding/compare/v2.0.1...v3.0.0) (2018-04-06)
-
-
-### Bug Fixes
-
-* **ci:** oops -- forgot to update CI config ([7a40563](https://github.com/zkat/figgy-pudding/commit/7a40563))
-* **get:** make provider lookup order like Object.assign ([33ff89b](https://github.com/zkat/figgy-pudding/commit/33ff89b))
-
-
-### Features
-
-* **concat:** add .concat() method to opts ([d310fce](https://github.com/zkat/figgy-pudding/commit/d310fce))
-
-
-### meta
-
-* drop support for node@4 and node@7 ([9f8a61c](https://github.com/zkat/figgy-pudding/commit/9f8a61c))
-
-
-### BREAKING CHANGES
-
-* node@4 and node@7 are no longer supported
-* **get:** shadow order for properties in providers is reversed
-
-
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/zkat/figgy-pudding/compare/v2.0.0...v2.0.1) (2018-03-16)
-
-
-### Bug Fixes
-
-* **opts:** ignore non-object providers ([7b9c0f8](https://github.com/zkat/figgy-pudding/commit/7b9c0f8))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/figgy-pudding/compare/v1.0.0...v2.0.0) (2018-03-16)
-
-
-### Features
-
-* **api:** overhauled API with new opt handling concept ([e6cc929](https://github.com/zkat/figgy-pudding/commit/e6cc929))
-* **license:** relicense to ISC ([87479aa](https://github.com/zkat/figgy-pudding/commit/87479aa))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-* **api:** this is a completely different approach than previously
-used by this library. See the readme for the new API and an explanation.
diff --git a/node_modules/figgy-pudding/README.md b/node_modules/figgy-pudding/README.md
deleted file mode 100644
index 3d0591c1e..000000000
--- a/node_modules/figgy-pudding/README.md
+++ /dev/null
@@ -1,260 +0,0 @@
-# figgy-pudding [![npm version](https://img.shields.io/npm/v/figgy-pudding.svg)](https://npm.im/figgy-pudding) [![license](https://img.shields.io/npm/l/figgy-pudding.svg)](https://npm.im/figgy-pudding) [![Travis](https://img.shields.io/travis/zkat/figgy-pudding.svg)](https://travis-ci.org/zkat/figgy-pudding) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/figgy-pudding?svg=true)](https://ci.appveyor.com/project/zkat/figgy-pudding) [![Coverage Status](https://coveralls.io/repos/github/zkat/figgy-pudding/badge.svg?branch=latest)](https://coveralls.io/github/zkat/figgy-pudding?branch=latest)
-
-[`figgy-pudding`](https://github.com/zkat/figgy-pudding) is a small JavaScript
-library for managing and composing cascading options objects -- hiding what
-needs to be hidden from each layer, without having to do a lot of manual munging
-and passing of options.
-
-### The God Object is Dead!
-### Now Bring Us Some Figgy Pudding!
-
-## Install
-
-`$ npm install figgy-pudding`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [API](#api)
- * [`figgyPudding(spec)`](#figgy-pudding)
- * [`PuddingFactory(values)`](#pudding-factory)
- * [`opts.get()`](#opts-get)
- * [`opts.concat()`](#opts-concat)
- * [`opts.toJSON()`](#opts-to-json)
- * [`opts.forEach()`](#opts-for-each)
- * [`opts[Symbol.iterator]()`](#opts-symbol-iterator)
- * [`opts.entries()`](#opts-entries)
- * [`opts.keys()`](#opts-keys)
- * [`opts.value()`](#opts-values)
-
-### Example
-
-```javascript
-// print-package.js
-const fetch = require('./fetch.js')
-const puddin = require('figgy-pudding')
-
-const PrintOpts = puddin({
- json: { default: false }
-})
-
-async function printPkg (name, opts) {
- // Expected pattern is to call this in every interface function. If `opts` is
- // not passed in, it will automatically create an (empty) object for it.
- opts = PrintOpts(opts)
- const uri = `https://registry.npmjs.com/${name}`
- const res = await fetch(uri, opts.concat({
- // Add or override any passed-in configs and pass them down.
- log: customLogger
- }))
- // The following would throw an error, because it's not in PrintOpts:
- // console.log(opts.log)
- if (opts.json) {
- return res.json()
- } else {
- return res.text()
- }
-}
-
-console.log(await printPkg('figgy', {
- // Pass in *all* configs at the toplevel, as a regular object.
- json: true,
- cache: './tmp-cache'
-}))
-```
-
-```javascript
-// fetch.js
-const puddin = require('figgy-pudding')
-
-const FetchOpts = puddin({
- log: { default: require('npmlog') },
- cache: {}
-})
-
-module.exports = async function (..., opts) {
- opts = FetchOpts(opts)
-}
-```
-
-### Features
-
-* hide options from layer that didn't ask for it
-* shared multi-layer options
-* make sure `opts` argument is available
-* transparent key access like normal keys, through a Proxy. No need for`.get()`!
-* default values
-* key aliases
-* arbitrary key filter functions
-* key/value iteration
-* serialization
-* 100% test coverage using `tap --100`
-
-### API
-
-#### <a name="figgy-pudding"></a> `> figgyPudding({ key: { default: val } | String }, [opts]) -> PuddingFactory`
-
-Defines an Options constructor that can be used to collect only the needed
-options.
-
-An optional `default` property for specs can be used to specify default values
-if nothing was passed in.
-
-If the value for a spec is a string, it will be treated as an alias to that
-other key.
-
-##### Example
-
-```javascript
-const MyAppOpts = figgyPudding({
- lg: 'log',
- log: {
- default: () => require('npmlog')
- },
- cache: {}
-})
-```
-
-#### <a name="pudding-factory"></a> `> PuddingFactory(...providers) -> FiggyPudding{}`
-
-Instantiates an options object defined by `figgyPudding()`, which uses
-`providers`, in order, to find requested properties.
-
-Each provider can be either a plain object, a `Map`-like object (that is, one
-with a `.get()` method) or another figgyPudding `Opts` object.
-
-When nesting `Opts` objects, their properties will not become available to the
-new object, but any further nested `Opts` that reference that property _will_ be
-able to read from their grandparent, as long as they define that key. Default
-values for nested `Opts` parents will be used, if found.
-
-##### Example
-
-```javascript
-const ReqOpts = figgyPudding({
- follow: {}
-})
-
-const opts = ReqOpts({
- follow: true,
- log: require('npmlog')
-})
-
-opts.follow // => true
-opts.log // => Error: ReqOpts does not define `log`
-
-const MoreOpts = figgyPudding({
- log: {}
-})
-MoreOpts(opts).log // => npmlog object (passed in from original plain obj)
-MoreOpts(opts).follow // => Error: MoreOpts does not define `follow`
-```
-
-#### <a name="opts-get"></a> `> opts.get(key) -> Value`
-
-Gets a value from the options object.
-
-##### Example
-
-```js
-const opts = MyOpts(config)
-opts.get('foo') // value of `foo`
-opts.foo // Proxy-based access through `.get()`
-```
-
-#### <a name="opts-concat"></a> `> opts.concat(...moreProviders) -> FiggyPudding{}`
-
-Creates a new opts object of the same type as `opts` with additional providers.
-Providers further to the right shadow providers to the left, with properties in
-the original `opts` being shadows by the new providers.
-
-##### Example
-
-```js
-const opts = MyOpts({x: 1})
-opts.get('x') // 1
-opts.concat({x: 2}).get('x') // 2
-opts.get('x') // 1 (original opts object left intact)
-```
-
-#### <a name="opts-to-json"></a> `> opts.toJSON() -> Value`
-
-Converts `opts` to a plain, JSON-stringifiable JavaScript value. Used internally
-by JavaScript to get `JSON.stringify()` working.
-
-Only keys that are readable by the current pudding type will be serialized.
-
-##### Example
-
-```js
-const opts = MyOpts({x: 1})
-opts.toJSON() // {x: 1}
-JSON.stringify(opts) // '{"x":1}'
-```
-
-#### <a name="opts-for-each"></a> `> opts.forEach((value, key, opts) => {}, thisArg) -> undefined`
-
-Iterates over the values of `opts`, limited to the keys readable by the current
-pudding type. `thisArg` will be used to set the `this` argument when calling the
-`fn`.
-
-##### Example
-
-```js
-const opts = MyOpts({x: 1, y: 2})
-opts.forEach((value, key) => console.log(key, '=', value))
-```
-
-#### <a name="opts-entries"></a> `> opts.entries() -> Iterator<[[key, value], ...]>`
-
-Returns an iterator that iterates over the keys and values in `opts`, limited to
-the keys readable by the current pudding type. Each iteration returns an array
-of `[key, value]`.
-
-##### Example
-
-```js
-const opts = MyOpts({x: 1, y: 2})
-[...opts({x: 1, y: 2}).entries()] // [['x', 1], ['y', 2]]
-```
-
-#### <a name="opts-symbol-iterator"></a> `> opts[Symbol.iterator]() -> Iterator<[[key, value], ...]>`
-
-Returns an iterator that iterates over the keys and values in `opts`, limited to
-the keys readable by the current pudding type. Each iteration returns an array
-of `[key, value]`. Makes puddings work natively with JS iteration mechanisms.
-
-##### Example
-
-```js
-const opts = MyOpts({x: 1, y: 2})
-[...opts({x: 1, y: 2})] // [['x', 1], ['y', 2]]
-for (let [key, value] of opts({x: 1, y: 2})) {
- console.log(key, '=', value)
-}
-```
-
-#### <a name="opts-keys"></a> `> opts.keys() -> Iterator<[key, ...]>`
-
-Returns an iterator that iterates over the keys in `opts`, limited to the keys
-readable by the current pudding type.
-
-##### Example
-
-```js
-const opts = MyOpts({x: 1, y: 2})
-[...opts({x: 1, y: 2}).keys()] // ['x', 'y']
-```
-
-#### <a name="opts-values"></a> `> opts.values() -> Iterator<[value, ...]>`
-
-Returns an iterator that iterates over the values in `opts`, limited to the keys
-readable by the current pudding type.
-
-##### Example
-'
-```js
-const opts = MyOpts({x: 1, y: 2})
-[...opts({x: 1, y: 2}).values()] // [1, 2]
-```
diff --git a/node_modules/figgy-pudding/index.js b/node_modules/figgy-pudding/index.js
deleted file mode 100644
index bb7d5711b..000000000
--- a/node_modules/figgy-pudding/index.js
+++ /dev/null
@@ -1,197 +0,0 @@
-'use strict'
-
-class FiggyPudding {
- constructor (specs, opts, providers) {
- this.__specs = specs || {}
- Object.keys(this.__specs).forEach(alias => {
- if (typeof this.__specs[alias] === 'string') {
- const key = this.__specs[alias]
- const realSpec = this.__specs[key]
- if (realSpec) {
- const aliasArr = realSpec.aliases || []
- aliasArr.push(alias, key)
- realSpec.aliases = [...(new Set(aliasArr))]
- this.__specs[alias] = realSpec
- } else {
- throw new Error(`Alias refers to invalid key: ${key} -> ${alias}`)
- }
- }
- })
- this.__opts = opts || {}
- this.__providers = reverse((providers).filter(
- x => x != null && typeof x === 'object'
- ))
- this.__isFiggyPudding = true
- }
- get (key) {
- return pudGet(this, key, true)
- }
- get [Symbol.toStringTag] () { return 'FiggyPudding' }
- forEach (fn, thisArg = this) {
- for (let [key, value] of this.entries()) {
- fn.call(thisArg, value, key, this)
- }
- }
- toJSON () {
- const obj = {}
- this.forEach((val, key) => {
- obj[key] = val
- })
- return obj
- }
- * entries (_matcher) {
- for (let key of Object.keys(this.__specs)) {
- yield [key, this.get(key)]
- }
- const matcher = _matcher || this.__opts.other
- if (matcher) {
- const seen = new Set()
- for (let p of this.__providers) {
- const iter = p.entries ? p.entries(matcher) : entries(p)
- for (let [key, val] of iter) {
- if (matcher(key) && !seen.has(key)) {
- seen.add(key)
- yield [key, val]
- }
- }
- }
- }
- }
- * [Symbol.iterator] () {
- for (let [key, value] of this.entries()) {
- yield [key, value]
- }
- }
- * keys () {
- for (let [key] of this.entries()) {
- yield key
- }
- }
- * values () {
- for (let [, value] of this.entries()) {
- yield value
- }
- }
- concat (...moreConfig) {
- return new Proxy(new FiggyPudding(
- this.__specs,
- this.__opts,
- reverse(this.__providers).concat(moreConfig)
- ), proxyHandler)
- }
-}
-try {
- const util = require('util')
- FiggyPudding.prototype[util.inspect.custom] = function (depth, opts) {
- return (
- this[Symbol.toStringTag] + ' '
- ) + util.inspect(this.toJSON(), opts)
- }
-} catch (e) {}
-
-function BadKeyError (key) {
- throw Object.assign(new Error(
- `invalid config key requested: ${key}`
- ), {code: 'EBADKEY'})
-}
-
-function pudGet (pud, key, validate) {
- let spec = pud.__specs[key]
- if (validate && !spec && (!pud.__opts.other || !pud.__opts.other(key))) {
- BadKeyError(key)
- } else {
- if (!spec) { spec = {} }
- let ret
- for (let p of pud.__providers) {
- ret = tryGet(key, p)
- if (ret === undefined && spec.aliases && spec.aliases.length) {
- for (let alias of spec.aliases) {
- if (alias === key) { continue }
- ret = tryGet(alias, p)
- if (ret !== undefined) {
- break
- }
- }
- }
- if (ret !== undefined) {
- break
- }
- }
- if (ret === undefined && spec.default !== undefined) {
- if (typeof spec.default === 'function') {
- return spec.default(pud)
- } else {
- return spec.default
- }
- } else {
- return ret
- }
- }
-}
-
-function tryGet (key, p) {
- let ret
- if (p.__isFiggyPudding) {
- ret = pudGet(p, key, false)
- } else if (typeof p.get === 'function') {
- ret = p.get(key)
- } else {
- ret = p[key]
- }
- return ret
-}
-
-const proxyHandler = {
- has (obj, prop) {
- return prop in obj.__specs && pudGet(obj, prop, false) !== undefined
- },
- ownKeys (obj) {
- return Object.keys(obj.__specs)
- },
- get (obj, prop) {
- if (
- typeof prop === 'symbol' ||
- prop.slice(0, 2) === '__' ||
- prop in FiggyPudding.prototype
- ) {
- return obj[prop]
- }
- return obj.get(prop)
- },
- set (obj, prop, value) {
- if (
- typeof prop === 'symbol' ||
- prop.slice(0, 2) === '__'
- ) {
- obj[prop] = value
- return true
- } else {
- throw new Error('figgyPudding options cannot be modified. Use .concat() instead.')
- }
- },
- deleteProperty () {
- throw new Error('figgyPudding options cannot be deleted. Use .concat() and shadow them instead.')
- }
-}
-
-module.exports = figgyPudding
-function figgyPudding (specs, opts) {
- function factory (...providers) {
- return new Proxy(new FiggyPudding(
- specs,
- opts,
- providers
- ), proxyHandler)
- }
- return factory
-}
-
-function reverse (arr) {
- const ret = []
- arr.forEach(x => ret.unshift(x))
- return ret
-}
-
-function entries (obj) {
- return Object.keys(obj).map(k => [k, obj[k]])
-}
diff --git a/node_modules/figgy-pudding/package.json b/node_modules/figgy-pudding/package.json
deleted file mode 100644
index 4f268f6ff..000000000
--- a/node_modules/figgy-pudding/package.json
+++ /dev/null
@@ -1,77 +0,0 @@
-{
- "_from": "figgy-pudding@latest",
- "_id": "figgy-pudding@3.5.1",
- "_inBundle": false,
- "_integrity": "sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w==",
- "_location": "/figgy-pudding",
- "_phantomChildren": {},
- "_requested": {
- "type": "tag",
- "registry": true,
- "raw": "figgy-pudding@latest",
- "name": "figgy-pudding",
- "escapedName": "figgy-pudding",
- "rawSpec": "latest",
- "saveSpec": null,
- "fetchSpec": "latest"
- },
- "_requiredBy": [
- "#USER",
- "/",
- "/cacache",
- "/libnpmhook",
- "/libnpmorg",
- "/libnpmteam",
- "/npm-registry-fetch",
- "/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-3.5.1.tgz",
- "_shasum": "862470112901c727a0e495a80744bd5baa1d6790",
- "_spec": "figgy-pudding@latest",
- "_where": "/Users/zkat/Documents/code/work/npm",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/figgy-pudding/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Delicious, festive, cascading config/opts definitions",
- "devDependencies": {
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tap": "^12.0.1",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/zkat/figgy-pudding#readme",
- "keywords": [
- "config",
- "options",
- "yummy"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "figgy-pudding",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/figgy-pudding.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --100 --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "3.5.1"
-}
diff --git a/node_modules/libnpmpack/node_modules/.bin/mkdirp b/node_modules/libnpmpack/node_modules/.bin/mkdirp
deleted file mode 120000
index 017896ceb..000000000
--- a/node_modules/libnpmpack/node_modules/.bin/mkdirp
+++ /dev/null
@@ -1 +0,0 @@
-../mkdirp/bin/cmd.js \ No newline at end of file
diff --git a/node_modules/libnpmpack/node_modules/.bin/pacote b/node_modules/libnpmpack/node_modules/.bin/pacote
deleted file mode 120000
index e59583143..000000000
--- a/node_modules/libnpmpack/node_modules/.bin/pacote
+++ /dev/null
@@ -1 +0,0 @@
-../pacote/lib/bin.js \ No newline at end of file
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md b/node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md
deleted file mode 100644
index 81458380b..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md
+++ /dev/null
@@ -1,15 +0,0 @@
-# Changers Lorgs!
-
-## 1.0
-
-Full rewrite. Essentially a brand new module.
-
-- Return a promise instead of taking a callback.
-- Use native `fs.mkdir(path, { recursive: true })` when available.
-- Drop support for outdated Node.js versions. (Technically still works on
- Node.js v8, but only 10 and above are officially supported.)
-
-## 0.x
-
-Original and most widely used recursive directory creation implementation
-in JavaScript, dating back to 2010.
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/LICENSE b/node_modules/libnpmpack/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 13fcd15f0..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js b/node_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js
deleted file mode 100755
index 6e0aa8dc4..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env node
-
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
- Create each supplied directory including any necessary parent directories
- that don't yet exist.
-
- If the directory already exists, do nothing.
-
-OPTIONS are:
-
- -m<mode> If a directory needs to be created, set the mode as an octal
- --mode=<mode> permission string.
-
- -v --version Print the mkdirp version number
-
- -h --help Print this helpful banner
-
- -p --print Print the first directories created for each path provided
-
- --manual Use manual implementation, even if native is available
-`
-
-const dirs = []
-const opts = {}
-let print = false
-let dashdash = false
-let manual = false
-for (const arg of process.argv.slice(2)) {
- if (dashdash)
- dirs.push(arg)
- else if (arg === '--')
- dashdash = true
- else if (arg === '--manual')
- manual = true
- else if (/^-h/.test(arg) || /^--help/.test(arg)) {
- console.log(usage())
- process.exit(0)
- } else if (arg === '-v' || arg === '--version') {
- console.log(require('../package.json').version)
- process.exit(0)
- } else if (arg === '-p' || arg === '--print') {
- print = true
- } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
- const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
- if (isNaN(mode)) {
- console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
- process.exit(1)
- }
- opts.mode = mode
- } else
- dirs.push(arg)
-}
-
-const mkdirp = require('../')
-const impl = manual ? mkdirp.manual : mkdirp
-if (dirs.length === 0)
- console.error(usage())
-
-Promise.all(dirs.map(dir => impl(dir, opts)))
- .then(made => print ? made.forEach(m => m && console.log(m)) : null)
- .catch(er => {
- console.error(er.message)
- if (er.code)
- console.error(' code: ' + er.code)
- process.exit(1)
- })
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/index.js b/node_modules/libnpmpack/node_modules/mkdirp/index.js
deleted file mode 100644
index ad7a16c9f..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/index.js
+++ /dev/null
@@ -1,31 +0,0 @@
-const optsArg = require('./lib/opts-arg.js')
-const pathArg = require('./lib/path-arg.js')
-
-const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
-const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
-const {useNative, useNativeSync} = require('./lib/use-native.js')
-
-
-const mkdirp = (path, opts) => {
- path = pathArg(path)
- opts = optsArg(opts)
- return useNative(opts)
- ? mkdirpNative(path, opts)
- : mkdirpManual(path, opts)
-}
-
-const mkdirpSync = (path, opts) => {
- path = pathArg(path)
- opts = optsArg(opts)
- return useNativeSync(opts)
- ? mkdirpNativeSync(path, opts)
- : mkdirpManualSync(path, opts)
-}
-
-mkdirp.sync = mkdirpSync
-mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
-mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
-mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
-mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
-
-module.exports = mkdirp
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js
deleted file mode 100644
index 022e492c0..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const {dirname} = require('path')
-
-const findMade = (opts, parent, path = undefined) => {
- // we never want the 'made' return value to be a root directory
- if (path === parent)
- return Promise.resolve()
-
- return opts.statAsync(parent).then(
- st => st.isDirectory() ? path : undefined, // will fail later
- er => er.code === 'ENOENT'
- ? findMade(opts, dirname(parent), parent)
- : undefined
- )
-}
-
-const findMadeSync = (opts, parent, path = undefined) => {
- if (path === parent)
- return undefined
-
- try {
- return opts.statSync(parent).isDirectory() ? path : undefined
- } catch (er) {
- return er.code === 'ENOENT'
- ? findMadeSync(opts, dirname(parent), parent)
- : undefined
- }
-}
-
-module.exports = {findMade, findMadeSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js
deleted file mode 100644
index 2eb18cd64..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const {dirname} = require('path')
-
-const mkdirpManual = (path, opts, made) => {
- opts.recursive = false
- const parent = dirname(path)
- if (parent === path) {
- return opts.mkdirAsync(path, opts).catch(er => {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- if (er.code !== 'EISDIR')
- throw er
- })
- }
-
- return opts.mkdirAsync(path, opts).then(() => made || path, er => {
- if (er.code === 'ENOENT')
- return mkdirpManual(parent, opts)
- .then(made => mkdirpManual(path, opts, made))
- if (er.code !== 'EEXIST' && er.code !== 'EROFS')
- throw er
- return opts.statAsync(path).then(st => {
- if (st.isDirectory())
- return made
- else
- throw er
- }, () => { throw er })
- })
-}
-
-const mkdirpManualSync = (path, opts, made) => {
- const parent = dirname(path)
- opts.recursive = false
-
- if (parent === path) {
- try {
- return opts.mkdirSync(path, opts)
- } catch (er) {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- if (er.code !== 'EISDIR')
- throw er
- else
- return
- }
- }
-
- try {
- opts.mkdirSync(path, opts)
- return made || path
- } catch (er) {
- if (er.code === 'ENOENT')
- return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
- if (er.code !== 'EEXIST' && er.code !== 'EROFS')
- throw er
- try {
- if (!opts.statSync(path).isDirectory())
- throw er
- } catch (_) {
- throw er
- }
- }
-}
-
-module.exports = {mkdirpManual, mkdirpManualSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js
deleted file mode 100644
index c7a6b6980..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const {dirname} = require('path')
-const {findMade, findMadeSync} = require('./find-made.js')
-const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
-
-const mkdirpNative = (path, opts) => {
- opts.recursive = true
- const parent = dirname(path)
- if (parent === path)
- return opts.mkdirAsync(path, opts)
-
- return findMade(opts, path).then(made =>
- opts.mkdirAsync(path, opts).then(() => made)
- .catch(er => {
- if (er.code === 'ENOENT')
- return mkdirpManual(path, opts)
- else
- throw er
- }))
-}
-
-const mkdirpNativeSync = (path, opts) => {
- opts.recursive = true
- const parent = dirname(path)
- if (parent === path)
- return opts.mkdirSync(path, opts)
-
- const made = findMadeSync(opts, path)
- try {
- opts.mkdirSync(path, opts)
- return made
- } catch (er) {
- if (er.code === 'ENOENT')
- return mkdirpManualSync(path, opts)
- else
- throw er
- }
-}
-
-module.exports = {mkdirpNative, mkdirpNativeSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js
deleted file mode 100644
index 488bd44c3..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const { promisify } = require('util')
-const fs = require('fs')
-const optsArg = opts => {
- if (!opts)
- opts = { mode: 0o777 & (~process.umask()), fs }
- else if (typeof opts === 'object')
- opts = { mode: 0o777 & (~process.umask()), fs, ...opts }
- else if (typeof opts === 'number')
- opts = { mode: opts, fs }
- else if (typeof opts === 'string')
- opts = { mode: parseInt(opts, 8), fs }
- else
- throw new TypeError('invalid options argument')
-
- opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
- opts.mkdirAsync = promisify(opts.mkdir)
- opts.stat = opts.stat || opts.fs.stat || fs.stat
- opts.statAsync = promisify(opts.stat)
- opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
- opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
- return opts
-}
-module.exports = optsArg
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js
deleted file mode 100644
index cc07de5a6..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
-const { resolve, parse } = require('path')
-const pathArg = path => {
- if (/\0/.test(path)) {
- // simulate same failure that node raises
- throw Object.assign(
- new TypeError('path must be a string without null bytes'),
- {
- path,
- code: 'ERR_INVALID_ARG_VALUE',
- }
- )
- }
-
- path = resolve(path)
- if (platform === 'win32') {
- const badWinChars = /[*|"<>?:]/
- const {root} = parse(path)
- if (badWinChars.test(path.substr(root.length))) {
- throw Object.assign(new Error('Illegal characters in path.'), {
- path,
- code: 'EINVAL',
- })
- }
- }
-
- return path
-}
-module.exports = pathArg
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js
deleted file mode 100644
index 079361de1..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const fs = require('fs')
-
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
-const versArr = version.replace(/^v/, '').split('.')
-const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
-
-const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
-const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
-
-module.exports = {useNative, useNativeSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/package.json b/node_modules/libnpmpack/node_modules/mkdirp/package.json
deleted file mode 100644
index d4c0acbb8..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,75 +0,0 @@
-{
- "_from": "mkdirp@^1.0.3",
- "_id": "mkdirp@1.0.3",
- "_inBundle": false,
- "_integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g==",
- "_location": "/libnpmpack/mkdirp",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "mkdirp@^1.0.3",
- "name": "mkdirp",
- "escapedName": "mkdirp",
- "rawSpec": "^1.0.3",
- "saveSpec": null,
- "fetchSpec": "^1.0.3"
- },
- "_requiredBy": [
- "/libnpmpack/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
- "_shasum": "4cf2e30ad45959dddea53ad97d518b6c8205e1ea",
- "_spec": "mkdirp@^1.0.3",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
- "bin": {
- "mkdirp": "bin/cmd.js"
- },
- "bugs": {
- "url": "https://github.com/isaacs/node-mkdirp/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Recursively mkdir, like `mkdir -p`",
- "devDependencies": {
- "require-inject": "^1.4.4",
- "tap": "^14.10.6"
- },
- "engines": {
- "node": ">=10"
- },
- "files": [
- "bin",
- "lib",
- "index.js"
- ],
- "homepage": "https://github.com/isaacs/node-mkdirp#readme",
- "keywords": [
- "mkdir",
- "directory",
- "make dir",
- "make",
- "dir",
- "recursive",
- "native"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "mkdirp",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/node-mkdirp.git"
- },
- "scripts": {
- "postpublish": "git push origin --follow-tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "snap": "tap",
- "test": "tap"
- },
- "tap": {
- "check-coverage": true,
- "coverage-map": "map.js"
- },
- "version": "1.0.3"
-}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/readme.markdown b/node_modules/libnpmpack/node_modules/mkdirp/readme.markdown
deleted file mode 100644
index 827de5905..000000000
--- a/node_modules/libnpmpack/node_modules/mkdirp/readme.markdown
+++ /dev/null
@@ -1,266 +0,0 @@
-# mkdirp
-
-Like `mkdir -p`, but in Node.js!
-
-Now with a modern API and no\* bugs!
-
-<small>\* may contain some bugs</small>
-
-# example
-
-## pow.js
-
-```js
-const mkdirp = require('mkdirp')
-
-// return value is a Promise resolving to the first directory created
-mkdirp('/tmp/foo/bar/baz').then(made =>
- console.log(`made directories, starting with ${made}`))
-```
-
-Output (where `/tmp/foo` already exists)
-
-```
-made directories, starting with /tmp/foo/bar
-```
-
-Or, if you don't have time to wait around for promises:
-
-```js
-const mkdirp = require('mkdirp')
-
-// return value is the first directory created
-const made = mkdirp.sync('/tmp/foo/bar/baz')
-console.log(`made directories, starting with ${made}`)
-```
-
-And now /tmp/foo/bar/baz exists, huzzah!
-
-# methods
-
-```js
-const mkdirp = require('mkdirp')
-```
-
-## mkdirp(dir, [opts]) -> Promise<String | undefined>
-
-Create a new directory and any necessary subdirectories at `dir` with octal
-permission string `opts.mode`. If `opts` is a string or number, it will be
-treated as the `opts.mode`.
-
-If `opts.mode` isn't specified, it defaults to `0o777 &
-(~process.umask())`.
-
-Promise resolves to first directory `made` that had to be created, or
-`undefined` if everything already exists. Promise rejects if any errors
-are encountered. Note that, in the case of promise rejection, some
-directories _may_ have been created, as recursive directory creation is not
-an atomic operation.
-
-You can optionally pass in an alternate `fs` implementation by passing in
-`opts.fs`. Your implementation should have `opts.fs.mkdir(path, opts, cb)`
-and `opts.fs.stat(path, cb)`.
-
-You can also override just one or the other of `mkdir` and `stat` by
-passing in `opts.stat` or `opts.mkdir`, or providing an `fs` option that
-only overrides one of these.
-
-## mkdirp.sync(dir, opts) -> String|null
-
-Synchronously create a new directory and any necessary subdirectories at
-`dir` with octal permission string `opts.mode`. If `opts` is a string or
-number, it will be treated as the `opts.mode`.
-
-If `opts.mode` isn't specified, it defaults to `0o777 &
-(~process.umask())`.
-
-Returns the first directory that had to be created, or undefined if
-everything already exists.
-
-You can optionally pass in an alternate `fs` implementation by passing in
-`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)`
-and `opts.fs.statSync(path)`.
-
-You can also override just one or the other of `mkdirSync` and `statSync`
-by passing in `opts.statSync` or `opts.mkdirSync`, or providing an `fs`
-option that only overrides one of these.
-
-## mkdirp.manual, mkdirp.manualSync
-
-Use the manual implementation (not the native one). This is the default
-when the native implementation is not available or the stat/mkdir
-implementation is overridden.
-
-## mkdirp.native, mkdirp.nativeSync
-
-Use the native implementation (not the manual one). This is the default
-when the native implementation is available and stat/mkdir are not
-overridden.
-
-# implementation
-
-On Node.js v10.12.0 and above, use the native `fs.mkdir(p,
-{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has been
-overridden by an option.
-
-## native implementation
-
-- If the path is a root directory, then pass it to the underlying
- implementation and return the result/error. (In this case, it'll either
- succeed or fail, but we aren't actually creating any dirs.)
-- Walk up the path statting each directory, to find the first path that
- will be created, `made`.
-- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`)
-- If error, raise it to the caller.
-- Return `made`.
-
-## manual implementation
-
-- Call underlying `fs.mkdir` implementation, with `recursive: false`
-- If error:
- - If path is a root directory, raise to the caller and do not handle it
- - If ENOENT, mkdirp parent dir, store result as `made`
- - stat(path)
- - If error, raise original `mkdir` error
- - If directory, return `made`
- - Else, raise original `mkdir` error
-- else
- - return `undefined` if a root dir, or `made` if set, or `path`
-
-## windows vs unix caveat
-
-On Windows file systems, attempts to create a root directory (ie, a drive
-letter or root UNC path) will fail. If the root directory exists, then it
-will fail with `EPERM`. If the root directory does not exist, then it will
-fail with `ENOENT`.
-
-On posix file systems, attempts to create a root directory (in recursive
-mode) will succeed silently, as it is treated like just another directory
-that already exists. (In non-recursive mode, of course, it fails with
-`EEXIST`.)
-
-In order to preserve this system-specific behavior (and because it's not as
-if we can create the parent of a root directory anyway), attempts to create
-a root directory are passed directly to the `fs` implementation, and any
-errors encountered are not handled.
-
-## native error caveat
-
-The native implementation (as of at least Node.js v13.4.0) does not provide
-appropriate errors in some cases (see
-[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) and
-[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)).
-
-In order to work around this issue, the native implementation will fall
-back to the manual implementation if an `ENOENT` error is encountered.
-
-# choosing a recursive mkdir implementation
-
-There are a few to choose from! Use the one that suits your needs best :D
-
-## use `fs.mkdir(path, {recursive: true}, cb)` if:
-
-- You wish to optimize performance even at the expense of other factors.
-- You don't need to know the first dir created.
-- You are ok with getting `ENOENT` as the error when some other problem is
- the actual cause.
-- You can limit your platforms to Node.js v10.12 and above.
-- You're ok with using callbacks instead of promises.
-- You don't need/want a CLI.
-- You don't need to override the `fs` methods in use.
-
-## use this module (mkdirp 1.x) if:
-
-- You need to know the first directory that was created.
-- You wish to use the native implementation if available, but fall back
- when it's not.
-- You prefer promise-returning APIs to callback-taking APIs.
-- You want more useful error messages than the native recursive mkdir
- provides (at least as of Node.js v13.4), and are ok with re-trying on
- `ENOENT` to achieve this.
-- You need (or at least, are ok with) a CLI.
-- You need to override the `fs` methods in use.
-
-## use [`make-dir`](http://npm.im/make-dir) if:
-
-- You do not need to know the first dir created (and wish to save a few
- `stat` calls when using the native implementation for this reason).
-- You wish to use the native implementation if available, but fall back
- when it's not.
-- You prefer promise-returning APIs to callback-taking APIs.
-- You are ok with occasionally getting `ENOENT` errors for failures that
- are actually related to something other than a missing file system entry.
-- You don't need/want a CLI.
-- You need to override the `fs` methods in use.
-
-## use mkdirp 0.x if:
-
-- You need to know the first directory that was created.
-- You need (or at least, are ok with) a CLI.
-- You need to override the `fs` methods in use.
-- You're ok with using callbacks instead of promises.
-- You are not running on Windows, where the root-level ENOENT errors can
- lead to infinite regress.
-- You think vinyl just sounds warmer and richer for some weird reason.
-- You are supporting truly ancient Node.js versions, before even the advent
- of a `Promise` language primitive. (Please don't. You deserve better.)
-
-# cli
-
-This package also ships with a `mkdirp` command.
-
-```
-$ mkdirp -h
-
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
- Create each supplied directory including any necessary parent directories
- that don't yet exist.
-
- If the directory already exists, do nothing.
-
-OPTIONS are:
-
- -m<mode> If a directory needs to be created, set the mode as an octal
- --mode=<mode> permission string.
-
- -v --version Print the mkdirp version number
-
- -h --help Print this helpful banner
-
- -p --print Print the first directories created for each path provided
-
- --manual Use manual implementation, even if native is available
-```
-
-# install
-
-With [npm](http://npmjs.org) do:
-
-```
-npm install mkdirp
-```
-
-to get the library locally, or
-
-```
-npm install -g mkdirp
-```
-
-to get the command everywhere, or
-
-```
-npx mkdirp ...
-```
-
-to run the command without installing it globally.
-
-# platform support
-
-This module works on node v8, but only v10 and above are officially
-supported, as Node v8 reached its LTS end of life 2020-01-01, which is in
-the past, as of this writing.
-
-# license
-
-MIT
diff --git a/node_modules/libnpmpack/node_modules/pacote/README.md b/node_modules/libnpmpack/node_modules/pacote/README.md
deleted file mode 100644
index 49dbde6a9..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/README.md
+++ /dev/null
@@ -1,244 +0,0 @@
-# pacote
-
-JavaScript Package Handler
-
-## USAGE
-
-```js
-const pacote = require('pacote')
-
-// get a package manifest
-pacote.manifest('foo@1.x').then(manifest => console.log('got it', manifest))
-
-// extract a package into a folder
-pacote.extract('github:npm/cli', 'some/path', options)
- .then(({from, resolved, integrity}) => {
- console.log('extracted!', from, resolved, integrity)
- })
-
-pacote.tarball('https://server.com/package.tgz').then(data => {
- console.log('got ' + data.length + ' bytes of tarball data')
-})
-```
-
-Anything that you can do to with kind of package, you can do to any kind of
-package. Data that isn't relevant (like a packument for a tarball) will be
-simulated.
-
-## CLI
-
-This module exports a command line interface that can do most of what is
-described below. Run `pacote -h` to learn more.
-
-```
-Pacote - The JavaScript Package Handler, v10.1.1
-
-Usage:
-
- pacote resolve <spec>
- Resolve a specifier and output the fully resolved target
- Returns integrity and from if '--long' flag is set.
-
- pacote manifest <spec>
- Fetch a manifest and print to stdout
-
- pacote packument <spec>
- Fetch a full packument and print to stdout
-
- pacote tarball <spec> [<filename>]
- Fetch a package tarball and save to <filename>
- If <filename> is missing or '-', the tarball will be streamed to stdout.
-
- pacote extract <spec> <folder>
- Extract a package to the destination folder.
-
-Configuration values all match the names of configs passed to npm, or
-options passed to Pacote. Additional flags for this executable:
-
- --long Print an object from 'resolve', including integrity and spec.
- --json Print result objects as JSON rather than node's default.
- (This is the default if stdout is not a TTY.)
- --help -h Print this helpful text.
-
-For example '--cache=/path/to/folder' will use that folder as the cache.
-```
-
-## API
-
-The `spec` refers to any kind of package specifier that npm can install.
-If you can pass it to the npm CLI, you can pass it to pacote. (In fact,
-that's exactly what the npm CLI does.)
-
-See below for valid `opts` values.
-
-* `pacote.resolve(spec, opts)` Resolve a specifier like `foo@latest` or
- `github:user/project` all the way to a tarball url, tarball file, or git
- repo with commit hash.
-
-* `pacote.extract(spec, dest, opts)` Extract a package's tarball into a
- destination folder. Returns a promise that resolves to the
- `{from,resolved,integrity}` of the extracted package.
-
-* `pacote.manifest(spec, opts)` Fetch (or simulate) a package's manifest
- (basically, the `package.json` file, plus a bit of metadata).
- See below for more on manifests and packuments. Returns a Promise that
- resolves to the manifest object.
-
-* `pacote.packument(spec, opts)` Fetch (or simulate) a package's packument
- (basically, the top-level package document listing all the manifests that
- the registry returns). See below for more on manifests and packuments.
- Returns a Promise that resolves to the packument object.
-
-* `pacote.tarball(spec, opts)` Get a package tarball data as a buffer in
- memory. Returns a Promise that resolves to the tarball data Buffer, with
- `from`, `resolved`, and `integrity` fields attached.
-
-* `pacote.tarball.file(spec, dest, opts)` Save a package tarball data to
- a file on disk. Returns a Promise that resolves to
- `{from,integrity,resolved}` of the fetched tarball.
-
-* `pacote.tarball.stream(spec, streamHandler, opts)` Fetch a tarball and
- make the stream available to the `streamHandler` function.
-
- This is mostly an internal function, but it is exposed because it does
- provide some functionality that may be difficult to achieve otherwise.
-
- The `streamHandler` function MUST return a Promise that resolves when
- the stream (and all associated work) is ended, or rejects if the stream
- has an error.
-
- The `streamHandler` function MAY be called multiple times, as Pacote
- retries requests in some scenarios, such as cache corruption or
- retriable network failures.
-
-### Options
-
-Options are passed to
-[`npm-registry-fetch`](http://npm.im/npm-registry-fetch) and
-[`cacache`](http://npm.im/cacache), so in addition to these, anything for
-those modules can be given to pacote as well.
-
-Options object is cloned, and mutated along the way to add integrity,
-resolved, and other properties, as they are determined.
-
-* `cache` Where to store cache entries and temp files. Passed to
- [`cacache`](http://npm.im/cacache). Defaults to the same cache directory
- that npm will use by default, based on platform and environment.
-* `where` Base folder for resolving relative `file:` dependencies.
-* `resolved` Shortcut for looking up resolved values. Should be specified
- if known.
-* `integrity` Expected integrity of fetched package tarball. If specified,
- tarballs with mismatched integrity values will raise an `EINTEGRITY`
- error.
-* `umask` Permission mode mask for extracted files and directories.
- Defaults to `0o22`. See "Extracted File Modes" below.
-* `fmode` Minimum permission mode for extracted files. Defaults to
- `0o666`. See "Extracted File Modes" below.
-* `dmode` Minimum permission mode for extracted directories. Defaults to
- `0o777`. See "Extracted File Modes" below.
-* `log` A logger object with methods for various log levels. Typically,
- this will be [`npmlog`](http://npm.im/npmlog) in the npm CLI use case,
- but if not specified, the default is a logger that emits `'log'` events
- on the `process` object.
-* `preferOnline` Prefer to revalidate cache entries, even when it would not
- be strictly necessary. Default `false`.
-* `before` When picking a manifest from a packument, only consider
- packages published before the specified date. Default `null`.
-* `defaultTag` The default `dist-tag` to use when choosing a manifest from a
- packument. Defaults to `latest`.
-* `registry` The npm registry to use by default. Defaults to
- `https://registry.npmjs.org/`.
-* `fullMetadata` Fetch the full metadata from the registry for packuments,
- including information not strictly required for installation (author,
- description, etc.) Defaults to `true` when `before` is set, since the
- version publish time is part of the extended packument metadata.
-
-## Extracted File Modes
-
-Files are extracted with a mode matching the following formula:
-
-```
-( (tarball entry mode value) | (minimum mode option) ) ~ (umask)
-```
-
-This is in order to prevent unreadable files or unlistable directories from
-cluttering a project's `node_modules` folder, even if the package tarball
-specifies that the file should be inaccessible.
-
-It also prevents files from being group- or world-writable without explicit
-opt-in by the user, because all file and directory modes are masked against
-the `umask` value.
-
-So, a file which is `0o771` in the tarball, using the default `fmode` of
-`0o666` and `umask` of `0o22`, will result in a file mode of `0o755`:
-
-```
-(0o771 | 0o666) => 0o777
-(0o777 ~ 0o22) => 0o755
-```
-
-In almost every case, the defaults are appropriate. To respect exactly
-what is in the package tarball (even if this makes an unusable system), set
-both `dmode` and `fmode` options to `0`. Otherwise, the `umask` config
-should be used in most cases where file mode modifications are required,
-and this functions more or less the same as the `umask` value in most Unix
-systems.
-
-## Extracted File Ownership
-
-When running as `root` on Unix systems, all extracted files and folders
-will have their owning `uid` and `gid` values set to match the ownership
-of the containing folder.
-
-This prevents `root`-owned files showing up in a project's `node_modules`
-folder when a user runs `sudo npm install`.
-
-## Manifests
-
-A `manifest` is similar to a `package.json` file. However, it has a few
-pieces of extra metadata, and sometimes lacks metadata that is inessential
-to package installation.
-
-In addition to the common `package.json` fields, manifests include:
-
-* `manifest._resolved` The tarball url or file path where the package
- artifact can be found.
-* `manifest._from` A normalized form of the spec passed in as an argument.
-* `manifest._integrity` The integrity value for the package artifact.
-* `manifest.dist` Registry manifests (those included in a packument) have a
- `dist` object. Only `tarball` is required, though at least one of
- `shasum` or `integrity` is almost always present.
-
- * `tarball` The url to the associated package artifact. (Copied by
- Pacote to `manifest._resolved`.)
- * `integrity` The integrity SRI string for the artifact. This may not
- be present for older packages on the npm registry. (Copied by Pacote
- to `manifest._integrity`.)
- * `shasum` Legacy integrity value. Hexadecimal-encoded sha1 hash.
- (Converted to an SRI string and copied by Pacote to
- `manifest._integrity` when `dist.integrity` is not present.)
- * `fileCount` Number of files in the tarball.
- * `unpackedSize` Size on disk of the package when unpacked.
- * `npm-signature` A signature of the package by the
- [`npmregistry`](https://keybase.io/npmregistry) Keybase account.
- (Obviously only present for packages published to
- `https://registry.npmjs.org`.)
-
-## Packuments
-
-A packument is the top-level package document that lists the set of
-manifests for available versions for a package.
-
-When a packument is fetched with `accept:
-application/vnd.npm.install-v1+json` in the HTTP headers, only the most
-minimum necessary metadata is returned. Additional metadata is returned
-when fetched with only `accept: application/json`.
-
-For Pacote's purposes, the following fields are relevant:
-
-* `versions` An object where each key is a version, and each value is the
- manifest for that version.
-* `dist-tags` An object mapping dist-tags to version numbers. This is how
- `foo@latest` gets turned into `foo@1.2.3`.
-* `time` In the full packument, an object mapping version numbers to
- publication times, for the `opts.before` functionality.
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/bin.js b/node_modules/libnpmpack/node_modules/pacote/lib/bin.js
deleted file mode 100755
index c0409be1f..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/bin.js
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/usr/bin/env node
-
-const run = conf => {
- const pacote = require('../')
- switch (conf._[0]) {
- case 'resolve':
- if (conf.long)
- return pacote.manifest(conf._[1], conf).then(mani => ({
- resolved: mani._resolved,
- integrity: mani._integrity,
- from: mani._from,
- }))
- case 'manifest':
- case 'packument':
- return pacote[conf._[0]](conf._[1], conf)
-
- case 'tarball':
- if (!conf._[2] || conf._[2] === '-') {
- return pacote.tarball.stream(conf._[1], stream => {
- stream.pipe(conf.testStdout ||
- /* istanbul ignore next */ process.stdout)
- // make sure it resolves something falsey
- return stream.promise().then(() => {})
- }, conf)
- } else
- return pacote.tarball.file(conf._[1], conf._[2], conf)
-
- case 'extract':
- return pacote.extract(conf._[1], conf._[2], conf)
-
- default: /* istanbul ignore next */ {
- throw new Error(`bad command: ${conf._[0]}`)
- }
- }
-}
-
-const version = require('../package.json').version
-const usage = () =>
-`Pacote - The JavaScript Package Handler, v${version}
-
-Usage:
-
- pacote resolve <spec>
- Resolve a specifier and output the fully resolved target
- Returns integrity and from if '--long' flag is set.
-
- pacote manifest <spec>
- Fetch a manifest and print to stdout
-
- pacote packument <spec>
- Fetch a full packument and print to stdout
-
- pacote tarball <spec> [<filename>]
- Fetch a package tarball and save to <filename>
- If <filename> is missing or '-', the tarball will be streamed to stdout.
-
- pacote extract <spec> <folder>
- Extract a package to the destination folder.
-
-Configuration values all match the names of configs passed to npm, or
-options passed to Pacote. Additional flags for this executable:
-
- --long Print an object from 'resolve', including integrity and spec.
- --json Print result objects as JSON rather than node's default.
- (This is the default if stdout is not a TTY.)
- --help -h Print this helpful text.
-
-For example '--cache=/path/to/folder' will use that folder as the cache.
-`
-
-const shouldJSON = (conf, result) =>
- conf.json ||
- !process.stdout.isTTY &&
- conf.json === undefined &&
- result &&
- typeof result === 'object'
-
-const pretty = (conf, result) =>
- shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result
-
-let addedLogListener = false
-const main = args => {
- const conf = parse(args)
- if (conf.help || conf.h)
- return console.log(usage())
-
- if (!addedLogListener) {
- process.on('log', console.error)
- addedLogListener = true
- }
-
- try {
- return run(conf)
- .then(result => result && console.log(pretty(conf, result)))
- .catch(er => {
- console.error(er)
- process.exit(1)
- })
- } catch (er) {
- console.error(er.message)
- console.error(usage())
- }
-}
-
-const parseArg = arg => {
- const split = arg.slice(2).split('=')
- const k = split.shift()
- const v = split.join('=')
- const no = /^no-/.test(k) && !v
- const key = (no ? k.substr(3) : k)
- .replace(/^tag$/, 'defaultTag')
- .replace(/-([a-z])/g, (_, c) => c.toUpperCase())
- const value = v ? v.replace(/^~/, process.env.HOME) : !no
- return { key, value }
-}
-
-const parse = args => {
- const conf = {
- _: [],
- cache: process.env.HOME + '/.npm/_cacache',
- }
- let dashdash = false
- args.forEach(arg => {
- if (dashdash)
- conf._.push(arg)
- else if (arg === '--')
- dashdash = true
- else if (arg === '-h')
- conf.help = true
- else if (/^--/.test(arg)) {
- const {key, value} = parseArg(arg)
- conf[key] = value
- } else {
- conf._.push(arg)
- }
- })
- return conf
-}
-
-if (module === require.main)
- main(process.argv.slice(2))
-else
- module.exports = {
- main,
- run,
- usage,
- parseArg,
- parse,
- }
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/dir.js b/node_modules/libnpmpack/node_modules/pacote/lib/dir.js
deleted file mode 100644
index 44dadaa32..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/dir.js
+++ /dev/null
@@ -1,98 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const FileFetcher = require('./file.js')
-const cacache = require('cacache')
-const Minipass = require('minipass')
-const { promisify } = require('util')
-const readPackageJson = require('read-package-json-fast')
-const npm = require('./util/npm.js')
-const isPackageBin = require('./util/is-package-bin.js')
-const packlist = require('npm-packlist')
-const tar = require('tar')
-const _prepareDir = Symbol('_prepareDir')
-const _tarcOpts = Symbol('_tarcOpts')
-
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-class DirFetcher extends Fetcher {
- constructor (spec, opts) {
- super(spec, opts)
- // just the fully resolved filename
- this.resolved = this.spec.fetchSpec
- }
-
- get types () {
- return ['directory']
- }
-
- [_prepareDir] () {
- return this.manifest().then(mani => {
- if (!mani.scripts || !mani.scripts.prepare)
- return
-
- // we *only* run prepare.
- // pre/post-pack is run by the npm CLI for publish and pack,
- // but this function is *also* run when installing git deps
- return npm(
- this.npmBin,
- [].concat(this.npmRunCmd).concat('prepare').concat(this.npmCliConfig),
- this.resolved,
- { message: 'directory preparation failed' }
- )
- })
- }
-
- [_tarballFromResolved] () {
- const stream = new Minipass()
- stream.resolved = this.resolved
- stream.integrity = this.integrity
-
- // run the prepare script, get the list of files, and tar it up
- // pipe to the stream, and proxy errors the chain.
- this[_prepareDir]()
- .then(() => packlist({ path: this.resolved }))
- .then(files => tar.c(this[_tarcOpts](), files)
- .on('error', er => stream.emit('error', er)).pipe(stream))
- .catch(er => stream.emit('error', er))
- return stream
- }
-
- [_tarcOpts] () {
- return {
- cwd: this.resolved,
- prefix: 'package/',
- portable: true,
- gzip: true,
-
- // ensure that package bins are always executable
- // Note that npm-packlist is already filtering out
- // anything that is not a regular file, ignored by
- // .npmignore or package.json "files", etc.
- filter: (path, stat) => {
- if (isPackageBin(this.package, path))
- stat.mode |= 0o111
- return true
- },
-
- // Provide a specific date in the 1980s for the benefit of zip,
- // which is confounded by files dated at the Unix epoch 0.
- mtime: new Date('1985-10-26T08:15:00.000Z'),
- }
- }
-
- manifest () {
- if (this.package)
- return Promise.resolve(this.package)
-
- return readPackageJson(this.resolved + '/package.json')
- .then(mani => this.package = {
- ...mani,
- _integrity: this.integrity && String(this.integrity),
- _resolved: this.resolved,
- _from: this.from,
- })
- }
-
- packument () {
- return FileFetcher.prototype.packument.apply(this)
- }
-}
-module.exports = DirFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js b/node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js
deleted file mode 100644
index 4c5efdc01..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js
+++ /dev/null
@@ -1,470 +0,0 @@
-// This is the base class that the other fetcher types in lib
-// all descend from.
-// It handles the unpacking and retry logic that is shared among
-// all of the other Fetcher types.
-
-const npa = require('npm-package-arg')
-const ssri = require('ssri')
-const { promisify } = require('util')
-const { basename, dirname } = require('path')
-const rimraf = promisify(require('rimraf'))
-const tar = require('tar')
-const procLog = require('./util/proc-log.js')
-const retry = require('promise-retry')
-const fsm = require('fs-minipass')
-const cacache = require('cacache')
-const isPackageBin = require('./util/is-package-bin.js')
-const getContents = require('@npmcli/installed-package-contents')
-
-// we only change ownership on unix platforms, and only if uid is 0
-const selfOwner = process.getuid && process.getuid() === 0 ? {
- uid: 0,
- gid: process.getgid(),
-} : null
-const chownr = selfOwner ? promisify(require('chownr')) : null
-const inferOwner = selfOwner ? require('infer-owner') : null
-const mkdirp = require('mkdirp')
-const cacheDir = require('./util/cache-dir.js')
-
-// Private methods.
-// Child classes should not have to override these.
-// Users should never call them.
-const _chown = Symbol('_chown')
-const _extract = Symbol('_extract')
-const _mkdir = Symbol('_mkdir')
-const _empty = Symbol('_empty')
-const _toFile = Symbol('_toFile')
-const _tarxOptions = Symbol('_tarxOptions')
-const _entryMode = Symbol('_entryMode')
-const _istream = Symbol('_istream')
-const _assertType = Symbol('_assertType')
-const _tarballFromCache = Symbol('_tarballFromCache')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-
-class FetcherBase {
- constructor (spec, opts) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('options object is required')
- this.spec = npa(spec, opts.where)
-
- // a bit redundant because presumably the caller already knows this,
- // but it makes it easier to not have to keep track of the requested
- // spec when we're dispatching thousands of these at once, and normalizing
- // is nice. saveSpec is preferred if set, because it turns stuff like
- // x/y#committish into github:x/y#committish. use name@rawSpec for
- // registry deps so that we turn xyz and xyz@ -> xyz@
- this.from = this.spec.registry
- ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
-
- this[_assertType]()
- // clone the opts object so that others aren't upset when we mutate it
- // by adding/modifying the integrity value.
- this.opts = {...opts}
- this.cache = opts.cache || cacheDir()
- this.resolved = opts.resolved || null
-
- // default to caching/verifying with sha512, that's what we usually have
- // need to change this default, or start overriding it, when sha512
- // is no longer strong enough.
- this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
-
- if (typeof opts.integrity === 'string')
- this.opts.integrity = ssri.parse(opts.integrity)
-
- this.package = null
- this.type = this.constructor.name
- this.fmode = opts.fmode || 0o666
- this.dmode = opts.dmode || 0o777
- this.umask = opts.umask || 0o022
- this.log = opts.log || procLog
-
- this.preferOnline = !!opts.preferOnline
- this.preferOffline = !!opts.preferOffline
- this.offline = !!opts.offline
-
- this.before = opts.before
- this.fullMetadata = this.before ? true : !!opts.fullMetadata
-
- this.defaultTag = opts.defaultTag || 'latest'
- this.registry = opts.registry || 'https://registry.npmjs.org'
-
- // command to run 'prepare' scripts on directories and git dirs
- // To use pacote with yarn, for example, set npmBin to 'yarn'
- // and npmRunCmd to [], and npmCliConfig with yarn's equivalents.
- this.npmBin = opts.npmBin || 'npm'
- this.npmRunCmd = opts.npmRunCmd || 'run'
-
- // command to install deps for preparing
- this.npmInstallCmd = opts.npmInstallCmd || [
- 'install',
- '--only=dev',
- '--prod',
- '--ignore-prepublish',
- '--no-progress',
- '--no-save',
- ]
-
- // XXX fill more of this in based on what we know from this.opts
- // we explicitly DO NOT fill in --tag, though, since we are often
- // going to be packing in the context of a publish, which may set
- // a dist-tag, but certainly wants to keep defaulting to latest.
- this.npmCliConfig = opts.npmCliConfig || [
- `--cache=${this.cache}`,
- `--prefer-offline=${!!this.preferOffline}`,
- `--prefer-online=${!!this.preferOnline}`,
- `--offline=${!!this.offline}`,
- `--before=${this.before ? this.before.toISOString() : ''}`,
- ]
- }
-
- get integrity () {
- return this.opts.integrity || null
- }
- set integrity (i) {
- if (!i)
- return
-
- i = ssri.parse(i)
- const current = this.opts.integrity
-
- // do not ever update an existing hash value, but do
- // merge in NEW algos and hashes that we don't already have.
- if (current)
- current.merge(i)
- else
- this.opts.integrity = i
- }
-
- get notImplementedError () {
- return new Error('not implemented in this fetcher type: ' + this.type)
- }
-
- // override in child classes
- // Returns a Promise that resolves to this.resolved string value
- resolve () {
- return this.resolved ? Promise.resolve(this.resolved)
- : Promise.reject(this.notImplementedError)
- }
-
- packument () {
- return Promise.reject(this.notImplementedError)
- }
-
- // override in child class
- // returns a manifest containing:
- // - name
- // - version
- // - _resolved
- // - _integrity
- // - plus whatever else was in there (corgi, full metadata, or pj file)
- manifest () {
- return Promise.reject(this.notImplementedError)
- }
-
- // private, should be overridden.
- // Note that they should *not* calculate or check integrity, but *just*
- // return the raw tarball data stream.
- [_tarballFromResolved] () {
- throw this.notImplementedError
- }
-
- // public, should not be overridden
- tarball () {
- return this.tarballStream(stream => new Promise((res, rej) => {
- const buf = []
- stream.on('error', er => rej(er))
- stream.on('end', () => {
- const data = Buffer.concat(buf)
- data.integrity = this.integrity && String(this.integrity)
- data.resolved = this.resolved
- data.from = this.from
- return res(data)
- })
- stream.on('data', d => buf.push(d))
- }))
- }
-
- // private
- // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
- [_tarballFromCache] () {
- return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
- }
-
- [_istream] (stream) {
- // everyone will need one of these, either for verifying or calculating
- // We always set it, because we have might only have a weak legacy hex
- // sha1 in the packument, and this MAY upgrade it to a stronger algo.
- // If we had an integrity, and it doesn't match, then this does not
- // override that error; the istream will raise the error before it
- // gets to the point of re-setting the integrity.
- const istream = ssri.integrityStream(this.opts)
- istream.on('integrity', i => this.integrity = i)
- return stream.on('error', er => istream.emit('error', er)).pipe(istream)
- }
-
- pickIntegrityAlgorithm () {
- return this.integrity ? this.integrity.pickAlgorithm(this.opts)
- : this.defaultIntegrityAlgorithm
- }
-
- // TODO: check error class, once those are rolled out to our deps
- isDataCorruptionError (er) {
- return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
- }
-
- // override the types getter
- get types () {}
- [_assertType] () {
- if (this.types && !this.types.includes(this.spec.type)) {
- throw new TypeError(`Wrong spec type (${
- this.spec.type
- }) for ${
- this.constructor.name
- }. Supported types: ${this.types.join(', ')}`)
- }
- }
-
- // We allow ENOENTs from cacache, but not anywhere else.
- // An ENOENT trying to read a tgz file, for example, is Right Out.
- isRetriableError (er) {
- // TODO: check error class, once those are rolled out to our deps
- return this.isDataCorruptionError(er) || er.code === 'ENOENT'
- }
-
- // Mostly internal, but has some uses
- // Pass in a function which returns a promise
- // Function will be called 1 or more times with streams that may fail.
- // Retries:
- // Function MUST handle errors on the stream by rejecting the promise,
- // so that retry logic can pick it up and either retry or fail whatever
- // promise it was making (ie, failing extraction, etc.)
- //
- // The return value of this method is a Promise that resolves the same
- // as whatever the streamHandler resolves to.
- //
- // This should never be overridden by child classes, but it is public.
- tarballStream (streamHandler) {
- // Only short-circuit via cache if we have everything else we'll need,
- // and the user has not expressed a preference for checking online.
-
- const fromCache = (
- !this.preferOnline &&
- this.integrity &&
- this.resolved
- ) ? streamHandler(this[_tarballFromCache]()).catch(er => {
- if (this.isDataCorruptionError(er)) {
- this.log.warn('tarball', `cached data for ${
- this.spec
- } (${this.integrity}) seems to be corrupted. Refreshing cache.`)
- return this.cleanupCached().then(() => { throw er })
- } else {
- throw er
- }
- }) : null
-
- const fromResolved = er => {
- if (er) {
- if (!this.isRetriableError(er))
- throw er
- this.log.silly('tarball', `no local data for ${
- this.spec
- }. Extracting by manifest.`)
- }
- return this.resolve().then(() => retry(tryAgain =>
- streamHandler(this[_istream](this[_tarballFromResolved]()))
- .catch(er => {
- // Most likely data integrity. A cache ENOENT error is unlikely
- // here, since we're definitely not reading from the cache, but it
- // IS possible that the fetch subsystem accessed the cache, and the
- // entry got blown away or something. Try one more time to be sure.
- if (this.isRetriableError(er)) {
- this.log.warn('tarball', `tarball data for ${
- this.spec
- } (${this.integrity}) seems to be corrupted. Trying again.`)
- return this.cleanupCached().then(() => tryAgain(er))
- }
- throw er
- }), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
- }
-
- return fromCache ? fromCache.catch(fromResolved) : fromResolved()
- }
-
- cleanupCached () {
- return cacache.rm.content(this.cache, this.integrity, this.opts)
- }
-
- [_chown] (path, uid, gid) {
- return selfOwner && (selfOwner.gid !== gid || selfOwner.uid !== uid)
- ? chownr(path, uid, gid)
- : /* istanbul ignore next - we don't test in root-owned folders */ null
- }
-
- [_empty] (path) {
- return getContents({path, depth: 1}).then(contents => Promise.all(
- contents.map(entry => rimraf(entry))))
- }
-
- [_mkdir] (dest) {
- // if we're bothering to do owner inference, then do it.
- // otherwise just make the dir, and return an empty object.
- // always empty the dir dir to start with, but do so
- // _after_ inferring the owner, in case there's an existing folder
- // there that we would want to preserve which differs from the
- // parent folder (rare, but probably happens sometimes).
- return !inferOwner
- ? this[_empty](dest).then(() => mkdirp(dest)).then(() => ({}))
- : inferOwner(dest).then(({uid, gid}) =>
- this[_empty](dest)
- .then(() => mkdirp(dest))
- .then(made => {
- // ignore the || dest part in coverage. It's there to handle
- // race conditions where the dir may be made by someone else
- // after being removed by us.
- const dir = made || /* istanbul ignore next */ dest
- return this[_chown](dir, uid, gid)
- })
- .then(() => ({uid, gid})))
- }
-
- // extraction is always the same. the only difference is where
- // the tarball comes from.
- extract (dest) {
- return this[_mkdir](dest).then(({uid, gid}) =>
- this.tarballStream(tarball => this[_extract](dest, tarball, uid, gid)))
- }
-
- [_toFile] (dest) {
- return this.tarballStream(str => new Promise((res, rej) => {
- const writer = new fsm.WriteStream(dest)
- str.on('error', er => writer.emit('error', er))
- writer.on('error', er => rej(er))
- writer.on('close', () => res({
- integrity: this.integrity && String(this.integrity),
- resolved: this.resolved,
- from: this.from,
- }))
- str.pipe(writer)
- }))
- }
-
- // don't use this[_mkdir] because we don't want to rimraf anything
- tarballFile (dest) {
- const dir = dirname(dest)
- return !inferOwner
- ? mkdirp(dir).then(() => this[_toFile](dest))
- : inferOwner(dest).then(({uid, gid}) =>
- mkdirp(dir).then(made => this[_toFile](dest)
- .then(res => this[_chown](made || dir, uid, gid)
- .then(() => res))))
- }
-
- [_extract] (dest, tarball, uid, gid) {
- const extractor = tar.x(this[_tarxOptions]({ cwd: dest, uid, gid }))
- const p = new Promise((resolve, reject) => {
- extractor.on('end', () => {
- resolve({
- resolved: this.resolved,
- integrity: this.integrity && String(this.integrity),
- from: this.from,
- })
- })
-
- extractor.on('error', er => {
- this.log.warn('tar', er.message)
- this.log.silly('tar', er)
- reject(er)
- })
-
- tarball.on('error', er => reject(er))
- })
-
- tarball.pipe(extractor)
- return p
- }
-
- // always ensure that entries are at least as permissive as our configured
- // dmode/fmode, but never more permissive than the umask allows.
- [_entryMode] (path, mode, type) {
- const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
- : /File$/.test(type) ? this.fmode
- : /* istanbul ignore next - should never happen in a pkg */ 0
-
- // make sure package bins are executable
- const exe = isPackageBin(this.package, path) ? 0o111 : 0
- return ((mode | m) & ~this.umask) | exe
- }
-
- [_tarxOptions] ({ cwd, uid, gid }) {
- const sawIgnores = new Set()
- return {
- cwd,
- filter: (name, entry) => {
- if (/Link$/.test(entry.type))
- return false
- entry.mode = this[_entryMode](entry.path, entry.mode, entry.type)
- // this replicates the npm pack behavior where .gitignore files
- // are treated like .npmignore files, but only if a .npmignore
- // file is not present.
- if (/File$/.test(entry.type)) {
- const base = basename(entry.path)
- if (base === '.npmignore')
- sawIgnores.add(entry.path)
- else if (base === '.gitignore') {
- // rename, but only if there's not already a .npmignore
- const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
- if (sawIgnores.has(ni))
- return false
- entry.path = ni
- }
- return true
- }
- },
- strip: 1,
- onwarn: /* istanbul ignore next - we can trust that tar logs */
- (code, msg, data) => {
- this.log.warn('tar', code, msg)
- this.log.silly('tar', code, msg, data)
- },
- uid,
- gid,
- umask: this.umask,
- }
- }
-}
-
-module.exports = FetcherBase
-
-// Child classes
-const GitFetcher = require('./git.js')
-const RegistryFetcher = require('./registry.js')
-const FileFetcher = require('./file.js')
-const DirFetcher = require('./dir.js')
-const RemoteFetcher = require('./remote.js')
-
-// Get an appropriate fetcher object from a spec and options
-FetcherBase.get = (rawSpec, opts = {}) => {
- const spec = npa(rawSpec, opts.where)
- switch (spec.type) {
- case 'git':
- return new GitFetcher(spec, opts)
-
- case 'remote':
- return new RemoteFetcher(spec, opts)
-
- case 'version':
- case 'range':
- case 'tag':
- case 'alias':
- return new RegistryFetcher(spec.subSpec || spec, opts)
-
- case 'file':
- return new FileFetcher(spec, opts)
-
- case 'directory':
- return new DirFetcher(spec, opts)
-
- default:
- throw new TypeError('Unknown spec type: ' + spec.type)
- }
-}
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/file.js b/node_modules/libnpmpack/node_modules/pacote/lib/file.js
deleted file mode 100644
index d5c601aab..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/file.js
+++ /dev/null
@@ -1,93 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const fsm = require('fs-minipass')
-const cacache = require('cacache')
-const { promisify } = require('util')
-const readPackageJson = require('read-package-json-fast')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const _exeBins = Symbol('_exeBins')
-const { resolve } = require('path')
-const fs = require('fs')
-
-class FileFetcher extends Fetcher {
- constructor (spec, opts) {
- super(spec, opts)
- // just the fully resolved filename
- this.resolved = this.spec.fetchSpec
- }
-
- get types () {
- return ['file']
- }
-
- manifest () {
- if (this.package)
- return Promise.resolve(this.package)
-
- // have to unpack the tarball for this.
- return cacache.tmp.withTmp(this.cache, this.opts, dir =>
- this.extract(dir)
- .then(() => readPackageJson(dir + '/package.json'))
- .then(mani => this.package = {
- ...mani,
- _integrity: this.integrity && String(this.integrity),
- _resolved: this.resolved,
- _from: this.from,
- }))
- }
-
- [_exeBins] (pkg, dest) {
- if (!pkg.bin)
- return Promise.resolve()
-
- return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => {
- const script = resolve(dest, pkg.bin[k])
- // Best effort. Ignore errors here, the only result is that
- // a bin script is not executable. But if it's missing or
- // something, we just leave it for a later stage to trip over
- // when we can provide a more useful contextual error.
- fs.stat(script, (er, st) => {
- if (er)
- return res()
- const mode = st.mode | 0o111
- if (mode === st.mode)
- return res()
- fs.chmod(script, mode, res)
- })
- })))
- }
-
- extract (dest) {
- // if we've already loaded the manifest, then the super got it.
- // but if not, read the unpacked manifest and chmod properly.
- return super.extract(dest)
- .then(result => this.package ? result
- : readPackageJson(dest + '/package.json').then(pkg =>
- this[_exeBins](pkg, dest)).then(() => result))
- }
-
- [_tarballFromResolved] () {
- // create a read stream and return it
- return new fsm.ReadStream(this.resolved)
- }
-
- packument () {
- // simulate based on manifest
- return this.manifest().then(mani => ({
- name: mani.name,
- 'dist-tags': {
- [this.defaultTag]: mani.version
- },
- versions: {
- [mani.version]: {
- ...mani,
- dist: {
- tarball: `file:${this.resolved}`,
- integrity: this.integrity && String(this.integrity),
- }
- }
- }
- }))
- }
-}
-
-module.exports = FileFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/git.js b/node_modules/libnpmpack/node_modules/pacote/lib/git.js
deleted file mode 100644
index 81f7ca256..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/git.js
+++ /dev/null
@@ -1,272 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const FileFetcher = require('./file.js')
-const RemoteFetcher = require('./remote.js')
-const DirFetcher = require('./dir.js')
-const hashre = /^[a-f0-9]{40}$/
-const git = require('@npmcli/git')
-const pickManifest = require('npm-pick-manifest')
-const npa = require('npm-package-arg')
-const url = require('url')
-const Minipass = require('minipass')
-const cacache = require('cacache')
-const { promisify } = require('util')
-const readPackageJson = require('read-package-json-fast')
-const npm = require('./util/npm.js')
-
-const _resolvedFromRepo = Symbol('_resolvedFromRepo')
-const _resolvedFromHosted = Symbol('_resolvedFromHosted')
-const _resolvedFromClone = Symbol('_resolvedFromClone')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const _addGitSha = Symbol('_addGitSha')
-const _clone = Symbol('_clone')
-const _cloneHosted = Symbol('_cloneHosted')
-const _cloneRepo = Symbol('_cloneRepo')
-const _setResolvedWithSha = Symbol('_setResolvedWithSha')
-const _prepareDir = Symbol('_prepareDir')
-
-// get the repository url. prefer ssh, fall back to git://
-// We have to add the git+ back because npa suppresses it.
-const repoUrl = (hosted, opts) =>
- hosted.sshurl && addGitPlus(hosted.sshurl(opts)) ||
- hosted.https && addGitPlus(hosted.https(opts))
-
-const addGitPlus = url => url && `git+${url}`
-
-class GitFetcher extends Fetcher {
- constructor (spec, opts) {
- super(spec, opts)
- this.resolvedRef = null
- if (this.spec.hosted)
- this.from = this.spec.hosted.shortcut({ noCommittish: false })
-
- // shortcut: avoid full clone when we can go straight to the tgz
- // if we have the full sha and it's a hosted git platform
- if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
- this.resolvedSha = this.spec.gitCommittish
- // use hosted.tarball() when we shell to RemoteFetcher later
- this.resolved = this.spec.hosted
- ? repoUrl(this.spec.hosted, { noCommittish: false })
- : this.spec.fetchSpec + '#' + this.spec.gitCommittish
- } else
- this.resolvedSha = ''
- }
-
- get types () {
- return ['git']
- }
-
- resolve () {
- // likely a hosted git repo with a sha, so get the tarball url
- // but in general, no reason to resolve() more than necessary!
- if (this.resolved)
- return super.resolve()
-
- // fetch the git repo and then look at the current hash
- const h = this.spec.hosted
- // try to use ssh, fall back to git.
- return h ? this[_resolvedFromHosted](h)
- : this[_resolvedFromRepo](this.spec.fetchSpec)
- }
-
- // first try https, since that's faster and passphrase-less for
- // public repos. Fall back to SSH to support private repos.
- // NB: we always store the SSH url in the 'resolved' field.
- [_resolvedFromHosted] (hosted) {
- return this[_resolvedFromRepo](hosted.https && hosted.https())
- .catch(er => {
- const ssh = hosted.sshurl && hosted.sshurl()
- if (!ssh)
- throw er
- return this[_resolvedFromRepo](ssh)
- })
- }
-
- [_resolvedFromRepo] (gitRemote) {
- // XXX make this a custom error class
- if (!gitRemote)
- return Promise.reject(new Error(`No git url for ${this.spec}`))
- const gitRange = this.spec.gitRange
- const name = this.spec.name
- return git.revs(gitRemote, this.opts).then(remoteRefs => {
- return gitRange ? pickManifest({
- versions: remoteRefs.versions,
- 'dist-tags': remoteRefs['dist-tags'],
- name,
- }, gitRange, this.opts)
- : this.spec.gitCommittish ?
- remoteRefs.refs[this.spec.gitCommittish] ||
- remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
- : remoteRefs.refs.HEAD // no git committish, get default head
- }).then(revDoc => {
- // the committish provided isn't in the rev list
- // things like HEAD~3 or @yesterday can land here.
- if (!revDoc || !revDoc.sha)
- return this[_resolvedFromClone]()
-
- this.resolvedRef = revDoc
- this.resolvedSha = revDoc.sha
- this[_addGitSha](revDoc.sha)
- return this.resolved
- })
- }
-
- [_setResolvedWithSha] (withSha) {
- // we haven't cloned, so a tgz download is still faster
- // of course, if it's not a known host, we can't do that.
- this.resolved = !this.spec.hosted ? withSha
- : repoUrl(npa(withSha).hosted, { noCommittish: false })
- }
-
- // when we get the git sha, we affix it to our spec to build up
- // either a git url with a hash, or a tarball download URL
- [_addGitSha] (sha) {
- if (this.spec.hosted) {
- this[_setResolvedWithSha](
- this.spec.hosted.shortcut({ noCommittish: true }) + '#' + sha
- )
- } else {
- const u = url.format(new url.URL(`#${sha}`, this.spec.rawSpec))
- this[_setResolvedWithSha](url.format(u))
- }
- }
-
- [_resolvedFromClone] () {
- // do a full or shallow clone, then look at the HEAD
- // kind of wasteful, but no other option, really
- return this[_clone](dir => this.resolved)
- }
-
- [_prepareDir] (dir) {
- return readPackageJson(dir + '/package.json').then(mani => {
- // no need if we aren't going to do any preparation.
- const scripts = mani.scripts
- if (!scripts || !(
- scripts.postinstall ||
- scripts.build ||
- scripts.preinstall ||
- scripts.install ||
- scripts.prepare))
- return
-
- // the DirFetcher will do its own preparation to run the prepare scripts
- // All we have to do is put the deps in place so that it can succeed.
- return npm(
- this.npmBin,
- [].concat(this.npmInstallCmd).concat(this.npmCliConfig),
- dir,
- { message: 'git dep preparation failed' }
- )
- })
- }
-
- [_tarballFromResolved] () {
- const stream = new Minipass()
- stream.resolved = this.resolved
- stream.integrity = this.integrity
- stream.from = this.from
-
- // check it out and then shell out to the DirFetcher tarball packer
- this[_clone](dir => this[_prepareDir](dir)
- .then(() => new Promise((res, rej) => {
- const df = new DirFetcher(`file:${dir}`, {
- ...this.opts,
- resolved: null,
- integrity: null,
- })
- const dirStream = df[_tarballFromResolved]()
- dirStream.on('error', rej)
- dirStream.on('end', res)
- dirStream.pipe(stream)
- }))).catch(
- /* istanbul ignore next: very unlikely and hard to test */
- er => stream.emit('error', er)
- )
- return stream
- }
-
- // clone a git repo into a temp folder (or fetch and unpack if possible)
- // handler accepts a directory, and returns a promise that resolves
- // when we're done with it, at which point, cacache deletes it
- //
- // TODO: after cloning, create a tarball of the folder, and add to the cache
- // with cacache.put.stream(), using a key that's deterministic based on the
- // spec and repo, so that we don't ever clone the same thing multiple times.
- [_clone] (handler, tarballOk = true) {
- const o = { tmpPrefix: 'git-clone' }
- const ref = this.resolvedSha || this.spec.gitCommittish
- const h = this.spec.hosted
- const resolved = this.resolved
-
- // can be set manually to false to fall back to actual git clone
- tarballOk = tarballOk &&
- h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
-
- return cacache.tmp.withTmp(this.cache, o, tmp => {
- // if we're resolved, and have a tarball url, shell out to RemoteFetcher
- if (tarballOk) {
- const nameat = this.spec.name ? `${this.spec.name}@` : ''
- return new RemoteFetcher(h.tarball({ noCommittish: false }), {
- ...this.opts,
- pkgid: `git:${nameat}${this.resolved}`,
- resolved: this.resolved,
- integrity: null, // it'll always be different, if we have one
- }).extract(tmp).then(() => handler(tmp), er => {
- // fall back to ssh download if tarball fails
- if (er.constructor.name.match(/^Http/))
- return this[_clone](handler, false)
- else
- throw er
- })
- }
-
- return (
- h ? this[_cloneHosted](ref, tmp)
- : this[_cloneRepo](this.spec.fetchSpec, ref, tmp)
- ).then(sha => {
- this.resolvedSha = sha
- if (!this.resolved)
- this[_addGitSha](sha)
- })
- .then(() => handler(tmp))
- })
- }
-
- [_cloneHosted] (ref, tmp) {
- const hosted = this.spec.hosted
- const https = hosted.https()
- return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp)
- .catch(er => {
- const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
- /* istanbul ignore if - should be covered by the resolve() call */
- if (!ssh)
- throw er
- return this[_cloneRepo](ssh, ref, tmp)
- })
- }
-
- [_cloneRepo] (repo, ref, tmp) {
- const { opts, spec } = this
- return git.clone(repo, ref, tmp, { ...opts, spec })
- }
-
- manifest () {
- if (this.package)
- return Promise.resolve(this.package)
-
- return this.spec.hosted && this.resolved
- ? FileFetcher.prototype.manifest.apply(this)
- : this[_clone](dir =>
- readPackageJson(dir + '/package.json')
- .then(mani => this.package = {
- ...mani,
- _integrity: this.integrity && String(this.integrity),
- _resolved: this.resolved,
- _from: this.from,
- }))
- }
-
- packument () {
- return FileFetcher.prototype.packument.apply(this)
- }
-}
-module.exports = GitFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/index.js b/node_modules/libnpmpack/node_modules/pacote/lib/index.js
deleted file mode 100644
index 546ba960b..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/index.js
+++ /dev/null
@@ -1,12 +0,0 @@
-const { get } = require('./fetcher.js')
-module.exports = {
- resolve: (spec, opts) => get(spec, opts).resolve(),
- extract: (spec, dest, opts) => get(spec, opts).extract(dest),
- manifest: (spec, opts) => get(spec, opts).manifest(),
- tarball: (spec, opts) => get(spec, opts).tarball(),
- packument: (spec, opts) => get(spec, opts).packument(),
-}
-module.exports.tarball.stream = (spec, handler, opts) =>
- get(spec, opts).tarballStream(handler)
-module.exports.tarball.file = (spec, dest, opts) =>
- get(spec, opts).tarballFile(dest)
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/registry.js b/node_modules/libnpmpack/node_modules/pacote/lib/registry.js
deleted file mode 100644
index b9df03614..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/registry.js
+++ /dev/null
@@ -1,159 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const RemoteFetcher = require('./remote.js')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const pacoteVersion = require('../package.json').version
-const npa = require('npm-package-arg')
-const pickManifest = require('npm-pick-manifest')
-const ssri = require('ssri')
-const Minipass = require('minipass')
-
-// Corgis are cute. 🐕🐶
-const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
-const fullDoc = 'application/json'
-
-const fetch = require('npm-registry-fetch')
-
-// TODO: memoize reg requests, so we don't even have to check cache
-
-const _headers = Symbol('_headers')
-class RegistryFetcher extends Fetcher {
- constructor (spec, opts) {
- super(spec, opts)
-
- // handle case when npm-package-arg guesses wrong.
- if (this.spec.type === 'tag' &&
- this.spec.rawSpec === '' &&
- this.defaultTag !== 'latest')
- this.spec = npa(`${this.spec.name}@${this.defaultTag}`)
- this.registry = fetch.pickRegistry(spec, opts)
- this.packumentUrl = this.registry.replace(/\/*$/, '/') +
- this.spec.escapedName
-
- // XXX pacote <=9 has some logic to ignore opts.resolved if
- // the resolved URL doesn't go to the same registry.
- // Consider reproducing that here, to throw away this.resolved
- // in that case.
- }
-
- resolve () {
- if (this.resolved)
- return Promise.resolve(this.resolved)
-
- // fetching the manifest sets resolved and (usually) integrity
- return this.manifest().then(() => {
- if (this.resolved)
- return this.resolved
-
- throw Object.assign(
- new Error('Invalid package manifest: no `dist.tarball` field'),
- { package: this.spec.toString() }
- )
- })
- }
-
- [_headers] () {
- return {
- // npm will override UA, but ensure that we always send *something*
- 'user-agent': this.opts.userAgent ||
- `pacote/${pacoteVersion} node/${process.version}`,
- ...(this.opts.headers || {}),
- 'pacote-version': pacoteVersion,
- 'pacote-req-type': 'packument',
- 'pacote-pkg-id': `registry:${this.spec.name}`,
- accept: this.fullMetadata ? fullDoc : corgiDoc,
- }
- }
-
- packument () {
- // npm-registry-fetch the packument
- // set the appropriate header for corgis if fullMetadata isn't set
- // return the res.json() promise
- return fetch(this.packumentUrl, {
- ...this.opts,
- headers: this[_headers](),
- spec: this.spec,
- // never check integrity for packuments themselves
- integrity: null,
- }).then(res => res.json().then(packument => {
- packument._cached = res.headers.has('x-local-cache')
- packument._contentLength = +res.headers.get('content-length')
- return packument
- })).catch(er => {
- if (er.code === 'E404' && !this.fullMetadata) {
- // possible that corgis are not supported by this registry
- this.fullMetadata = true
- return this.packument()
- }
- throw er
- })
- }
-
- manifest () {
- if (this.package)
- return Promise.resolve(this.package)
-
- return this.packument()
- .then(packument => pickManifest(packument, this.spec.fetchSpec, {
- ...this.opts,
- defaultTag: this.defaultTag,
- before: this.before,
- }) /* XXX add ETARGET and E403 revalidation of cached packuments here */)
- .then(mani => {
- // add _resolved and _integrity from dist object
- const { dist } = mani
- if (dist) {
- this.resolved = mani._resolved = dist.tarball
- mani._from = this.from
- const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
- : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', {...this.opts})
- : null
- if (distIntegrity) {
- if (!this.integrity)
- this.integrity = distIntegrity
- else if (!this.integrity.match(distIntegrity)) {
- // only bork if they have algos in common.
- // otherwise we end up breaking if we have saved a sha512
- // previously for the tarball, but the manifest only
- // provides a sha1, which is possible for older publishes.
- // Otherwise, this is almost certainly a case of holding it
- // wrong, and will result in weird or insecure behavior
- // later on when building package tree.
- for (const algo of Object.keys(this.integrity)) {
- if (distIntegrity[algo]) {
- throw Object.assign(new Error(
- `Integrity checksum failed when using ${algo}: `+
- `wanted ${this.integrity} but got ${distIntegrity}.`
- ), { code: 'EINTEGRITY' })
- }
- }
- // made it this far, the integrity is worthwhile. accept it.
- // the setter here will take care of merging it into what we
- // already had.
- this.integrity = distIntegrity
- }
- }
- }
- if (this.integrity)
- mani._integrity = String(this.integrity)
- return this.package = mani
- })
- }
-
- [_tarballFromResolved] () {
- // we use a RemoteFetcher to get the actual tarball stream
- return new RemoteFetcher(this.resolved, {
- ...this.opts,
- resolved: this.resolved,
- pkgid: `registry:${this.spec.name}@${this.resolved}`,
- })[_tarballFromResolved]()
- }
-
- get types () {
- return [
- 'tag',
- 'version',
- 'range',
- ]
- }
-}
-module.exports = RegistryFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/remote.js b/node_modules/libnpmpack/node_modules/pacote/lib/remote.js
deleted file mode 100644
index 81f14efbc..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/remote.js
+++ /dev/null
@@ -1,72 +0,0 @@
-const Fetcher = require('./fetcher.js')
-const FileFetcher = require('./file.js')
-const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
-const pacoteVersion = require('../package.json').version
-const fetch = require('npm-registry-fetch')
-const ssri = require('ssri')
-const Minipass = require('minipass')
-
-const _headers = Symbol('_headers')
-class RemoteFetcher extends Fetcher {
- constructor (spec, opts) {
- super(spec, opts)
- this.resolved = this.spec.fetchSpec
- // nam is a fermented pork sausage that is good to eat
- const nameat = this.spec.name ? `${this.spec.name}@` : ''
- this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
- }
-
- [_tarballFromResolved] () {
- const stream = new Minipass()
- const fetchOpts = {
- ...this.opts,
- headers: this[_headers](),
- spec: this.spec,
- integrity: this.integrity,
- algorithms: [ this.pickIntegrityAlgorithm() ],
- }
- fetch(this.resolved, fetchOpts).then(res => {
- const hash = res.headers.get('x-local-cache-hash')
- if (hash) {
- this.integrity = decodeURIComponent(hash)
- }
-
- res.body.on('error',
- /* istanbul ignore next - exceedingly rare and hard to simulate */
- er => stream.emit('error', er)
- ).pipe(stream)
- }).catch(er => stream.emit('error', er))
-
- return stream
- }
-
- [_headers] () {
- return {
- // npm will override this, but ensure that we always send *something*
- 'user-agent': this.opts.userAgent ||
- `pacote/${pacoteVersion} node/${process.version}`,
- ...(this.opts.headers || {}),
- 'pacote-version': pacoteVersion,
- 'pacote-req-type': 'tarball',
- 'pacote-pkg-id': this.pkgid,
- ...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
- : {}),
- ...(this.opts.headers || {}),
- }
- }
-
- get types () {
- return ['remote']
- }
-
- // getting a packument and/or manifest is the same as with a file: spec.
- // unpack the tarball stream, and then read from the package.json file.
- packument () {
- return FileFetcher.prototype.packument.apply(this)
- }
-
- manifest () {
- return FileFetcher.prototype.manifest.apply(this)
- }
-}
-module.exports = RemoteFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js
deleted file mode 100644
index d5c0bf28f..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js
+++ /dev/null
@@ -1,12 +0,0 @@
-const os = require('os')
-const {resolve} = require('path')
-
-module.exports = (fakePlatform = false) => {
- const temp = os.tmpdir()
- const uidOrPid = process.getuid ? process.getuid() : process.pid
- const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid)
- const platform = fakePlatform || process.platform
- const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
- const cacheRoot = (platform === 'win32' && process.env.APPDATA) || home
- return resolve(cacheRoot, cacheExtra)
-}
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js
deleted file mode 100644
index 35cf06427..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// Function to determine whether a path is in the package.bin set.
-// Used to prevent issues when people publish a package from a
-// windows machine, and then install with --no-bin-links.
-//
-// Note: this is not possible in remote or file fetchers, since
-// we don't have the manifest until AFTER we've unpacked. But the
-// main use case is registry fetching with git a distant second,
-// so that's an acceptable edge case to not handle.
-
-const binObj = (name, bin) =>
- typeof bin === 'string' ? { [name]: bin } : bin
-
-const hasBin = (pkg, path) => {
- const bin = binObj(pkg.name, pkg.bin)
- const p = path.replace(/^[^\\\/]*\//, '')
- for (const [k, v] of Object.entries(bin)) {
- if (v === p)
- return true
- }
- return false
-}
-
-module.exports = (pkg, path) =>
- pkg && pkg.bin ? hasBin(pkg, path) : false
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js
deleted file mode 100644
index 293695525..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// run an npm command
-const spawn = require('@npmcli/promise-spawn')
-
-module.exports = (npmBin, npmCommand, cwd, extra) => {
- const isJS = npmBin.endsWith('.js')
- const cmd = isJS ? process.execPath : npmBin
- const args = (isJS ? [npmBin] : []).concat(npmCommand)
- return spawn(cmd, args, { cwd, stdioString: true }, extra)
-}
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js
deleted file mode 100644
index b2bdd9dc9..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// default logger.
-// emits 'log' events on the process
-const LEVELS = [
- 'notice',
- 'error',
- 'warn',
- 'info',
- 'verbose',
- 'http',
- 'silly',
- 'pause',
- 'resume'
-]
-
-const log = level => (...args) => process.emit('log', level, ...args)
-
-const logger = {}
-for (const level of LEVELS) {
- logger[level] = log(level)
-}
-module.exports = logger
diff --git a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/CHANGELOG.md b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/CHANGELOG.md
deleted file mode 100644
index 390a3a3c4..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/CHANGELOG.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-## [8.0.0](https://github.com/npm/npm-package-arg/compare/v7.0.0...v8.0.0) (2019-12-15)
-
-
-### ⚠ BREAKING CHANGES
-
-* Dropping support for node 6 and 8. It'll probably
-still work on those versions, but they are no longer supported or
-tested, since npm v7 is moving away from them.
-
-* drop support for node 6 and 8 ([ba85e68](https://github.com/npm/npm-package-arg/commit/ba85e68555d6270f672c3d59da17672f744d0376))
-
-<a name="7.0.0"></a>
-# [7.0.0](https://github.com/npm/npm-package-arg/compare/v6.1.1...v7.0.0) (2019-11-11)
-
-
-### deps
-
-* bump hosted-git-info to 3.0.2 ([68a4fc3](https://github.com/npm/npm-package-arg/commit/68a4fc3)), closes [/github.com/npm/hosted-git-info/pull/38#issuecomment-520243803](https://github.com//github.com/npm/hosted-git-info/pull/38/issues/issuecomment-520243803)
-
-
-### BREAKING CHANGES
-
-* this drops support for ancient node versions.
-
-
-
-<a name="6.1.1"></a>
-## [6.1.1](https://github.com/npm/npm-package-arg/compare/v6.1.0...v6.1.1) (2019-08-21)
-
-
-### Bug Fixes
-
-* preserve drive letter on windows git file:// urls ([3909203](https://github.com/npm/npm-package-arg/commit/3909203))
-
-
-
-<a name="6.1.0"></a>
-# [6.1.0](https://github.com/npm/npm-package-arg/compare/v6.0.0...v6.1.0) (2018-04-10)
-
-
-### Bug Fixes
-
-* **git:** Fix gitRange for git+ssh for private git ([#33](https://github.com/npm/npm-package-arg/issues/33)) ([647a0b3](https://github.com/npm/npm-package-arg/commit/647a0b3))
-
-
-### Features
-
-* **alias:** add `npm:` registry alias spec ([#34](https://github.com/npm/npm-package-arg/issues/34)) ([ab99f8e](https://github.com/npm/npm-package-arg/commit/ab99f8e))
diff --git a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/README.md b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/README.md
deleted file mode 100644
index 847341b21..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/README.md
+++ /dev/null
@@ -1,83 +0,0 @@
-# npm-package-arg
-
-[![Build Status](https://travis-ci.org/npm/npm-package-arg.svg?branch=master)](https://travis-ci.org/npm/npm-package-arg)
-
-Parses package name and specifier passed to commands like `npm install` or
-`npm cache add`, or as found in `package.json` dependency sections.
-
-## EXAMPLES
-
-```javascript
-var assert = require("assert")
-var npa = require("npm-package-arg")
-
-// Pass in the descriptor, and it'll return an object
-try {
- var parsed = npa("@bar/foo@1.2")
-} catch (ex) {
- …
-}
-```
-
-## USING
-
-`var npa = require('npm-package-arg')`
-
-### var result = npa(*arg*[, *where*])
-
-* *arg* - a string that you might pass to `npm install`, like:
-`foo@1.2`, `@bar/foo@1.2`, `foo@user/foo`, `http://x.com/foo.tgz`,
-`git+https://github.com/user/foo`, `bitbucket:user/foo`, `foo.tar.gz`,
-`../foo/bar/` or `bar`. If the *arg* you provide doesn't have a specifier
-part, eg `foo` then the specifier will default to `latest`.
-* *where* - Optionally the path to resolve file paths relative to. Defaults to `process.cwd()`
-
-**Throws** if the package name is invalid, a dist-tag is invalid or a URL's protocol is not supported.
-
-### var result = npa.resolve(*name*, *spec*[, *where*])
-
-* *name* - The name of the module you want to install. For example: `foo` or `@bar/foo`.
-* *spec* - The specifier indicating where and how you can get this module. Something like:
-`1.2`, `^1.7.17`, `http://x.com/foo.tgz`, `git+https://github.com/user/foo`,
-`bitbucket:user/foo`, `file:foo.tar.gz` or `file:../foo/bar/`. If not
-included then the default is `latest`.
-* *where* - Optionally the path to resolve file paths relative to. Defaults to `process.cwd()`
-
-**Throws** if the package name is invalid, a dist-tag is invalid or a URL's protocol is not supported.
-
-## RESULT OBJECT
-
-The objects that are returned by npm-package-arg contain the following
-keys:
-
-* `type` - One of the following strings:
- * `git` - A git repo
- * `tag` - A tagged version, like `"foo@latest"`
- * `version` - A specific version number, like `"foo@1.2.3"`
- * `range` - A version range, like `"foo@2.x"`
- * `file` - A local `.tar.gz`, `.tar` or `.tgz` file.
- * `directory` - A local directory.
- * `remote` - An http url (presumably to a tgz)
-* `registry` - If true this specifier refers to a resource hosted on a
- registry. This is true for `tag`, `version` and `range` types.
-* `name` - If known, the `name` field expected in the resulting pkg.
-* `scope` - If a name is something like `@org/module` then the `scope`
- field will be set to `@org`. If it doesn't have a scoped name, then
- scope is `null`.
-* `escapedName` - A version of `name` escaped to match the npm scoped packages
- specification. Mostly used when making requests against a registry. When
- `name` is `null`, `escapedName` will also be `null`.
-* `rawSpec` - The specifier part that was parsed out in calls to `npa(arg)`,
- or the value of `spec` in calls to `npa.resolve(name, spec).
-* `saveSpec` - The normalized specifier, for saving to package.json files.
- `null` for registry dependencies.
-* `fetchSpec` - The version of the specifier to be used to fetch this
- resource. `null` for shortcuts to hosted git dependencies as there isn't
- just one URL to try with them.
-* `gitRange` - If set, this is a semver specifier to match against git tags with
-* `gitCommittish` - If set, this is the specific committish to use with a git dependency.
-* `hosted` - If `from === 'hosted'` then this will be a `hosted-git-info`
- object. This property is not included when serializing the object as
- JSON.
-* `raw` - The original un-modified string that was provided. If called as
- `npa.resolve(name, spec)` then this will be `name + '@' + spec`.
diff --git a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/npa.js b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/npa.js
deleted file mode 100644
index d18168b75..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/npa.js
+++ /dev/null
@@ -1,301 +0,0 @@
-'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.Result = Result
-
-let url
-let HostedGit
-let semver
-let path_
-function path () {
- if (!path_) path_ = require('path')
- return path_
-}
-let validatePackageName
-let os
-
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
-
-function npa (arg, where) {
- let name
- let spec
- if (typeof arg === 'object') {
- if (arg instanceof Result && (!where || where === arg.where)) {
- return arg
- } else if (arg.name && arg.rawSpec) {
- return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
- } else {
- return npa(arg.raw, where || arg.where)
- }
- }
- const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
- const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
- if (isURL.test(arg)) {
- spec = arg
- } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
- spec = arg
- } else if (nameEndsAt > 0) {
- name = namePart
- spec = arg.slice(nameEndsAt + 1)
- } else {
- if (!validatePackageName) validatePackageName = require('validate-npm-package-name')
- const valid = validatePackageName(arg)
- if (valid.validForOldPackages) {
- name = arg
- } else {
- spec = arg
- }
- }
- return resolve(name, spec, where, arg)
-}
-
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-
-function resolve (name, spec, where, arg) {
- const res = new Result({
- raw: arg,
- name: name,
- rawSpec: spec,
- fromArgument: arg != null
- })
-
- if (name) res.setName(name)
-
- if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
- return fromFile(res, where)
- } else if (spec && /^npm:/i.test(spec)) {
- return fromAlias(res, where)
- }
- if (!HostedGit) HostedGit = require('hosted-git-info')
- const hosted = HostedGit.fromUrl(spec, { noGitPlus: true, noCommittish: true })
- if (hosted) {
- return fromHostedGit(res, hosted)
- } else if (spec && isURL.test(spec)) {
- return fromURL(res)
- } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
- return fromFile(res, where)
- } else {
- return fromRegistry(res)
- }
-}
-
-function invalidPackageName (name, valid) {
- const err = new Error(`Invalid package name "${name}": ${valid.errors.join('; ')}`)
- err.code = 'EINVALIDPACKAGENAME'
- return err
-}
-function invalidTagName (name) {
- const err = new Error(`Invalid tag name "${name}": Tags may not have any characters that encodeURIComponent encodes.`)
- err.code = 'EINVALIDTAGNAME'
- return err
-}
-
-function Result (opts) {
- this.type = opts.type
- this.registry = opts.registry
- this.where = opts.where
- if (opts.raw == null) {
- this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
- } else {
- this.raw = opts.raw
- }
- this.name = undefined
- this.escapedName = undefined
- this.scope = undefined
- this.rawSpec = opts.rawSpec == null ? '' : opts.rawSpec
- this.saveSpec = opts.saveSpec
- this.fetchSpec = opts.fetchSpec
- if (opts.name) this.setName(opts.name)
- this.gitRange = opts.gitRange
- this.gitCommittish = opts.gitCommittish
- this.hosted = opts.hosted
-}
-
-Result.prototype.setName = function (name) {
- if (!validatePackageName) validatePackageName = require('validate-npm-package-name')
- const valid = validatePackageName(name)
- if (!valid.validForOldPackages) {
- throw invalidPackageName(name, valid)
- }
- this.name = name
- this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
- // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
- this.escapedName = name.replace('/', '%2f')
- return this
-}
-
-Result.prototype.toString = function () {
- const full = []
- if (this.name != null && this.name !== '') full.push(this.name)
- const spec = this.saveSpec || this.fetchSpec || this.rawSpec
- if (spec != null && spec !== '') full.push(spec)
- return full.length ? full.join('@') : this.raw
-}
-
-Result.prototype.toJSON = function () {
- const result = Object.assign({}, this)
- delete result.hosted
- return result
-}
-
-function setGitCommittish (res, committish) {
- if (committish != null && committish.length >= 7 && committish.slice(0, 7) === 'semver:') {
- res.gitRange = decodeURIComponent(committish.slice(7))
- res.gitCommittish = null
- } else {
- res.gitCommittish = committish === '' ? null : committish
- }
- return res
-}
-
-const isAbsolutePath = /^[/]|^[A-Za-z]:/
-
-function resolvePath (where, spec) {
- if (isAbsolutePath.test(spec)) return spec
- return path().resolve(where, spec)
-}
-
-function isAbsolute (dir) {
- if (dir[0] === '/') return true
- if (/^[A-Za-z]:/.test(dir)) return true
- return false
-}
-
-function fromFile (res, where) {
- if (!where) where = process.cwd()
- res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
- res.where = where
-
- const spec = res.rawSpec.replace(/\\/g, '/')
- .replace(/^file:[/]*([A-Za-z]:)/, '$1') // drive name paths on windows
- .replace(/^file:(?:[/]*([~./]))?/, '$1')
- if (/^~[/]/.test(spec)) {
- // this is needed for windows and for file:~/foo/bar
- if (!os) os = require('os')
- res.fetchSpec = resolvePath(os.homedir(), spec.slice(2))
- res.saveSpec = 'file:' + spec
- } else {
- res.fetchSpec = resolvePath(where, spec)
- if (isAbsolute(spec)) {
- res.saveSpec = 'file:' + spec
- } else {
- res.saveSpec = 'file:' + path().relative(where, res.fetchSpec)
- }
- }
- return res
-}
-
-function fromHostedGit (res, hosted) {
- res.type = 'git'
- res.hosted = hosted
- res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
- res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
- return setGitCommittish(res, hosted.committish)
-}
-
-function unsupportedURLType (protocol, spec) {
- const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
- err.code = 'EUNSUPPORTEDPROTOCOL'
- return err
-}
-
-function matchGitScp (spec) {
- // git ssh specifiers are overloaded to also use scp-style git
- // specifiers, so we have to parse those out and treat them special.
- // They are NOT true URIs, so we can't hand them to `url.parse`.
- //
- // This regex looks for things that look like:
- // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
- //
- // ...and various combinations. The username in the beginning is *required*.
- const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
- return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
- fetchSpec: matched[1],
- gitCommittish: matched[2] == null ? null : matched[2]
- }
-}
-
-function fromURL (res) {
- if (!url) url = require('url')
- const urlparse = url.parse(res.rawSpec)
- res.saveSpec = res.rawSpec
- // check the protocol, and then see if it's git or not
- switch (urlparse.protocol) {
- case 'git:':
- case 'git+http:':
- case 'git+https:':
- case 'git+rsync:':
- case 'git+ftp:':
- case 'git+file:':
- case 'git+ssh:':
- res.type = 'git'
- const match = urlparse.protocol === 'git+ssh:' && matchGitScp(res.rawSpec)
- if (match) {
- setGitCommittish(res, match.gitCommittish)
- res.fetchSpec = match.fetchSpec
- } else {
- setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
- urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
- if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
- // keep the drive letter : on windows file paths
- urlparse.host += ':'
- urlparse.hostname += ':'
- }
- delete urlparse.hash
- res.fetchSpec = url.format(urlparse)
- }
- break
- case 'http:':
- case 'https:':
- res.type = 'remote'
- res.fetchSpec = res.saveSpec
- break
-
- default:
- throw unsupportedURLType(urlparse.protocol, res.rawSpec)
- }
-
- return res
-}
-
-function fromAlias (res, where) {
- const subSpec = npa(res.rawSpec.substr(4), where)
- if (subSpec.type === 'alias') {
- throw new Error('nested aliases not supported')
- }
- if (!subSpec.registry) {
- throw new Error('aliases only work for registry deps')
- }
- res.subSpec = subSpec
- res.registry = true
- res.type = 'alias'
- res.saveSpec = null
- res.fetchSpec = null
- return res
-}
-
-function fromRegistry (res) {
- res.registry = true
- const spec = res.rawSpec === '' ? 'latest' : res.rawSpec
- // no save spec for registry components as we save based on the fetched
- // version, not on the argument so this can't compute that.
- res.saveSpec = null
- res.fetchSpec = spec
- if (!semver) semver = require('semver')
- const version = semver.valid(spec, true)
- const range = semver.validRange(spec, true)
- if (version) {
- res.type = 'version'
- } else if (range) {
- res.type = 'range'
- } else {
- if (encodeURIComponent(spec) !== spec) {
- throw invalidTagName(spec)
- }
- res.type = 'tag'
- }
- return res
-}
diff --git a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json
deleted file mode 100644
index 71728e253..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- "_from": "npm-package-arg@^8.0.1",
- "_id": "npm-package-arg@8.0.1",
- "_inBundle": false,
- "_integrity": "sha512-/h5Fm6a/exByzFSTm7jAyHbgOqErl9qSNJDQF32Si/ZzgwT2TERVxRxn3Jurw1wflgyVVAxnFR4fRHPM7y1ClQ==",
- "_location": "/libnpmpack/pacote/npm-package-arg",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-package-arg@^8.0.1",
- "name": "npm-package-arg",
- "escapedName": "npm-package-arg",
- "rawSpec": "^8.0.1",
- "saveSpec": null,
- "fetchSpec": "^8.0.1"
- },
- "_requiredBy": [
- "/libnpmpack/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.0.1.tgz",
- "_shasum": "9d76f8d7667b2373ffda60bb801a27ef71e3e270",
- "_spec": "npm-package-arg@^8.0.1",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/npm-package-arg/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "hosted-git-info": "^3.0.2",
- "semver": "^7.0.0",
- "validate-npm-package-name": "^3.0.0"
- },
- "deprecated": false,
- "description": "Parse the things that can be arguments to `npm install`",
- "devDependencies": {
- "tap": "^14.10.2"
- },
- "directories": {
- "test": "test"
- },
- "engines": {
- "node": ">=10"
- },
- "files": [
- "npa.js"
- ],
- "homepage": "https://github.com/npm/npm-package-arg",
- "license": "ISC",
- "main": "npa.js",
- "name": "npm-package-arg",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/npm-package-arg.git"
- },
- "scripts": {
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "preversion": "npm test",
- "snap": "tap",
- "test": "tap"
- },
- "version": "8.0.1"
-}
diff --git a/node_modules/libnpmpack/node_modules/pacote/package.json b/node_modules/libnpmpack/node_modules/pacote/package.json
deleted file mode 100644
index 2e53042d9..000000000
--- a/node_modules/libnpmpack/node_modules/pacote/package.json
+++ /dev/null
@@ -1,107 +0,0 @@
-{
- "_from": "pacote@^11.1.4",
- "_id": "pacote@11.1.4",
- "_inBundle": false,
- "_integrity": "sha512-eUGJvSSpWFZKn3z8gig/HgnBmUl6gIWByIIaHzSyEr3tOWX0w8tFEADXtpu8HGv5E0ShCeTP6enRq8iHKCHSvw==",
- "_location": "/libnpmpack/pacote",
- "_phantomChildren": {
- "hosted-git-info": "3.0.2",
- "semver": "7.1.3",
- "validate-npm-package-name": "3.0.0"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "pacote@^11.1.4",
- "name": "pacote",
- "escapedName": "pacote",
- "rawSpec": "^11.1.4",
- "saveSpec": null,
- "fetchSpec": "^11.1.4"
- },
- "_requiredBy": [
- "/libnpmpack"
- ],
- "_resolved": "https://registry.npmjs.org/pacote/-/pacote-11.1.4.tgz",
- "_shasum": "5529a453c59881b7f059da8af6903b0f79c124b2",
- "_spec": "pacote@^11.1.4",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "https://izs.me"
- },
- "bin": {
- "pacote": "lib/bin.js"
- },
- "bugs": {
- "url": "https://github.com/npm/pacote/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "@npmcli/git": "^2.0.1",
- "@npmcli/installed-package-contents": "^1.0.5",
- "@npmcli/promise-spawn": "^1.1.0",
- "cacache": "^15.0.0",
- "chownr": "^1.1.4",
- "fs-minipass": "^2.1.0",
- "infer-owner": "^1.0.4",
- "lru-cache": "^5.1.1",
- "minipass": "^3.0.1",
- "minipass-fetch": "^1.2.1",
- "mkdirp": "^1.0.3",
- "npm-package-arg": "^8.0.1",
- "npm-packlist": "^2.1.0",
- "npm-pick-manifest": "^6.0.0",
- "npm-registry-fetch": "^8.0.0",
- "promise-inflight": "^1.0.1",
- "promise-retry": "^1.1.1",
- "read-package-json-fast": "^1.1.3",
- "rimraf": "^2.7.1",
- "semver": "^7.1.3",
- "ssri": "^8.0.0",
- "tar": "^6.0.1",
- "which": "^2.0.2"
- },
- "deprecated": false,
- "description": "JavaScript package downloader",
- "devDependencies": {
- "mutate-fs": "^2.1.1",
- "npm-registry-mock": "^1.3.1",
- "require-inject": "^1.4.4",
- "tap": "^14.10.6"
- },
- "engines": {
- "node": ">=10"
- },
- "files": [
- "lib/**/*.js"
- ],
- "homepage": "https://github.com/npm/pacote#readme",
- "keywords": [
- "packages",
- "npm",
- "git"
- ],
- "license": "ISC",
- "main": "lib/index.js",
- "name": "pacote",
- "repository": {
- "type": "git",
- "url": "git+ssh://git@github.com/npm/pacote.git"
- },
- "scripts": {
- "postpublish": "git push origin --follow-tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "snap": "tap",
- "test": "tap"
- },
- "tap": {
- "timeout": 300,
- "check-coverage": true,
- "coverage-map": "map.js",
- "esm": false
- },
- "version": "11.1.4"
-}
diff --git a/node_modules/libnpmpack/node_modules/rimraf/bin.js b/node_modules/libnpmpack/node_modules/rimraf/bin.js
deleted file mode 100755
index 0d1e17be7..000000000
--- a/node_modules/libnpmpack/node_modules/rimraf/bin.js
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env node
-
-var rimraf = require('./')
-
-var help = false
-var dashdash = false
-var noglob = false
-var args = process.argv.slice(2).filter(function(arg) {
- if (dashdash)
- return !!arg
- else if (arg === '--')
- dashdash = true
- else if (arg === '--no-glob' || arg === '-G')
- noglob = true
- else if (arg === '--glob' || arg === '-g')
- noglob = false
- else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/))
- help = true
- else
- return !!arg
-})
-
-if (help || args.length === 0) {
- // If they didn't ask for help, then this is not a "success"
- var log = help ? console.log : console.error
- log('Usage: rimraf <path> [<path> ...]')
- log('')
- log(' Deletes all files and folders at "path" recursively.')
- log('')
- log('Options:')
- log('')
- log(' -h, --help Display this usage info')
- log(' -G, --no-glob Do not expand glob patterns in arguments')
- log(' -g, --glob Expand glob patterns in arguments (default)')
- process.exit(help ? 0 : 1)
-} else
- go(0)
-
-function go (n) {
- if (n >= args.length)
- return
- var options = {}
- if (noglob)
- options = { glob: false }
- rimraf(args[n], options, function (er) {
- if (er)
- throw er
- go(n+1)
- })
-}
diff --git a/node_modules/libnpmpack/package.json b/node_modules/libnpmpack/package.json
index fb599a81c..e21a0d85c 100644
--- a/node_modules/libnpmpack/package.json
+++ b/node_modules/libnpmpack/package.json
@@ -1,42 +1,19 @@
{
- "_from": "libnpmpack@2.0.0",
+ "_from": "libnpmpack@^2.0.0",
"_id": "libnpmpack@2.0.0",
"_inBundle": false,
"_integrity": "sha512-w4wB8ZQUceUANUEiSYqi4nHlqFxhzLXWmhVbDt3NlyZVkmblTokR4xK9VfihLXJhdARQxeILx/HxReeqas1KZQ==",
"_location": "/libnpmpack",
- "_phantomChildren": {
- "@npmcli/git": "2.0.1",
- "@npmcli/installed-package-contents": "1.0.5",
- "@npmcli/promise-spawn": "1.1.0",
- "cacache": "15.0.0",
- "fs-minipass": "2.1.0",
- "glob": "7.1.4",
- "hosted-git-info": "3.0.2",
- "infer-owner": "1.0.4",
- "lru-cache": "5.1.1",
- "minipass": "3.1.1",
- "minipass-fetch": "1.2.1",
- "npm-packlist": "2.1.0",
- "npm-pick-manifest": "6.0.0",
- "npm-registry-fetch": "8.0.0",
- "promise-inflight": "1.0.1",
- "promise-retry": "1.1.1",
- "read-package-json-fast": "1.1.3",
- "semver": "7.1.3",
- "ssri": "8.0.0",
- "tar": "6.0.1",
- "validate-npm-package-name": "3.0.0",
- "which": "2.0.2"
- },
+ "_phantomChildren": {},
"_requested": {
- "type": "version",
+ "type": "range",
"registry": true,
- "raw": "libnpmpack@2.0.0",
+ "raw": "libnpmpack@^2.0.0",
"name": "libnpmpack",
"escapedName": "libnpmpack",
- "rawSpec": "2.0.0",
+ "rawSpec": "^2.0.0",
"saveSpec": null,
- "fetchSpec": "2.0.0"
+ "fetchSpec": "^2.0.0"
},
"_requiredBy": [
"#USER",
@@ -45,8 +22,8 @@
],
"_resolved": "https://registry.npmjs.org/libnpmpack/-/libnpmpack-2.0.0.tgz",
"_shasum": "6b7f050f8dde248776c92495147f623dc711a221",
- "_spec": "libnpmpack@2.0.0",
- "_where": "/Users/claudiahdz/npm/cli",
+ "_spec": "libnpmpack@^2.0.0",
+ "_where": "/Users/isaacs/dev/npm/cli",
"author": {
"name": "npm Inc.",
"email": "support@npmjs.com"
diff --git a/node_modules/mkdirp-infer-owner/node_modules/mkdirp/lib/opts-arg.js b/node_modules/mkdirp-infer-owner/node_modules/mkdirp/lib/opts-arg.js
index 488bd44c3..2fa4833fa 100644
--- a/node_modules/mkdirp-infer-owner/node_modules/mkdirp/lib/opts-arg.js
+++ b/node_modules/mkdirp-infer-owner/node_modules/mkdirp/lib/opts-arg.js
@@ -2,9 +2,9 @@ const { promisify } = require('util')
const fs = require('fs')
const optsArg = opts => {
if (!opts)
- opts = { mode: 0o777 & (~process.umask()), fs }
+ opts = { mode: 0o777, fs }
else if (typeof opts === 'object')
- opts = { mode: 0o777 & (~process.umask()), fs, ...opts }
+ opts = { mode: 0o777, fs, ...opts }
else if (typeof opts === 'number')
opts = { mode: opts, fs }
else if (typeof opts === 'string')
diff --git a/node_modules/mkdirp-infer-owner/node_modules/mkdirp/package.json b/node_modules/mkdirp-infer-owner/node_modules/mkdirp/package.json
index ab0da9b27..4e9f70cc8 100644
--- a/node_modules/mkdirp-infer-owner/node_modules/mkdirp/package.json
+++ b/node_modules/mkdirp-infer-owner/node_modules/mkdirp/package.json
@@ -1,8 +1,8 @@
{
"_from": "mkdirp@^1.0.3",
- "_id": "mkdirp@1.0.3",
+ "_id": "mkdirp@1.0.4",
"_inBundle": false,
- "_integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g==",
+ "_integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"_location": "/mkdirp-infer-owner/mkdirp",
"_phantomChildren": {},
"_requested": {
@@ -18,10 +18,10 @@
"_requiredBy": [
"/mkdirp-infer-owner"
],
- "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
- "_shasum": "4cf2e30ad45959dddea53ad97d518b6c8205e1ea",
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+ "_shasum": "3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
"_spec": "mkdirp@^1.0.3",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/mkdirp-infer-owner",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/mkdirp-infer-owner",
"bin": {
"mkdirp": "bin/cmd.js"
},
@@ -33,7 +33,7 @@
"description": "Recursively mkdir, like `mkdir -p`",
"devDependencies": {
"require-inject": "^1.4.4",
- "tap": "^14.10.6"
+ "tap": "^14.10.7"
},
"engines": {
"node": ">=10"
@@ -71,5 +71,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "1.0.3"
+ "version": "1.0.4"
}
diff --git a/node_modules/move-file/index.d.ts b/node_modules/move-file/index.d.ts
new file mode 100644
index 000000000..45616d0f4
--- /dev/null
+++ b/node_modules/move-file/index.d.ts
@@ -0,0 +1,41 @@
+declare namespace moveFile {
+ interface Options {
+ /**
+ Overwrite existing destination file.
+
+ @default true
+ */
+ readonly overwrite?: boolean;
+ }
+}
+
+declare const moveFile: {
+ /**
+ Move a file.
+
+ @param source - File you want to move.
+ @param destination - Where you want the file moved.
+ @returns A `Promise` that resolves when the file has been moved.
+
+ @example
+ ```
+ import moveFile = require('move-file');
+
+ (async () => {
+ await moveFile('source/unicorn.png', 'destination/unicorn.png');
+ console.log('The file has been moved');
+ })();
+ ```
+ */
+ (source: string, destination: string, options?: moveFile.Options): Promise<void>;
+
+ /**
+ Move a file synchronously.
+
+ @param source - File you want to move.
+ @param destination - Where you want the file moved.
+ */
+ sync(source: string, destination: string, options?: moveFile.Options): void;
+};
+
+export = moveFile;
diff --git a/node_modules/move-file/index.js b/node_modules/move-file/index.js
new file mode 100644
index 000000000..09e31acaa
--- /dev/null
+++ b/node_modules/move-file/index.js
@@ -0,0 +1,62 @@
+'use strict';
+const path = require('path');
+const fs = require('fs');
+const pathExists = require('path-exists');
+
+const fsP = fs.promises;
+
+module.exports = async (source, destination, options) => {
+ if (!source || !destination) {
+ throw new TypeError('`source` and `destination` file required');
+ }
+
+ options = {
+ overwrite: true,
+ ...options
+ };
+
+ if (!options.overwrite && await pathExists(destination)) {
+ throw new Error(`The destination file exists: ${destination}`);
+ }
+
+ await fsP.mkdir(path.dirname(destination), {recursive: true});
+
+ try {
+ await fsP.rename(source, destination);
+ } catch (error) {
+ if (error.code === 'EXDEV') {
+ await fsP.copyFile(source, destination);
+ await fsP.unlink(source);
+ } else {
+ throw error;
+ }
+ }
+};
+
+module.exports.sync = (source, destination, options) => {
+ if (!source || !destination) {
+ throw new TypeError('`source` and `destination` file required');
+ }
+
+ options = {
+ overwrite: true,
+ ...options
+ };
+
+ if (!options.overwrite && fs.existsSync(destination)) {
+ throw new Error(`The destination file exists: ${destination}`);
+ }
+
+ fs.mkdirSync(path.dirname(destination), {recursive: true});
+
+ try {
+ fs.renameSync(source, destination);
+ } catch (error) {
+ if (error.code === 'EXDEV') {
+ fs.copyFileSync(source, destination);
+ fs.unlinkSync(source);
+ } else {
+ throw error;
+ }
+ }
+};
diff --git a/node_modules/move-file/license b/node_modules/move-file/license
new file mode 100644
index 000000000..fa7ceba3e
--- /dev/null
+++ b/node_modules/move-file/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/move-file/node_modules/path-exists/index.d.ts b/node_modules/move-file/node_modules/path-exists/index.d.ts
new file mode 100644
index 000000000..54b7ab8f4
--- /dev/null
+++ b/node_modules/move-file/node_modules/path-exists/index.d.ts
@@ -0,0 +1,28 @@
+declare const pathExists: {
+ /**
+ Check if a path exists.
+
+ @returns Whether the path exists.
+
+ @example
+ ```
+ // foo.ts
+ import pathExists = require('path-exists');
+
+ (async () => {
+ console.log(await pathExists('foo.ts'));
+ //=> true
+ })();
+ ```
+ */
+ (path: string): Promise<boolean>;
+
+ /**
+ Synchronously check if a path exists.
+
+ @returns Whether the path exists.
+ */
+ sync(path: string): boolean;
+};
+
+export = pathExists;
diff --git a/node_modules/move-file/node_modules/path-exists/index.js b/node_modules/move-file/node_modules/path-exists/index.js
new file mode 100644
index 000000000..1943921b7
--- /dev/null
+++ b/node_modules/move-file/node_modules/path-exists/index.js
@@ -0,0 +1,23 @@
+'use strict';
+const fs = require('fs');
+const {promisify} = require('util');
+
+const pAccess = promisify(fs.access);
+
+module.exports = async path => {
+ try {
+ await pAccess(path);
+ return true;
+ } catch (_) {
+ return false;
+ }
+};
+
+module.exports.sync = path => {
+ try {
+ fs.accessSync(path);
+ return true;
+ } catch (_) {
+ return false;
+ }
+};
diff --git a/node_modules/move-file/node_modules/path-exists/license b/node_modules/move-file/node_modules/path-exists/license
new file mode 100644
index 000000000..e7af2f771
--- /dev/null
+++ b/node_modules/move-file/node_modules/path-exists/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/move-file/node_modules/path-exists/package.json b/node_modules/move-file/node_modules/path-exists/package.json
new file mode 100644
index 000000000..4c6d0b13d
--- /dev/null
+++ b/node_modules/move-file/node_modules/path-exists/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "path-exists@^4.0.0",
+ "_id": "path-exists@4.0.0",
+ "_inBundle": false,
+ "_integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "_location": "/move-file/path-exists",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "path-exists@^4.0.0",
+ "name": "path-exists",
+ "escapedName": "path-exists",
+ "rawSpec": "^4.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^4.0.0"
+ },
+ "_requiredBy": [
+ "/move-file"
+ ],
+ "_resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "_shasum": "513bdbe2d3b95d7762e8c1137efa195c6c61b5b3",
+ "_spec": "path-exists@^4.0.0",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/move-file",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/path-exists/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Check if a path exists",
+ "devDependencies": {
+ "ava": "^1.4.1",
+ "tsd": "^0.7.2",
+ "xo": "^0.24.0"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "homepage": "https://github.com/sindresorhus/path-exists#readme",
+ "keywords": [
+ "path",
+ "exists",
+ "exist",
+ "file",
+ "filepath",
+ "fs",
+ "filesystem",
+ "file-system",
+ "access",
+ "stat"
+ ],
+ "license": "MIT",
+ "name": "path-exists",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/path-exists.git"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "version": "4.0.0"
+}
diff --git a/node_modules/move-file/node_modules/path-exists/readme.md b/node_modules/move-file/node_modules/path-exists/readme.md
new file mode 100644
index 000000000..81f984545
--- /dev/null
+++ b/node_modules/move-file/node_modules/path-exists/readme.md
@@ -0,0 +1,52 @@
+# path-exists [![Build Status](https://travis-ci.org/sindresorhus/path-exists.svg?branch=master)](https://travis-ci.org/sindresorhus/path-exists)
+
+> Check if a path exists
+
+NOTE: `fs.existsSync` has been un-deprecated in Node.js since 6.8.0. If you only need to check synchronously, this module is not needed.
+
+While [`fs.exists()`](https://nodejs.org/api/fs.html#fs_fs_exists_path_callback) is being [deprecated](https://github.com/iojs/io.js/issues/103), there's still a genuine use-case of being able to check if a path exists for other purposes than doing IO with it.
+
+Never use this before handling a file though:
+
+> In particular, checking if a file exists before opening it is an anti-pattern that leaves you vulnerable to race conditions: another process may remove the file between the calls to `fs.exists()` and `fs.open()`. Just open the file and handle the error when it's not there.
+
+
+## Install
+
+```
+$ npm install path-exists
+```
+
+
+## Usage
+
+```js
+// foo.js
+const pathExists = require('path-exists');
+
+(async () => {
+ console.log(await pathExists('foo.js'));
+ //=> true
+})();
+```
+
+
+## API
+
+### pathExists(path)
+
+Returns a `Promise<boolean>` of whether the path exists.
+
+### pathExists.sync(path)
+
+Returns a `boolean` of whether the path exists.
+
+
+## Related
+
+- [path-exists-cli](https://github.com/sindresorhus/path-exists-cli) - CLI for this module
+
+
+## License
+
+MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/move-file/package.json b/node_modules/move-file/package.json
new file mode 100644
index 000000000..35a25410e
--- /dev/null
+++ b/node_modules/move-file/package.json
@@ -0,0 +1,82 @@
+{
+ "_from": "move-file@^2.0.0",
+ "_id": "move-file@2.0.0",
+ "_inBundle": false,
+ "_integrity": "sha512-cdkdhNCgbP5dvS4tlGxZbD+nloio9GIimP57EjqFhwLcMjnU+XJKAZzlmg/TN/AK1LuNAdTSvm3CPPP4Xkv0iQ==",
+ "_location": "/move-file",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "move-file@^2.0.0",
+ "name": "move-file",
+ "escapedName": "move-file",
+ "rawSpec": "^2.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^2.0.0"
+ },
+ "_requiredBy": [
+ "/@npmcli/arborist/cacache"
+ ],
+ "_resolved": "https://registry.npmjs.org/move-file/-/move-file-2.0.0.tgz",
+ "_shasum": "83ffa309b5d7f69d518b28e1333e2ffadf331e3e",
+ "_spec": "move-file@^2.0.0",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist/node_modules/cacache",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "https://sindresorhus.com"
+ },
+ "bugs": {
+ "url": "https://github.com/sindresorhus/move-file/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "path-exists": "^4.0.0"
+ },
+ "deprecated": false,
+ "description": "Move a file - Even works across devices",
+ "devDependencies": {
+ "ava": "^1.4.1",
+ "sinon": "^9.0.2",
+ "temp-write": "^4.0.0",
+ "tempy": "^0.5.0",
+ "tsd": "^0.11.0",
+ "xo": "^0.24.0"
+ },
+ "engines": {
+ "node": ">=10.17"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "funding": "https://github.com/sponsors/sindresorhus",
+ "homepage": "https://github.com/sindresorhus/move-file#readme",
+ "keywords": [
+ "move",
+ "file",
+ "mv",
+ "fs",
+ "stream",
+ "file-system",
+ "ncp",
+ "fast",
+ "quick",
+ "data",
+ "content",
+ "contents",
+ "devices",
+ "partitions"
+ ],
+ "license": "MIT",
+ "name": "move-file",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/sindresorhus/move-file.git"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "version": "2.0.0"
+}
diff --git a/node_modules/move-file/readme.md b/node_modules/move-file/readme.md
new file mode 100644
index 000000000..9f7625f4d
--- /dev/null
+++ b/node_modules/move-file/readme.md
@@ -0,0 +1,67 @@
+# move-file [![Build Status](https://travis-ci.com/sindresorhus/move-file.svg?branch=master)](https://travis-ci.com/sindresorhus/move-file)
+
+> Move a file
+
+The built-in [`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback) is just a JavaScript wrapper for the C `rename(2)` function, which doesn't support moving files across partitions or devices. This module is what you would have expected `fs.rename()` to be.
+
+## Highlights
+
+- Promise API.
+- Supports moving a file across partitions and devices.
+- Optionally prevent overwriting an existing file.
+- Creates non-existent destination directories for you.
+
+## Install
+
+```
+$ npm install move-file
+```
+
+## Usage
+
+```js
+const moveFile = require('move-file');
+
+(async () => {
+ await moveFile('source/unicorn.png', 'destination/unicorn.png');
+ console.log('The file has been moved');
+})();
+```
+
+## API
+
+### moveFile(source, destination, options?)
+
+Returns a `Promise` that resolves when the file has been moved.
+
+### moveFile.sync(source, destination, options?)
+
+#### source
+
+Type: `string`
+
+File you want to move.
+
+#### destination
+
+Type: `string`
+
+Where you want the file moved.
+
+#### options
+
+Type: `object`
+
+##### overwrite
+
+Type: `boolean`\
+Default: `true`
+
+Overwrite existing destination file.
+
+## Related
+
+- [move-file-cli](https://github.com/sindresorhus/move-file-cli) - CLI for this module
+- [cp-file](https://github.com/sindresorhus/cp-file) - Copy a file
+- [cpy](https://github.com/sindresorhus/cpy) - Copy files
+- [make-dir](https://github.com/sindresorhus/make-dir) - Make a directory and its parents if needed
diff --git a/node_modules/ms/index.js b/node_modules/ms/index.js
index 72297501f..c4498bcc2 100644
--- a/node_modules/ms/index.js
+++ b/node_modules/ms/index.js
@@ -28,7 +28,7 @@ module.exports = function(val, options) {
var type = typeof val;
if (type === 'string' && val.length > 0) {
return parse(val);
- } else if (type === 'number' && isNaN(val) === false) {
+ } else if (type === 'number' && isFinite(val)) {
return options.long ? fmtLong(val) : fmtShort(val);
}
throw new Error(
@@ -50,7 +50,7 @@ function parse(str) {
if (str.length > 100) {
return;
}
- var match = /^((?:\d+)?\-?\d?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
+ var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
str
);
if (!match) {
diff --git a/node_modules/ms/package.json b/node_modules/ms/package.json
index 9a4424a8b..35a4127e8 100644
--- a/node_modules/ms/package.json
+++ b/node_modules/ms/package.json
@@ -1,27 +1,35 @@
{
- "_from": "ms@^2.0.0",
- "_id": "ms@2.1.1",
+ "_from": "ms@2.1.2",
+ "_id": "ms@2.1.2",
"_inBundle": false,
- "_integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==",
+ "_integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"_location": "/ms",
"_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "version",
"registry": true,
- "raw": "ms@^2.0.0",
+ "raw": "ms@2.1.2",
"name": "ms",
"escapedName": "ms",
- "rawSpec": "^2.0.0",
+ "rawSpec": "2.1.2",
"saveSpec": null,
- "fetchSpec": "^2.0.0"
+ "fetchSpec": "2.1.2"
},
"_requiredBy": [
- "/humanize-ms"
+ "#USER",
+ "/",
+ "/@babel/traverse/debug",
+ "/cloudant-follow/debug",
+ "/humanize-ms",
+ "/istanbul-lib-source-maps/debug",
+ "/nano/debug",
+ "/npm-registry-fetch/debug",
+ "/tap-mocha-reporter/debug"
],
- "_resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
- "_shasum": "30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a",
- "_spec": "ms@^2.0.0",
- "_where": "/Users/rebecca/code/npm/node_modules/humanize-ms",
+ "_resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "_shasum": "d09d1f357b443f493382a8eb3ccd183872ae6009",
+ "_spec": "ms@2.1.2",
+ "_where": "/Users/isaacs/dev/npm/cli",
"bugs": {
"url": "https://github.com/zeit/ms/issues"
},
@@ -65,5 +73,5 @@
"precommit": "lint-staged",
"test": "mocha tests.js"
},
- "version": "2.1.1"
+ "version": "2.1.2"
}
diff --git a/node_modules/ms/readme.md b/node_modules/ms/readme.md
index bb767293a..9a1996b17 100644
--- a/node_modules/ms/readme.md
+++ b/node_modules/ms/readme.md
@@ -1,7 +1,7 @@
# ms
[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms)
-[![Slack Channel](http://zeit-slackin.now.sh/badge.svg)](https://zeit.chat/)
+[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit)
Use this package to easily convert various time formats to milliseconds.
diff --git a/node_modules/npm-package-arg/npa.js b/node_modules/npm-package-arg/npa.js
index bf2c17cfd..d18168b75 100644
--- a/node_modules/npm-package-arg/npa.js
+++ b/node_modules/npm-package-arg/npa.js
@@ -12,7 +12,7 @@ function path () {
return path_
}
let validatePackageName
-let osenv
+let os
const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
@@ -70,7 +70,7 @@ function resolve (name, spec, where, arg) {
return fromAlias(res, where)
}
if (!HostedGit) HostedGit = require('hosted-git-info')
- const hosted = HostedGit.fromUrl(spec, {noGitPlus: true, noCommittish: true})
+ const hosted = HostedGit.fromUrl(spec, { noGitPlus: true, noCommittish: true })
if (hosted) {
return fromHostedGit(res, hosted)
} else if (spec && isURL.test(spec)) {
@@ -174,8 +174,8 @@ function fromFile (res, where) {
.replace(/^file:(?:[/]*([~./]))?/, '$1')
if (/^~[/]/.test(spec)) {
// this is needed for windows and for file:~/foo/bar
- if (!osenv) osenv = require('osenv')
- res.fetchSpec = resolvePath(osenv.home(), spec.slice(2))
+ if (!os) os = require('os')
+ res.fetchSpec = resolvePath(os.homedir(), spec.slice(2))
res.saveSpec = 'file:' + spec
} else {
res.fetchSpec = resolvePath(where, spec)
@@ -191,7 +191,7 @@ function fromFile (res, where) {
function fromHostedGit (res, hosted) {
res.type = 'git'
res.hosted = hosted
- res.saveSpec = hosted.toString({noGitPlus: false, noCommittish: false})
+ res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
return setGitCommittish(res, hosted.committish)
}
diff --git a/node_modules/npm-package-arg/package.json b/node_modules/npm-package-arg/package.json
index 492cf8c80..29f30fc1d 100644
--- a/node_modules/npm-package-arg/package.json
+++ b/node_modules/npm-package-arg/package.json
@@ -1,8 +1,8 @@
{
"_from": "npm-package-arg@latest",
- "_id": "npm-package-arg@8.0.0",
+ "_id": "npm-package-arg@8.0.1",
"_inBundle": false,
- "_integrity": "sha512-JgqZHCEUKvhX7EehLNdySiuB227a0QYra9wpZOkW+jvwsRYKkce7y5Rv2axkxScJU1EP+L32jT2PLhQz7IWHlw==",
+ "_integrity": "sha512-/h5Fm6a/exByzFSTm7jAyHbgOqErl9qSNJDQF32Si/ZzgwT2TERVxRxn3Jurw1wflgyVVAxnFR4fRHPM7y1ClQ==",
"_location": "/npm-package-arg",
"_phantomChildren": {},
"_requested": {
@@ -19,13 +19,15 @@
"#USER",
"/",
"/@npmcli/arborist",
+ "/@npmcli/arborist/npm-pick-manifest",
+ "/libnpmaccess",
+ "/libnpmpack",
+ "/libnpmpublish",
"/npm-pick-manifest",
- "/npm-registry-fetch",
- "/pacote",
- "/pacote/npm-registry-fetch"
+ "/npm-registry-fetch"
],
- "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.0.0.tgz",
- "_shasum": "b519989b569efffa1fa050fb22a3c3c322d4bcc5",
+ "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.0.1.tgz",
+ "_shasum": "9d76f8d7667b2373ffda60bb801a27ef71e3e270",
"_spec": "npm-package-arg@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
@@ -39,15 +41,12 @@
"bundleDependencies": false,
"dependencies": {
"hosted-git-info": "^3.0.2",
- "osenv": "^0.1.5",
"semver": "^7.0.0",
"validate-npm-package-name": "^3.0.0"
},
"deprecated": false,
"description": "Parse the things that can be arguments to `npm install`",
"devDependencies": {
- "standard": "^11.0.1",
- "standard-version": "^7.0.1",
"tap": "^14.10.2"
},
"directories": {
@@ -68,11 +67,11 @@
"url": "git+https://github.com/npm/npm-package-arg.git"
},
"scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "posttest": "standard",
- "prerelease": "npm t",
- "release": "standard-version -s",
- "test": "tap --100 -J --coverage test/*.js"
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
},
- "version": "8.0.0"
+ "version": "8.0.1"
}
diff --git a/node_modules/p-map/index.d.ts b/node_modules/p-map/index.d.ts
index 8f796295d..bcbe0afce 100644
--- a/node_modules/p-map/index.d.ts
+++ b/node_modules/p-map/index.d.ts
@@ -3,6 +3,8 @@ declare namespace pMap {
/**
Number of concurrently pending promises returned by `mapper`.
+ Must be an integer from 1 and up or `Infinity`.
+
@default Infinity
*/
readonly concurrency?: number;
@@ -21,7 +23,7 @@ declare namespace pMap {
@param element - Iterated element.
@param index - Index of the element in the source array.
*/
- type Mapper<Element = any, NewElement = any> = (
+ type Mapper<Element = any, NewElement = unknown> = (
element: Element,
index: number
) => NewElement | Promise<NewElement>;
diff --git a/node_modules/p-map/index.js b/node_modules/p-map/index.js
index 5a8aeb256..c11a28512 100644
--- a/node_modules/p-map/index.js
+++ b/node_modules/p-map/index.js
@@ -14,11 +14,11 @@ module.exports = async (
throw new TypeError('Mapper function is required');
}
- if (!(typeof concurrency === 'number' && concurrency >= 1)) {
- throw new TypeError(`Expected \`concurrency\` to be a number from 1 and up, got \`${concurrency}\` (${typeof concurrency})`);
+ if (!((Number.isSafeInteger(concurrency) || concurrency === Infinity) && concurrency >= 1)) {
+ throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`);
}
- const ret = [];
+ const result = [];
const errors = [];
const iterator = iterable[Symbol.iterator]();
let isRejected = false;
@@ -32,7 +32,7 @@ module.exports = async (
}
const nextItem = iterator.next();
- const i = currentIndex;
+ const index = currentIndex;
currentIndex++;
if (nextItem.done) {
@@ -42,7 +42,7 @@ module.exports = async (
if (!stopOnError && errors.length !== 0) {
reject(new AggregateError(errors));
} else {
- resolve(ret);
+ resolve(result);
}
}
@@ -54,7 +54,7 @@ module.exports = async (
(async () => {
try {
const element = await nextItem.value;
- ret[i] = await mapper(element, i);
+ result[index] = await mapper(element, index);
resolvingCount--;
next();
} catch (error) {
diff --git a/node_modules/p-map/license b/node_modules/p-map/license
index e7af2f771..fa7ceba3e 100644
--- a/node_modules/p-map/license
+++ b/node_modules/p-map/license
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
diff --git a/node_modules/p-map/package.json b/node_modules/p-map/package.json
index 21aff08b2..5d61ae6a1 100644
--- a/node_modules/p-map/package.json
+++ b/node_modules/p-map/package.json
@@ -1,31 +1,31 @@
{
- "_from": "p-map@^3.0.0",
- "_id": "p-map@3.0.0",
+ "_from": "p-map@^4.0.0",
+ "_id": "p-map@4.0.0",
"_inBundle": false,
- "_integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
+ "_integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==",
"_location": "/p-map",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "p-map@^3.0.0",
+ "raw": "p-map@^4.0.0",
"name": "p-map",
"escapedName": "p-map",
- "rawSpec": "^3.0.0",
+ "rawSpec": "^4.0.0",
"saveSpec": null,
- "fetchSpec": "^3.0.0"
+ "fetchSpec": "^4.0.0"
},
"_requiredBy": [
- "/npm-registry-fetch/cacache"
+ "/cacache"
],
- "_resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
- "_shasum": "d704d9af8a2ba684e2600d9a215983d4141a979d",
- "_spec": "p-map@^3.0.0",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/npm-registry-fetch/node_modules/cacache",
+ "_resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
+ "_shasum": "bb2f95a5eda2ec168ec9274e06a747c3e2904d2b",
+ "_spec": "p-map@^4.0.0",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/cacache",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
+ "url": "https://sindresorhus.com"
},
"bugs": {
"url": "https://github.com/sindresorhus/p-map/issues"
@@ -42,16 +42,17 @@
"in-range": "^2.0.0",
"random-int": "^2.0.0",
"time-span": "^3.1.0",
- "tsd": "^0.7.2",
- "xo": "^0.24.0"
+ "tsd": "^0.7.4",
+ "xo": "^0.27.2"
},
"engines": {
- "node": ">=8"
+ "node": ">=10"
},
"files": [
"index.js",
"index.d.ts"
],
+ "funding": "https://github.com/sponsors/sindresorhus",
"homepage": "https://github.com/sindresorhus/p-map#readme",
"keywords": [
"promise",
@@ -80,5 +81,5 @@
"scripts": {
"test": "xo && ava && tsd"
},
- "version": "3.0.0"
+ "version": "4.0.0"
}
diff --git a/node_modules/p-map/readme.md b/node_modules/p-map/readme.md
index 4941cb773..53a371574 100644
--- a/node_modules/p-map/readme.md
+++ b/node_modules/p-map/readme.md
@@ -4,14 +4,12 @@
Useful when you need to run promise-returning & async functions multiple times with different inputs concurrently.
-
## Install
```
$ npm install p-map
```
-
## Usage
```js
@@ -61,19 +59,24 @@ Type: `object`
##### concurrency
-Type: `number`<br>
-Default: `Infinity`<br>
+Type: `number` (Integer)\
+Default: `Infinity`\
Minimum: `1`
Number of concurrently pending promises returned by `mapper`.
##### stopOnError
-Type: `boolean`<br>
+Type: `boolean`\
Default: `true`
When set to `false`, instead of stopping when a promise rejects, it will wait for all the promises to settle and then reject with an [aggregated error](https://github.com/sindresorhus/aggregate-error) containing all the errors from the rejected promises.
+## p-map for enterprise
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of p-map and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-p-map?utm_source=npm-p-map&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
## Related
@@ -84,16 +87,3 @@ When set to `false`, instead of stopping when a promise rejects, it will wait fo
- [p-map-series](https://github.com/sindresorhus/p-map-series) - Map over promises serially
- [p-queue](https://github.com/sindresorhus/p-queue) - Promise queue with concurrency control
- [More…](https://github.com/sindresorhus/promise-fun)
-
-
----
-
-<div align="center">
- <b>
- <a href="https://tidelift.com/subscription/pkg/npm-p-map?utm_source=npm-p-map&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
- </b>
- <br>
- <sub>
- Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
- </sub>
-</div>
diff --git a/node_modules/pacote/lib/dir.js b/node_modules/pacote/lib/dir.js
index 1eb30df9b..16660d982 100644
--- a/node_modules/pacote/lib/dir.js
+++ b/node_modules/pacote/lib/dir.js
@@ -4,12 +4,14 @@ const cacache = require('cacache')
const Minipass = require('minipass')
const { promisify } = require('util')
const readPackageJson = require('read-package-json-fast')
-const npm = require('./util/npm.js')
const isPackageBin = require('./util/is-package-bin.js')
const packlist = require('npm-packlist')
const tar = require('tar')
const _prepareDir = Symbol('_prepareDir')
const _tarcOpts = Symbol('_tarcOpts')
+const { resolve } = require('path')
+
+const runScript = require('@npmcli/run-script')
const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
class DirFetcher extends Fetcher {
@@ -25,19 +27,24 @@ class DirFetcher extends Fetcher {
[_prepareDir] () {
return this.manifest().then(mani => {
- if (!mani.scripts || !mani.scripts.prepare)
- return
+ if (!mani.scripts || !mani.scripts.prepare)
+ return
- // we *only* run prepare.
- // pre/post-pack is run by the npm CLI for publish and pack,
- // but this function is *also* run when installing git deps
- return npm(
- this.npmBin,
- [].concat(this.npmRunCmd).concat('prepare').concat(this.npmCliConfig),
- this.resolved,
- 'directory preparation failed'
- )
+ // we *only* run prepare.
+ // pre/post-pack is run by the npm CLI for publish and pack,
+ // but this function is *also* run when installing git deps
+ return runScript({
+ pkg: mani,
+ event: 'prepare',
+ path: this.resolved,
+ stdioString: true,
+ env: {
+ npm_package_resolved: this.resolved,
+ npm_package_integrity: this.integrity,
+ npm_package_json: resolve(this.resolved, 'package.json'),
+ },
})
+ })
}
[_tarballFromResolved] () {
diff --git a/node_modules/pacote/lib/fetcher.js b/node_modules/pacote/lib/fetcher.js
index 59e5c6de5..01058acb2 100644
--- a/node_modules/pacote/lib/fetcher.js
+++ b/node_modules/pacote/lib/fetcher.js
@@ -13,7 +13,6 @@ const procLog = require('./util/proc-log.js')
const retry = require('promise-retry')
const fsm = require('fs-minipass')
const cacache = require('cacache')
-const osenv = require('osenv')
const isPackageBin = require('./util/is-package-bin.js')
const getContents = require('@npmcli/installed-package-contents')
@@ -87,13 +86,13 @@ class FetcherBase {
this.fullMetadata = this.before ? true : !!opts.fullMetadata
this.defaultTag = opts.defaultTag || 'latest'
- this.registry = opts.registry || 'https://registry.npmjs.org'
+ this.registry = (opts.registry || 'https://registry.npmjs.org')
+ .replace(/\/+$/, '')
// command to run 'prepare' scripts on directories and git dirs
// To use pacote with yarn, for example, set npmBin to 'yarn'
- // and npmRunCmd to [], and npmCliConfig with yarn's equivalents.
+ // and npmInstallCmd to ['add'], and npmCliConfig with yarn's equivalents.
this.npmBin = opts.npmBin || 'npm'
- this.npmRunCmd = opts.npmRunCmd || 'run'
// command to install deps for preparing
this.npmInstallCmd = opts.npmInstallCmd || [
diff --git a/node_modules/pacote/lib/git.js b/node_modules/pacote/lib/git.js
index ade670b96..81f7ca256 100644
--- a/node_modules/pacote/lib/git.js
+++ b/node_modules/pacote/lib/git.js
@@ -3,7 +3,7 @@ const FileFetcher = require('./file.js')
const RemoteFetcher = require('./remote.js')
const DirFetcher = require('./dir.js')
const hashre = /^[a-f0-9]{40}$/
-const git = require('./util/git/')
+const git = require('@npmcli/git')
const pickManifest = require('npm-pick-manifest')
const npa = require('npm-package-arg')
const url = require('url')
@@ -154,7 +154,7 @@ class GitFetcher extends Fetcher {
this.npmBin,
[].concat(this.npmInstallCmd).concat(this.npmCliConfig),
dir,
- 'git dep preparation failed'
+ { message: 'git dep preparation failed' }
)
})
}
@@ -245,7 +245,8 @@ class GitFetcher extends Fetcher {
}
[_cloneRepo] (repo, ref, tmp) {
- return git.clone(repo, ref, tmp, this.spec, this.opts)
+ const { opts, spec } = this
+ return git.clone(repo, ref, tmp, { ...opts, spec })
}
manifest () {
diff --git a/node_modules/pacote/lib/remote.js b/node_modules/pacote/lib/remote.js
index 81f14efbc..91f6eb59d 100644
--- a/node_modules/pacote/lib/remote.js
+++ b/node_modules/pacote/lib/remote.js
@@ -5,12 +5,17 @@ const pacoteVersion = require('../package.json').version
const fetch = require('npm-registry-fetch')
const ssri = require('ssri')
const Minipass = require('minipass')
+// The default registry URL is a string of great magic.
+const magic = /^https?:\/\/registry\.npmjs\.org\//
const _headers = Symbol('_headers')
class RemoteFetcher extends Fetcher {
constructor (spec, opts) {
super(spec, opts)
this.resolved = this.spec.fetchSpec
+ if (magic.test(this.resolved) && !magic.test(this.registry + '/'))
+ this.resolved = this.resolved.replace(magic, this.registry + '/')
+
// nam is a fermented pork sausage that is good to eat
const nameat = this.spec.name ? `${this.spec.name}@` : ''
this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
diff --git a/node_modules/pacote/lib/util/cache-dir.js b/node_modules/pacote/lib/util/cache-dir.js
index 890fe3165..d5c0bf28f 100644
--- a/node_modules/pacote/lib/util/cache-dir.js
+++ b/node_modules/pacote/lib/util/cache-dir.js
@@ -1,10 +1,10 @@
-const osenv = require('osenv')
+const os = require('os')
const {resolve} = require('path')
module.exports = (fakePlatform = false) => {
- const temp = osenv.tmpdir()
+ const temp = os.tmpdir()
const uidOrPid = process.getuid ? process.getuid() : process.pid
- const home = osenv.home() || resolve(temp, 'npm-' + uidOrPid)
+ const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid)
const platform = fakePlatform || process.platform
const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
const cacheRoot = (platform === 'win32' && process.env.APPDATA) || home
diff --git a/node_modules/pacote/lib/util/git/clone.js b/node_modules/pacote/lib/util/git/clone.js
deleted file mode 100644
index bab91ac72..000000000
--- a/node_modules/pacote/lib/util/git/clone.js
+++ /dev/null
@@ -1,138 +0,0 @@
-// The goal here is to minimize both git workload and
-// the number of refs we download over the network.
-//
-// Every method ends up with the checked out working dir
-// at the specified ref, and resolves with the git sha.
-
-// Only certain whitelisted hosts get shallow cloning.
-// Many hosts (including GHE) don't always support it.
-// A failed shallow fetch takes a LOT longer than a full
-// fetch in most cases, so we skip it entirely.
-// Set opts.gitShallow = true/false to force this behavior
-// one way or the other.
-const shallowHosts = new Set([
- 'github.com',
- 'gist.github.com',
- 'gitlab.com',
- 'bitbucket.com',
- 'bitbucket.org',
-])
-const { parse } = require('url')
-
-const revs = require('./revs.js')
-const spawn = require('./spawn.js')
-
-const pickManifest = require('npm-pick-manifest')
-const { promisify } = require('util')
-const fs = require('fs')
-const mkdirp = require('mkdirp')
-
-module.exports = (repo, ref = 'HEAD', target, spec, opts) =>
- revs(repo, opts).then(revs => clone(
- repo,
- revs,
- ref,
- resolveRef(revs, ref, spec, opts),
- target,
- opts
- ))
-
-const maybeShallow = (repo, opts) =>
- opts.gitShallow === false || opts.gitShallow ? opts.gitShallow
- : shallowHosts.has(parse(repo).host)
-
-const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32'
-
-const clone = (repo, revs, ref, revDoc, target, opts) =>
- !revDoc ? unresolved(repo, ref, target, opts)
- : revDoc.sha === revs.refs.HEAD.sha ? plain(repo, revDoc, target, opts)
- : revDoc.type === 'tag' || revDoc.type === 'branch'
- ? branch(repo, revDoc, target, opts)
- : other(repo, revDoc, target, opts)
-
-const resolveRef = (revs, ref, spec, opts) =>
- !revs ? /* istanbul ignore next - will fail anyway, can't pull */ null
- : !ref ? revs.refs.HEAD
- : spec.gitRange ? pickManifest(revs, spec.gitRange, opts)
- : spec.gitCommittish
- ? revs.refs[spec.gitCommittish] ||
- revs.refs[revs.shas[spec.gitCommittish]] ||
- revs.refs[spec.gitCommittish]
- : revs.refs.HEAD
-
-// pull request or some other kind of advertised ref
-const other = (repo, revDoc, target, opts) => {
- const shallow = maybeShallow(repo, opts)
-
- const fetchOrigin = [ 'fetch', 'origin', revDoc.rawRef ]
- .concat(shallow ? ['--depth=1'] : [])
-
- const git = (args) => spawn(args, { cwd: target }, opts)
- return mkdirp(target)
- .then(() => git(['init']))
- .then(() => isWindows(opts)
- ? git(['config', '--local', '--add', 'core.longpaths', 'true'])
- : null)
- .then(() => git(['remote', 'add', 'origin', repo]))
- .then(() => git(fetchOrigin))
- .then(() => git(['checkout', revDoc.sha]))
- .then(() => updateSubmodules(target, opts))
- .then(() => revDoc.sha)
-}
-
-// tag or branches. use -b
-const branch = (repo, revDoc, target, opts) => {
- const args = [
- 'clone',
- '-b',
- revDoc.ref,
- repo,
- target,
- '--recurse-submodules',
- ]
- if (maybeShallow(repo, opts))
- args.push('--depth=1')
- if (isWindows(opts))
- args.push('--config', 'core.longpaths=true')
- return spawn(args, {}, opts).then(() => revDoc.sha)
-}
-
-// just the head. clone it
-const plain = (repo, revDoc, target, opts) => {
- const args = [
- 'clone',
- repo,
- target,
- '--recurse-submodules'
- ]
- if (maybeShallow(repo, opts))
- args.push('--depth=1')
- if (isWindows(opts))
- args.push('--config', 'core.longpaths=true')
- return spawn(args, {}, opts).then(() => revDoc.sha)
-}
-
-const updateSubmodules = (target, opts) => new Promise(res =>
- fs.stat(target + '/.gitmodules', er => res(er ? null
- : spawn([
- 'submodule',
- 'update',
- '-q',
- '--init',
- '--recursive'
- ], { cwd: target }, opts))))
-
-const unresolved = (repo, ref, target, opts) => {
- // can't do this one shallowly, because the ref isn't advertised
- // but we can avoid checking out the working dir twice, at least
- const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : []
- const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git']
- const git = (args) => spawn(args, { cwd: target }, opts)
- return mkdirp(target)
- .then(() => git(cloneArgs.concat(lp)))
- .then(() => git(['init']))
- .then(() => git(['checkout', ref]))
- .then(() => updateSubmodules(target, opts))
- .then(() => git(['rev-parse', '--revs-only', 'HEAD']))
- .then(stdout => stdout.trim())
-}
diff --git a/node_modules/pacote/lib/util/git/env.js b/node_modules/pacote/lib/util/git/env.js
deleted file mode 100644
index 517d1deb7..000000000
--- a/node_modules/pacote/lib/util/git/env.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const uniqueFilename = require('unique-filename')
-const { join } = require('path')
-const osenv = require('osenv')
-
-const goodEnvVars = new Set([
- 'GIT_ASKPASS',
- 'GIT_EXEC_PATH',
- 'GIT_PROXY_COMMAND',
- 'GIT_SSH',
- 'GIT_SSH_COMMAND',
- 'GIT_SSL_CAINFO',
- 'GIT_SSL_NO_VERIFY'
-])
-
-// memoize
-let gitEnv
-
-module.exports = () => {
- if (gitEnv)
- return gitEnv
-
- // we set the template dir to an empty folder to give git less to do
- const tmpDir = join(osenv.tmpdir(), 'pacote-git-template-tmp')
- const tmpName = uniqueFilename(tmpDir, 'git-clone')
- return gitEnv = Object.keys(process.env).reduce((gitEnv, k) => {
- if (goodEnvVars.has(k) || !k.startsWith('GIT_'))
- gitEnv[k] = process.env[k]
- return gitEnv
- }, {
- GIT_ASKPASS: 'echo',
- GIT_TEMPLATE_DIR: tmpName
- })
-}
diff --git a/node_modules/pacote/lib/util/git/index.js b/node_modules/pacote/lib/util/git/index.js
deleted file mode 100644
index 65c5fadd3..000000000
--- a/node_modules/pacote/lib/util/git/index.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module.exports = {
- clone: require('./clone.js'),
- revs: require('./revs.js'),
- spawn: require('./spawn.js'),
-}
diff --git a/node_modules/pacote/lib/util/git/lines-to-revs.js b/node_modules/pacote/lib/util/git/lines-to-revs.js
deleted file mode 100644
index 524e67243..000000000
--- a/node_modules/pacote/lib/util/git/lines-to-revs.js
+++ /dev/null
@@ -1,133 +0,0 @@
-// turn an array of lines from `git ls-remote` into a thing
-// vaguely resembling a packument, where docs are a resolved ref
-
-const semver = require('semver')
-
-module.exports = lines => finish(lines.reduce(linesToRevsReducer, {
- versions: {},
- 'dist-tags': {},
- refs: {},
- shas: {},
-}))
-
-const finish = revs => distTags(shaList(peelTags(revs)))
-
-// We can check out shallow clones on specific SHAs if we have a ref
-const shaList = revs => {
- Object.keys(revs.refs).forEach(ref => {
- doc = revs.refs[ref]
- if (revs.shas[doc.sha])
- revs.shas[doc.sha].push(ref)
- else
- revs.shas[doc.sha] = [ref]
- })
- return revs
-}
-
-
-// Replace any tags with their ^{} counterparts, if those exist
-const peelTags = revs => {
- Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => {
- const peeled = revs.refs[ref]
- const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')]
- if (unpeeled) {
- unpeeled.sha = peeled.sha
- delete revs.refs[ref]
- }
- })
- return revs
-}
-
-const distTags = revs => {
- // not entirely sure what situations would result in an
- // ichabod repo, but best to be careful in Sleepy Hollow anyway
- const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {}
- const versions = Object.keys(revs.versions)
- versions.forEach(v => {
- // simulate a dist-tags with latest pointing at the
- // 'latest' branch if one exists and is a version,
- // or HEAD if not.
- const ver = revs.versions[v]
- if (revs.refs.latest && ver.sha === revs.refs.latest.sha)
- revs['dist-tags'].latest = v
- else if (ver.sha === HEAD.sha) {
- revs['dist-tags'].HEAD = v
- if (!revs.refs.latest)
- revs['dist-tags'].latest = v
- }
- })
- return revs
-}
-
-const refType = ref =>
- ref.startsWith('refs/tags/') ? 'tag'
- : ref.startsWith('refs/heads/') ? 'branch'
- : ref.startsWith('refs/pull/') ? 'pull'
- : ref === 'HEAD' ? 'head'
- // Could be anything, ignore for now
- : /* istanbul ignore next */ 'other'
-
-// return the doc, or null if we should ignore it.
-const lineToRevDoc = line => {
- const split = line.trim().split(/\s+/, 2)
- if (split.length < 2)
- return null
-
- const sha = split[0].trim()
- const rawRef = split[1].trim()
- const type = refType(rawRef)
-
- if (type === 'tag') {
- // refs/tags/foo^{} is the 'peeled tag', ie the commit
- // that is tagged by refs/tags/foo they resolve to the same
- // content, just different objects in git's data structure.
- // But, we care about the thing the tag POINTS to, not the tag
- // object itself, so we only look at the peeled tag refs, and
- // ignore the pointer.
- // For now, though, we have to save both, because some tags
- // don't have peels, if they were not annotated.
- const ref = rawRef.substr('refs/tags/'.length)
- return { sha, ref, rawRef, type }
- }
-
- if (type === 'branch') {
- const ref = rawRef.substr('refs/heads/'.length)
- return { sha, ref, rawRef, type }
- }
-
- if (type === 'pull') {
- // NB: merged pull requests installable with #pull/123/merge
- // for the merged pr, or #pull/123 for the PR head
- const ref = rawRef.substr('refs/'.length).replace(/\/head$/, '')
- return { sha, ref, rawRef, type }
- }
-
- if (type === 'head') {
- const ref = 'HEAD'
- return { sha, ref, rawRef, type }
- }
-
- // at this point, all we can do is leave the ref un-munged
- return { sha, ref: rawRef, rawRef, type }
-}
-
-const linesToRevsReducer = (revs, line) => {
- const doc = lineToRevDoc(line)
-
- if (!doc)
- return revs
-
- revs.refs[doc.ref] = doc
- revs.refs[doc.rawRef] = doc
-
- if (doc.type === 'tag') {
- // try to pull a semver value out of tags like `release-v1.2.3`
- // which is a pretty common pattern.
- const match = !doc.ref.endsWith('^{}') &&
- doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
- if (match && semver.valid(match[1], true))
- revs.versions[semver.clean(match[1], true)] = doc
- }
-
- return revs
-}
diff --git a/node_modules/pacote/lib/util/git/opts.js b/node_modules/pacote/lib/util/git/opts.js
deleted file mode 100644
index c5659d0b8..000000000
--- a/node_modules/pacote/lib/util/git/opts.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const gitEnv = require('./env.js')
-module.exports = (_gitOpts = {}, opts = {}) => {
- const isRoot = process.getuid && process.getuid() === 0
- const gitOpts = {
- env: gitEnv()
- }
-
- if (isRoot && +opts.uid)
- gitOpts.uid = +opts.uid
-
- if (isRoot && +opts.gid)
- gitOpts.gid = +opts.gid
-
- Object.assign(gitOpts, _gitOpts)
- return gitOpts
-}
diff --git a/node_modules/pacote/lib/util/git/revs.js b/node_modules/pacote/lib/util/git/revs.js
deleted file mode 100644
index 4e4bcf205..000000000
--- a/node_modules/pacote/lib/util/git/revs.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const pinflight = require('promise-inflight')
-const spawn = require('./spawn.js')
-const LRU = require('lru-cache')
-
-const revsCache = new LRU({
- max: 100,
- maxAge: 5 * 60 * 1000,
-})
-
-const linesToRevs = require('./lines-to-revs.js')
-
-module.exports = (repo, opts = {}) => {
- if (!opts.noGitRevCache) {
- const cached = revsCache.get(repo)
- if (cached)
- return Promise.resolve(cached)
- }
-
- return pinflight(`ls-remote:${repo}`, () =>
- spawn(['ls-remote', repo], {}, opts)
- .then(stdout => linesToRevs(stdout.trim().split('\n')))
- .then(revs => (revsCache.set(repo, revs), revs))
- )
-}
diff --git a/node_modules/pacote/lib/util/git/should-retry.js b/node_modules/pacote/lib/util/git/should-retry.js
deleted file mode 100644
index 8082bb5d7..000000000
--- a/node_modules/pacote/lib/util/git/should-retry.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const transientErrors = [
- 'remote error: Internal Server Error',
- 'The remote end hung up unexpectedly',
- 'Connection timed out',
- 'Operation timed out',
- 'Failed to connect to .* Timed out',
- 'Connection reset by peer',
- 'SSL_ERROR_SYSCALL',
- 'The requested URL returned error: 503'
-].join('|')
-
-const transientErrorRe = new RegExp(transientErrors)
-
-const maxRetry = 3
-
-module.exports = (error, number) =>
- transientErrorRe.test(error) && (number < maxRetry)
diff --git a/node_modules/pacote/lib/util/git/spawn.js b/node_modules/pacote/lib/util/git/spawn.js
deleted file mode 100644
index 95f5b0cd2..000000000
--- a/node_modules/pacote/lib/util/git/spawn.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const spawn = require('../spawn.js')
-const promiseRetry = require('promise-retry')
-const shouldRetry = require('./should-retry.js')
-const whichGit = require('./which.js')
-const makeOpts = require('./opts.js')
-const procLog = require('../proc-log.js')
-
-module.exports = (gitArgs, gitOpts, opts = {}) => {
- const gitPath = whichGit(opts)
-
- if (gitPath instanceof Error)
- return Promise.reject(gitPath)
-
- const log = opts.log || procLog
- return promiseRetry((retry, number) => {
- if (number !== 1)
- log.silly('pacote', `Retrying git command: ${
- gitArgs.join(' ')} attempt # ${number}`)
-
- return spawn(gitPath, gitArgs, makeOpts(gitOpts, opts))
- .catch(er => {
- if (shouldRetry(er.stderr, number))
- retry(er)
- else
- throw er
- })
- .then(({stdout}) => stdout)
- }, opts.retry !== null && opts.retry !== undefined ? opts.retry : {
- retries: opts.fetchRetries || 2,
- factor: opts.fetchRetryFactor || 10,
- maxTimeout: opts.fetchRetryMaxtimeout || 60000,
- minTimeout: opts.fetchRetryMintimeout || 1000,
- })
-}
diff --git a/node_modules/pacote/lib/util/git/which.js b/node_modules/pacote/lib/util/git/which.js
deleted file mode 100644
index 9e82d391a..000000000
--- a/node_modules/pacote/lib/util/git/which.js
+++ /dev/null
@@ -1,11 +0,0 @@
-const which = require('which')
-
-let gitPath
-try {
- gitPath = which.sync('git')
-} catch (e) {}
-
-module.exports = (opts = {}) =>
- opts.git ||
- opts.git !== false && gitPath ||
- Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' })
diff --git a/node_modules/pacote/lib/util/npm.js b/node_modules/pacote/lib/util/npm.js
index 0bf5093ae..293695525 100644
--- a/node_modules/pacote/lib/util/npm.js
+++ b/node_modules/pacote/lib/util/npm.js
@@ -1,9 +1,9 @@
// run an npm command
-const spawn = require('./spawn.js')
+const spawn = require('@npmcli/promise-spawn')
-module.exports = (npmBin, npmCommand, cwd, ermsg) => {
+module.exports = (npmBin, npmCommand, cwd, extra) => {
const isJS = npmBin.endsWith('.js')
const cmd = isJS ? process.execPath : npmBin
const args = (isJS ? [npmBin] : []).concat(npmCommand)
- return spawn(cmd, args, { cwd }, ermsg)
+ return spawn(cmd, args, { cwd, stdioString: true }, extra)
}
diff --git a/node_modules/pacote/lib/util/spawn.js b/node_modules/pacote/lib/util/spawn.js
deleted file mode 100644
index 03af72307..000000000
--- a/node_modules/pacote/lib/util/spawn.js
+++ /dev/null
@@ -1,36 +0,0 @@
-const { spawn } = require('child_process')
-module.exports = (cmd, args, options, ermsg) => new Promise((res, rej) => {
- if (!ermsg)
- ermsg = `failed '${[cmd].concat(args).join(' ')}'`
- const proc = spawn(cmd, args, options)
- const stdout = []
- const stderr = []
- proc.on('error',
- /* istanbul ignore next: node 8 just throws from spawn() */
- er => withStdio(rej, er, stdout, stderr))
- if (proc.stdout) {
- proc.stdout.on('data', c => stdout.push(c))
- proc.stdout.on('error', er => withStdio(rej, er, stdout, stderr))
- }
- if (proc.stderr) {
- proc.stderr.on('data', c => stderr.push(c))
- proc.stderr.on('error', er => withStdio(rej, er, stdout, stderr))
- }
- proc.on('close', (code, signal) => {
- if (code || signal)
- return withStdio(rej, Object.assign(new Error(ermsg), {
- cmd,
- args,
- code,
- signal,
- }), stdout, stderr)
- withStdio(res, { cmd, args }, stdout, stderr)
- })
-})
-
-const withStdio = (resrej, obj, stdout, stderr) => {
- return resrej(Object.assign(obj, {
- stdout: Buffer.concat(stdout).toString('utf8'),
- stderr: Buffer.concat(stderr).toString('utf8'),
- }))
-}
diff --git a/node_modules/libnpmpack/node_modules/.bin/rimraf b/node_modules/pacote/node_modules/.bin/rimraf
index 4cd49a49d..4cd49a49d 120000
--- a/node_modules/libnpmpack/node_modules/.bin/rimraf
+++ b/node_modules/pacote/node_modules/.bin/rimraf
diff --git a/node_modules/pacote/node_modules/chownr/package.json b/node_modules/pacote/node_modules/chownr/package.json
index 0317b2b2c..1dd70b753 100644
--- a/node_modules/pacote/node_modules/chownr/package.json
+++ b/node_modules/pacote/node_modules/chownr/package.json
@@ -1,26 +1,28 @@
{
- "_from": "chownr@^1.1.4",
- "_id": "chownr@1.1.4",
+ "_from": "chownr@^2.0.0",
+ "_id": "chownr@2.0.0",
"_inBundle": false,
- "_integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
+ "_integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
"_location": "/pacote/chownr",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "chownr@^1.1.4",
+ "raw": "chownr@^2.0.0",
"name": "chownr",
"escapedName": "chownr",
- "rawSpec": "^1.1.4",
+ "rawSpec": "^2.0.0",
"saveSpec": null,
- "fetchSpec": "^1.1.4"
+ "fetchSpec": "^2.0.0"
},
"_requiredBy": [
- "/pacote"
+ "/pacote",
+ "/pacote/cacache",
+ "/pacote/cacache/tar"
],
- "_resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
- "_shasum": "6fc9d7b42d32a583596337666e7d08084da2cc6b",
- "_spec": "chownr@^1.1.4",
+ "_resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
+ "_shasum": "15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece",
+ "_spec": "chownr@^2.0.0",
"_where": "/Users/isaacs/dev/npm/cli/node_modules/pacote",
"author": {
"name": "Isaac Z. Schlueter",
@@ -38,6 +40,9 @@
"rimraf": "^2.7.1",
"tap": "^14.10.6"
},
+ "engines": {
+ "node": ">=10"
+ },
"files": [
"chownr.js"
],
@@ -58,5 +63,5 @@
"tap": {
"check-coverage": true
},
- "version": "1.1.4"
+ "version": "2.0.0"
}
diff --git a/node_modules/pacote/node_modules/mkdirp/lib/opts-arg.js b/node_modules/pacote/node_modules/mkdirp/lib/opts-arg.js
index 488bd44c3..2fa4833fa 100644
--- a/node_modules/pacote/node_modules/mkdirp/lib/opts-arg.js
+++ b/node_modules/pacote/node_modules/mkdirp/lib/opts-arg.js
@@ -2,9 +2,9 @@ const { promisify } = require('util')
const fs = require('fs')
const optsArg = opts => {
if (!opts)
- opts = { mode: 0o777 & (~process.umask()), fs }
+ opts = { mode: 0o777, fs }
else if (typeof opts === 'object')
- opts = { mode: 0o777 & (~process.umask()), fs, ...opts }
+ opts = { mode: 0o777, fs, ...opts }
else if (typeof opts === 'number')
opts = { mode: opts, fs }
else if (typeof opts === 'string')
diff --git a/node_modules/pacote/node_modules/mkdirp/package.json b/node_modules/pacote/node_modules/mkdirp/package.json
index 602eb7ed0..be836545e 100644
--- a/node_modules/pacote/node_modules/mkdirp/package.json
+++ b/node_modules/pacote/node_modules/mkdirp/package.json
@@ -1,8 +1,8 @@
{
"_from": "mkdirp@^1.0.3",
- "_id": "mkdirp@1.0.3",
+ "_id": "mkdirp@1.0.4",
"_inBundle": false,
- "_integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g==",
+ "_integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"_location": "/pacote/mkdirp",
"_phantomChildren": {},
"_requested": {
@@ -16,12 +16,14 @@
"fetchSpec": "^1.0.3"
},
"_requiredBy": [
- "/pacote"
+ "/pacote",
+ "/pacote/cacache",
+ "/pacote/cacache/tar"
],
- "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
- "_shasum": "4cf2e30ad45959dddea53ad97d518b6c8205e1ea",
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+ "_shasum": "3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
"_spec": "mkdirp@^1.0.3",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/pacote",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/pacote",
"bin": {
"mkdirp": "bin/cmd.js"
},
@@ -33,7 +35,7 @@
"description": "Recursively mkdir, like `mkdir -p`",
"devDependencies": {
"require-inject": "^1.4.4",
- "tap": "^14.10.6"
+ "tap": "^14.10.7"
},
"engines": {
"node": ">=10"
@@ -71,5 +73,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "1.0.3"
+ "version": "1.0.4"
}
diff --git a/node_modules/pacote/node_modules/rimraf/CHANGELOG.md b/node_modules/pacote/node_modules/rimraf/CHANGELOG.md
new file mode 100644
index 000000000..f116f1414
--- /dev/null
+++ b/node_modules/pacote/node_modules/rimraf/CHANGELOG.md
@@ -0,0 +1,65 @@
+# v3.0
+
+- Add `--preserve-root` option to executable (default true)
+- Drop support for Node.js below version 6
+
+# v2.7
+
+- Make `glob` an optional dependency
+
+# 2.6
+
+- Retry on EBUSY on non-windows platforms as well
+- Make `rimraf.sync` 10000% more reliable on Windows
+
+# 2.5
+
+- Handle Windows EPERM when lstat-ing read-only dirs
+- Add glob option to pass options to glob
+
+# 2.4
+
+- Add EPERM to delay/retry loop
+- Add `disableGlob` option
+
+# 2.3
+
+- Make maxBusyTries and emfileWait configurable
+- Handle weird SunOS unlink-dir issue
+- Glob the CLI arg for better Windows support
+
+# 2.2
+
+- Handle ENOENT properly on Windows
+- Allow overriding fs methods
+- Treat EPERM as indicative of non-empty dir
+- Remove optional graceful-fs dep
+- Consistently return null error instead of undefined on success
+- win32: Treat ENOTEMPTY the same as EBUSY
+- Add `rimraf` binary
+
+# 2.1
+
+- Fix SunOS error code for a non-empty directory
+- Try rmdir before readdir
+- Treat EISDIR like EPERM
+- Remove chmod
+- Remove lstat polyfill, node 0.7 is not supported
+
+# 2.0
+
+- Fix myGid call to check process.getgid
+- Simplify the EBUSY backoff logic.
+- Use fs.lstat in node >= 0.7.9
+- Remove gently option
+- remove fiber implementation
+- Delete files that are marked read-only
+
+# 1.0
+
+- Allow ENOENT in sync method
+- Throw when no callback is provided
+- Make opts.gently an absolute path
+- use 'stat' if 'lstat' is not available
+- Consistent error naming, and rethrow non-ENOENT stat errors
+- add fiber implementation
diff --git a/node_modules/libnpmpack/node_modules/pacote/LICENSE b/node_modules/pacote/node_modules/rimraf/LICENSE
index a03cd0ed0..19129e315 100644
--- a/node_modules/libnpmpack/node_modules/pacote/LICENSE
+++ b/node_modules/pacote/node_modules/rimraf/LICENSE
@@ -1,6 +1,6 @@
The ISC License
-Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors
+Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/libnpmpack/node_modules/rimraf/README.md b/node_modules/pacote/node_modules/rimraf/README.md
index 423b8cf85..423b8cf85 100644
--- a/node_modules/libnpmpack/node_modules/rimraf/README.md
+++ b/node_modules/pacote/node_modules/rimraf/README.md
diff --git a/node_modules/pacote/node_modules/rimraf/bin.js b/node_modules/pacote/node_modules/rimraf/bin.js
new file mode 100755
index 000000000..023814cc9
--- /dev/null
+++ b/node_modules/pacote/node_modules/rimraf/bin.js
@@ -0,0 +1,68 @@
+#!/usr/bin/env node
+
+const rimraf = require('./')
+
+const path = require('path')
+
+const isRoot = arg => /^(\/|[a-zA-Z]:\\)$/.test(path.resolve(arg))
+const filterOutRoot = arg => {
+ const ok = preserveRoot === false || !isRoot(arg)
+ if (!ok) {
+ console.error(`refusing to remove ${arg}`)
+ console.error('Set --no-preserve-root to allow this')
+ }
+ return ok
+}
+
+let help = false
+let dashdash = false
+let noglob = false
+let preserveRoot = true
+const args = process.argv.slice(2).filter(arg => {
+ if (dashdash)
+ return !!arg
+ else if (arg === '--')
+ dashdash = true
+ else if (arg === '--no-glob' || arg === '-G')
+ noglob = true
+ else if (arg === '--glob' || arg === '-g')
+ noglob = false
+ else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/))
+ help = true
+ else if (arg === '--preserve-root')
+ preserveRoot = true
+ else if (arg === '--no-preserve-root')
+ preserveRoot = false
+ else
+ return !!arg
+}).filter(arg => !preserveRoot || filterOutRoot(arg))
+
+const go = n => {
+ if (n >= args.length)
+ return
+ const options = noglob ? { glob: false } : {}
+ rimraf(args[n], options, er => {
+ if (er)
+ throw er
+ go(n+1)
+ })
+}
+
+if (help || args.length === 0) {
+ // If they didn't ask for help, then this is not a "success"
+ const log = help ? console.log : console.error
+ log('Usage: rimraf <path> [<path> ...]')
+ log('')
+ log(' Deletes all files and folders at "path" recursively.')
+ log('')
+ log('Options:')
+ log('')
+ log(' -h, --help Display this usage info')
+ log(' -G, --no-glob Do not expand glob patterns in arguments')
+ log(' -g, --glob Expand glob patterns in arguments (default)')
+ log(' --preserve-root Do not remove \'/\' (default)')
+ log(' --no-preserve-root Do not treat \'/\' specially')
+ log(' -- Stop parsing flags')
+ process.exit(help ? 0 : 1)
+} else
+ go(0)
diff --git a/node_modules/libnpmpack/node_modules/rimraf/package.json b/node_modules/pacote/node_modules/rimraf/package.json
index a61c985d5..9b0db2294 100644
--- a/node_modules/libnpmpack/node_modules/rimraf/package.json
+++ b/node_modules/pacote/node_modules/rimraf/package.json
@@ -1,27 +1,28 @@
{
- "_from": "rimraf@^2.7.1",
- "_id": "rimraf@2.7.1",
+ "_from": "rimraf@^3.0.2",
+ "_id": "rimraf@3.0.2",
"_inBundle": false,
- "_integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
- "_location": "/libnpmpack/rimraf",
+ "_integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+ "_location": "/pacote/rimraf",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
- "raw": "rimraf@^2.7.1",
+ "raw": "rimraf@^3.0.2",
"name": "rimraf",
"escapedName": "rimraf",
- "rawSpec": "^2.7.1",
+ "rawSpec": "^3.0.2",
"saveSpec": null,
- "fetchSpec": "^2.7.1"
+ "fetchSpec": "^3.0.2"
},
"_requiredBy": [
- "/libnpmpack/pacote"
+ "/pacote",
+ "/pacote/cacache"
],
- "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
- "_shasum": "35797f13a7fdadc566142c29d4f07ccad483e3ec",
- "_spec": "rimraf@^2.7.1",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
+ "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "_shasum": "f1a5402ba6220ad52cc1282bac1ae3aa49fd061a",
+ "_spec": "rimraf@^3.0.2",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/pacote",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
@@ -49,6 +50,9 @@
"bin.js",
"rimraf.js"
],
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ },
"homepage": "https://github.com/isaacs/rimraf#readme",
"license": "ISC",
"main": "rimraf.js",
@@ -58,10 +62,10 @@
"url": "git://github.com/isaacs/rimraf.git"
},
"scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
+ "postpublish": "git push origin --follow-tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js"
},
- "version": "2.7.1"
+ "version": "3.0.2"
}
diff --git a/node_modules/libnpmpack/node_modules/rimraf/rimraf.js b/node_modules/pacote/node_modules/rimraf/rimraf.js
index a90ad029f..34da4171d 100644
--- a/node_modules/libnpmpack/node_modules/rimraf/rimraf.js
+++ b/node_modules/pacote/node_modules/rimraf/rimraf.js
@@ -1,29 +1,25 @@
-module.exports = rimraf
-rimraf.sync = rimrafSync
-
-var assert = require("assert")
-var path = require("path")
-var fs = require("fs")
-var glob = undefined
+const assert = require("assert")
+const path = require("path")
+const fs = require("fs")
+let glob = undefined
try {
glob = require("glob")
} catch (_err) {
// treat glob as optional.
}
-var _0666 = parseInt('666', 8)
-var defaultGlobOpts = {
+const defaultGlobOpts = {
nosort: true,
silent: true
}
// for EMFILE handling
-var timeout = 0
+let timeout = 0
-var isWindows = (process.platform === "win32")
+const isWindows = (process.platform === "win32")
-function defaults (options) {
- var methods = [
+const defaults = options => {
+ const methods = [
'unlink',
'chmod',
'stat',
@@ -31,7 +27,7 @@ function defaults (options) {
'rmdir',
'readdir'
]
- methods.forEach(function(m) {
+ methods.forEach(m => {
options[m] = options[m] || fs[m]
m = m + 'Sync'
options[m] = options[m] || fs[m]
@@ -49,7 +45,7 @@ function defaults (options) {
options.glob = options.glob || defaultGlobOpts
}
-function rimraf (p, options, cb) {
+const rimraf = (p, options, cb) => {
if (typeof options === 'function') {
cb = options
options = {}
@@ -63,27 +59,17 @@ function rimraf (p, options, cb) {
defaults(options)
- var busyTries = 0
- var errState = null
- var n = 0
-
- if (options.disableGlob || !glob.hasMagic(p))
- return afterGlob(null, [p])
-
- options.lstat(p, function (er, stat) {
- if (!er)
- return afterGlob(null, [p])
+ let busyTries = 0
+ let errState = null
+ let n = 0
- glob(p, options.glob, afterGlob)
- })
-
- function next (er) {
+ const next = (er) => {
errState = errState || er
if (--n === 0)
cb(errState)
}
- function afterGlob (er, results) {
+ const afterGlob = (er, results) => {
if (er)
return cb(er)
@@ -91,24 +77,19 @@ function rimraf (p, options, cb) {
if (n === 0)
return cb()
- results.forEach(function (p) {
- rimraf_(p, options, function CB (er) {
+ results.forEach(p => {
+ const CB = (er) => {
if (er) {
if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
busyTries < options.maxBusyTries) {
busyTries ++
- var time = busyTries * 100
// try again, with the same exact callback as this one.
- return setTimeout(function () {
- rimraf_(p, options, CB)
- }, time)
+ return setTimeout(() => rimraf_(p, options, CB), busyTries * 100)
}
// this one won't happen if graceful-fs is used.
if (er.code === "EMFILE" && timeout < options.emfileWait) {
- return setTimeout(function () {
- rimraf_(p, options, CB)
- }, timeout ++)
+ return setTimeout(() => rimraf_(p, options, CB), timeout ++)
}
// already gone
@@ -117,9 +98,21 @@ function rimraf (p, options, cb) {
timeout = 0
next(er)
- })
+ }
+ rimraf_(p, options, CB)
})
}
+
+ if (options.disableGlob || !glob.hasMagic(p))
+ return afterGlob(null, [p])
+
+ options.lstat(p, (er, stat) => {
+ if (!er)
+ return afterGlob(null, [p])
+
+ glob(p, options.glob, afterGlob)
+ })
+
}
// Two possible strategies.
@@ -133,14 +126,14 @@ function rimraf (p, options, cb) {
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
-function rimraf_ (p, options, cb) {
+const rimraf_ = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
- options.lstat(p, function (er, st) {
+ options.lstat(p, (er, st) => {
if (er && er.code === "ENOENT")
return cb(null)
@@ -151,7 +144,7 @@ function rimraf_ (p, options, cb) {
if (st && st.isDirectory())
return rmdir(p, options, er, cb)
- options.unlink(p, function (er) {
+ options.unlink(p, er => {
if (er) {
if (er.code === "ENOENT")
return cb(null)
@@ -167,18 +160,16 @@ function rimraf_ (p, options, cb) {
})
}
-function fixWinEPERM (p, options, er, cb) {
+const fixWinEPERM = (p, options, er, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
- if (er)
- assert(er instanceof Error)
- options.chmod(p, _0666, function (er2) {
+ options.chmod(p, 0o666, er2 => {
if (er2)
cb(er2.code === "ENOENT" ? null : er)
else
- options.stat(p, function(er3, stats) {
+ options.stat(p, (er3, stats) => {
if (er3)
cb(er3.code === "ENOENT" ? null : er)
else if (stats.isDirectory())
@@ -189,14 +180,12 @@ function fixWinEPERM (p, options, er, cb) {
})
}
-function fixWinEPERMSync (p, options, er) {
+const fixWinEPERMSync = (p, options, er) => {
assert(p)
assert(options)
- if (er)
- assert(er instanceof Error)
try {
- options.chmodSync(p, _0666)
+ options.chmodSync(p, 0o666)
} catch (er2) {
if (er2.code === "ENOENT")
return
@@ -204,8 +193,9 @@ function fixWinEPERMSync (p, options, er) {
throw er
}
+ let stats
try {
- var stats = options.statSync(p)
+ stats = options.statSync(p)
} catch (er3) {
if (er3.code === "ENOENT")
return
@@ -219,17 +209,15 @@ function fixWinEPERMSync (p, options, er) {
options.unlinkSync(p)
}
-function rmdir (p, options, originalEr, cb) {
+const rmdir = (p, options, originalEr, cb) => {
assert(p)
assert(options)
- if (originalEr)
- assert(originalEr instanceof Error)
assert(typeof cb === 'function')
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
- options.rmdir(p, function (er) {
+ options.rmdir(p, er => {
if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
rmkids(p, options, cb)
else if (er && er.code === "ENOTDIR")
@@ -239,20 +227,20 @@ function rmdir (p, options, originalEr, cb) {
})
}
-function rmkids(p, options, cb) {
+const rmkids = (p, options, cb) => {
assert(p)
assert(options)
assert(typeof cb === 'function')
- options.readdir(p, function (er, files) {
+ options.readdir(p, (er, files) => {
if (er)
return cb(er)
- var n = files.length
+ let n = files.length
if (n === 0)
return options.rmdir(p, cb)
- var errState
- files.forEach(function (f) {
- rimraf(path.join(p, f), options, function (er) {
+ let errState
+ files.forEach(f => {
+ rimraf(path.join(p, f), options, er => {
if (errState)
return
if (er)
@@ -267,7 +255,7 @@ function rmkids(p, options, cb) {
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
-function rimrafSync (p, options) {
+const rimrafSync = (p, options) => {
options = options || {}
defaults(options)
@@ -276,7 +264,7 @@ function rimrafSync (p, options) {
assert(options, 'rimraf: missing options')
assert.equal(typeof options, 'object', 'rimraf: options should be object')
- var results
+ let results
if (options.disableGlob || !glob.hasMagic(p)) {
results = [p]
@@ -292,11 +280,12 @@ function rimrafSync (p, options) {
if (!results.length)
return
- for (var i = 0; i < results.length; i++) {
- var p = results[i]
+ for (let i = 0; i < results.length; i++) {
+ const p = results[i]
+ let st
try {
- var st = options.lstatSync(p)
+ st = options.lstatSync(p)
} catch (er) {
if (er.code === "ENOENT")
return
@@ -325,11 +314,9 @@ function rimrafSync (p, options) {
}
}
-function rmdirSync (p, options, originalEr) {
+const rmdirSync = (p, options, originalEr) => {
assert(p)
assert(options)
- if (originalEr)
- assert(originalEr instanceof Error)
try {
options.rmdirSync(p)
@@ -343,12 +330,10 @@ function rmdirSync (p, options, originalEr) {
}
}
-function rmkidsSync (p, options) {
+const rmkidsSync = (p, options) => {
assert(p)
assert(options)
- options.readdirSync(p).forEach(function (f) {
- rimrafSync(path.join(p, f), options)
- })
+ options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
@@ -356,12 +341,12 @@ function rmkidsSync (p, options) {
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
- var retries = isWindows ? 100 : 1
- var i = 0
+ const retries = isWindows ? 100 : 1
+ let i = 0
do {
- var threw = true
+ let threw = true
try {
- var ret = options.rmdirSync(p, options)
+ const ret = options.rmdirSync(p, options)
threw = false
return ret
} finally {
@@ -370,3 +355,6 @@ function rmkidsSync (p, options) {
}
} while (true)
}
+
+module.exports = rimraf
+rimraf.sync = rimrafSync
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 619c82e37..3a26dbabb 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,28 +1,31 @@
{
- "_from": "pacote@11.1",
- "_id": "pacote@11.1.0",
+ "_from": "pacote@latest",
+ "_id": "pacote@11.1.8",
"_inBundle": false,
- "_integrity": "sha512-JcMmHiK6h6rcncj2HLayiyJZg28iJXJafXcmEGw2NjKH3WE8ZgSwyMZs7+f+aliPD57PDhB31IEgUtLXp0YZxA==",
+ "_integrity": "sha512-oBXbdsqA0pnk4lNmWDHMm20uSbZiYCjE0MHeSFG3esxw1mubhSJizVc1rKVI0KMkYTMz4M7OTEqY1h2lx+wbPw==",
"_location": "/pacote",
- "_phantomChildren": {},
+ "_phantomChildren": {
+ "glob": "7.1.4"
+ },
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "pacote@11.1",
+ "raw": "pacote@latest",
"name": "pacote",
"escapedName": "pacote",
- "rawSpec": "11.1",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "11.1"
+ "fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/",
- "/@npmcli/arborist"
+ "/@npmcli/arborist",
+ "/libnpmpack"
],
- "_resolved": "https://registry.npmjs.org/pacote/-/pacote-11.1.0.tgz",
- "_shasum": "2285e37594c42588436549148a941ca6e282d425",
- "_spec": "pacote@11.1",
+ "_resolved": "https://registry.npmjs.org/pacote/-/pacote-11.1.8.tgz",
+ "_shasum": "e8136f14d30dc7ac7b01e93b91ef74fa92e6bd7c",
+ "_spec": "pacote@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
"name": "Isaac Z. Schlueter",
@@ -37,27 +40,25 @@
},
"bundleDependencies": false,
"dependencies": {
+ "@npmcli/git": "^2.0.1",
"@npmcli/installed-package-contents": "^1.0.5",
- "cacache": "^15.0.0",
- "chownr": "^1.1.4",
+ "@npmcli/promise-spawn": "^1.2.0",
+ "@npmcli/run-script": "^1.3.0",
+ "cacache": "^15.0.3",
+ "chownr": "^2.0.0",
"fs-minipass": "^2.1.0",
"infer-owner": "^1.0.4",
- "lru-cache": "^5.1.1",
"minipass": "^3.0.1",
- "minipass-fetch": "^1.2.1",
"mkdirp": "^1.0.3",
- "npm-package-arg": "^8.0.0",
+ "npm-package-arg": "^8.0.1",
"npm-packlist": "^2.1.0",
"npm-pick-manifest": "^6.0.0",
"npm-registry-fetch": "^8.0.0",
- "osenv": "^0.1.5",
- "promise-inflight": "^1.0.1",
"promise-retry": "^1.1.1",
"read-package-json-fast": "^1.1.3",
- "semver": "^7.1.3",
+ "rimraf": "^3.0.2",
"ssri": "^8.0.0",
- "tar": "^6.0.1",
- "which": "^2.0.2"
+ "tar": "^6.0.1"
},
"deprecated": false,
"description": "JavaScript package downloader",
@@ -99,5 +100,5 @@
"coverage-map": "map.js",
"esm": false
},
- "version": "11.1.0"
+ "version": "11.1.8"
}
diff --git a/node_modules/semver/CHANGELOG.md b/node_modules/semver/CHANGELOG.md
index db6b6d4d5..220af176f 100644
--- a/node_modules/semver/CHANGELOG.md
+++ b/node_modules/semver/CHANGELOG.md
@@ -1,5 +1,31 @@
# changes log
+## 7.3.0
+
+* Add `subset(r1, r2)` method to determine if `r1` range is entirely
+ contained by `r2` range.
+
+## 7.2.3
+
+* Fix handling of `includePrelease` mode where version ranges like `1.0.0 -
+ 2.0.0` would include `3.0.0-pre` and not `1.0.0-pre`.
+
+## 7.2.2
+
+* Fix bug where `2.0.0-pre` would be included in `^1.0.0` if
+ `includePrerelease` was set to true.
+
+## 7.2.0
+
+* Add `simplifyRange` method to attempt to generate a more human-readable
+ range expression that is equivalent to a supplied range, for a given set
+ of versions.
+
+## 7.1.2
+
+* Remove fancy lazy-loading logic, as it was causing problems for webpack
+ users.
+
## 7.1.0
* Add `require('semver/preload')` to load the entire module without using
diff --git a/node_modules/semver/README.md b/node_modules/semver/README.md
index 9ba40454c..9bef045af 100644
--- a/node_modules/semver/README.md
+++ b/node_modules/semver/README.md
@@ -78,6 +78,8 @@ const semverOutside = require('semver/ranges/outside')
const semverGtr = require('semver/ranges/gtr')
const semverLtr = require('semver/ranges/ltr')
const semverIntersects = require('semver/ranges/intersects')
+const simplifyRange = require('semver/ranges/simplify')
+const rangeSubset = require('semver/ranges/subset')
```
As a command-line utility:
@@ -254,8 +256,8 @@ inclusive range, then all versions that start with the supplied parts
of the tuple are accepted, but nothing that would be greater than the
provided tuple parts.
-* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
-* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
+* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0`
+* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0`
#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
@@ -263,28 +265,28 @@ Any of `X`, `x`, or `*` may be used to "stand in" for one of the
numeric values in the `[major, minor, patch]` tuple.
* `*` := `>=0.0.0` (Any version satisfies)
-* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
-* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
+* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version)
+* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions)
A partial version range is treated as an X-Range, so the special
character is in fact optional.
* `""` (empty string) := `*` := `>=0.0.0`
-* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
-* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
+* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0`
+* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0`
#### Tilde Ranges `~1.2.3` `~1.2` `~1`
Allows patch-level changes if a minor version is specified on the
comparator. Allows minor-level changes if not.
-* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
-* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
-* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
-* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
-* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
-* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
-* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
+* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0`
+* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`)
+* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`)
+* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0`
+* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`)
+* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`)
+* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in
the `1.2.3` version will be allowed, if they are greater than or
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
`1.2.4-beta.2` would not, because it is a prerelease of a
@@ -306,15 +308,15 @@ However, it presumes that there will *not* be breaking changes between
`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
additive (but non-breaking), according to commonly observed practices.
-* `^1.2.3` := `>=1.2.3 <2.0.0`
-* `^0.2.3` := `>=0.2.3 <0.3.0`
-* `^0.0.3` := `>=0.0.3 <0.0.4`
-* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
+* `^1.2.3` := `>=1.2.3 <2.0.0-0`
+* `^0.2.3` := `>=0.2.3 <0.3.0-0`
+* `^0.0.3` := `>=0.0.3 <0.0.4-0`
+* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in
the `1.2.3` version will be allowed, if they are greater than or
equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
`1.2.4-beta.2` would not, because it is a prerelease of a
different `[major, minor, patch]` tuple.
-* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
+* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the
`0.0.3` version *only* will be allowed, if they are greater than or
equal to `beta`. So, `0.0.3-pr.2` would be allowed.
@@ -322,16 +324,16 @@ When parsing caret ranges, a missing `patch` value desugars to the
number `0`, but will allow flexibility within that value, even if the
major and minor versions are both `0`.
-* `^1.2.x` := `>=1.2.0 <2.0.0`
-* `^0.0.x` := `>=0.0.0 <0.1.0`
-* `^0.0` := `>=0.0.0 <0.1.0`
+* `^1.2.x` := `>=1.2.0 <2.0.0-0`
+* `^0.0.x` := `>=0.0.0 <0.1.0-0`
+* `^0.0` := `>=0.0.0 <0.1.0-0`
A missing `minor` and `patch` values will desugar to zero, but also
allow flexibility within those values, even if the major version is
zero.
-* `^1.x` := `>=1.0.0 <2.0.0`
-* `^0.x` := `>=0.0.0 <1.0.0`
+* `^1.x` := `>=1.0.0 <2.0.0-0`
+* `^0.x` := `>=0.0.0 <1.0.0-0`
### Range Grammar
@@ -446,6 +448,16 @@ strings that they parse.
`hilo` argument must be either the string `'>'` or `'<'`. (This is
the function called by `gtr` and `ltr`.)
* `intersects(range)`: Return true if any of the ranges comparators intersect
+* `simplifyRange(versions, range)`: Return a "simplified" range that
+ matches the same items in `versions` list as the range specified. Note
+ that it does *not* guarantee that it would match the same versions in all
+ cases, only for the set of versions provided. This is useful when
+ generating ranges by joining together multiple versions with `||`
+ programmatically, to provide the user with something a bit more
+ ergonomic. If the provided range is shorter in string-length than the
+ generated range, then that is returned.
+* `subset(subRange, superRange)`: Return `true` if the `subRange` range is
+ entirely contained by the `superRange` range.
Note that, since ranges may be non-contiguous, a version might not be
greater than a range, less than a range, *or* satisfy a range! For
diff --git a/node_modules/semver/classes/range.js b/node_modules/semver/classes/range.js
index 90876c389..83f896771 100644
--- a/node_modules/semver/classes/range.js
+++ b/node_modules/semver/classes/range.js
@@ -68,7 +68,7 @@ class Range {
range = range.trim()
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
- range = range.replace(hr, hyphenReplace)
+ range = range.replace(hr, hyphenReplace(this.options.includePrerelease))
debug('hyphen replace', range)
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
@@ -92,6 +92,7 @@ class Range {
.map(comp => parseComparator(comp, this.options))
.join(' ')
.split(/\s+/)
+ .map(comp => replaceGTE0(comp, this.options))
// in loose mode, throw out any that are not valid comparators
.filter(this.options.loose ? comp => !!comp.match(compRe) : () => true)
.map(comp => new Comparator(comp, this.options))
@@ -191,11 +192,11 @@ const parseComparator = (comp, options) => {
const isX = id => !id || id.toLowerCase() === 'x' || id === '*'
// ~, ~> --> * (any, kinda silly)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0
const replaceTildes = (comp, options) =>
comp.trim().split(/\s+/).map((comp) => {
return replaceTilde(comp, options)
@@ -210,18 +211,18 @@ const replaceTilde = (comp, options) => {
if (isX(M)) {
ret = ''
} else if (isX(m)) {
- ret = `>=${M}.0.0 <${+M + 1}.0.0`
+ ret = `>=${M}.0.0 <${+M + 1}.0.0-0`
} else if (isX(p)) {
- // ~1.2 == >=1.2.0 <1.3.0
- ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0`
+ // ~1.2 == >=1.2.0 <1.3.0-0
+ ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0`
} else if (pr) {
debug('replaceTilde pr', pr)
ret = `>=${M}.${m}.${p}-${pr
- } <${M}.${+m + 1}.0`
+ } <${M}.${+m + 1}.0-0`
} else {
- // ~1.2.3 == >=1.2.3 <1.3.0
+ // ~1.2.3 == >=1.2.3 <1.3.0-0
ret = `>=${M}.${m}.${p
- } <${M}.${+m + 1}.0`
+ } <${M}.${+m + 1}.0-0`
}
debug('tilde return', ret)
@@ -230,11 +231,11 @@ const replaceTilde = (comp, options) => {
}
// ^ --> * (any, kinda silly)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
-// ^1.2.3 --> >=1.2.3 <2.0.0
-// ^1.2.0 --> >=1.2.0 <2.0.0
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0
+// ^1.2.3 --> >=1.2.3 <2.0.0-0
+// ^1.2.0 --> >=1.2.0 <2.0.0-0
const replaceCarets = (comp, options) =>
comp.trim().split(/\s+/).map((comp) => {
return replaceCaret(comp, options)
@@ -243,6 +244,7 @@ const replaceCarets = (comp, options) =>
const replaceCaret = (comp, options) => {
debug('caret', comp, options)
const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
+ const z = options.includePrerelease ? '-0' : ''
return comp.replace(r, (_, M, m, p, pr) => {
debug('caret', comp, _, M, m, p, pr)
let ret
@@ -250,40 +252,40 @@ const replaceCaret = (comp, options) => {
if (isX(M)) {
ret = ''
} else if (isX(m)) {
- ret = `>=${M}.0.0 <${+M + 1}.0.0`
+ ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0`
} else if (isX(p)) {
if (M === '0') {
- ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0`
+ ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0`
} else {
- ret = `>=${M}.${m}.0 <${+M + 1}.0.0`
+ ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0`
}
} else if (pr) {
debug('replaceCaret pr', pr)
if (M === '0') {
if (m === '0') {
ret = `>=${M}.${m}.${p}-${pr
- } <${M}.${m}.${+p + 1}`
+ } <${M}.${m}.${+p + 1}-0`
} else {
ret = `>=${M}.${m}.${p}-${pr
- } <${M}.${+m + 1}.0`
+ } <${M}.${+m + 1}.0-0`
}
} else {
ret = `>=${M}.${m}.${p}-${pr
- } <${+M + 1}.0.0`
+ } <${+M + 1}.0.0-0`
}
} else {
debug('no pr')
if (M === '0') {
if (m === '0') {
ret = `>=${M}.${m}.${p
- } <${M}.${m}.${+p + 1}`
+ }${z} <${M}.${m}.${+p + 1}-0`
} else {
ret = `>=${M}.${m}.${p
- } <${M}.${+m + 1}.0`
+ }${z} <${M}.${+m + 1}.0-0`
}
} else {
ret = `>=${M}.${m}.${p
- } <${+M + 1}.0.0`
+ } <${+M + 1}.0.0-0`
}
}
@@ -356,12 +358,15 @@ const replaceXRange = (comp, options) => {
}
}
+ if (gtlt === '<')
+ pr = '-0'
+
ret = `${gtlt + M}.${m}.${p}${pr}`
} else if (xm) {
- ret = `>=${M}.0.0${pr} <${+M + 1}.0.0${pr}`
+ ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0`
} else if (xp) {
ret = `>=${M}.${m}.0${pr
- } <${M}.${+m + 1}.0${pr}`
+ } <${M}.${+m + 1}.0-0`
}
debug('xRange return', ret)
@@ -378,32 +383,42 @@ const replaceStars = (comp, options) => {
return comp.trim().replace(re[t.STAR], '')
}
+const replaceGTE0 = (comp, options) => {
+ debug('replaceGTE0', comp, options)
+ return comp.trim()
+ .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '')
+}
+
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
-// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
-// 1.2 - 3.4 => >=1.2.0 <3.5.0
-const hyphenReplace = ($0,
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0-0
+const hyphenReplace = incPr => ($0,
from, fM, fm, fp, fpr, fb,
to, tM, tm, tp, tpr, tb) => {
if (isX(fM)) {
from = ''
} else if (isX(fm)) {
- from = `>=${fM}.0.0`
+ from = `>=${fM}.0.0${incPr ? '-0' : ''}`
} else if (isX(fp)) {
- from = `>=${fM}.${fm}.0`
- } else {
+ from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}`
+ } else if (fpr) {
from = `>=${from}`
+ } else {
+ from = `>=${from}${incPr ? '-0' : ''}`
}
if (isX(tM)) {
to = ''
} else if (isX(tm)) {
- to = `<${+tM + 1}.0.0`
+ to = `<${+tM + 1}.0.0-0`
} else if (isX(tp)) {
- to = `<${tM}.${+tm + 1}.0`
+ to = `<${tM}.${+tm + 1}.0-0`
} else if (tpr) {
to = `<=${tM}.${tm}.${tp}-${tpr}`
+ } else if (incPr) {
+ to = `<${tM}.${tm}.${+tp + 1}-0`
} else {
to = `<=${to}`
}
diff --git a/node_modules/semver/index.js b/node_modules/semver/index.js
index 3a0b4e52c..57e2ae649 100644
--- a/node_modules/semver/index.js
+++ b/node_modules/semver/index.js
@@ -43,4 +43,6 @@ module.exports = {
gtr: require('./ranges/gtr'),
ltr: require('./ranges/ltr'),
intersects: require('./ranges/intersects'),
+ simplifyRange: require('./ranges/simplify'),
+ subset: require('./ranges/subset'),
}
diff --git a/node_modules/semver/internal/re.js b/node_modules/semver/internal/re.js
index 0e8fb5289..54d4176de 100644
--- a/node_modules/semver/internal/re.js
+++ b/node_modules/semver/internal/re.js
@@ -177,3 +177,6 @@ createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` +
// Star ranges basically just allow anything at all.
createToken('STAR', '(<|>)?=?\\s*\\*')
+// >=0.0.0 is like a star
+createToken('GTE0', '^\\s*>=\\s*0\.0\.0\\s*$')
+createToken('GTE0PRE', '^\\s*>=\\s*0\.0\.0-0\\s*$')
diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json
index 03a9c1c38..31f76d1b0 100644
--- a/node_modules/semver/package.json
+++ b/node_modules/semver/package.json
@@ -1,33 +1,35 @@
{
- "_from": "semver@7",
- "_id": "semver@7.1.3",
+ "_from": "semver@latest",
+ "_id": "semver@7.3.2",
"_inBundle": false,
- "_integrity": "sha512-ekM0zfiA9SCBlsKa2X1hxyxiI4L3B6EbVJkkdgQXnSEEaHlGdvyodMruTiulSRWMMB4NeIuYNMC9rTKTz97GxA==",
+ "_integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==",
"_location": "/semver",
"_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "semver@7",
+ "raw": "semver@latest",
"name": "semver",
"escapedName": "semver",
- "rawSpec": "7",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "7"
+ "fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/",
"/@npmcli/arborist",
"/@npmcli/arborist/npm-install-checks",
+ "/@npmcli/arborist/npm-pick-manifest",
+ "/libnpmpublish",
+ "/libnpmversion",
"/npm-package-arg",
"/npm-pick-manifest",
- "/npm-pick-manifest/npm-install-checks",
- "/pacote"
+ "/npm-pick-manifest/npm-install-checks"
],
- "_resolved": "https://registry.npmjs.org/semver/-/semver-7.1.3.tgz",
- "_shasum": "e4345ce73071c53f336445cfc19efb1c311df2a6",
- "_spec": "semver@7",
+ "_resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
+ "_shasum": "604962b052b81ed0786aae84389ffba70ffd3938",
+ "_spec": "semver@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"bin": {
"semver": "bin/semver.js"
@@ -39,18 +41,18 @@
"deprecated": false,
"description": "The semantic version parser used by npm.",
"devDependencies": {
- "tap": "^14.10.2"
+ "tap": "^14.10.7"
},
"engines": {
"node": ">=10"
},
"files": [
- "bin",
+ "bin/**/*.js",
"range.bnf",
- "classes",
- "functions",
- "internal",
- "ranges",
+ "classes/**/*.js",
+ "functions/**/*.js",
+ "internal/**/*.js",
+ "ranges/**/*.js",
"index.js",
"preload.js"
],
@@ -73,5 +75,5 @@
"check-coverage": true,
"coverage-map": "map.js"
},
- "version": "7.1.3"
+ "version": "7.3.2"
}
diff --git a/node_modules/semver/ranges/simplify.js b/node_modules/semver/ranges/simplify.js
new file mode 100644
index 000000000..b792f9729
--- /dev/null
+++ b/node_modules/semver/ranges/simplify.js
@@ -0,0 +1,44 @@
+// given a set of versions and a range, create a "simplified" range
+// that includes the same versions that the original range does
+// If the original range is shorter than the simplified one, return that.
+const satisfies = require('../functions/satisfies.js')
+const compare = require('../functions/compare.js')
+module.exports = (versions, range, options) => {
+ const set = []
+ let min = null
+ let prev = null
+ const v = versions.sort((a, b) => compare(a, b, options))
+ for (const version of v) {
+ const included = satisfies(version, range, options)
+ if (included) {
+ prev = version
+ if (!min)
+ min = version
+ } else {
+ if (prev) {
+ set.push([min, prev])
+ }
+ prev = null
+ min = null
+ }
+ }
+ if (min)
+ set.push([min, null])
+
+ const ranges = []
+ for (const [min, max] of set) {
+ if (min === max)
+ ranges.push(min)
+ else if (!max && min === v[0])
+ ranges.push('*')
+ else if (!max)
+ ranges.push(`>=${min}`)
+ else if (min === v[0])
+ ranges.push(`<=${max}`)
+ else
+ ranges.push(`${min} - ${max}`)
+ }
+ const simplified = ranges.join(' || ')
+ const original = typeof range.raw === 'string' ? range.raw : String(range)
+ return simplified.length < original.length ? simplified : range
+}
diff --git a/node_modules/semver/ranges/subset.js b/node_modules/semver/ranges/subset.js
new file mode 100644
index 000000000..6ae29a3c9
--- /dev/null
+++ b/node_modules/semver/ranges/subset.js
@@ -0,0 +1,155 @@
+const Range = require('../classes/range.js')
+const { ANY } = require('../classes/comparator.js')
+const satisfies = require('../functions/satisfies.js')
+const compare = require('../functions/compare.js')
+
+// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
+// - Every simple range `r1, r2, ...` is a subset of some `R1, R2, ...`
+//
+// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
+// - If c is only the ANY comparator
+// - If C is only the ANY comparator, return true
+// - Else return false
+// - Let EQ be the set of = comparators in c
+// - If EQ is more than one, return true (null set)
+// - Let GT be the highest > or >= comparator in c
+// - Let LT be the lowest < or <= comparator in c
+// - If GT and LT, and GT.semver > LT.semver, return true (null set)
+// - If EQ
+// - If GT, and EQ does not satisfy GT, return true (null set)
+// - If LT, and EQ does not satisfy LT, return true (null set)
+// - If EQ satisfies every C, return true
+// - Else return false
+// - If GT
+// - If GT is lower than any > or >= comp in C, return false
+// - If GT is >=, and GT.semver does not satisfy every C, return false
+// - If LT
+// - If LT.semver is greater than that of any > comp in C, return false
+// - If LT is <=, and LT.semver does not satisfy every C, return false
+// - If any C is a = range, and GT or LT are set, return false
+// - Else return true
+
+const subset = (sub, dom, options) => {
+ sub = new Range(sub, options)
+ dom = new Range(dom, options)
+ let sawNonNull = false
+
+ OUTER: for (const simpleSub of sub.set) {
+ for (const simpleDom of dom.set) {
+ const isSub = simpleSubset(simpleSub, simpleDom, options)
+ sawNonNull = sawNonNull || isSub !== null
+ if (isSub)
+ continue OUTER
+ }
+ // the null set is a subset of everything, but null simple ranges in
+ // a complex range should be ignored. so if we saw a non-null range,
+ // then we know this isn't a subset, but if EVERY simple range was null,
+ // then it is a subset.
+ if (sawNonNull)
+ return false
+ }
+ return true
+}
+
+const simpleSubset = (sub, dom, options) => {
+ if (sub.length === 1 && sub[0].semver === ANY)
+ return dom.length === 1 && dom[0].semver === ANY
+
+ const eqSet = new Set()
+ let gt, lt
+ for (const c of sub) {
+ if (c.operator === '>' || c.operator === '>=')
+ gt = higherGT(gt, c, options)
+ else if (c.operator === '<' || c.operator === '<=')
+ lt = lowerLT(lt, c, options)
+ else
+ eqSet.add(c.semver)
+ }
+
+ if (eqSet.size > 1)
+ return null
+
+ let gtltComp
+ if (gt && lt) {
+ gtltComp = compare(gt.semver, lt.semver, options)
+ if (gtltComp > 0)
+ return null
+ else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<='))
+ return null
+ }
+
+ // will iterate one or zero times
+ for (const eq of eqSet) {
+ if (gt && !satisfies(eq, String(gt), options))
+ return null
+
+ if (lt && !satisfies(eq, String(lt), options))
+ return null
+
+ for (const c of dom) {
+ if (!satisfies(eq, String(c), options))
+ return false
+ }
+ return true
+ }
+
+ let higher, lower
+ let hasDomLT, hasDomGT
+ for (const c of dom) {
+ hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='
+ hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='
+ if (gt) {
+ if (c.operator === '>' || c.operator === '>=') {
+ higher = higherGT(gt, c, options)
+ if (higher === c)
+ return false
+ } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options))
+ return false
+ }
+ if (lt) {
+ if (c.operator === '<' || c.operator === '<=') {
+ lower = lowerLT(lt, c, options)
+ if (lower === c)
+ return false
+ } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options))
+ return false
+ }
+ if (!c.operator && (lt || gt) && gtltComp !== 0)
+ return false
+ }
+
+ // if there was a < or >, and nothing in the dom, then must be false
+ // UNLESS it was limited by another range in the other direction.
+ // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
+ if (gt && hasDomLT && !lt && gtltComp !== 0)
+ return false
+
+ if (lt && hasDomGT && !gt && gtltComp !== 0)
+ return false
+
+ return true
+}
+
+// >=1.2.3 is lower than >1.2.3
+const higherGT = (a, b, options) => {
+ if (!a)
+ return b
+ const comp = compare(a.semver, b.semver, options)
+ return comp > 0 ? a
+ : comp < 0 ? b
+ : b.operator === '>' && a.operator === '>=' ? b
+ : a
+}
+
+// <=1.2.3 is higher than <1.2.3
+const lowerLT = (a, b, options) => {
+ if (!a)
+ return b
+ const comp = compare(a.semver, b.semver, options)
+ return comp < 0 ? a
+ : comp > 0 ? b
+ : b.operator === '<' && a.operator === '<=' ? b
+ : a
+}
+
+module.exports = subset