Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorisaacs <i@izs.me>2020-06-04 01:07:00 +0300
committerisaacs <i@izs.me>2020-06-04 01:08:48 +0300
commit1f9cfdac2eda3bd4a3b1427fc450a05811f353fe (patch)
treef4ceaaad2afb9bd35d8096356421015605002ce3 /node_modules
parentf1724f457a9e84c7f5c89b6439ef0ab6f482c3c6 (diff)
update @npmcli/arborist, pacote, cacache
Also some hand-crafted deduping of various deps that got nested as a result. Really excited to start self-installing again soon. @npmcli/arborist is a whole lot smarter about pruning unnecessary duplicate modules in the normal course of installation.
Diffstat (limited to 'node_modules')
-rw-r--r--node_modules/@npmcli/arborist/README.md6
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/audit.js2
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js165
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/deduper.js15
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/index.js28
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/load-actual.js257
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/load-virtual.js64
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/pruner.js12
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/rebuild.js252
-rw-r--r--node_modules/@npmcli/arborist/lib/arborist/reify.js262
-rw-r--r--node_modules/@npmcli/arborist/lib/calc-dep-flags.js12
-rw-r--r--node_modules/@npmcli/arborist/lib/consistent-resolve.js7
-rw-r--r--node_modules/@npmcli/arborist/lib/dep-valid.js31
-rw-r--r--node_modules/@npmcli/arborist/lib/edge.js16
-rw-r--r--node_modules/@npmcli/arborist/lib/from-path.js13
-rw-r--r--node_modules/@npmcli/arborist/lib/inventory.js2
-rw-r--r--node_modules/@npmcli/arborist/lib/node.js176
-rw-r--r--node_modules/@npmcli/arborist/lib/shrinkwrap.js390
-rw-r--r--node_modules/@npmcli/arborist/lib/yarn-lock.js3
-rw-r--r--node_modules/@npmcli/arborist/package.json17
-rw-r--r--node_modules/@npmcli/move-file/LICENSE.md22
-rw-r--r--node_modules/@npmcli/move-file/README.md68
-rw-r--r--node_modules/@npmcli/move-file/index.js93
l---------node_modules/@npmcli/move-file/node_modules/.bin/mkdirp1
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/CHANGELOG.md15
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/LICENSE (renamed from node_modules/string_decoder/node_modules/safe-buffer/LICENSE)4
-rwxr-xr-xnode_modules/@npmcli/move-file/node_modules/mkdirp/bin/cmd.js68
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/index.js31
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/lib/find-made.js29
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-manual.js64
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-native.js39
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/lib/opts-arg.js23
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/lib/path-arg.js29
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/lib/use-native.js10
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/package.json75
-rw-r--r--node_modules/@npmcli/move-file/node_modules/mkdirp/readme.markdown266
-rw-r--r--node_modules/@npmcli/move-file/package.json63
-rw-r--r--node_modules/cacache/CHANGELOG.md7
-rw-r--r--node_modules/cacache/lib/util/move-file.js2
-rw-r--r--node_modules/cacache/package.json18
-rw-r--r--node_modules/common-ancestor-path/LICENSE15
-rw-r--r--node_modules/common-ancestor-path/README.md28
-rw-r--r--node_modules/common-ancestor-path/index.js17
-rw-r--r--node_modules/common-ancestor-path/package.json61
-rw-r--r--node_modules/make-fetch-happen/cache.js34
-rw-r--r--node_modules/make-fetch-happen/package.json30
-rw-r--r--node_modules/minipass/README.md9
-rw-r--r--node_modules/minipass/index.js35
-rw-r--r--node_modules/minipass/package.json15
-rw-r--r--node_modules/mkdirp/node_modules/minimist/.travis.yml8
-rw-r--r--node_modules/mkdirp/node_modules/minimist/LICENSE18
-rw-r--r--node_modules/mkdirp/node_modules/minimist/example/parse.js2
-rw-r--r--node_modules/mkdirp/node_modules/minimist/index.js245
-rw-r--r--node_modules/mkdirp/node_modules/minimist/package.json73
-rw-r--r--node_modules/mkdirp/node_modules/minimist/readme.markdown95
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/all_bool.js32
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/bool.js178
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/dash.js31
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/default_bool.js35
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/dotted.js22
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/kv_short.js16
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/long.js31
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/num.js36
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/parse.js197
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/parse_modified.js9
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/proto.js44
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/short.js67
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/stop_early.js15
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/unknown.js102
-rw-r--r--node_modules/mkdirp/node_modules/minimist/test/whitespace.js8
-rw-r--r--node_modules/move-file/index.d.ts41
-rw-r--r--node_modules/move-file/index.js62
-rw-r--r--node_modules/move-file/license9
-rw-r--r--node_modules/move-file/node_modules/path-exists/index.d.ts28
-rw-r--r--node_modules/move-file/node_modules/path-exists/index.js23
-rw-r--r--node_modules/move-file/node_modules/path-exists/license9
-rw-r--r--node_modules/move-file/node_modules/path-exists/package.json71
-rw-r--r--node_modules/move-file/node_modules/path-exists/readme.md52
-rw-r--r--node_modules/move-file/package.json82
-rw-r--r--node_modules/move-file/readme.md67
-rw-r--r--node_modules/npm-registry-fetch/CHANGELOG.md14
-rw-r--r--node_modules/npm-registry-fetch/README.md9
-rw-r--r--node_modules/npm-registry-fetch/index.js4
-rw-r--r--node_modules/npm-registry-fetch/package.json29
-rw-r--r--node_modules/pacote/package.json14
-rw-r--r--node_modules/string_decoder/node_modules/safe-buffer/README.md586
-rw-r--r--node_modules/string_decoder/node_modules/safe-buffer/index.d.ts187
-rw-r--r--node_modules/string_decoder/node_modules/safe-buffer/index.js64
-rw-r--r--node_modules/string_decoder/node_modules/safe-buffer/package.json62
-rw-r--r--node_modules/walk-up-path/LICENSE15
-rw-r--r--node_modules/walk-up-path/README.md46
-rw-r--r--node_modules/walk-up-path/index.js11
-rw-r--r--node_modules/walk-up-path/package.json61
93 files changed, 2430 insertions, 3281 deletions
diff --git a/node_modules/@npmcli/arborist/README.md b/node_modules/@npmcli/arborist/README.md
index 0a7f07afa..a760156b8 100644
--- a/node_modules/@npmcli/arborist/README.md
+++ b/node_modules/@npmcli/arborist/README.md
@@ -296,7 +296,7 @@ pruning nodes from the tree.
| | | | | | peer dependencies | |
|------------+------+-----+----------+-------------+---------------------+-------------------|
| | X | X | | X | peer dependency of | if pruning peer |
-| | | | | not in lock | dev node heirarchy | OR dev deps |
+| | | | | not in lock | dev node hierarchy | OR dev deps |
|------------+------+-----+----------+-------------+---------------------+-------------------|
| | X | | X | X | peer dependency of | if pruning peer |
| | | | | not in lock | optional nodes, or | OR optional deps |
@@ -304,11 +304,11 @@ pruning nodes from the tree.
|------------+------+-----+----------+-------------+---------------------+-------------------|
| | X | X | X | X | peer optional deps | if pruning peer |
| | | | | not in lock | of the dev dep | OR optional OR |
-| | | | | | heirarchy | dev |
+| | | | | | hierarchy | dev |
|------------+------+-----+----------+-------------+---------------------+-------------------|
| | X | | | X | BOTH a non-optional | if pruning peers |
| | | | | in lock | peer dep within the | OR: |
-| | | | | | dev heirarchy, AND | BOTH optional |
+| | | | | | dev hierarchy, AND | BOTH optional |
| | | | | | a peer optional dep | AND dev deps |
+------------+------+-----+----------+-------------+---------------------+-------------------+
```
diff --git a/node_modules/@npmcli/arborist/lib/arborist/audit.js b/node_modules/@npmcli/arborist/lib/arborist/audit.js
index 7e860845a..a008c68d3 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/audit.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/audit.js
@@ -6,7 +6,7 @@ const AuditReport = require('../audit-report.js')
// shared with reify
const _global = Symbol.for('global')
-module.exports = cls => class Auditor extends Reifier(cls) {
+module.exports = cls => class Auditor extends cls {
async audit (options = {}) {
if (this[_global])
throw Object.assign(
diff --git a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
index a4dc0e0a7..880b698f3 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js
@@ -3,13 +3,14 @@
const rpj = require('read-package-json-fast')
const npa = require('npm-package-arg')
const pacote = require('pacote')
+const cacache = require('cacache')
const semver = require('semver')
const pickManifest = require('npm-pick-manifest')
const mapWorkspaces = require('@npmcli/map-workspaces')
+const fromPath = require('../from-path.js')
const calcDepFlags = require('../calc-dep-flags.js')
const Shrinkwrap = require('../shrinkwrap.js')
-const Tracker = require('../tracker.js')
const Node = require('../node.js')
const Link = require('../link.js')
const addRmPkgDeps = require('../add-rm-pkg-deps.js')
@@ -26,9 +27,11 @@ const KEEP = Symbol('KEEP')
// Yes, clobber the package that is already here
const REPLACE = Symbol('REPLACE')
-const {resolve} = require('path')
const relpath = require('../relpath.js')
+// note: some of these symbols are shared so we can hit
+// them with unit tests and reuse them across mixins
+const _complete = Symbol('complete')
const _depsSeen = Symbol('depsSeen')
const _depsQueue = Symbol('depsQueue')
const _currentDep = Symbol('currentDep')
@@ -50,7 +53,6 @@ const _manifests = Symbol('manifests')
const _mapWorkspaces = Symbol('mapWorkspaces')
const _linkFromSpec = Symbol('linkFromSpec')
const _loadPeerSet = Symbol('loadPeerSet')
-// shared symbols so we can hit them with unit tests
const _updateNames = Symbol.for('updateNames')
const _placeDep = Symbol.for('placeDep')
const _canPlaceDep = Symbol.for('canPlaceDep')
@@ -73,6 +75,7 @@ const _follow = Symbol('follow')
const _globalStyle = Symbol('globalStyle')
const _globalRootNode = Symbol('globalRootNode')
const _isVulnerable = Symbol.for('isVulnerable')
+const _usePackageLock = Symbol.for('usePackageLock')
// used by Reify mixin
const _force = Symbol.for('force')
@@ -80,10 +83,7 @@ const _explicitRequests = Symbol.for('explicitRequests')
const _global = Symbol.for('global')
const _idealTreePrune = Symbol.for('idealTreePrune')
-const Virtual = require('./load-virtual.js')
-const Actual = require('./load-actual.js')
-
-module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cls))) {
+module.exports = cls => class IdealTreeBuilder extends cls {
constructor (options) {
super(options)
@@ -98,6 +98,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
globalStyle = false,
legacyPeerDeps = false,
force = false,
+ packageLock = true,
} = options
this[_force] = !!force
@@ -105,6 +106,8 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
this.idealTree = options.idealTree
this.legacyPeerDeps = legacyPeerDeps
+ this[_usePackageLock] = packageLock
+ this[_global] = !!global
this[_globalStyle] = this[_global] || globalStyle
this[_follow] = !!follow
@@ -167,6 +170,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
if (update.all || !Array.isArray(update.names))
update.names = []
+ this[_complete] = !!options.complete
this[_preferDedupe] = !!options.preferDedupe
this[_legacyBundling] = !!options.legacyBundling
this[_updateNames] = update.names
@@ -201,10 +205,23 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// to build out the full virtual tree from it, since we'll be
// reconstructing it anyway.
.then(root => this[_global] ? root
- : this[_updateAll] ? Shrinkwrap.reset({ path: this.path })
+ : !this[_usePackageLock] || this[_updateAll]
+ ? Shrinkwrap.reset({ path: this.path })
.then(meta => Object.assign(root, {meta}))
: this.loadVirtual({ root }))
+ // if we don't have a lockfile to go from, then start with the
+ // actual tree, so we only make the minimum required changes.
+ // don't do this for global installs or updates, because in those
+ // cases we don't use a lockfile anyway.
+ // Load on a new Arborist object, so the Nodes aren't the same,
+ // or else it'll get super confusing when we change them!
+ .then(async root => {
+ if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk)
+ await new this.constructor(this.options).loadActual({ root })
+ return root
+ })
+
.then(tree => this[_mapWorkspaces](tree))
.then(tree => {
// null the virtual tree, because we're about to hack away at it
@@ -251,7 +268,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// process the add/rm requests by modifying the root node, and the
// update.names request by queueing nodes dependent on those named.
- [_applyUserRequests] (options) {
+ async [_applyUserRequests] (options) {
process.emit('time', 'idealTree:userRequests')
// If we have a list of package names to update, and we know it's
// going to update them wherever they are, add any paths into those
@@ -268,16 +285,12 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
this[_explicitRequests].add(name)
}
- // triggers a refresh of all edgesOut
- const after = () => {
- this.idealTree.package = this.idealTree.package
- process.emit('timeEnd', 'idealTree:userRequests')
- }
+ if (options.add)
+ await this[_add](options)
- // these just add and remove to/from the root node
- return (options.add)
- ? this[_add](options).then(after)
- : after()
+ // triggers a refresh of all edgesOut
+ this.idealTree.package = this.idealTree.package
+ process.emit('timeEnd', 'idealTree:userRequests')
}
// This returns a promise because we might not have the name yet,
@@ -439,7 +452,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
.then(() => process.emit('timeEnd', 'idealTree:buildDeps'))
}
- [_buildDepStep] () {
+ async [_buildDepStep] () {
// removes tracker of previous dependency in the queue
if (this[_currentDep]) {
const { location, name } = this[_currentDep]
@@ -464,13 +477,41 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// satisfied by whatever's in that file anyway.
if (this[_depsSeen].has(node) ||
node.root !== this.idealTree ||
- node.hasShrinkwrap)
+ node.hasShrinkwrap && !this[_complete])
return this[_buildDepStep]()
this[_depsSeen].add(node)
this[_currentDep] = node
process.emit('time', `idealTree:${node.location || '#root'}`)
+ // if we're loading a _complete_ ideal tree, for a --package-lock-only
+ // installation for example, we have to crack open the tarball and
+ // look inside if it has bundle deps or shrinkwraps. note that this is
+ // not necessary during a reification, because we just update the
+ // ideal tree by reading bundles/shrinkwraps in place.
+ // Don't bother if the node is from the actual tree and hasn't
+ // been resolved, because we can't fetch it anyway, could be anything!
+ if (this[_complete] && node !== this.idealTree && node.resolved) {
+ const Arborist = this.constructor
+ const bd = node.package.bundleDependencies
+ const hasBundle = bd && Array.isArray(bd) && bd.length
+ const { hasShrinkwrap } = node
+ if (hasBundle || hasShrinkwrap) {
+ const opt = { ...this.options }
+ await cacache.tmp.withTmp(this.cache, opt, async path => {
+ await pacote.extract(node.resolved, path, opt)
+
+ if (hasShrinkwrap)
+ await new Arborist({ ...this.options, path })
+ .loadVirtual({ root: node })
+
+ if (hasBundle)
+ await new Arborist({ ...this.options, path })
+ .loadActual({ root: node, ignoreMissing: true })
+ })
+ }
+ }
+
// if any deps are missing or invalid, then we fetch the manifest for
// the thing we want, and build a new dep node from that.
// Then, find the ideal placement for that node. The ideal placement
@@ -512,7 +553,7 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// Set `preferDedupe: true` in the options to replace the shallower
// dep if allowed.
- return Promise.all(
+ const tasks = await Promise.all(
// resolve all the edges into nodes using pacote.manifest
// return a {dep,edge} object so that we can track the reason
// for this node through the parallelized async operation.
@@ -523,30 +564,30 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
.then(dep => ({edge, dep})))
)
- .then(tasks =>
- tasks.sort((a, b) => a.edge.name.localeCompare(b.edge.name))
- .map(({ edge, dep }) => this[_placeDep](dep, node, edge)))
-
- .then(placed => {
- const promises = []
- for (const set of placed) {
- for (const node of set) {
- this[_mutateTree] = true
- this.addTracker('idealTree', node.name, node.location)
- this[_depsQueue].push(node)
-
- // we're certainly going to need these soon, fetch them asap
- // if it fails at this point, though, dont' worry because it
- // may well be an optional dep that has gone missing. it'll
- // fail later anyway.
- promises.push(...this[_problemEdges](node).map(e =>
- this[_fetchManifest](npa.resolve(e.name, e.spec, node.path))
- .catch(er => null)))
- }
+ const placed = tasks
+ .sort((a, b) => a.edge.name.localeCompare(b.edge.name))
+ .map(({ edge, dep }) => this[_placeDep](dep, node, edge))
+
+ const promises = []
+ for (const set of placed) {
+ for (const node of set) {
+ this[_mutateTree] = true
+ this.addTracker('idealTree', node.name, node.location)
+ this[_depsQueue].push(node)
+
+ // we're certainly going to need these soon, fetch them asap
+ // if it fails at this point, though, dont' worry because it
+ // may well be an optional dep that has gone missing. it'll
+ // fail later anyway.
+ const from = fromPath(node)
+ promises.push(...this[_problemEdges](node).map(e =>
+ this[_fetchManifest](npa.resolve(e.name, e.spec, from))
+ .catch(er => null)))
}
- return Promise.all(promises)
- })
- .then(() => this[_buildDepStep]())
+ }
+ await Promise.all(promises)
+
+ return this[_buildDepStep]()
}
// loads a node from an edge, and then loads its peer deps (and their
@@ -561,9 +602,21 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
pkg: edge.from.package,
legacyPeerDeps,
})
+
const spec = npa.resolve(edge.name, edge.spec, edge.from.path)
return this[_nodeFromSpec](edge.name, spec, parent, edge)
- .then(node => this[_loadPeerSet](node))
+ .then(node => {
+ // handle otherwise unresolvable dependency nesting loops by
+ // creating a symbolic link
+ // a1 -> b1 -> a2 -> b2 -> a1 -> ...
+ // instead of nesting forever, when the loop occurs, create
+ // a symbolic link to the earlier instance
+ for (let p = edge.from.resolveParent; p; p = p.resolveParent) {
+ if (p.matches(node))
+ return new Link({ parent, target: p })
+ }
+ return this[_loadPeerSet](node)
+ })
}
[_problemEdges] (node) {
@@ -593,21 +646,9 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
...this.options,
avoid: this[_avoidRange](spec.name),
}
- const { yarnLock } = this.idealTree.meta
- const fromYarn = yarnLock && yarnLock.entries.get(spec.raw)
- if (fromYarn && fromYarn.version) {
- // if it's the yarn or npm default registry, use the version as
- // our effective spec. if it's any other kind of thing, use that.
- const yarnRegRe = /^https?:\/\/registry.yarnpkg.com\//
- const npmRegRe = /^https?:\/\/registry.npmjs.org\//
- const {resolved, version} = fromYarn
- const isYarnReg = yarnRegRe.test(resolved)
- const isnpmReg = !isYarnReg && npmRegRe.test(resolved)
- const yspec = (isYarnReg || isnpmReg) && version || resolved
- if (yspec)
- spec = npa(`${spec.name}@${yspec}`)
- }
-
+ // get the intended spec and stored metadata from yarn.lock file,
+ // if available and valid.
+ spec = this.idealTree.meta.checkYarnLock(spec, options)
const p = pacote.manifest(spec, options)
this[_manifests].set(spec.raw, p)
return p
@@ -669,7 +710,9 @@ module.exports = cls => class IdealTreeBuilder extends Tracker(Virtual(Actual(cl
// where this dep cannot be placed, and use the one right before that.
// place dep, requested by node, to satisfy edge
[_placeDep] (dep, node, edge, peerEntryEdge = null) {
- if (edge.to && !edge.error && !this[_updateNames].includes(edge.name) &&
+ if (edge.to &&
+ !edge.error &&
+ !this[_updateNames].includes(edge.name) &&
!this[_isVulnerable](edge.to))
return []
diff --git a/node_modules/@npmcli/arborist/lib/arborist/deduper.js b/node_modules/@npmcli/arborist/lib/arborist/deduper.js
new file mode 100644
index 000000000..32db0ac26
--- /dev/null
+++ b/node_modules/@npmcli/arborist/lib/arborist/deduper.js
@@ -0,0 +1,15 @@
+module.exports = cls => class Deduper extends cls {
+ async dedupe (options = {}) {
+ const tree = await this.loadVirtual().catch(() => this.loadActual())
+ const names = []
+ for (const name of tree.inventory.query('name')) {
+ if (tree.inventory.query('name', name).size > 1)
+ names.push(name)
+ }
+ return this.reify({
+ ...options,
+ preferDedupe: true,
+ update: { names }
+ })
+ }
+}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/index.js b/node_modules/@npmcli/arborist/lib/arborist/index.js
index d2215cbfe..9c35b3eb8 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/index.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/index.js
@@ -21,12 +21,30 @@
// instance. It always refers to the actual tree, but is updated (and written
// to disk) on reification.
-// reify extends buildideal, which extends actual and virtual, so that's
-// the only one to pull in. This class is just here to grab the options
-// and path, and call out to the others.
+// Each of the mixin "classes" adds functionality, but are not dependent on
+// constructor call order. So, we just load them in an array, and build up
+// the base class, so that the overall voltron class is easier to test and
+// cover, and separation of concerns can be maintained.
+
const Auditor = require('./audit.js')
const {resolve} = require('path')
-class Arborist extends Auditor(require('events')) {
+const {homedir} = require('os')
+
+const mixins = [
+ require('../tracker.js'),
+ require('./pruner.js'),
+ require('./deduper.js'),
+ require('./audit.js'),
+ require('./build-ideal-tree.js'),
+ require('./load-actual.js'),
+ require('./load-virtual.js'),
+ require('./rebuild.js'),
+ require('./reify.js'),
+]
+
+const Base = mixins.reduce((a, b) => b(a), require('events'))
+
+class Arborist extends Base {
constructor (options = {}) {
process.emit('time', 'arborist:ctor')
super(options)
@@ -34,7 +52,9 @@ class Arborist extends Auditor(require('events')) {
nodeVersion: process.version,
...options,
path: options.path || '.',
+ cache: options.cache || `${homedir()}/.npm/_cacache`,
}
+ this.cache = resolve(this.options.cache)
this.path = resolve(this.options.path)
process.emit('timeEnd', 'arborist:ctor')
}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
index 925e234b1..95ce9b2a5 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js
@@ -1,10 +1,12 @@
// mix-in implementing the loadActual method
-const {dirname, resolve, join} = require('path')
+const {relative, dirname, resolve, join} = require('path')
const rpj = require('read-package-json-fast')
const {promisify} = require('util')
const readdir = promisify(require('readdir-scoped-modules'))
+const walkUp = require('walk-up-path')
+const ancestorPath = require('common-ancestor-path')
const Shrinkwrap = require('../shrinkwrap.js')
const calcDepFlags = require('../calc-dep-flags.js')
@@ -17,15 +19,21 @@ const _newNode = Symbol('newNode')
const _newLink = Symbol('newLink')
const _loadFSTree = Symbol('loadFSTree')
const _loadFSChildren = Symbol('loadFSChildren')
+const _findMissingEdges = Symbol('findMissingEdges')
const _findFSParents = Symbol('findFSParents')
const _actualTreeLoaded = Symbol('actualTreeLoaded')
const _rpcache = Symbol('realpathCache')
const _stcache = Symbol('statCache')
-const _linkTargets = Symbol('linkTargets')
+const _topNodes = Symbol('linkTargets')
const _cache = Symbol('nodeLoadingCache')
+const _loadActual = Symbol('loadActual')
const _loadActualVirtually = Symbol('loadActualVirtually')
const _loadActualActually = Symbol('loadActualActually')
+const _actualTreePromise = Symbol('actualTreePromise')
+const _actualTree = Symbol('actualTree')
+const _transplant = Symbol('transplant')
+const _transplantFilter = Symbol('transplantFilter')
const _filter = Symbol('filter')
const _global = Symbol.for('global')
@@ -41,7 +49,7 @@ module.exports = cls => class ActualLoader extends cls {
// ensure when walking the tree that we don't call loadTree on the
// same actual node more than one time.
- this[_actualTreeLoaded] = null
+ this[_actualTreeLoaded] = new Set()
// caches for cached realpath calls
const cwd = process.cwd()
@@ -59,81 +67,114 @@ module.exports = cls => class ActualLoader extends cls {
// And, we know that it can ONLY be relevant when the node
// is a target of a link, otherwise it'd be in a node_modules
// folder, so take advantage of that to limit the scans later.
- this[_linkTargets] = new Set()
+ this[_topNodes] = new Set()
}
// public method
- loadActual (options = {}) {
- // mostly realpath to throw if the root doesn't exist
- if (this.actualTree)
- return Promise.resolve(this.actualTree)
+ async loadActual (options = {}) {
+ // stash the promise so that we don't ever have more than one
+ // going at the same time. This is so that buildIdealTree can
+ // default to the actualTree if no shrinkwrap present, but
+ // reify() can still call buildIdealTree and loadActual in parallel
+ // safely.
+ return this.actualTree ? this.actualTree
+ : this[_actualTreePromise] ? this[_actualTreePromise]
+ : this[_actualTreePromise] = this[_loadActual](options)
+ .then(tree => this.actualTree = tree)
+ }
- const { global = false, filter = () => true } = options
+ async [_loadActual] (options) {
+ // mostly realpath to throw if the root doesn't exist
+ const {
+ global = false,
+ filter = () => true,
+ root = null,
+ transplantFilter = () => true,
+ ignoreMissing = false,
+ } = options
this[_filter] = filter
+ this[_transplantFilter] = transplantFilter
+
if (global) {
- return realpath(this.path, this[_rpcache], this[_stcache])
- .then(real => this[this.path === real ? _newNode : _newLink]({
- path: this.path,
- realpath: real,
- pkg: {},
- global,
- })).then(node => {
- this.actualTree = node
- return this[_loadActualActually]()
- }).then(tree => {
- return tree
- })
+ const real = await realpath(this.path, this[_rpcache], this[_stcache])
+ const newNodeOrLink = this.path === real ? _newNode : _newLink
+ this[_actualTree] = await this[newNodeOrLink]({
+ path: this.path,
+ realpath: real,
+ pkg: {},
+ global,
+ })
+ return this[_loadActualActually]({root, ignoreMissing})
}
- return realpath(this.path, this[_rpcache], this[_stcache])
- .then(real => this[_loadFSNode]({ path: this.path, real }))
- .then(node => {
- // XXX only rely on this if the hidden lockfile is the newest thing?
- // need some kind of heuristic, like if the package.json or sw have
- // been touched more recently, then ignore it? This is a hazard if
- // user switches back and forth between Arborist and another way of
- // mutating the node_modules folder.
- this.actualTree = node
-
- return Shrinkwrap.load({
- path: node.realpath,
- hiddenLockfile: true,
- }).then(meta => {
- if (meta.loadedFromDisk) {
- node.meta = meta
- meta.add(node)
- return this[_loadActualVirtually]()
- }
- return Shrinkwrap.load({
- path: node.realpath,
- }).then(meta => {
- node.meta = meta
- meta.add(node)
- return this[_loadActualActually]()
- })
- })
+ // not in global mode, hidden lockfile is allowed, load root pkg too
+ this[_actualTree] = await this[_loadFSNode]({
+ path: this.path,
+ real: await realpath(this.path, this[_rpcache], this[_stcache]),
+ })
+
+ // XXX only rely on this if the hidden lockfile is the newest thing?
+ // need some kind of heuristic, like if the package.json or sw have
+ // been touched more recently, then ignore it? This is a hazard if
+ // user switches back and forth between Arborist and another way of
+ // mutating the node_modules folder.
+ const meta = await Shrinkwrap.load({
+ path: this[_actualTree].path,
+ hiddenLockfile: true,
+ })
+ if (meta.loadedFromDisk) {
+ this[_actualTree].meta = meta
+ meta.add(this[_actualTree])
+ return this[_loadActualVirtually]({ root })
+ } else {
+ const meta = await Shrinkwrap.load({
+ path: this[_actualTree].path,
})
+ this[_actualTree].meta = meta
+ meta.add(this[_actualTree])
+ return this[_loadActualActually]({ root, ignoreMissing })
+ }
}
- [_loadActualVirtually] () {
+ async [_loadActualVirtually] ({ root }) {
// have to load on a new Arborist object, so we don't assign
// the virtualTree on this one! Also, the weird reference is because
// we can't easily get a ref to Arborist in this module, without
// creating a circular reference, since this class is a mixin used
// to build up the Arborist class itself.
- return new this.constructor({...this.options}).loadVirtual({
- root: this.actualTree,
+ await new this.constructor({...this.options}).loadVirtual({
+ root: this[_actualTree],
})
+ this[_transplant](root)
+ return this[_actualTree]
}
- [_loadActualActually] () {
- this[_actualTreeLoaded] = new Set()
- // did is a set of all realpaths visited on this walk
- // important when a link points at a node we end up visiting later.
- return this[_loadFSTree](this.actualTree)
- .then(() => this[_findFSParents]())
- .then(() => calcDepFlags(this.actualTree))
- .then(() => this.actualTree)
+ async [_loadActualActually] ({ root, ignoreMissing }) {
+ await this[_loadFSTree](this[_actualTree])
+ if (!ignoreMissing)
+ await this[_findMissingEdges]()
+ this[_findFSParents]()
+ this[_transplant](root)
+ // only reset root flags if we're not re-rooting, otherwise leave as-is
+ calcDepFlags(this[_actualTree], !root)
+ return this[_actualTree]
+ }
+
+ [_transplant] (root) {
+ if (!root)
+ return
+ // have to set the fsChildren first, because re-rooting a Link
+ // re-roots the target, but without updating its realpath, so
+ // we have to re-root the targets first so their location is
+ // updated appropriately.
+ for (const node of this[_actualTree].fsChildren) {
+ node.fsParent = root
+ }
+ for (const node of this[_actualTree].children.values()) {
+ if (this[_transplantFilter](node))
+ node.parent = root
+ }
+ this[_actualTree] = root
}
[_loadFSNode] ({ path, parent, real, root }) {
@@ -151,7 +192,7 @@ module.exports = cls => class ActualLoader extends cls {
// it's already loaded as a tree top, and then gets its parent loaded
// later, if a symlink points deeper in the tree.
const cached = this[_cache].get(path)
- if (cached) {
+ if (cached && !cached.dummy) {
return Promise.resolve(cached).then(node => {
node.parent = parent
return node
@@ -192,8 +233,8 @@ module.exports = cls => class ActualLoader extends cls {
// it'll get parented later, making the fsParent scan a no-op, but better
// safe than sorry, since it's cheap.
const { parent, realpath, path } = options
- if (!parent && path !== this.path)
- this[_linkTargets].add(realpath)
+ if (!parent)
+ this[_topNodes].add(realpath)
return process.env._TEST_ARBORIST_SLOW_LINK_TARGET_ === '1'
? new Promise(res => setTimeout(() => res(new Node(options)), 100))
: new Node(options)
@@ -201,22 +242,15 @@ module.exports = cls => class ActualLoader extends cls {
[_newLink] (options) {
const { realpath } = options
- this[_linkTargets].add(realpath)
+ this[_topNodes].add(realpath)
const target = this[_cache].get(realpath)
const link = new Link({ ...options, target })
if (!target) {
this[_cache].set(realpath, link.target)
- const nmParent = link.target.inNodeModules()
- // if a link points at /path/to/node_modules/foo, and we haven't
- // loaded the node at /path/to, then load that, so that we'll also
- // pick up peers of that node in the node_modules tree. This is how
- // we can read pnpm trees properly. Defer loading until this is done.
- if (nmParent) {
- return this[_loadFSNode]({ path: nmParent, root: link.root })
- .then(node => this[_loadFSTree](node))
- .then(() => link)
- }
+ // if a link target points at a node outside of the root tree's
+ // node_modules hierarchy, then load that node as well.
+ return this[_loadFSTree](link.target).then(() => link)
} else if (target.then)
target.then(node => link.target = node)
@@ -233,7 +267,7 @@ module.exports = cls => class ActualLoader extends cls {
return node.then(node => this[_loadFSTree](node))
// impossible except in pathological ELOOP cases
- /* istanbul ignore next */
+ /* istanbul ignore if */
if (did.has(node.realpath))
return Promise.resolve(node)
@@ -263,25 +297,78 @@ module.exports = cls => class ActualLoader extends cls {
() => {})
}
+ async [_findMissingEdges] () {
+ // try to resolve any missing edges by walking up the directory tree,
+ // checking for the package in each node_modules folder. stop at the
+ // root directory.
+ // The tricky move here is that we load a "dummy" node for the folder
+ // containing the node_modules folder, so that it can be assigned as
+ // the fsParent. It's a bad idea to *actually* load that full node,
+ // because people sometimes develop in ~/projects/node_modules/...
+ // so we'd end up loading a massive tree with lots of unrelated junk.
+ const nmContents = new Map()
+ const tree = this[_actualTree]
+ for (const node of tree.inventory.values()) {
+ const ancestor = ancestorPath(node.realpath, this.path)
+
+ const depPromises = []
+ for (const [name, edge] of node.edgesOut.entries()) {
+ if (!edge.missing && !(edge.to && edge.to.dummy))
+ continue
+
+ // start the walk from the dirname, because we would have found
+ // the dep in the loadFSTree step already if it was local.
+ for (const p of walkUp(dirname(node.realpath))) {
+ // only walk as far as the nearest ancestor
+ // this keeps us from going into completely unrelated
+ // places when a project is just missing something, but
+ // allows for finding the transitive deps of link targets.
+ // ie, if it has to go up and back out to get to the path
+ // from the nearest common ancestor, we've gone too far.
+ if (ancestor && /^\.\.(?:[\\\/]|$)/.test(relative(ancestor, p)))
+ break
+
+ const entries = nmContents.get(p) ||
+ await readdir(p + '/node_modules').catch(() => [])
+ nmContents.set(p, entries)
+ if (!entries.includes(name))
+ continue
+
+ const d = this[_cache].has(p) ? await this[_cache].get(p)
+ : new Node({ path: p, root: node.root, dummy: true })
+ this[_cache].set(p, d)
+ if (d.dummy) {
+ // it's a placeholder, so likely would not have loaded this dep,
+ // unless another dep in the tree also needs it.
+ const depPath = `${p}/node_modules/${name}`
+ const cached = this[_cache].get(depPath)
+ if (!cached || cached.dummy) {
+ depPromises.push(this[_loadFSNode]({
+ path: depPath,
+ root: node.root,
+ parent: d,
+ }).then(node => this[_loadFSTree](node)))
+ }
+ }
+ break
+ }
+ }
+ await Promise.all(depPromises)
+ }
+ }
+
// try to find a node that is the parent in a fs tree sense, but not a
// node_modules tree sense, of any link targets. this allows us to
// resolve deps that node will find, but a legacy npm view of the
// world would not have noticed.
[_findFSParents] () {
- for (const path of this[_linkTargets]) {
+ for (const path of this[_topNodes]) {
const node = this[_cache].get(path)
- if (!node.parent && !node.fsParent) {
- for (let p = dirname(path); p;) {
+ if (node && !node.parent && !node.fsParent) {
+ for (const p of walkUp(dirname(path))) {
if (this[_cache].has(p)) {
node.fsParent = this[_cache].get(p)
- p = null
- } else {
- // walk up the tree until p === dirname(p)
- const pp = dirname(p)
- if (pp === p)
- p = null
- else
- p = pp
+ break
}
}
}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
index 0b1d0f91a..b7e289be8 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js
@@ -1,8 +1,10 @@
// mixin providing the loadVirtual method
-const {resolve} = require('path')
+const {dirname, resolve} = require('path')
const mapWorkspaces = require('@npmcli/map-workspaces')
+const walkUp = require('walk-up-path')
+const nameFromFolder = require('@npmcli/name-from-folder')
const consistentResolve = require('../consistent-resolve.js')
const Shrinkwrap = require('../shrinkwrap.js')
const Node = require('../node.js')
@@ -109,43 +111,38 @@ module.exports = cls => class VirtualLoader extends cls {
[assignParentage] (nodes) {
for (const [location, node] of nodes) {
- if (location === '')
- continue
-
- // node_modules/foo/node_modules/bar -> node_modules/foo
- // if we have something like node_modules/foo/a/b/c,
- // however, then the foo module isn't actually the 'parent' of
- // the c module, but it is the fsparent
- const split = location.split(/(?:^|\/)node_modules\//)
- const name = split.pop()
- const ploc = split.join('/node_modules/').substr(1)
- const parent = nodes.get(ploc)
- /* istanbul ignore else - impossible unless lockfile damaged/invalid */
- if (parent) {
- // if the node location doesn't actually start with node_modules, but
- // the node name DOES match the folder it's in, like if you have a
- // link from `node_modules/app` to `./app`, then split won't contain
- // anything, but the name will still match. In that case, it is an
- // fsParent, though, not a parent.
- const parentType = name === node.name && split.length
+ const { path, name } = node
+ for (const p of walkUp(dirname(path))) {
+ const ploc = relpath(this.path, p)
+ const parent = nodes.get(ploc)
+ if (!parent)
+ continue
+
+ const locTest = `${ploc}/node_modules/${name}`.replace(/^\//, '')
+ const ptype = location === locTest
? 'parent'
: 'fsParent'
- node[ parentType ] = parent
+ node[ptype] = parent
// read inBundle from package because 'package' here is
// actually a v2 lockfile metadata entry.
- if (node.package.inBundle && parent.edgesOut.has(name)) {
+ if (ptype === 'parent' && node.package.inBundle && parent.edgesOut.has(name)) {
const ppkg = parent.package
if (!ppkg.bundleDependencies)
ppkg.bundleDependencies = [name]
else if (!ppkg.bundleDependencies.includes(name))
ppkg.bundleDependencies.push(name)
}
+
+ break
}
}
}
[loadNode] (location, sw) {
const path = resolve(this.path, location)
+ // shrinkwrap doesn't include package name unless necessary
+ if (!sw.name)
+ sw.name = nameFromFolder(path)
const node = new Node({
legacyPeerDeps: this.legacyPeerDeps,
root: this.virtualTree,
@@ -184,24 +181,11 @@ module.exports = cls => class VirtualLoader extends cls {
target,
pkg: target && target.package,
})
- if (target) {
- link.extraneous = target.extraneous
- link.devOptional = target.devOptional
- link.peer = target.peer
- link.optional = target.optional
- link.dev = target.dev
- } else {
- // probably only had a legacy shrinkwrap. assume that it
- // ought to be here.
- link.extraneous = link.target.extraneous = !!meta.extraneous
- link.optional = link.target.optional = !!meta.optional
- link.dev = link.target.dev = !!meta.dev
- link.peer = link.target.peer = !!meta.peer
- link.devOptional = link.target.devOptional =
- !!meta.devOptional ||
- !!meta.dev ||
- !!meta.optional
- }
+ link.extraneous = target.extraneous
+ link.devOptional = target.devOptional
+ link.peer = target.peer
+ link.optional = target.optional
+ link.dev = target.dev
return link
}
}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/pruner.js b/node_modules/@npmcli/arborist/lib/arborist/pruner.js
new file mode 100644
index 000000000..3bd2678cc
--- /dev/null
+++ b/node_modules/@npmcli/arborist/lib/arborist/pruner.js
@@ -0,0 +1,12 @@
+module.exports = cls => class Pruner extends cls {
+ async prune (options = {}) {
+ const tree = await this.buildIdealTree(options)
+ const extraneousNodes = this.idealTree.inventory.filter(n => n.extraneous)
+
+ for (const node of extraneousNodes) {
+ node.parent = null
+ }
+
+ return this.reify(options)
+ }
+}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/rebuild.js b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js
new file mode 100644
index 000000000..2a0057887
--- /dev/null
+++ b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js
@@ -0,0 +1,252 @@
+// Arborist.rebuild({path = this.path}) will do all the binlinks and
+// bundle building needed. Called by reify, and by `npm rebuild`.
+
+const {depth: dfwalk} = require('treeverse')
+const promiseAllRejectLate = require('promise-all-reject-late')
+const rpj = require('read-package-json-fast')
+const binLinks = require('bin-links')
+const runScript = require('@npmcli/run-script')
+const promiseCallLimit = require('promise-call-limit')
+const {resolve} = require('path')
+
+const boolEnv = b => b ? '1' : ''
+
+const _rebuildBundle = Symbol('rebuildBundle')
+const _ignoreScripts = Symbol('ignoreScripts')
+const _binLinks = Symbol('binLinks')
+const _oldMeta = Symbol('oldMeta')
+const _createBinLinks = Symbol('createBinLinks')
+const _doHandleOptionalFailure = Symbol('doHandleOptionalFailure')
+const _linkAllBins = Symbol('linkAllBins')
+const _runScripts = Symbol('runScripts')
+const _buildQueues = Symbol('buildQueues')
+const _addToBuildSet = Symbol('addToBuildSet')
+const _queues = Symbol('queues')
+const _scriptShell = Symbol('scriptShell')
+
+const _force = Symbol.for('force')
+const _global = Symbol.for('global')
+
+// defined by reify mixin
+const _handleOptionalFailure = Symbol.for('handleOptionalFailure')
+const _trashList = Symbol.for('trashList')
+
+module.exports = cls => class Builder extends cls {
+ constructor (options) {
+ super(options)
+
+ const {
+ ignoreScripts = false,
+ scriptShell,
+ binLinks = true,
+ rebuildBundle = true,
+ } = options
+
+ this[_binLinks] = binLinks
+ this[_ignoreScripts] = !!ignoreScripts
+ this[_scriptShell] = scriptShell
+ this[_rebuildBundle] = !!rebuildBundle
+ this[_queues] = {
+ preinstall: [],
+ install: [],
+ postinstall: [],
+ bin: [],
+ }
+ this[_oldMeta] = null
+ }
+
+ async rebuild ({ nodes, handleOptionalFailure = false } = {}) {
+ // nothing to do if we're not building anything!
+ if (this[_ignoreScripts] && !this[_binLinks])
+ return
+
+ // when building for the first time, as part of reify, we ignore
+ // failures in optional nodes, and just delete them. however, when
+ // running JUST a rebuild, we treat optional failures as real fails
+ this[_doHandleOptionalFailure] = handleOptionalFailure
+
+ // if we don't have a set of nodes, then just rebuild
+ // the actual tree on disk.
+ if (!nodes)
+ nodes = (await this.loadActual()).inventory.values()
+
+ process.emit('time', 'build')
+
+ await this[_buildQueues](nodes)
+ if (!this[_ignoreScripts])
+ await this[_runScripts]('preinstall')
+ if (this[_binLinks])
+ await this[_linkAllBins]()
+ if (!this[_ignoreScripts]) {
+ await this[_runScripts]('install')
+ await this[_runScripts]('postinstall')
+ }
+
+ process.emit('timeEnd', 'build')
+ }
+
+ async [_buildQueues] (nodes) {
+ process.emit('time', 'build:queue')
+ const set = new Set()
+
+ const promises = []
+ for (const node of nodes) {
+ promises.push(this[_addToBuildSet](node, set))
+
+ // if it has bundle deps, add those too, if rebuildBundle
+ if (this[_rebuildBundle] !== false) {
+ const bd = node.package.bundleDependencies
+ if (bd && bd.length) {
+ dfwalk({
+ tree: node,
+ leave: node => promises.push(this[_addToBuildSet](node, set)),
+ getChildren: node => [...node.children.values()],
+ filter: node => node.inBundle,
+ })
+ }
+ }
+ }
+ await promiseAllRejectLate(promises)
+
+ // now sort into the queues for the 4 things we have to do
+ // run in the same predictable order that buildIdealTree uses
+ // there's no particular reason for doing it in this order rather
+ // than another, but sorting *somehow* makes it consistent.
+ const queue = [...set].sort((a, b) =>
+ (a.depth - b.depth) || a.path.localeCompare(b.path))
+
+ for (const node of queue) {
+ const { package: { bin, scripts = {} } } = node
+ const { preinstall, install, postinstall } = scripts
+ const tests = { bin, preinstall, install, postinstall }
+ for (const [key, has] of Object.entries(tests)) {
+ if (has)
+ this[_queues][key].push(node)
+ }
+ }
+ process.emit('timeEnd', 'build:queue')
+ }
+
+ async [_addToBuildSet] (node, set) {
+ if (set.has(node))
+ return
+
+ if (this[_oldMeta] === null) {
+ const {root: {meta}} = node
+ this[_oldMeta] = meta && meta.loadedFromDisk &&
+ !(meta.originalLockfileVersion >= 2)
+ }
+
+ const { package: pkg, hasInstallScript } = node
+ const { bin, scripts = {} } = pkg
+
+ const { preinstall, install, postinstall } = scripts
+ const anyScript = preinstall || install || postinstall
+
+ if (!anyScript && (hasInstallScript || this[_oldMeta])) {
+ // we either have an old metadata (and thus might have scripts)
+ // or we have an indication that there's install scripts (but
+ // don't yet know what they are) so we have to load the package.json
+ // from disk to see what the deal is. Failure here just means
+ // no scripts to add, probably borked package.json.
+ // add to the set then remove while we're reading the pj, so we
+ // don't accidentally hit it multiple times.
+ set.add(node)
+ const pkg = await rpj(node.path + '/package.json').catch(() => ({}))
+ set.delete(node)
+
+ const {scripts = {}} = pkg
+ if (scripts.preinstall || scripts.install || scripts.postinstall) {
+ node.package.scripts = pkg.scripts
+ return this[_addToBuildSet](node, set)
+ }
+ }
+
+ if (bin || preinstall || install || postinstall) {
+ set.add(node)
+ }
+ }
+
+ async [_runScripts] (event) {
+ const queue = this[_queues][event]
+
+ if (!queue.length)
+ return
+
+ process.emit('time', `build:run:${event}`)
+ await promiseCallLimit(queue.map(node => async () => {
+ const {
+ path,
+ integrity,
+ resolved,
+ optional,
+ peer,
+ dev,
+ devOptional,
+ package: pkg,
+ } = node
+
+ // skip any that we know we'll be deleting
+ if (this[_trashList].has(path))
+ return
+
+ process.emit('time', `build:run:${event}:${node.location}`)
+ const p = runScript({
+ event,
+ path,
+ pkg,
+ stdioString: true,
+ env: {
+ npm_package_resolved: resolved,
+ npm_package_integrity: integrity,
+ npm_package_json: resolve(path, 'package.json'),
+ npm_package_optional: boolEnv(optional),
+ npm_package_dev: boolEnv(dev),
+ npm_package_peer: boolEnv(peer),
+ npm_package_dev_optional:
+ boolEnv(devOptional && !dev && !optional),
+ },
+ scriptShell: this[_scriptShell],
+ })
+
+ return this[_doHandleOptionalFailure]
+ ? this[_handleOptionalFailure](node, p) : p
+ }))
+ process.emit('timeEnd', `build:run:${event}`)
+ }
+
+ async [_linkAllBins] () {
+ const queue = this[_queues].bin
+ if (!queue.length)
+ return
+
+ process.emit('time', 'build:link')
+ const promises = []
+ for (const node of queue) {
+ promises.push(this[_createBinLinks](node))
+ }
+ await promiseAllRejectLate(promises)
+ process.emit('timeEnd', 'build:link')
+ }
+
+ async [_createBinLinks] (node) {
+ if (this[_trashList].has(node.path))
+ return
+
+ process.emit('time', `build:link:${node.location}`)
+
+ const p = binLinks({
+ pkg: node.package,
+ path: node.path,
+ top: !!(node.isTop || node.globalTop),
+ force: this[_force],
+ global: !!node.globalTop,
+ })
+
+ await (this[_doHandleOptionalFailure]
+ ? this[_handleOptionalFailure](node, p)
+ : p)
+
+ process.emit('timeEnd', `build:link:${node.location}`)
+ }
+}
diff --git a/node_modules/@npmcli/arborist/lib/arborist/reify.js b/node_modules/@npmcli/arborist/lib/arborist/reify.js
index 89514ae14..f1dc231d1 100644
--- a/node_modules/@npmcli/arborist/lib/arborist/reify.js
+++ b/node_modules/@npmcli/arborist/lib/arborist/reify.js
@@ -2,15 +2,11 @@
const npa = require('npm-package-arg')
const pacote = require('pacote')
-const binLinks = require('bin-links')
-const runScript = require('@npmcli/run-script')
const rpj = require('read-package-json-fast')
const {checkEngine, checkPlatform} = require('npm-install-checks')
const updateDepSpec = require('../update-dep-spec.js')
const AuditReport = require('../audit-report.js')
-const boolEnv = b => b ? '1' : ''
-
const {dirname, resolve, relative} = require('path')
const {depth: dfwalk} = require('treeverse')
const fs = require('fs')
@@ -27,7 +23,6 @@ const Diff = require('../diff.js')
const retirePath = require('../retire-path.js')
const promiseAllRejectLate = require('promise-all-reject-late')
const promiseCallLimit = require('promise-call-limit')
-const Ideal = require('./build-ideal-tree.js')
const optionalSet = require('../optional-set.js')
const _retiredPaths = Symbol('retiredPaths')
@@ -37,9 +32,10 @@ const _savePrefix = Symbol('savePrefix')
const _retireShallowNodes = Symbol('retireShallowNodes')
const _getBundlesByDepth = Symbol('getBundlesByDepth')
const _registryResolved = Symbol('registryResolved')
-const _trashList = Symbol('trashList')
const _addNodeToTrashList = Symbol('addNodeToTrashList')
-const _handleOptionalFailure = Symbol('handleOptionalFailure')
+// shared by rebuild mixin
+const _trashList = Symbol.for('trashList')
+const _handleOptionalFailure = Symbol.for('handleOptionalFailure')
const _loadTrees = Symbol.for('loadTrees')
// shared symbols for swapping out when testing
@@ -54,15 +50,13 @@ const _warnDeprecated = Symbol('warnDeprecated')
const _recheckEngineAndPlatform = Symbol('recheckEngineAndPlatform')
const _checkEngine = Symbol('checkEngine')
const _checkPlatform = Symbol('checkPlatform')
-const _binLinks = Symbol('binLinks')
const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees')
const _submitQuickAudit = Symbol('submitQuickAudit')
const _awaitQuickAudit = Symbol('awaitQuickAudit')
const _unpackNewModules = Symbol.for('unpackNewModules')
const _moveContents = Symbol.for('moveContents')
const _moveBackRetiredUnchanged = Symbol.for('moveBackRetiredUnchanged')
-const _runLifecycleScripts = Symbol('runLifecycleScripts')
-const _runScriptQueue = Symbol('runScriptQueue')
+const _build = Symbol('build')
const _removeTrash = Symbol.for('removeTrash')
const _renamePath = Symbol.for('renamePath')
const _rollbackRetireShallowNodes = Symbol.for('rollbackRetireShallowNodes')
@@ -71,13 +65,15 @@ const _rollbackMoveBackRetiredUnchanged = Symbol.for('rollbackMoveBackRetiredUnc
const _saveIdealTree = Symbol.for('saveIdealTree')
const _copyIdealToActual = Symbol('copyIdealToActual')
const _addOmitsToTrashList = Symbol('addOmitsToTrashList')
+const _packageLockOnly = Symbol('packageLockOnly')
+const _dryRun = Symbol('dryRun')
+const _reifyPackages = Symbol('reifyPackages')
const _omitDev = Symbol('omitDev')
const _omitOptional = Symbol('omitOptional')
const _omitPeer = Symbol('omitPeer')
const _global = Symbol.for('global')
-const _ignoreScripts = Symbol('ignoreScripts')
const _scriptShell = Symbol('scriptShell')
// defined by Ideal mixin
@@ -85,24 +81,24 @@ const _force = Symbol.for('force')
const _idealTreePrune = Symbol.for('idealTreePrune')
const _explicitRequests = Symbol.for('explicitRequests')
const _resolvedAdd = Symbol.for('resolvedAdd')
+const _usePackageLock = Symbol.for('usePackageLock')
+const _formatPackageLock = Symbol.for('formatPackageLock')
-module.exports = cls => class Reifier extends Ideal(cls) {
+module.exports = cls => class Reifier extends cls {
constructor (options) {
super(options)
const {
- ignoreScripts = false,
- scriptShell,
savePrefix = '^',
- binLinks = true,
+ packageLockOnly = false,
+ dryRun = false,
+ formatPackageLock = true,
} = options
- if (!binLinks)
- this[_binLinks] = () => {}
-
- this[_ignoreScripts] = !!ignoreScripts
- this[_scriptShell] = scriptShell
+ this[_dryRun] = !!dryRun
+ this[_packageLockOnly] = !!packageLockOnly
this[_savePrefix] = savePrefix
+ this[_formatPackageLock] = !!formatPackageLock
this.diff = null
this[_retiredPaths] = {}
@@ -112,7 +108,13 @@ module.exports = cls => class Reifier extends Ideal(cls) {
}
// public method
- reify (options = {}) {
+ async reify (options = {}) {
+ if (this[_packageLockOnly] && this[_global]) {
+ const er = new Error('cannot generate lockfile for global packages')
+ er.code = 'ESHRINKWRAPGLOBAL'
+ throw er
+ }
+
const omit = new Set(options.omit || [])
this[_omitDev] = omit.has('dev')
this[_omitOptional] = omit.has('optional')
@@ -121,50 +123,78 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// start tracker block
this.addTracker('reify')
process.emit('time', 'reify')
- return this[_loadTrees](options)
+ await this[_loadTrees](options)
.then(() => this[_diffTrees]())
- .then(() => this[_retireShallowNodes]())
- .then(() => this[_createSparseTree]())
- .then(() => this[_addOmitsToTrashList]())
- .then(() => this[_loadShrinkwrapsAndUpdateTrees]())
- .then(() => this[_loadBundlesAndUpdateTrees]())
- .then(() => this[_submitQuickAudit]())
- .then(() => this[_unpackNewModules]())
- .then(() => this[_moveBackRetiredUnchanged]())
- .then(() => this[_runLifecycleScripts]())
- .then(() => this[_removeTrash]())
+ .then(() => this[_reifyPackages]())
.then(() => this[_saveIdealTree](options))
.then(() => this[_copyIdealToActual]())
.then(() => this[_awaitQuickAudit]())
- .then(() => {
- this.finishTracker('reify')
- process.emit('timeEnd', 'reify')
- return this.actualTree
- })
+
+ this.finishTracker('reify')
+ process.emit('timeEnd', 'reify')
+ return this.actualTree
+ }
+
+ async [_reifyPackages] () {
+ // we don't submit the audit report or write to disk on dry runs
+ if (this[_dryRun])
+ return
+
+ if (this[_packageLockOnly]) {
+ // we already have the complete tree, so just audit it now,
+ // and that's all we have to do here.
+ return this[_submitQuickAudit]()
+ }
+
+ await this[_retireShallowNodes]()
+ await this[_createSparseTree]()
+ await this[_addOmitsToTrashList]()
+ await this[_loadShrinkwrapsAndUpdateTrees]()
+ await this[_loadBundlesAndUpdateTrees]()
+ await this[_submitQuickAudit]()
+ await this[_unpackNewModules]()
+ await this[_moveBackRetiredUnchanged]()
+ await this[_build]()
+ await this[_removeTrash]()
}
// when doing a local install, we load everything and figure it all out.
// when doing a global install, we *only* care about the explicit requests.
[_loadTrees] (options) {
process.emit('time', 'reify:loadTrees')
+ const bitOpt = {
+ ...options,
+ complete: this[_packageLockOnly] || this[_dryRun],
+ }
+
+ // if we're only writing a package lock, then it doesn't matter what's here
+ if (this[_packageLockOnly])
+ return this.buildIdealTree(bitOpt)
+ .then(() => process.emit('timeEnd', 'reify:loadTrees'))
+
+ const actualOpt = this[_global] ? {
+ ignoreMissing: true,
+ global: true,
+ filter: (node, kid) => !node.isRoot ? true
+ : this[_explicitRequests].has(kid),
+ } : { ignoreMissing: true }
+
if (!this[_global])
- return Promise.all([this.loadActual(), this.buildIdealTree(options)])
+ return Promise.all([this.loadActual(actualOpt), this.buildIdealTree(bitOpt)])
.then(() => process.emit('timeEnd', 'reify:loadTrees'))
// the global install space tends to have a lot of stuff in it. don't
// load all of it, just what we care about. we won't be saving a
// hidden lockfile in there anyway.
- const actualOpts = {
- global: true,
- filter: (node, kid) => !node.isRoot ? true
- : this[_explicitRequests].has(kid),
- }
- return this.buildIdealTree(options)
- .then(() => this.loadActual(actualOpts))
+ return this.buildIdealTree(bitOpt)
+ .then(() => this.loadActual(actualOpt))
.then(() => process.emit('timeEnd', 'reify:loadTrees'))
}
[_diffTrees] () {
+ if (this[_packageLockOnly])
+ return
+
process.emit('time', 'reify:diffTrees')
// XXX if we have an existing diff already, there should be a way
// to just invalidate the parts that changed, but avoid walking the
@@ -348,7 +378,6 @@ module.exports = cls => class Reifier extends Ideal(cls) {
.then(() => this[_extractOrLink](node))
.then(() => this[_warnDeprecated](node))
.then(() => this[_recheckEngineAndPlatform](node))
- .then(() => this[_binLinks](node))
return this[_handleOptionalFailure](node, p)
.then(() => {
@@ -367,7 +396,8 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// Do the best with what we have, or else remove it from the tree
// entirely, since we can't possibly reify it.
const res = node.resolved ? this[_registryResolved](node.resolved)
- : node.package.version ? `${node.package.name || node.name}@${node.package.version}`
+ : node.package.name && node.package.version
+ ? `${node.package.name}@${node.package.version}`
: null
// no idea what this thing is. remove it from the tree.
@@ -405,6 +435,8 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// did not track that useful bit of info.
const {meta} = this.idealTree
if (meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2)) {
+ // XXX should have a shared location where package.json is read,
+ // so we don't ever read the same pj more than necessary.
return rpj(node.path + '/package.json').then(pkg => {
node.package.bin = pkg.bin
node.package.os = pkg.os
@@ -454,16 +486,6 @@ module.exports = cls => class Reifier extends Ideal(cls) {
}) : p).then(() => node)
}
- [_binLinks] (node) {
- return binLinks({
- pkg: node.package,
- path: node.path,
- top: node.isTop || node.globalTop,
- force: this[_force],
- global: node.globalTop,
- })
- }
-
[_registryResolved] (resolved) {
// the default registry url is a magic value meaning "the currently
// configured registry".
@@ -507,38 +529,18 @@ module.exports = cls => class Reifier extends Ideal(cls) {
return this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth)
}
- const Arborist = this.constructor
// extract all the nodes with bundles
return promiseAllRejectLate(set.map(node => this[_reifyNode](node)))
// then load their unpacked children and move into the ideal tree
- .then(nodes => promiseAllRejectLate(nodes.map(node =>
- new Arborist({
+ .then(nodes =>
+ promiseAllRejectLate(nodes.map(node => new this.constructor({
...this.options,
path: node.path,
- }).loadActual().then(tree => {
- for (const child of tree.children.values()) {
- // skip the empty sparse tree folders
- if (child.package._id)
- child.parent = node
- }
- return node
- }).then(node => {
- // link the bins for any bundled deps in its tree.
- // these are often required for build scripts.
- const set = new Set()
- dfwalk({
- tree: node,
- visit: node => set.add(node),
- getChildren: node => [...node.children.values()],
- filter: node => node.inBundle,
- })
- const promises = []
- for (const node of set) {
- promises.push(this[_binLinks](node))
- }
- return promiseAllRejectLate(promises)
- }).then(() => node)
- )))
+ }).loadActual({
+ root: node,
+ // don't transplant any sparse folders we created
+ transplantFilter: node => node.package._id,
+ }))))
// move onto the next level of bundled items
.then(() => this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth))
.catch(er => this[_rollbackCreateSparseTree](er))
@@ -689,99 +691,26 @@ module.exports = cls => class Reifier extends Ideal(cls) {
.then(() => this[_rollbackCreateSparseTree](er))
}
- [_runLifecycleScripts] () {
- if (this[_ignoreScripts])
- return
-
- process.emit('time', 'reify:runScripts')
+ [_build] () {
+ process.emit('time', 'reify:build')
// for all the things being installed, run their appropriate scripts
// run in tip->root order, so as to be more likely to build a node's
// deps before attempting to build it itself
- const installedNodes = []
+ const nodes = []
dfwalk({
tree: this.diff,
- leave: diff => installedNodes.push(diff.ideal),
+ leave: diff => nodes.push(diff.ideal),
// process adds before changes, ignore removals
getChildren: diff => diff && diff.children,
filter: diff => diff.action === 'ADD' || diff.action === 'CHANGE'
})
- const preinstall = []
- const install = []
- const postinstall = []
-
- // have to get the scripts from the actual package.json, because the
- // legacy read-package-json is too enthusiastic about adding a node-gyp
- // command, even if the gypfile is not included in the package
- const {meta} = this.idealTree
- return promiseAllRejectLate(installedNodes.map(node => {
- // ignore any from paths that we already know we'll delete
- if (this[_trashList].has(node.path))
- return
-
- // if we loaded from a v1 lockfile, then we have to check them all
- // otherwise, we only have to check the ones with hasInstallScript
- const hasInstallScript = node.hasInstallScript ||
- meta.loadedFromDisk && meta.originalLockfileVersion < 2
-
- if (!hasInstallScript)
- return
-
- return rpj(node.path + '/package.json').then(pkg => {
- if (pkg.scripts) {
- node.package.scripts = pkg.scripts
- const val = [node, pkg]
- if (pkg.scripts.preinstall)
- preinstall.push(val)
- if (pkg.scripts.install)
- install.push(val)
- if (pkg.scripts.postinstall)
- postinstall.push(val)
- }
- })
- // failure just means no scripts to add, probably just no pj
- .catch(/* istanbul ignore next */ er => {})
- }))
- .then(() => this[_runScriptQueue]('preinstall', preinstall))
- .then(() => this[_runScriptQueue]('install', install))
- .then(() => this[_runScriptQueue]('postinstall', postinstall))
- .then(() => process.emit('timeEnd', 'reify:runScripts'))
+ return this.rebuild({ nodes, handleOptionalFailure: true })
+ .then(() => process.emit('timeEnd', 'reify:build'))
.catch(er => this[_rollbackMoveBackRetiredUnchanged](er))
}
- [_runScriptQueue] (event, queue) {
- if (!queue.length)
- return
-
- process.emit('time', `reify:runScripts:${event}`)
- return promiseCallLimit(queue.map(([node, pkg]) => () => {
- const {path} = node
- // skip any that we know we'll be deleting
- if (this[_trashList].has(node.path))
- return Promise.resolve()
-
- return this[_handleOptionalFailure](node, runScript({
- event,
- path,
- pkg,
- stdioString: true,
- env: {
- npm_package_resolved: node.resolved,
- npm_package_integrity: node.integrity,
- npm_package_json: resolve(node.path, 'package.json'),
- npm_package_optional: boolEnv(node.optional),
- npm_package_dev: boolEnv(node.dev),
- npm_package_peer: boolEnv(node.peer),
- npm_package_dev_optional:
- boolEnv(node.devOptional && !node.dev && !node.optional),
- },
- scriptShell: this[_scriptShell],
- }))
- }))
- .then(() => process.emit('timeEnd', `reify:runScripts:${event}`))
- }
-
// the tree is pretty much built now, so it's cleanup time.
// remove the retired folders, and any deleted nodes
// If this fails, there isn't much we can do but tell the user about it.
@@ -812,7 +741,7 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// save it first, then prune out the optional trash, and then return it.
// support save=false option
- if (options.save === false || this[_global])
+ if (options.save === false || this[_global] || this[_dryRun])
return
process.emit('time', 'reify:save')
@@ -844,11 +773,10 @@ module.exports = cls => class Reifier extends Ideal(cls) {
// XXX preserve indentation maybe?
const pj = resolve(this.idealTree.path, 'package.json')
- if (this.idealTree.meta.yarnLock)
- this.idealTree.meta.yarnLock.fromTree(this.idealTree)
+ const saveOpt = { format: this[_formatPackageLock] }
return Promise.all([
- this.idealTree.meta.save(),
+ this[_usePackageLock] && this.idealTree.meta.save(saveOpt),
writeFile(pj, JSON.stringify({
...this.idealTree.package,
_id: undefined,
diff --git a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js
index 3d612607a..73f1e60d7 100644
--- a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js
+++ b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js
@@ -1,10 +1,12 @@
const { depth } = require('treeverse')
-const calcDepFlags = tree => {
- tree.dev = false
- tree.optional = false
- tree.devOptional = false
- tree.peer = false
+const calcDepFlags = (tree, resetRoot = true) => {
+ if (resetRoot) {
+ tree.dev = false
+ tree.optional = false
+ tree.devOptional = false
+ tree.peer = false
+ }
const ret = depth({
tree,
visit: node => calcDepFlagsStep(node),
diff --git a/node_modules/@npmcli/arborist/lib/consistent-resolve.js b/node_modules/@npmcli/arborist/lib/consistent-resolve.js
index 05eb2b9ed..5d648de5b 100644
--- a/node_modules/@npmcli/arborist/lib/consistent-resolve.js
+++ b/node_modules/@npmcli/arborist/lib/consistent-resolve.js
@@ -4,7 +4,7 @@
// lockfiles, and for converting hosted git repos to a consistent url type.
const npa = require('npm-package-arg')
const relpath = require('./relpath.js')
-const consistentResolve = (resolved, fromPath, toPath) => {
+const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => {
if (!resolved)
return null
@@ -17,8 +17,9 @@ const consistentResolve = (resolved, fromPath, toPath) => {
rawSpec,
raw,
} = npa(resolved, fromPath)
- return type === 'file' || type === 'directory'
- ? 'file:' + (toPath ? relpath(toPath, fetchSpec) : fetchSpec)
+ const isPath = type === 'file' || type === 'directory'
+ return isPath && !relPaths ? `file:${fetchSpec}`
+ : isPath ? 'file:' + (toPath ? relpath(toPath, fetchSpec) : fetchSpec)
: hosted ? 'git+' + hosted.sshurl({ noCommittish: false })
: type === 'git' ? saveSpec
// always return something. 'foo' is interpreted as 'foo@' otherwise.
diff --git a/node_modules/@npmcli/arborist/lib/dep-valid.js b/node_modules/@npmcli/arborist/lib/dep-valid.js
index e5d6d2a6b..198d34fa9 100644
--- a/node_modules/@npmcli/arborist/lib/dep-valid.js
+++ b/node_modules/@npmcli/arborist/lib/dep-valid.js
@@ -7,15 +7,20 @@
const semver = require('semver')
const npa = require('npm-package-arg')
const {resolve, relative} = require('path')
+const fromPath = require('./from-path.js')
const depValid = (child, requested, requestor) => {
// NB: we don't do much to verify 'tag' type requests.
// Just verify that we got a remote resolution. Presumably, it
// came from a registry and was tagged at some point.
+
if (typeof requested === 'string') {
- // '' is equivalent to '*'
try {
- requested = npa.resolve(child.name, requested || '*', requestor.realpath)
+ // tarball/dir must have resolved to the same tgz on disk, but for
+ // file: deps that depend on other files/dirs, we must resolve the
+ // location based on the *requestor* file/dir, not where it ends up.
+ // '' is equivalent to '*'
+ requested = npa.resolve(child.name, requested || '*', fromPath(requestor))
} catch (er) {
// Not invalid because the child doesn't match, but because
// the spec itself is not supported. Nothing would match,
@@ -52,9 +57,7 @@ const depValid = (child, requested, requestor) => {
relative(child.realpath, requested.fetchSpec) === ''
case 'file':
- // tarball must have resolved to the same tgz on disk
- return !child.isLink && !!child.resolved &&
- npa(child.resolved, child.path).fetchSpec === requested.fetchSpec
+ return tarballValid(child, requested, requestor)
case 'alias':
// check that the alias target is valid
@@ -104,6 +107,24 @@ const depValid = (child, requested, requestor) => {
return false
}
+const tarballValid = (child, requested, requestor) => {
+ if (child.isLink)
+ return false
+
+ if (child.resolved)
+ return child.resolved === `file:${requested.fetchSpec}`
+
+ // if we have a legacy mutated package.json file. we can't be 100%
+ // sure that it resolved to the same file, but if it was the same
+ // request, that's a pretty good indicator of sameness.
+ if (child.package._requested)
+ return child.package._requested.saveSpec === requested.saveSpec
+
+ // ok, we're probably dealing with some legacy cruft here, not much
+ // we can do at this point unfortunately.
+ return false
+}
+
module.exports = (child, requested, accept, requestor) =>
depValid(child, requested, requestor) ||
(typeof accept === 'string' ? depValid(child, accept, requestor) : false)
diff --git a/node_modules/@npmcli/arborist/lib/edge.js b/node_modules/@npmcli/arborist/lib/edge.js
index a0bb7a432..a8dd882b4 100644
--- a/node_modules/@npmcli/arborist/lib/edge.js
+++ b/node_modules/@npmcli/arborist/lib/edge.js
@@ -60,6 +60,10 @@ class Edge {
return depValid(node, this.spec, this.accept, this.from)
}
+ get workspace () {
+ return this[_type] === 'workspace'
+ }
+
get dev () {
return this[_type] === 'dev'
}
@@ -92,6 +96,18 @@ class Edge {
return !this.error
}
+ get missing () {
+ return this.error === 'MISSING'
+ }
+
+ get invalid () {
+ return this.error === 'INVALID'
+ }
+
+ get peerLocal () {
+ return this.error === 'PEER LOCAL'
+ }
+
get error () {
this[_error] = this[_error] || this[_loadError]()
return this[_error] === 'OK' ? null : this[_error]
diff --git a/node_modules/@npmcli/arborist/lib/from-path.js b/node_modules/@npmcli/arborist/lib/from-path.js
new file mode 100644
index 000000000..08be583d1
--- /dev/null
+++ b/node_modules/@npmcli/arborist/lib/from-path.js
@@ -0,0 +1,13 @@
+// file dependencies need their dependencies resolved based on the
+// location where the tarball was found, not the location where they
+// end up getting installed. directory (ie, symlink) deps also need
+// to be resolved based on their targets, but that's what realpath is
+
+const {dirname} = require('path')
+const npa = require('npm-package-arg')
+
+const fromPath = (node, spec) =>
+ spec && spec.type === 'file' ? dirname(spec.fetchSpec)
+ : node.realpath
+
+module.exports = node => fromPath(node, node.resolved && npa(node.resolved))
diff --git a/node_modules/@npmcli/arborist/lib/inventory.js b/node_modules/@npmcli/arborist/lib/inventory.js
index 085780061..fff75c039 100644
--- a/node_modules/@npmcli/arborist/lib/inventory.js
+++ b/node_modules/@npmcli/arborist/lib/inventory.js
@@ -41,7 +41,7 @@ class Inventory extends Map {
for (const [key, map] of this[_index].entries()) {
const val_ = node[key] || (node.package && node.package[key])
const val = typeof val_ === 'string' ? val_
- : typeof val_ === 'object'
+ : (val_ && typeof val_ === 'object')
? ( key === 'license' ? val_.type
: key === 'funding' ? val_.url
: /* istanbul ignore next */ val_)
diff --git a/node_modules/@npmcli/arborist/lib/node.js b/node_modules/@npmcli/arborist/lib/node.js
index 5a7661686..6ed6bfe21 100644
--- a/node_modules/@npmcli/arborist/lib/node.js
+++ b/node_modules/@npmcli/arborist/lib/node.js
@@ -61,9 +61,6 @@ const _workspaces = Symbol('_workspaces')
const relpath = require('./relpath.js')
const consistentResolve = require('./consistent-resolve.js')
-// for comparing nodes to yarn.lock entries
-const mismatch = (a, b) => a && b && a !== b
-
class Node {
constructor (options) {
// NB: path can be null if it's a link target
@@ -90,6 +87,7 @@ class Node {
devOptional = true,
peer = true,
global = false,
+ dummy = false,
} = options
// true if part of a global install
@@ -122,8 +120,13 @@ class Node {
// since _location is just where the module ended up in the tree,
// and _where can be different than the actual root if it's a
// meta-dep deeper in the dependency graph.
+ //
+ // If we don't have the other oldest indicators of legacy npm, then it's
+ // probably what we're getting from pacote, which IS trustworthy.
+ //
+ // Otherwise, hopefully a shrinkwrap will help us out.
const resolved = consistentResolve(pkg._resolved)
- if (resolved && !/^file:/.test(resolved))
+ if (resolved && !(/^file:/.test(resolved) && pkg._where))
this.resolved = resolved
}
this.integrity = integrity || pkg._integrity || null
@@ -142,11 +145,24 @@ class Node {
// a 3-levels-deep dependency of a non-dev dep. If we calc the
// flags along the way, then they'll tend to be invalid by the
// time we need to look at them.
- this.dev = dev
- this.optional = optional
- this.devOptional = devOptional
- this.peer = peer
- this.extraneous = extraneous
+ if (!dummy) {
+ this.dev = dev
+ this.optional = optional
+ this.devOptional = devOptional
+ this.peer = peer
+ this.extraneous = extraneous
+ this.dummy = false
+ } else {
+ // true if this is a placeholder for the purpose of serving as a
+ // fsParent to link targets that get their deps resolved outside
+ // the root tree folder.
+ this.dummy = true
+ this.dev = false
+ this.optional = false
+ this.devOptional = false
+ this.peer = false
+ this.extraneous = false
+ }
this.edgesIn = new Set()
this.edgesOut = new Map()
@@ -340,13 +356,13 @@ class Node {
this[_refreshLocation]()
if (this.top.meta)
- this.top.meta.add(this)
+ this[_refreshTopMeta]()
if (this.target && !nullRoot)
this.target.root = root
- this.children.forEach(c => c.root = root)
this.fsChildren.forEach(c => c.root = root)
+ this.children.forEach(c => c.root = root)
/* istanbul ignore next */
dassert(this === root || this.inventory.size === 0)
}
@@ -448,17 +464,18 @@ class Node {
// Almost certainly due to being a linked workspace-style package.
this[_fsParent] = fsParent
fsParent.fsChildren.add(this)
+ // refresh the path BEFORE setting root, so meta gets updated properly
+ this[_refreshPath](fsParent, current && current.path)
this.root = fsParent.root
this[_reloadEdges](e => !e.to)
- this[_refreshPath](fsParent, current && current.path)
}
// called when we find that we have an fsParent which could account
// for some missing edges which are actually fine and not missing at all.
[_reloadEdges] (filter) {
this.edgesOut.forEach(edge => filter(edge) && edge.reload())
- this.children.forEach(c => c[_reloadEdges](filter))
this.fsChildren.forEach(c => c[_reloadEdges](filter))
+ this.children.forEach(c => c[_reloadEdges](filter))
}
// is it safe to replace one node with another? check the edges to
@@ -485,6 +502,40 @@ class Node {
return node.canReplaceWith(this)
}
+ matches (node) {
+ // if the nodes are literally the same object, obviously a match.
+ if (node === this)
+ return true
+
+ // if the names don't match, they're different things, even if
+ // the package contents are identical.
+ if (node.name !== this.name)
+ return false
+
+ // if they're links, they match if the targets match
+ if (this.isLink)
+ return node.isLink && this.target.matches(node.target)
+
+ // if they're two root nodes, they're different if the paths differ
+ if (this.isRoot && node.isRoot)
+ return this.path === node.path
+
+ // if the integrity matches, then they're the same.
+ if (this.integrity && node.integrity)
+ return this.integrity === node.integrity
+
+ // if no integrity, check resolved
+ if (this.resolved && node.resolved)
+ return this.resolved === node.resolved
+
+ // if no resolved, check both package name and version
+ // otherwise, conclude that they are different things
+ return this.package.name && node.package.name &&
+ this.package.name === node.package.name &&
+ this.package.version && node.package.version &&
+ this.package.version === node.package.version
+ }
+
// replace this node with the supplied argument
// Useful when mutating an ideal tree, so we can avoid having to call
// the parent/root setters more than necessary.
@@ -497,8 +548,8 @@ class Node {
// pretend to be in the tree, so top/etc refs are not changing for kids.
node.parent = null
node[_parent] = this[_parent]
+ this.fsChildren.forEach(c => c.fsParent = node)
this.children.forEach(c => c.parent = node)
- this.fsChildren.forEach(c=> c.fsParent = node)
// now remove the hidden reference, and call parent setter to finalize.
node[_parent] = null
node.parent = this.parent
@@ -565,8 +616,8 @@ class Node {
// we are about to change the parent, and thus the top, so we have
// to delist from the metadata now to ensure we remove it from the
// proper top node metadata if it isn't the root.
- this.children.forEach(c => c[_delistFromMeta]())
this.fsChildren.forEach(c => c[_delistFromMeta]())
+ this.children.forEach(c => c[_delistFromMeta]())
}
// remove from former parent.
@@ -597,7 +648,7 @@ class Node {
// if the root isn't changing, then this is a no-op.
// the root setter is a no-op if the root didn't change, so we have
// to manually call the method to update location and metadata
- if (this.root === newRoot)
+ if (!rootChange)
this[_refreshLocation]()
else
this.root = newRoot
@@ -625,6 +676,13 @@ class Node {
// since loading a parent can add *or change* resolutions, we also
// walk the tree from this point reloading all edges.
this[_reloadEdges](e => true)
+
+ // have to refresh the location of children and fsChildren at this point,
+ // because their paths have likely changed, and root may have been set.
+ if (!rootChange) {
+ this.children.forEach(c => c[_refreshLocation]())
+ this.fsChildren.forEach(c => c[_refreshLocation]())
+ }
}
// called after changing the parent (and thus the top), and after changing
@@ -674,9 +732,7 @@ class Node {
if (newPath === oldPath)
return
- if (this.path && this.resolved && /^file:/.test(this.resolved))
- this.resolved = consistentResolve(this.resolved, this.path, newPath)
-
+ this[_delistFromMeta]()
this.path = newPath
if (!this.isLink) {
this.realpath = this.path
@@ -687,8 +743,9 @@ class Node {
}
}
- this.children.forEach(c => c[_refreshPath](this, oldPath))
+ this[_refreshLocation]()
this.fsChildren.forEach(c => c[_refreshPath](this, oldPath))
+ this.children.forEach(c => c[_refreshPath](this, oldPath))
}
@@ -700,48 +757,8 @@ class Node {
this.location = relpath(root.realpath, this.path)
root.inventory.add(this)
-
- // try to get metadata, and write to the root's store if we can
- if (root.meta) {
- if (this.resolved === null || this.integrity === null) {
- const {
- resolved,
- integrity,
- hasShrinkwrap,
- } = root.meta.get(this.path)
- const pathFixed = !resolved ? null
- : !/^file:/.test(resolved) ? resolved
- // resolve onto the metadata path, then realpath to there from here
- : `file:${relpath(this.path,
- resolve(root.realpath, resolved.substr(5)))}`
-
- // if we have one, only set the other if it matches
- // otherwise it could be for a completely different thing.
- const resolvedOk = !resolved || !this.resolved ||
- this.resolved === pathFixed
- const integrityOk = !integrity || !this.integrity ||
- this.integrity === integrity
-
- if ((resolved || integrity) && resolvedOk && integrityOk) {
- this.resolved = this.resolved || pathFixed || null
- this.integrity = this.integrity || integrity || null
- this.hasShrinkwrap = this.hasShrinkwrap || hasShrinkwrap || false
- } else {
- // try to read off the package or node itself.
- const {
- resolved,
- integrity,
- hasShrinkwrap,
- } = Shrinkwrap.metaFromNode(this, this.path)
- this.resolved = this.resolved || resolved || null
- this.integrity = this.integrity || integrity || null
- this.hasShrinkwrap = this.hasShrinkwrap || hasShrinkwrap || false
- }
- }
-
- // add to the root meta so we don't do this dance more than once.
+ if (root.meta)
root.meta.add(this)
- }
}
addEdgeOut (edge) {
@@ -750,41 +767,20 @@ class Node {
addEdgeIn (edge) {
this.edgesIn.add(edge)
- if (!edge.valid)
- return
-
- // try to get metadata from the yarn.lock file if we need it
- const needsMeta = !this.resolved || !this.integrity
- if (!needsMeta)
- return
-
- const yarnLock = this.root.meta && this.root.meta.yarnLock
- if (!yarnLock || !yarnLock.entries || !yarnLock.entries.size)
- return
-
- const pathFixed = !this.resolved ? null
- : !/file:/.test(this.resolved) ? this.resolved
- : consistentResolve(this.resolved, this.path, this.root.meta.path)
- const entry = yarnLock.entries.get(`${this.name}@${edge.spec}`)
- // skip any entries that don't appear to be referring to this thing
- if (!entry ||
- mismatch(this.package.version, entry.version) ||
- mismatch(this.integrity, entry.integrity) ||
- mismatch(pathFixed, entry.resolved))
- return
-
- // ok, this is probably it! get what we can from it.
- this.integrity = this.integrity || entry.integrity || null
- this.resolved = this.resolved ||
- consistentResolve(entry.resolved, this.root.meta.path, this.path) ||
- null
+ // try to get metadata from the yarn.lock file
+ if (this.root.meta)
+ this.root.meta.addEdge(edge)
}
[_reloadNamedEdges] (name, root) {
// either it's the node in question, or it's going to block it anyway
- if (this.name === name && !this.isTop)
+ if (this.name === name && !this.isTop) {
+ // reload the edges in so that anything that SHOULD be blocked
+ // by this node actually will be.
+ this.edgesIn.forEach(e => e.reload())
return
+ }
const edge = this.edgesOut.get(name)
// if we don't have an edge, do nothing, but keep descending
diff --git a/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/node_modules/@npmcli/arborist/lib/shrinkwrap.js
index 5954ba4eb..f7f96159e 100644
--- a/node_modules/@npmcli/arborist/lib/shrinkwrap.js
+++ b/node_modules/@npmcli/arborist/lib/shrinkwrap.js
@@ -11,19 +11,26 @@
const lockfileVersion = 2
-// don't update this.data right away when doing this.add(node) Just add the
-// node to a set of nodes needing an update. When this.get(location) or
-// this.save() is called, then update the data for the location(s) in question.
-// That way, it doesn't matter whether the node.devOptional/etc is set when
-// adding, since we'll look at it's current state when it's time to check,
-// which will be when the caller is ready to do something with the data.
-
-// It's tempting to handle yarn.lock files here as well. However, since they
-// don't capture the shape of the tree, they're not useful for most of the
-// cases we care about. We load them in this class because it's convenient
-// to do so while loading other lock files, but they're not kept automatically
-// in sync. Rather, if present, they're just used as a fallback, and updated
-// all at once at the end with the state of the fully realized tree.
+// for comparing nodes to yarn.lock entries
+const mismatch = (a, b) => a && b && a !== b
+
+// this.tree => the root node for the tree (ie, same path as this)
+// - Set the first time we do `this.add(node)` for a path matching this.path
+//
+// this.add(node) =>
+// - decorate the node with the metadata we have, if we have it, and it matches
+// - add to the map of nodes needing to be committed, so that subsequent
+// changes are captured when we commit that location's metadata.
+//
+// this.commit() =>
+// - commit all nodes awaiting update to their metadata entries
+// - re-generate this.data and this.yarnLock based on this.tree
+//
+// Note that between this.add() and this.commit(), `this.data` will be out of
+// date! Always call `commit()` before relying on it.
+//
+// After calling this.commit(), any nodes not present in the tree will have
+// been removed from the shrinkwrap data as well.
const YarnLock = require('./yarn-lock.js')
const {promisify} = require('util')
@@ -33,6 +40,7 @@ const readFile = promisify(fs.readFile)
const writeFile = promisify(fs.writeFile)
const stat = promisify(fs.stat)
const link = promisify(fs.link)
+const readdir = promisify(fs.readdir)
const { resolve, dirname, basename } = require('path')
const specFromLock = require('./spec-from-lock.js')
const versionFromTgz = require('./version-from-tgz.js')
@@ -86,7 +94,7 @@ const maybeStatFile = file => {
}
const pkgMetaKeys = [
- 'name',
+ // note: name is included if necessary, for alias packages
'version',
'dependencies',
'peerDependencies',
@@ -124,16 +132,44 @@ const metaFieldFromPkg = (pkg, key) => {
? val
: null
}
+
+// check to make sure that there are no packages newer than the hidden lockfile
+const assertNoNewer = async (path, data, lockTime, dir = path) => {
+ const base = basename(dir)
+ const isNM = dir !== path && base === 'node_modules'
+ const isScope = dir !== path && !isNM && base.charAt(0) === '@'
+ const isParent = dir === path || isNM || isScope
+
+ if (dir !== path) {
+ const dirTime = (await stat(dir)).mtime
+ const rel = relpath(path, dir)
+ if (dirTime > lockTime)
+ throw 'out of date, updated: ' + rel
+ if (!isScope && !isNM && !data.packages[rel])
+ throw 'missing from lockfile: ' + rel
+ }
+
+ const parent = isParent ? dir : resolve(dir, 'node_modules')
+ const children = dir === path
+ ? Promise.resolve([{name: 'node_modules', isDirectory: () => true }])
+ : readdir(parent, { withFileTypes: true })
+
+ return children.catch(() => [])
+ .then(ents => Promise.all(
+ ents.filter(ent => ent.isDirectory() && !/^\./.test(ent.name))
+ .map(ent => assertNoNewer(path, data, lockTime, resolve(parent, ent.name)))
+ ))
+}
+
const _awaitingUpdate = Symbol('_awaitingUpdate')
const _updateWaitingNode = Symbol('_updateWaitingNode')
const _lockFromLoc = Symbol('_lockFromLoc')
-const _createLockFromLoc = Symbol('_createLockFromLoc')
const _pathToLoc = Symbol('_pathToLoc')
const _loadAll = Symbol('_loadAll')
const _metaFromLock = Symbol('_metaFromLock')
const _resolveMetaNode = Symbol('_resolveMetaNode')
const _fixDependencies = Symbol('_fixDependencies')
-
+const _buildLegacyLockfile = Symbol('_buildLegacyLockfile')
const _filenameSet = Symbol('_filenameSet')
const _maybeRead = Symbol('_maybeRead')
const _maybeStat = Symbol('_maybeStat')
@@ -142,6 +178,10 @@ class Shrinkwrap {
return new Shrinkwrap(options).load()
}
+ static get keyOrder () {
+ return swKeyOrder
+ }
+
static reset (options) {
// still need to know if it was loaded from the disk, but don't
// bother reading it if we're gonna just throw it away.
@@ -172,6 +212,10 @@ class Shrinkwrap {
if (val)
meta[key.replace(/^_/, '')] = val
})
+ // we only include name if different from the node path name
+ const pname = node.package.name
+ if (pname && pname !== node.name)
+ meta.name = pname
if (node.isTop && node.package.devDependencies)
meta.devDependencies = node.package.devDependencies
@@ -181,9 +225,8 @@ class Shrinkwrap {
meta[key] = node[key]
})
- const res = node.resolved || node.package._resolved
- const resolved = consistentResolve(res, node.path, path)
- if (resolved && (node.resolved || !/^file:/.test(resolved)))
+ const resolved = consistentResolve(node.resolved, node.path, path, true)
+ if (resolved)
meta.resolved = resolved
if (node.extraneous)
@@ -209,6 +252,7 @@ class Shrinkwrap {
hiddenLockfile = false,
} = options
this[_awaitingUpdate] = new Map()
+ this.tree = null
this.path = resolve(path || '.')
this.filename = null
this.data = null
@@ -217,13 +261,50 @@ class Shrinkwrap {
this.type = null
this.yarnLock = null
this.hiddenLockfile = hiddenLockfile
+ this.loadingError = null
// only load npm-shrinkwrap.json in dep trees, not package-lock
this.shrinkwrapOnly = shrinkwrapOnly
}
+ // check to see if a spec is present in the yarn.lock file, and if so,
+ // if we should use it, and what it should resolve to. This is only
+ // done when we did not load a shrinkwrap from disk. Also, decorate
+ // the options object if provided with the resolved and integrity that
+ // we expect.
+ checkYarnLock (spec, options = {}) {
+ spec = npa(spec)
+ const { yarnLock, loadedFromDisk } = this
+ const useYarnLock = yarnLock && !loadedFromDisk
+ const fromYarn = useYarnLock && yarnLock.entries.get(spec.raw)
+ if (fromYarn && fromYarn.version) {
+ // if it's the yarn or npm default registry, use the version as
+ // our effective spec. if it's any other kind of thing, use that.
+ const yarnRegRe = /^https?:\/\/registry.yarnpkg.com\//
+ const npmRegRe = /^https?:\/\/registry.npmjs.org\//
+ const {resolved, version, integrity} = fromYarn
+ const isYarnReg = spec.registry && yarnRegRe.test(resolved)
+ const isnpmReg = spec.registry && !isYarnReg && npmRegRe.test(resolved)
+ const isReg = isnpmReg || isYarnReg
+ // don't use the simple version if the "registry" url is
+ // something else entirely!
+ const tgz = isReg && versionFromTgz(spec.name, resolved) || {}
+ const yspec = tgz.name === spec.name && tgz.version === version ? version
+ : isReg && tgz.name && tgz.version ? `npm:${tgz.name}@${tgz.version}`
+ : resolved
+ if (yspec) {
+ options.resolved = resolved.replace(yarnRegRe, 'https://registry.npmjs.org/')
+ options.integrity = integrity
+ return npa(`${spec.name}@${yspec}`)
+ }
+ }
+ return spec
+ }
+
// throw away the shrinkwrap data so we can start fresh
// still worth doing a load() first so we know which files to write.
reset () {
+ this.tree = null
+ this[_awaitingUpdate] = new Map()
this.data = {
lockfileVersion,
requires: true,
@@ -263,9 +344,12 @@ class Shrinkwrap {
return this[_maybeRead]().then(([sw, lock, yarn]) => {
const data = lock || sw || ''
// don't use detect-indent, just pick the first line.
- const indent = data.match(/^\{\n([\s\t]+)/)
+ // if the file starts with {" then we have an indent of '', ie, none
+ // which will default to 2 at save time.
+ const indent = data.match(/^\{\n?([\s\t]*)"/)
if (indent)
this.indent = indent[1]
+
// use shrinkwrap only for deps, otherwise prefer package-lock
// and ignore npm-shrinkwrap if both are present.
// TODO: emit a warning here or something if both are present.
@@ -273,6 +357,7 @@ class Shrinkwrap {
(this.hiddenLockfile ? 'node_modules/.package-lock'
: this.shrinkwrapOnly || sw && !lock ? 'npm-shrinkwrap'
: 'package-lock') + '.json')
+
this.type = basename(this.filename)
this.loadedFromDisk = !!data
@@ -283,7 +368,18 @@ class Shrinkwrap {
}
return data ? parseJSON(data) : {}
+ }).then(async data => {
+ if (!this.hiddenLockfile || !data.packages)
+ return data
+
+ // add a few ms just to account for jitter
+ const lockTime = +(await stat(this.filename)).mtime + 10
+ await assertNoNewer(this.path, data, lockTime)
+
+ // all good! hidden lockfile is the newest thing in here.
+ return data
}).catch(er => {
+ this.loadingError = er
this.loadedFromDisk = false
return {}
}).then(lock => {
@@ -306,6 +402,19 @@ class Shrinkwrap {
}).then(() => this)
}
+ [_loadAll] (location, name, lock) {
+ // migrate a v1 package lock to the new format.
+ const meta = this[_metaFromLock](location, name, lock)
+ // dependencies nested under a link are actually under the link target
+ if (meta.link)
+ location = meta.resolved
+ if (lock.dependencies) {
+ for (const [name, dep] of Object.entries(lock.dependencies)) {
+ const loc = location + (location ? '/' : '') + 'node_modules/' + name
+ this[_loadAll](loc, name, dep)
+ }
+ }
+ }
// v1 lockfiles track the optional/dev flags, but they don't tell us
// which thing had what kind of dep on what other thing, so we need
@@ -363,19 +472,6 @@ class Shrinkwrap {
}
- [_loadAll] (location, name, lock) {
- // migrate a v1 package lock to the new format.
- this[_metaFromLock](location, name, lock)
- if (lock.dependencies) {
- Object.keys(lock.dependencies).forEach(name =>
- this[_loadAll](
- location + (location ? '/' : '') + 'node_modules/' + name,
- name,
- lock.dependencies[name]
- ))
- }
- }
-
[_lockFromLoc] (lock, path, i = 0) {
if (!lock)
return null
@@ -392,18 +488,6 @@ class Shrinkwrap {
return this[_lockFromLoc](lock.dependencies[path[i]], path, i + 1)
}
- [_createLockFromLoc] (lock, path, i = 0) {
- if (path[i] === '')
- i++
-
- if (i === path.length)
- return lock
-
- lock.dependencies = lock.dependencies || {}
- lock.dependencies[path[i]] = lock.dependencies[path[i]] || {}
- return this[_createLockFromLoc](lock.dependencies[path[i]], path, i + 1)
- }
-
// pass in a path relative to the root path, or an absolute path,
// get back a /-normalized location based on root path.
[_pathToLoc] (path) {
@@ -454,6 +538,26 @@ class Shrinkwrap {
if (!lock)
return {}
+ // try to figure out a npm-package-arg spec from the lockfile entry
+ // This will return null if we could not get anything valid out of it.
+ const spec = specFromLock(name, lock, this.path)
+
+ if (spec.type === 'directory') {
+ // the "version" was a file: url to a non-tarball path
+ // this is a symlink dep. We don't store much metadata
+ // about symlinks, just the target.
+ const target = relpath(this.path, spec.fetchSpec)
+ this.data.packages[location] = {
+ link: true,
+ resolved: target,
+ }
+ // also save the link target, omitting version since we don't know
+ // what it is, but we know it isn't a link to itself!
+ if (!this.data.packages[target])
+ this[_metaFromLock](target, name, { ...lock, version: null })
+ return this.data.packages[location]
+ }
+
const meta = {}
// when calling loadAll we'll change these into proper dep objects
if (lock.requires && typeof lock.requires === 'object')
@@ -473,19 +577,6 @@ class Shrinkwrap {
if (lock.integrity)
meta.integrity = lock.integrity
- // try to figure out a npm-package-arg spec from the lockfile entry
- // This will return null if we could not get anything valid out of it.
- const spec = specFromLock(name, lock, this.path)
-
- if (spec.type === 'directory') {
- // the "version" was a file: url to a non-tarball path
- // this is a symlink dep. We don't store much metadata
- // about symlinks, just the target.
- meta.link = true
- meta.resolved = relpath(this.path, spec.fetchSpec)
- return this.data.packages[location] = meta
- }
-
if (lock.version && !lock.integrity) {
// this is usually going to be a git url or symlink, but it could
// also be a registry dependency that did not have integrity at
@@ -520,7 +611,7 @@ class Shrinkwrap {
if (spec.registry)
meta.resolved = lock.resolved
else if (spec.type === 'file')
- meta.resolved = consistentResolve(spec, this.path, this.path)
+ meta.resolved = consistentResolve(spec, this.path, this.path, true)
else if (spec.fetchSpec)
meta.resolved = spec.fetchSpec
}
@@ -558,32 +649,123 @@ class Shrinkwrap {
// will be actually updated on read
const loc = relpath(this.path, node.path)
+ if (node.path === this.path)
+ this.tree = node
+
+ // if we have metadata about this node, and it's a match, then
+ // try to decorate it.
+ if (node.resolved === null || node.integrity === null) {
+ const {
+ resolved,
+ integrity,
+ hasShrinkwrap,
+ } = this.get(node.path)
+
+ const pathFixed = !resolved ? null
+ : !/^file:/.test(resolved) ? resolved
+ // resolve onto the metadata path
+ : `file:${resolve(this.path, resolved.substr(5))}`
+
+ // if we have one, only set the other if it matches
+ // otherwise it could be for a completely different thing.
+ const resolvedOk = !resolved || !node.resolved ||
+ node.resolved === pathFixed
+ const integrityOk = !integrity || !node.integrity ||
+ node.integrity === integrity
+
+ if ((resolved || integrity) && resolvedOk && integrityOk) {
+ node.resolved = node.resolved || pathFixed || null
+ node.integrity = node.integrity || integrity || null
+ node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false
+ } else {
+ // try to read off the package or node itself
+ const {
+ resolved,
+ integrity,
+ hasShrinkwrap,
+ } = Shrinkwrap.metaFromNode(node, this.path)
+ node.resolved = node.resolved || resolved || null
+ node.integrity = node.integrity || integrity || null
+ node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false
+ }
+ }
this[_awaitingUpdate].set(loc, node)
}
- [_updateWaitingNode] (loc) {
- const node = this[_awaitingUpdate].get(loc)
- this[_awaitingUpdate].delete(loc)
+ addEdge (edge) {
+ if (!this.yarnLock || !edge.valid)
+ return
- const meta = Shrinkwrap.metaFromNode(node, this.path)
- this.data.packages[loc] = meta
+ const { to: node } = edge
- // hidden lockfiles don't include legacy metadata
- if (this.hiddenLockfile)
+ // if it's already set up, nothing to do
+ if (node.resolved !== null && node.integrity !== null)
return
- const path = loc.split(/(?:^|\/)node_modules\//)
+ // if the yarn lock is empty, nothing to do
+ if (!this.yarnLock.entries || !this.yarnLock.entries.size)
+ return
- // legacy shrinkwraps don't track nodes outside of node_modules
- if (!node.parent && node.path !== this.path)
+ // we relativize the path here because that's how it shows up in the lock
+ // XXX how is this different from pathFixed above??
+ const pathFixed = !node.resolved ? null
+ : !/file:/.test(node.resolved) ? node.resolved
+ : consistentResolve(node.resolved, node.path, this.path, true)
+
+ const entry = this.yarnLock.entries.get(`${node.name}@${edge.spec}`)
+
+ if (!entry ||
+ mismatch(node.package.version, entry.version) ||
+ mismatch(node.integrity, entry.integrity) ||
+ mismatch(pathFixed, entry.resolved))
return
- const lock = this[_createLockFromLoc](this.data, path)
+ node.integrity = node.integrity || entry.integrity || null
+ node.resolved = node.resolved ||
+ consistentResolve(entry.resolved, this.path, node.path) || null
+
+ this[_awaitingUpdate].set(relpath(this.path, node.path), node)
+ }
+
+ [_updateWaitingNode] (loc) {
+ const node = this[_awaitingUpdate].get(loc)
+ this[_awaitingUpdate].delete(loc)
+ this.data.packages[loc] = Shrinkwrap.metaFromNode(node, this.path)
+ }
+
+ commit () {
+ if (this.tree) {
+ if (this.yarnLock)
+ this.yarnLock.fromTree(this.tree)
+ const root = Shrinkwrap.metaFromNode(this.tree, this.path)
+ this.data.packages = {}
+ if (Object.keys(root).length)
+ this.data.packages[''] = root
+ for (const node of this.tree.inventory.values()) {
+ const loc = relpath(this.path, node.path)
+ this.data.packages[loc] = Shrinkwrap.metaFromNode(node, this.path)
+ }
+ } else if (this[_awaitingUpdate].size > 0) {
+ for (const loc of this[_awaitingUpdate].keys()) {
+ this[_updateWaitingNode](loc)
+ }
+ }
+
+ // hidden lockfiles don't include legacy metadata or a root entry
+ if (this.hiddenLockfile) {
+ delete this.data.packages['']
+ delete this.data.dependencies
+ } else if (this.tree)
+ this[_buildLegacyLockfile](this.tree, this.data)
- // set legacy shrinkwrap data
- if (node.path === this.path) {
+ return this.data
+ }
+
+ [_buildLegacyLockfile] (node, lock, path = []) {
+ if (node === this.tree) {
+ // the root node
lock.name = node.package.name || node.name
- if (node.package && node.package.version)
+ if (node.package.version)
lock.version = node.package.version
}
@@ -595,7 +777,7 @@ class Shrinkwrap {
// dep link out of the edgesIn set. Choose the edge with the fewest
// number of `node_modules` sections in the requestor path, and then
// lexically sort afterwards.
- const edge = [...node.edgesIn].filter(edge => edge.valid).sort((a, b) => {
+ const edge = [...node.edgesIn].filter(e => e.valid).sort((a, b) => {
const aloc = a.from.location.split('node_modules')
const bloc = b.from.location.split('node_modules')
/* istanbul ignore next - sort calling order is indeterminate */
@@ -603,19 +785,23 @@ class Shrinkwrap {
: bloc.length > aloc.length ? -1
: aloc[aloc.length - 1].localeCompare(bloc[bloc.length - 1])
})[0]
- // if we don't have one, just an empty object so nothing matches below
+
+ const res = consistentResolve(node.resolved, this.path, this.path, true)
+ const rSpec = specFromResolved(res)
+
+ // if we don't have anything (ie, it's extraneous) then use the resolved
+ // value as if that was where we got it from, since at least it's true.
+ // if we don't have either, just an empty object so nothing matches below.
// This will effectively just save the version and resolved, as if it's
// a standard version/range dep, which is a reasonable default.
- const spec = !edge ? {}
+ const spec = !edge ? rSpec
: npa.resolve(node.name, edge.spec, edge.from.realpath)
- const rSpec = specFromResolved(node.resolved)
-
if (node.target)
lock.version = `file:${relpath(this.path, node.realpath)}`
else if (spec && (spec.type === 'file' || spec.type === 'remote'))
lock.version = spec.saveSpec
- else if (spec && spec.type === 'git' || rSpec && rSpec.type === 'git') {
+ else if (spec && spec.type === 'git' || rSpec.type === 'git') {
lock.version = node.resolved
/* istanbul ignore else - don't think there are any cases where a git
* spec (or indeed, ANY npa spec) doesn't have a .raw member */
@@ -648,10 +834,6 @@ class Shrinkwrap {
if (node.integrity)
lock.integrity = node.integrity
- // XXX: may need to clean up old flags if lock updated multiple times
- // If we see things like "extraneous":true,"optional":true, then that'll
- // be an indication that the lock is updating multiple times, and we'll
- // have to delete keys that are no longer valid.
if (node.extraneous)
lock.extraneous = true
else if (!node.isLink) {
@@ -668,9 +850,10 @@ class Shrinkwrap {
lock.optional = true
}
- if (node.edgesOut.size > 0) {
- if (node.path !== this.path) {
- lock.requires = [...node.edgesOut.entries()].reduce((set, [k, v]) => {
+ const depender = node.target || node
+ if (depender.edgesOut.size > 0) {
+ if (node !== this.tree) {
+ lock.requires = [...depender.edgesOut.entries()].reduce((set, [k, v]) => {
set[k] = v.spec
return set
}, {})
@@ -678,27 +861,36 @@ class Shrinkwrap {
lock.requires = true
}
}
- }
- commit () {
- if (this[_awaitingUpdate].size > 0) {
- for (const loc of this[_awaitingUpdate].keys()) {
- this[_updateWaitingNode](loc)
+ // now we walk the children, putting them in the 'dependencies' object
+ const {children} = node.target || node
+ if (!children.size)
+ delete lock.dependencies
+ else {
+ const kidPath = [...path, node.realpath]
+ const dependencies = {}
+ // skip any that are already in the descent path, so cyclical link
+ // dependencies don't blow up with ELOOP.
+ let found = false
+ for (const [name, kid] of children.entries()) {
+ if (path.includes(kid.realpath))
+ continue
+ dependencies[name] = this[_buildLegacyLockfile](kid, {}, kidPath)
+ found = true
}
+ if (found)
+ lock.dependencies = dependencies
}
-
- // hidden lockfiles don't include legacy metadata
- if (this.hiddenLockfile)
- delete this.data.dependencies
-
- return this.data
+ return lock
}
- save () {
+ save (options = {}) {
if (!this.data)
throw new Error('run load() before saving data')
- const json = stringify(this.commit(), swKeyOrder, this.indent)
+ const { format = true } = options
+ const indent = format ? this.indent || 2 : 0
+ const json = stringify(this.commit(), swKeyOrder, indent)
return Promise.all([
writeFile(this.filename, json).catch(er => {
if (this.hiddenLockfile) {
diff --git a/node_modules/@npmcli/arborist/lib/yarn-lock.js b/node_modules/@npmcli/arborist/lib/yarn-lock.js
index 7c84083be..f9099ecb2 100644
--- a/node_modules/@npmcli/arborist/lib/yarn-lock.js
+++ b/node_modules/@npmcli/arborist/lib/yarn-lock.js
@@ -172,7 +172,8 @@ class YarnLock {
this.current.resolved = consistentResolve(
node.resolved,
node.isLink ? dirname(node.path) : node.path,
- node.root.path
+ node.root.path,
+ true
)
if (node.integrity)
this.current.integrity = node.integrity
diff --git a/node_modules/@npmcli/arborist/package.json b/node_modules/@npmcli/arborist/package.json
index 789fd901d..4ef884d2d 100644
--- a/node_modules/@npmcli/arborist/package.json
+++ b/node_modules/@npmcli/arborist/package.json
@@ -1,8 +1,8 @@
{
"_from": "@npmcli/arborist@latest",
- "_id": "@npmcli/arborist@0.0.0-pre.19",
+ "_id": "@npmcli/arborist@0.0.0-pre.20",
"_inBundle": false,
- "_integrity": "sha512-ddHXJyMNlrmrgvVuuca4qAXO9sfpZyMaDnePnjdezkG2snVWbQMUxV6S4zRS3aUtiQOL8E6GPROGRjBRkMDYvA==",
+ "_integrity": "sha512-ILYXVLsWmqdC9UJY238NOXkNnfM9/3dhQKwAApPRDIgmciC/rUb7KIg5R3sbDA/LmurXGZ3wgtzgn6VokJfqZQ==",
"_location": "/@npmcli/arborist",
"_phantomChildren": {
"glob": "7.1.4",
@@ -27,8 +27,8 @@
"#USER",
"/"
],
- "_resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-0.0.0-pre.19.tgz",
- "_shasum": "bb33d5bc383226c7b379832dcbf32d9004d946ac",
+ "_resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-0.0.0-pre.20.tgz",
+ "_shasum": "1083553530a57587e6aa06d3375c13a63b7be534",
"_spec": "@npmcli/arborist@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
@@ -46,19 +46,22 @@
"@npmcli/name-from-folder": "^1.0.1",
"@npmcli/run-script": "^1.3.1",
"bin-links": "^2.1.2",
+ "cacache": "^15.0.3",
+ "common-ancestor-path": "^1.0.1",
"json-stringify-nice": "^1.1.1",
"mkdirp-infer-owner": "^1.0.2",
"npm-install-checks": "^4.0.0",
"npm-package-arg": "^8.0.0",
"npm-pick-manifest": "^6.1.0",
- "pacote": "^11.1.9",
+ "pacote": "^11.1.10",
"parse-conflict-json": "^1.0.0",
"promise-all-reject-late": "^1.0.0",
"promise-call-limit": "^1.0.1",
"read-package-json-fast": "^1.1.0",
"readdir-scoped-modules": "^1.1.0",
"semver": "^7.1.2",
- "treeverse": "^1.0.1"
+ "treeverse": "^1.0.1",
+ "walk-up-path": "^1.0.0"
},
"deprecated": false,
"description": "Manage node_modules trees",
@@ -94,5 +97,5 @@
"esm": false,
"timeout": "60"
},
- "version": "0.0.0-pre.19"
+ "version": "0.0.0-pre.20"
}
diff --git a/node_modules/@npmcli/move-file/LICENSE.md b/node_modules/@npmcli/move-file/LICENSE.md
new file mode 100644
index 000000000..072bf2084
--- /dev/null
+++ b/node_modules/@npmcli/move-file/LICENSE.md
@@ -0,0 +1,22 @@
+MIT License
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
+Copyright (c) npm, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/@npmcli/move-file/README.md b/node_modules/@npmcli/move-file/README.md
new file mode 100644
index 000000000..da682ebd5
--- /dev/null
+++ b/node_modules/@npmcli/move-file/README.md
@@ -0,0 +1,68 @@
+# @npmcli/move-file
+
+A fork of [move-file](https://github.com/sindresorhus/move-file) with
+compatibility with all node 10.x versions.
+
+> Move a file
+
+The built-in
+[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback)
+is just a JavaScript wrapper for the C `rename(2)` function, which doesn't
+support moving files across partitions or devices. This module is what you
+would have expected `fs.rename()` to be.
+
+## Highlights
+
+- Promise API.
+- Supports moving a file across partitions and devices.
+- Optionally prevent overwriting an existing file.
+- Creates non-existent destination directories for you.
+- Support for Node versions that lack built-in recursive `fs.mkdir()`
+
+## Install
+
+```
+$ npm install @npmcli/move-file
+```
+
+## Usage
+
+```js
+const moveFile = require('@npmcli/move-file');
+
+(async () => {
+ await moveFile('source/unicorn.png', 'destination/unicorn.png');
+ console.log('The file has been moved');
+})();
+```
+
+## API
+
+### moveFile(source, destination, options?)
+
+Returns a `Promise` that resolves when the file has been moved.
+
+### moveFile.sync(source, destination, options?)
+
+#### source
+
+Type: `string`
+
+File you want to move.
+
+#### destination
+
+Type: `string`
+
+Where you want the file moved.
+
+#### options
+
+Type: `object`
+
+##### overwrite
+
+Type: `boolean`\
+Default: `true`
+
+Overwrite existing destination file.
diff --git a/node_modules/@npmcli/move-file/index.js b/node_modules/@npmcli/move-file/index.js
new file mode 100644
index 000000000..d1567d1f6
--- /dev/null
+++ b/node_modules/@npmcli/move-file/index.js
@@ -0,0 +1,93 @@
+const { dirname } = require('path')
+const { promisify } = require('util')
+const {
+ access: access_,
+ accessSync,
+ copyFile: copyFile_,
+ copyFileSync,
+ unlink: unlink_,
+ unlinkSync,
+ rename: rename_,
+ renameSync,
+} = require('fs')
+
+const access = promisify(access_)
+const copyFile = promisify(copyFile_)
+const unlink = promisify(unlink_)
+const rename = promisify(rename_)
+
+const mkdirp = require('mkdirp')
+
+const pathExists = async path => {
+ try {
+ await access(path)
+ return true
+ } catch (er) {
+ return er.code !== 'ENOENT'
+ }
+}
+
+const pathExistsSync = path => {
+ try {
+ accessSync(path)
+ return true
+ } catch (er) {
+ return er.code !== 'ENOENT'
+ }
+}
+
+module.exports = async (source, destination, options = {}) => {
+ if (!source || !destination) {
+ throw new TypeError('`source` and `destination` file required')
+ }
+
+ options = {
+ overwrite: true,
+ ...options
+ }
+
+ if (!options.overwrite && await pathExists(destination)) {
+ throw new Error(`The destination file exists: ${destination}`)
+ }
+
+ await mkdirp(dirname(destination))
+
+ try {
+ await rename(source, destination)
+ } catch (error) {
+ if (error.code === 'EXDEV') {
+ await copyFile(source, destination)
+ await unlink(source)
+ } else {
+ throw error
+ }
+ }
+}
+
+module.exports.sync = (source, destination, options = {}) => {
+ if (!source || !destination) {
+ throw new TypeError('`source` and `destination` file required')
+ }
+
+ options = {
+ overwrite: true,
+ ...options
+ }
+
+ if (!options.overwrite && pathExistsSync(destination)) {
+ throw new Error(`The destination file exists: ${destination}`)
+ }
+
+ mkdirp.sync(dirname(destination))
+
+ try {
+ renameSync(source, destination)
+ } catch (error) {
+ if (error.code === 'EXDEV') {
+ copyFileSync(source, destination)
+ unlinkSync(source)
+ } else {
+ throw error
+ }
+ }
+}
diff --git a/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp b/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp
new file mode 120000
index 000000000..017896ceb
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp
@@ -0,0 +1 @@
+../mkdirp/bin/cmd.js \ No newline at end of file
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/CHANGELOG.md b/node_modules/@npmcli/move-file/node_modules/mkdirp/CHANGELOG.md
new file mode 100644
index 000000000..81458380b
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/CHANGELOG.md
@@ -0,0 +1,15 @@
+# Changers Lorgs!
+
+## 1.0
+
+Full rewrite. Essentially a brand new module.
+
+- Return a promise instead of taking a callback.
+- Use native `fs.mkdir(path, { recursive: true })` when available.
+- Drop support for outdated Node.js versions. (Technically still works on
+ Node.js v8, but only 10 and above are officially supported.)
+
+## 0.x
+
+Original and most widely used recursive directory creation implementation
+in JavaScript, dating back to 2010.
diff --git a/node_modules/string_decoder/node_modules/safe-buffer/LICENSE b/node_modules/@npmcli/move-file/node_modules/mkdirp/LICENSE
index 0c068ceec..13fcd15f0 100644
--- a/node_modules/string_decoder/node_modules/safe-buffer/LICENSE
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/LICENSE
@@ -1,6 +1,6 @@
-The MIT License (MIT)
+Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-Copyright (c) Feross Aboukhadijeh
+This project is free software released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/bin/cmd.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/bin/cmd.js
new file mode 100755
index 000000000..6e0aa8dc4
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/bin/cmd.js
@@ -0,0 +1,68 @@
+#!/usr/bin/env node
+
+const usage = () => `
+usage: mkdirp [DIR1,DIR2..] {OPTIONS}
+
+ Create each supplied directory including any necessary parent directories
+ that don't yet exist.
+
+ If the directory already exists, do nothing.
+
+OPTIONS are:
+
+ -m<mode> If a directory needs to be created, set the mode as an octal
+ --mode=<mode> permission string.
+
+ -v --version Print the mkdirp version number
+
+ -h --help Print this helpful banner
+
+ -p --print Print the first directories created for each path provided
+
+ --manual Use manual implementation, even if native is available
+`
+
+const dirs = []
+const opts = {}
+let print = false
+let dashdash = false
+let manual = false
+for (const arg of process.argv.slice(2)) {
+ if (dashdash)
+ dirs.push(arg)
+ else if (arg === '--')
+ dashdash = true
+ else if (arg === '--manual')
+ manual = true
+ else if (/^-h/.test(arg) || /^--help/.test(arg)) {
+ console.log(usage())
+ process.exit(0)
+ } else if (arg === '-v' || arg === '--version') {
+ console.log(require('../package.json').version)
+ process.exit(0)
+ } else if (arg === '-p' || arg === '--print') {
+ print = true
+ } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
+ const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
+ if (isNaN(mode)) {
+ console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
+ process.exit(1)
+ }
+ opts.mode = mode
+ } else
+ dirs.push(arg)
+}
+
+const mkdirp = require('../')
+const impl = manual ? mkdirp.manual : mkdirp
+if (dirs.length === 0)
+ console.error(usage())
+
+Promise.all(dirs.map(dir => impl(dir, opts)))
+ .then(made => print ? made.forEach(m => m && console.log(m)) : null)
+ .catch(er => {
+ console.error(er.message)
+ if (er.code)
+ console.error(' code: ' + er.code)
+ process.exit(1)
+ })
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/index.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/index.js
new file mode 100644
index 000000000..ad7a16c9f
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/index.js
@@ -0,0 +1,31 @@
+const optsArg = require('./lib/opts-arg.js')
+const pathArg = require('./lib/path-arg.js')
+
+const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
+const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
+const {useNative, useNativeSync} = require('./lib/use-native.js')
+
+
+const mkdirp = (path, opts) => {
+ path = pathArg(path)
+ opts = optsArg(opts)
+ return useNative(opts)
+ ? mkdirpNative(path, opts)
+ : mkdirpManual(path, opts)
+}
+
+const mkdirpSync = (path, opts) => {
+ path = pathArg(path)
+ opts = optsArg(opts)
+ return useNativeSync(opts)
+ ? mkdirpNativeSync(path, opts)
+ : mkdirpManualSync(path, opts)
+}
+
+mkdirp.sync = mkdirpSync
+mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
+mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
+mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
+mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
+
+module.exports = mkdirp
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/find-made.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/find-made.js
new file mode 100644
index 000000000..022e492c0
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/find-made.js
@@ -0,0 +1,29 @@
+const {dirname} = require('path')
+
+const findMade = (opts, parent, path = undefined) => {
+ // we never want the 'made' return value to be a root directory
+ if (path === parent)
+ return Promise.resolve()
+
+ return opts.statAsync(parent).then(
+ st => st.isDirectory() ? path : undefined, // will fail later
+ er => er.code === 'ENOENT'
+ ? findMade(opts, dirname(parent), parent)
+ : undefined
+ )
+}
+
+const findMadeSync = (opts, parent, path = undefined) => {
+ if (path === parent)
+ return undefined
+
+ try {
+ return opts.statSync(parent).isDirectory() ? path : undefined
+ } catch (er) {
+ return er.code === 'ENOENT'
+ ? findMadeSync(opts, dirname(parent), parent)
+ : undefined
+ }
+}
+
+module.exports = {findMade, findMadeSync}
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-manual.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-manual.js
new file mode 100644
index 000000000..2eb18cd64
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-manual.js
@@ -0,0 +1,64 @@
+const {dirname} = require('path')
+
+const mkdirpManual = (path, opts, made) => {
+ opts.recursive = false
+ const parent = dirname(path)
+ if (parent === path) {
+ return opts.mkdirAsync(path, opts).catch(er => {
+ // swallowed by recursive implementation on posix systems
+ // any other error is a failure
+ if (er.code !== 'EISDIR')
+ throw er
+ })
+ }
+
+ return opts.mkdirAsync(path, opts).then(() => made || path, er => {
+ if (er.code === 'ENOENT')
+ return mkdirpManual(parent, opts)
+ .then(made => mkdirpManual(path, opts, made))
+ if (er.code !== 'EEXIST' && er.code !== 'EROFS')
+ throw er
+ return opts.statAsync(path).then(st => {
+ if (st.isDirectory())
+ return made
+ else
+ throw er
+ }, () => { throw er })
+ })
+}
+
+const mkdirpManualSync = (path, opts, made) => {
+ const parent = dirname(path)
+ opts.recursive = false
+
+ if (parent === path) {
+ try {
+ return opts.mkdirSync(path, opts)
+ } catch (er) {
+ // swallowed by recursive implementation on posix systems
+ // any other error is a failure
+ if (er.code !== 'EISDIR')
+ throw er
+ else
+ return
+ }
+ }
+
+ try {
+ opts.mkdirSync(path, opts)
+ return made || path
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
+ if (er.code !== 'EEXIST' && er.code !== 'EROFS')
+ throw er
+ try {
+ if (!opts.statSync(path).isDirectory())
+ throw er
+ } catch (_) {
+ throw er
+ }
+ }
+}
+
+module.exports = {mkdirpManual, mkdirpManualSync}
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-native.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-native.js
new file mode 100644
index 000000000..c7a6b6980
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/mkdirp-native.js
@@ -0,0 +1,39 @@
+const {dirname} = require('path')
+const {findMade, findMadeSync} = require('./find-made.js')
+const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
+
+const mkdirpNative = (path, opts) => {
+ opts.recursive = true
+ const parent = dirname(path)
+ if (parent === path)
+ return opts.mkdirAsync(path, opts)
+
+ return findMade(opts, path).then(made =>
+ opts.mkdirAsync(path, opts).then(() => made)
+ .catch(er => {
+ if (er.code === 'ENOENT')
+ return mkdirpManual(path, opts)
+ else
+ throw er
+ }))
+}
+
+const mkdirpNativeSync = (path, opts) => {
+ opts.recursive = true
+ const parent = dirname(path)
+ if (parent === path)
+ return opts.mkdirSync(path, opts)
+
+ const made = findMadeSync(opts, path)
+ try {
+ opts.mkdirSync(path, opts)
+ return made
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ return mkdirpManualSync(path, opts)
+ else
+ throw er
+ }
+}
+
+module.exports = {mkdirpNative, mkdirpNativeSync}
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/opts-arg.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/opts-arg.js
new file mode 100644
index 000000000..2fa4833fa
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/opts-arg.js
@@ -0,0 +1,23 @@
+const { promisify } = require('util')
+const fs = require('fs')
+const optsArg = opts => {
+ if (!opts)
+ opts = { mode: 0o777, fs }
+ else if (typeof opts === 'object')
+ opts = { mode: 0o777, fs, ...opts }
+ else if (typeof opts === 'number')
+ opts = { mode: opts, fs }
+ else if (typeof opts === 'string')
+ opts = { mode: parseInt(opts, 8), fs }
+ else
+ throw new TypeError('invalid options argument')
+
+ opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
+ opts.mkdirAsync = promisify(opts.mkdir)
+ opts.stat = opts.stat || opts.fs.stat || fs.stat
+ opts.statAsync = promisify(opts.stat)
+ opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
+ opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
+ return opts
+}
+module.exports = optsArg
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/path-arg.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/path-arg.js
new file mode 100644
index 000000000..cc07de5a6
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/path-arg.js
@@ -0,0 +1,29 @@
+const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
+const { resolve, parse } = require('path')
+const pathArg = path => {
+ if (/\0/.test(path)) {
+ // simulate same failure that node raises
+ throw Object.assign(
+ new TypeError('path must be a string without null bytes'),
+ {
+ path,
+ code: 'ERR_INVALID_ARG_VALUE',
+ }
+ )
+ }
+
+ path = resolve(path)
+ if (platform === 'win32') {
+ const badWinChars = /[*|"<>?:]/
+ const {root} = parse(path)
+ if (badWinChars.test(path.substr(root.length))) {
+ throw Object.assign(new Error('Illegal characters in path.'), {
+ path,
+ code: 'EINVAL',
+ })
+ }
+ }
+
+ return path
+}
+module.exports = pathArg
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/use-native.js b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/use-native.js
new file mode 100644
index 000000000..079361de1
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/lib/use-native.js
@@ -0,0 +1,10 @@
+const fs = require('fs')
+
+const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
+const versArr = version.replace(/^v/, '').split('.')
+const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
+
+const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
+const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
+
+module.exports = {useNative, useNativeSync}
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/package.json b/node_modules/@npmcli/move-file/node_modules/mkdirp/package.json
new file mode 100644
index 000000000..269d165d8
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/package.json
@@ -0,0 +1,75 @@
+{
+ "_from": "mkdirp@^1.0.4",
+ "_id": "mkdirp@1.0.4",
+ "_inBundle": false,
+ "_integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
+ "_location": "/@npmcli/move-file/mkdirp",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "mkdirp@^1.0.4",
+ "name": "mkdirp",
+ "escapedName": "mkdirp",
+ "rawSpec": "^1.0.4",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.4"
+ },
+ "_requiredBy": [
+ "/@npmcli/move-file"
+ ],
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+ "_shasum": "3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
+ "_spec": "mkdirp@^1.0.4",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/move-file",
+ "bin": {
+ "mkdirp": "bin/cmd.js"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-mkdirp/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Recursively mkdir, like `mkdir -p`",
+ "devDependencies": {
+ "require-inject": "^1.4.4",
+ "tap": "^14.10.7"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "files": [
+ "bin",
+ "lib",
+ "index.js"
+ ],
+ "homepage": "https://github.com/isaacs/node-mkdirp#readme",
+ "keywords": [
+ "mkdir",
+ "directory",
+ "make dir",
+ "make",
+ "dir",
+ "recursive",
+ "native"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "mkdirp",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/node-mkdirp.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --follow-tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true,
+ "coverage-map": "map.js"
+ },
+ "version": "1.0.4"
+}
diff --git a/node_modules/@npmcli/move-file/node_modules/mkdirp/readme.markdown b/node_modules/@npmcli/move-file/node_modules/mkdirp/readme.markdown
new file mode 100644
index 000000000..827de5905
--- /dev/null
+++ b/node_modules/@npmcli/move-file/node_modules/mkdirp/readme.markdown
@@ -0,0 +1,266 @@
+# mkdirp
+
+Like `mkdir -p`, but in Node.js!
+
+Now with a modern API and no\* bugs!
+
+<small>\* may contain some bugs</small>
+
+# example
+
+## pow.js
+
+```js
+const mkdirp = require('mkdirp')
+
+// return value is a Promise resolving to the first directory created
+mkdirp('/tmp/foo/bar/baz').then(made =>
+ console.log(`made directories, starting with ${made}`))
+```
+
+Output (where `/tmp/foo` already exists)
+
+```
+made directories, starting with /tmp/foo/bar
+```
+
+Or, if you don't have time to wait around for promises:
+
+```js
+const mkdirp = require('mkdirp')
+
+// return value is the first directory created
+const made = mkdirp.sync('/tmp/foo/bar/baz')
+console.log(`made directories, starting with ${made}`)
+```
+
+And now /tmp/foo/bar/baz exists, huzzah!
+
+# methods
+
+```js
+const mkdirp = require('mkdirp')
+```
+
+## mkdirp(dir, [opts]) -> Promise<String | undefined>
+
+Create a new directory and any necessary subdirectories at `dir` with octal
+permission string `opts.mode`. If `opts` is a string or number, it will be
+treated as the `opts.mode`.
+
+If `opts.mode` isn't specified, it defaults to `0o777 &
+(~process.umask())`.
+
+Promise resolves to first directory `made` that had to be created, or
+`undefined` if everything already exists. Promise rejects if any errors
+are encountered. Note that, in the case of promise rejection, some
+directories _may_ have been created, as recursive directory creation is not
+an atomic operation.
+
+You can optionally pass in an alternate `fs` implementation by passing in
+`opts.fs`. Your implementation should have `opts.fs.mkdir(path, opts, cb)`
+and `opts.fs.stat(path, cb)`.
+
+You can also override just one or the other of `mkdir` and `stat` by
+passing in `opts.stat` or `opts.mkdir`, or providing an `fs` option that
+only overrides one of these.
+
+## mkdirp.sync(dir, opts) -> String|null
+
+Synchronously create a new directory and any necessary subdirectories at
+`dir` with octal permission string `opts.mode`. If `opts` is a string or
+number, it will be treated as the `opts.mode`.
+
+If `opts.mode` isn't specified, it defaults to `0o777 &
+(~process.umask())`.
+
+Returns the first directory that had to be created, or undefined if
+everything already exists.
+
+You can optionally pass in an alternate `fs` implementation by passing in
+`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)`
+and `opts.fs.statSync(path)`.
+
+You can also override just one or the other of `mkdirSync` and `statSync`
+by passing in `opts.statSync` or `opts.mkdirSync`, or providing an `fs`
+option that only overrides one of these.
+
+## mkdirp.manual, mkdirp.manualSync
+
+Use the manual implementation (not the native one). This is the default
+when the native implementation is not available or the stat/mkdir
+implementation is overridden.
+
+## mkdirp.native, mkdirp.nativeSync
+
+Use the native implementation (not the manual one). This is the default
+when the native implementation is available and stat/mkdir are not
+overridden.
+
+# implementation
+
+On Node.js v10.12.0 and above, use the native `fs.mkdir(p,
+{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has been
+overridden by an option.
+
+## native implementation
+
+- If the path is a root directory, then pass it to the underlying
+ implementation and return the result/error. (In this case, it'll either
+ succeed or fail, but we aren't actually creating any dirs.)
+- Walk up the path statting each directory, to find the first path that
+ will be created, `made`.
+- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`)
+- If error, raise it to the caller.
+- Return `made`.
+
+## manual implementation
+
+- Call underlying `fs.mkdir` implementation, with `recursive: false`
+- If error:
+ - If path is a root directory, raise to the caller and do not handle it
+ - If ENOENT, mkdirp parent dir, store result as `made`
+ - stat(path)
+ - If error, raise original `mkdir` error
+ - If directory, return `made`
+ - Else, raise original `mkdir` error
+- else
+ - return `undefined` if a root dir, or `made` if set, or `path`
+
+## windows vs unix caveat
+
+On Windows file systems, attempts to create a root directory (ie, a drive
+letter or root UNC path) will fail. If the root directory exists, then it
+will fail with `EPERM`. If the root directory does not exist, then it will
+fail with `ENOENT`.
+
+On posix file systems, attempts to create a root directory (in recursive
+mode) will succeed silently, as it is treated like just another directory
+that already exists. (In non-recursive mode, of course, it fails with
+`EEXIST`.)
+
+In order to preserve this system-specific behavior (and because it's not as
+if we can create the parent of a root directory anyway), attempts to create
+a root directory are passed directly to the `fs` implementation, and any
+errors encountered are not handled.
+
+## native error caveat
+
+The native implementation (as of at least Node.js v13.4.0) does not provide
+appropriate errors in some cases (see
+[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) and
+[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)).
+
+In order to work around this issue, the native implementation will fall
+back to the manual implementation if an `ENOENT` error is encountered.
+
+# choosing a recursive mkdir implementation
+
+There are a few to choose from! Use the one that suits your needs best :D
+
+## use `fs.mkdir(path, {recursive: true}, cb)` if:
+
+- You wish to optimize performance even at the expense of other factors.
+- You don't need to know the first dir created.
+- You are ok with getting `ENOENT` as the error when some other problem is
+ the actual cause.
+- You can limit your platforms to Node.js v10.12 and above.
+- You're ok with using callbacks instead of promises.
+- You don't need/want a CLI.
+- You don't need to override the `fs` methods in use.
+
+## use this module (mkdirp 1.x) if:
+
+- You need to know the first directory that was created.
+- You wish to use the native implementation if available, but fall back
+ when it's not.
+- You prefer promise-returning APIs to callback-taking APIs.
+- You want more useful error messages than the native recursive mkdir
+ provides (at least as of Node.js v13.4), and are ok with re-trying on
+ `ENOENT` to achieve this.
+- You need (or at least, are ok with) a CLI.
+- You need to override the `fs` methods in use.
+
+## use [`make-dir`](http://npm.im/make-dir) if:
+
+- You do not need to know the first dir created (and wish to save a few
+ `stat` calls when using the native implementation for this reason).
+- You wish to use the native implementation if available, but fall back
+ when it's not.
+- You prefer promise-returning APIs to callback-taking APIs.
+- You are ok with occasionally getting `ENOENT` errors for failures that
+ are actually related to something other than a missing file system entry.
+- You don't need/want a CLI.
+- You need to override the `fs` methods in use.
+
+## use mkdirp 0.x if:
+
+- You need to know the first directory that was created.
+- You need (or at least, are ok with) a CLI.
+- You need to override the `fs` methods in use.
+- You're ok with using callbacks instead of promises.
+- You are not running on Windows, where the root-level ENOENT errors can
+ lead to infinite regress.
+- You think vinyl just sounds warmer and richer for some weird reason.
+- You are supporting truly ancient Node.js versions, before even the advent
+ of a `Promise` language primitive. (Please don't. You deserve better.)
+
+# cli
+
+This package also ships with a `mkdirp` command.
+
+```
+$ mkdirp -h
+
+usage: mkdirp [DIR1,DIR2..] {OPTIONS}
+
+ Create each supplied directory including any necessary parent directories
+ that don't yet exist.
+
+ If the directory already exists, do nothing.
+
+OPTIONS are:
+
+ -m<mode> If a directory needs to be created, set the mode as an octal
+ --mode=<mode> permission string.
+
+ -v --version Print the mkdirp version number
+
+ -h --help Print this helpful banner
+
+ -p --print Print the first directories created for each path provided
+
+ --manual Use manual implementation, even if native is available
+```
+
+# install
+
+With [npm](http://npmjs.org) do:
+
+```
+npm install mkdirp
+```
+
+to get the library locally, or
+
+```
+npm install -g mkdirp
+```
+
+to get the command everywhere, or
+
+```
+npx mkdirp ...
+```
+
+to run the command without installing it globally.
+
+# platform support
+
+This module works on node v8, but only v10 and above are officially
+supported, as Node v8 reached its LTS end of life 2020-01-01, which is in
+the past, as of this writing.
+
+# license
+
+MIT
diff --git a/node_modules/@npmcli/move-file/package.json b/node_modules/@npmcli/move-file/package.json
new file mode 100644
index 000000000..dacb8b292
--- /dev/null
+++ b/node_modules/@npmcli/move-file/package.json
@@ -0,0 +1,63 @@
+{
+ "_from": "@npmcli/move-file@^1.0.1",
+ "_id": "@npmcli/move-file@1.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-Uv6h1sT+0DrblvIrolFtbvM1FgWm+/sy4B3pvLp67Zys+thcukzS5ekn7HsZFGpWP4Q3fYJCljbWQE/XivMRLw==",
+ "_location": "/@npmcli/move-file",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "@npmcli/move-file@^1.0.1",
+ "name": "@npmcli/move-file",
+ "escapedName": "@npmcli%2fmove-file",
+ "scope": "@npmcli",
+ "rawSpec": "^1.0.1",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.1"
+ },
+ "_requiredBy": [
+ "/cacache"
+ ],
+ "_resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.0.1.tgz",
+ "_shasum": "de103070dac0f48ce49cf6693c23af59c0f70464",
+ "_spec": "@npmcli/move-file@^1.0.1",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/cacache",
+ "bugs": {
+ "url": "https://github.com/npm/move-file/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "mkdirp": "^1.0.4"
+ },
+ "deprecated": false,
+ "description": "move a file (fork of move-file)",
+ "devDependencies": {
+ "require-inject": "^1.4.4",
+ "tap": "^14.10.7"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/npm/move-file#readme",
+ "license": "MIT",
+ "name": "@npmcli/move-file",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/move-file.git"
+ },
+ "scripts": {
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "1.0.1"
+}
diff --git a/node_modules/cacache/CHANGELOG.md b/node_modules/cacache/CHANGELOG.md
index 80d9315a6..649db2202 100644
--- a/node_modules/cacache/CHANGELOG.md
+++ b/node_modules/cacache/CHANGELOG.md
@@ -2,6 +2,13 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+### [15.0.4](https://github.com/npm/cacache/compare/v15.0.3...v15.0.4) (2020-06-03)
+
+
+### Bug Fixes
+
+* replace move-file dep with @npmcli/move-file ([bf88af0](https://github.com/npm/cacache/commit/bf88af04e50cca9b54041151139ffc1fd415e2dc)), closes [#37](https://github.com/npm/cacache/issues/37)
+
### [15.0.3](https://github.com/npm/cacache/compare/v15.0.2...v15.0.3) (2020-04-28)
diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js
index b5d7a2534..84130b2e9 100644
--- a/node_modules/cacache/lib/util/move-file.js
+++ b/node_modules/cacache/lib/util/move-file.js
@@ -5,7 +5,7 @@ const util = require('util')
const chmod = util.promisify(fs.chmod)
const unlink = util.promisify(fs.unlink)
const stat = util.promisify(fs.stat)
-const move = require('move-file')
+const move = require('@npmcli/move-file')
const pinflight = require('promise-inflight')
module.exports = moveFile
diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json
index 70b741d2c..e5e0bcc71 100644
--- a/node_modules/cacache/package.json
+++ b/node_modules/cacache/package.json
@@ -1,8 +1,8 @@
{
"_from": "cacache@latest",
- "_id": "cacache@15.0.3",
+ "_id": "cacache@15.0.4",
"_inBundle": false,
- "_integrity": "sha512-bc3jKYjqv7k4pWh7I/ixIjfcjPul4V4jme/WbjvwGS5LzoPL/GzXr4C5EgPNLO/QEZl9Oi61iGitYEdwcrwLCQ==",
+ "_integrity": "sha512-YlnKQqTbD/6iyoJvEY3KJftjrdBYroCbxxYXzhOzsFLWlp6KX4BOlEf4mTx0cMUfVaTS3ENL2QtDWeRYoGLkkw==",
"_location": "/cacache",
"_phantomChildren": {
"fs-minipass": "2.1.0",
@@ -24,11 +24,13 @@
"_requiredBy": [
"#USER",
"/",
- "/npm-registry-fetch/make-fetch-happen",
- "/pacote"
+ "/@npmcli/arborist",
+ "/make-fetch-happen",
+ "/pacote",
+ "/pacote/make-fetch-happen"
],
- "_resolved": "https://registry.npmjs.org/cacache/-/cacache-15.0.3.tgz",
- "_shasum": "2225c2d1dd8e872339950d6a39c051e0e9334392",
+ "_resolved": "https://registry.npmjs.org/cacache/-/cacache-15.0.4.tgz",
+ "_shasum": "b2c23cf4ac4f5ead004fb15a0efb0a20340741f1",
"_spec": "cacache@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
@@ -54,6 +56,7 @@
}
],
"dependencies": {
+ "@npmcli/move-file": "^1.0.1",
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"glob": "^7.1.4",
@@ -64,7 +67,6 @@
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.2",
"mkdirp": "^1.0.3",
- "move-file": "^2.0.0",
"p-map": "^4.0.0",
"promise-inflight": "^1.0.1",
"rimraf": "^3.0.2",
@@ -129,5 +131,5 @@
"100": true,
"test-regex": "test/[^/]*.js"
},
- "version": "15.0.3"
+ "version": "15.0.4"
}
diff --git a/node_modules/common-ancestor-path/LICENSE b/node_modules/common-ancestor-path/LICENSE
new file mode 100644
index 000000000..05eeeb88c
--- /dev/null
+++ b/node_modules/common-ancestor-path/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/common-ancestor-path/README.md b/node_modules/common-ancestor-path/README.md
new file mode 100644
index 000000000..2e8764373
--- /dev/null
+++ b/node_modules/common-ancestor-path/README.md
@@ -0,0 +1,28 @@
+# common-ancestor-path
+
+Find the common ancestor of 2 or more paths on Windows or Unix
+
+## USAGE
+
+Give it two or more path strings, and it'll do the thing.
+
+```js
+const ancestor = require('common-ancestor-path')
+
+// output /a/b
+console.log(ancestor('/a/b/c/d', '/a/b/x/y/z', '/a/b/c/i/j/k'))
+
+// normalizes separators, but NOT cases, since it matters sometimes
+console.log(ancestor('C:\\a\\b\\c', 'C:\\a\\b\\x'))
+
+// no common ancestor on different windows drive letters
+// so, this returns null
+console.log(ancestor('c:\\a\\b\\c', 'd:\\d\\e\\f'))
+```
+
+## API
+
+`commonAncestorPath(...paths)`
+
+Returns the nearest (deepest) common ancestor path, or `null` if on
+different roots on Windows.
diff --git a/node_modules/common-ancestor-path/index.js b/node_modules/common-ancestor-path/index.js
new file mode 100644
index 000000000..09ae31782
--- /dev/null
+++ b/node_modules/common-ancestor-path/index.js
@@ -0,0 +1,17 @@
+const {parse, sep, normalize: norm} = require('path')
+
+function* commonArrayMembers (a, b) {
+ const [l, s] = a.length > b.length ? [a, b] : [b, a]
+ for (const x of s) {
+ if (x === l.shift())
+ yield x
+ else
+ break
+ }
+}
+
+const commonAncestorPath = (a, b) => a === b ? a
+ : parse(a).root !== parse(b).root ? null
+ : [...commonArrayMembers(norm(a).split(sep), norm(b).split(sep))].join(sep)
+
+module.exports = (...paths) => paths.reduce(commonAncestorPath)
diff --git a/node_modules/common-ancestor-path/package.json b/node_modules/common-ancestor-path/package.json
new file mode 100644
index 000000000..d455f3bc1
--- /dev/null
+++ b/node_modules/common-ancestor-path/package.json
@@ -0,0 +1,61 @@
+{
+ "_from": "common-ancestor-path@^1.0.1",
+ "_id": "common-ancestor-path@1.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w==",
+ "_location": "/common-ancestor-path",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "common-ancestor-path@^1.0.1",
+ "name": "common-ancestor-path",
+ "escapedName": "common-ancestor-path",
+ "rawSpec": "^1.0.1",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.1"
+ },
+ "_requiredBy": [
+ "/@npmcli/arborist"
+ ],
+ "_resolved": "https://registry.npmjs.org/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz",
+ "_shasum": "4f7d2d1394d91b7abdf51871c62f71eadb0182a7",
+ "_spec": "common-ancestor-path@^1.0.1",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "https://izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/common-ancestor-path/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Find the common ancestor of 2 or more paths on Windows or Unix",
+ "devDependencies": {
+ "require-inject": "^1.4.4",
+ "tap": "^14.10.7"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/isaacs/common-ancestor-path#readme",
+ "license": "ISC",
+ "name": "common-ancestor-path",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/common-ancestor-path.git"
+ },
+ "scripts": {
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "1.0.1"
+}
diff --git a/node_modules/make-fetch-happen/cache.js b/node_modules/make-fetch-happen/cache.js
index b11ddc459..1b7f0db1b 100644
--- a/node_modules/make-fetch-happen/cache.js
+++ b/node_modules/make-fetch-happen/cache.js
@@ -169,39 +169,7 @@ module.exports = class Cache {
ckey,
cacheOpts
)
-
- // See: https://github.com/npm/npm-registry-fetch/issues/23#issuecomment-623558888
- //
- // XXX why does this fix the glitch??
- //
- // Something weird is going on here. This SHOULD be fine as a simple
- // pipe(), but for some reason, backpressure from the cache stream
- // can cause the pipeline to drop the first chunk of data, resulting
- // in invalid JSON. Until that is fixed, just write into the cache
- // without any backpressure.
- //
- // The only hazard is that, if the fs is truly very slow, and the rest
- // of the consumption pipeline is very fast, then we'll back up into
- // memory and use more than we ought to, rather than pushing back on
- // the incoming stream. However, this isn't likely to ever be a problem
- // due to how npm does HTTP. Either it's fetching a JSON response,
- // or a tarball (which is also either unpacking to disk, or streaming
- // directly to a tarball file on disk). So, if the disk is slow, and
- // it's a tarball request, we're likely to get backpressure from the
- // main pipeline anyway. It can only become a problem if the JSON
- // response is large enough to span multiple chunks, and also the fs
- // is loaded enough to start slowing down. In the JSON response case,
- // we're going to load the whole thing in memory anyway, so nothing is
- // made particularly *worse* by this lack of backpressure.
- //
- // It is possible that the root cause of this bug exists either in
- // cacache, minipass-pipeline, or minipass itself. But since we don't
- // do a multi-pipe tee stream anywhere else in npm's stack, this is
- // the only spot where it can make itself known.
- tee.on('data', d => cacheStream.write(d))
- tee.on('end', () => cacheStream.end())
- // tee.pipe(cacheStream)
-
+ tee.pipe(cacheStream)
cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
newBody.unshift(tee)
}
diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json
index decec12d9..dac0309c4 100644
--- a/node_modules/make-fetch-happen/package.json
+++ b/node_modules/make-fetch-happen/package.json
@@ -1,27 +1,29 @@
{
- "_from": "make-fetch-happen@^8.0.6",
- "_id": "make-fetch-happen@8.0.6",
+ "_from": "make-fetch-happen@latest",
+ "_id": "make-fetch-happen@8.0.7",
"_inBundle": false,
- "_integrity": "sha512-QJ4pB5VBY9H9e+3t/o+fPjsVUlPULpAllxuKertRo/7ii47TfxeEEnneM6NCmhyn4MQPTYL+M+RkiU9bR+hAfg==",
+ "_integrity": "sha512-rkDA4c1nMXVqLkfOaM5RK2dxkUndjLOCrPycTDZgbkFDzhmaCO3P1dmCW//yt1I/G1EcedJqMsSjWkV79Hh4hQ==",
"_location": "/make-fetch-happen",
"_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "make-fetch-happen@^8.0.6",
+ "raw": "make-fetch-happen@latest",
"name": "make-fetch-happen",
"escapedName": "make-fetch-happen",
- "rawSpec": "^8.0.6",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "^8.0.6"
+ "fetchSpec": "latest"
},
"_requiredBy": [
- "/@npmcli/arborist/npm-registry-fetch"
+ "#USER",
+ "/",
+ "/npm-registry-fetch"
],
- "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.6.tgz",
- "_shasum": "392726f46eba30cc61dc82dc015167e6e428ce80",
- "_spec": "make-fetch-happen@^8.0.6",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist/node_modules/npm-registry-fetch",
+ "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-8.0.7.tgz",
+ "_shasum": "7f98e6e75784c541833d0ffe2f82c31418a87ac2",
+ "_spec": "make-fetch-happen@latest",
+ "_where": "/Users/isaacs/dev/npm/cli",
"author": {
"name": "Kat Marchán",
"email": "kzm@zkat.tech"
@@ -38,7 +40,7 @@
"https-proxy-agent": "^5.0.0",
"is-lambda": "^1.0.1",
"lru-cache": "^5.1.1",
- "minipass": "^3.0.0",
+ "minipass": "^3.1.3",
"minipass-collect": "^1.0.2",
"minipass-fetch": "^1.1.2",
"minipass-flush": "^1.0.5",
@@ -94,5 +96,5 @@
"preversion": "npm t",
"test": "tap test/*.js"
},
- "version": "8.0.6"
+ "version": "8.0.7"
}
diff --git a/node_modules/minipass/README.md b/node_modules/minipass/README.md
index 32ace2fb9..1a6ff7f5d 100644
--- a/node_modules/minipass/README.md
+++ b/node_modules/minipass/README.md
@@ -47,6 +47,13 @@ out:
- [tap-parser](http://npm.im/tap)
- [treport](http://npm.im/tap)
- [minipass-fetch](http://npm.im/minipass-fetch)
+- [pacote](http://npm.im/pacote)
+- [make-fetch-happen](http://npm.im/make-fetch-happen)
+- [cacache](http://npm.im/cacache)
+- [ssri](http://npm.im/ssri)
+- [npm-registry-fetch](http://npm.im/npm-registry-fetch)
+- [minipass-json-stream](http://npm.im/minipass-json-stream)
+- [minipass-sized](http://npm.im/minipass-sized)
## Differences from Node.js Streams
@@ -224,7 +231,7 @@ src.write('foo')
const tee = new Minipass()
tee.pipe(dest1)
tee.pipe(dest2)
-stream.pipe(tee) // tee gets 'foo', pipes to both locations
+src.pipe(tee) // tee gets 'foo', pipes to both locations
```
The same caveat applies to `on('data')` event listeners. The first one
diff --git a/node_modules/minipass/index.js b/node_modules/minipass/index.js
index 55ea0f3dd..56cbd665d 100644
--- a/node_modules/minipass/index.js
+++ b/node_modules/minipass/index.js
@@ -97,7 +97,7 @@ module.exports = class Minipass extends Stream {
}
get objectMode () { return this[OBJECTMODE] }
- set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ }
+ set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
write (chunk, encoding, cb) {
if (this[EOF])
@@ -134,12 +134,11 @@ module.exports = class Minipass extends Stream {
// this ensures at this point that the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!this.objectMode && !chunk.length) {
- const ret = this.flowing
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
cb()
- return ret
+ return this.flowing
}
// fast-path writing strings of same encoding to a stream with
@@ -153,16 +152,24 @@ module.exports = class Minipass extends Stream {
if (Buffer.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
- try {
- return this.flowing
- ? (this.emit('data', chunk), this.flowing)
- : (this[BUFFERPUSH](chunk), false)
- } finally {
+ if (this.flowing) {
+ // if we somehow have something in the buffer, but we think we're
+ // flowing, then we need to flush all that out first, or we get
+ // chunks coming in out of order. Can't emit 'drain' here though,
+ // because we're mid-write, so that'd be bad.
if (this[BUFFERLENGTH] !== 0)
- this.emit('readable')
- if (cb)
- cb()
- }
+ this[FLUSH](true)
+ this.emit('data', chunk)
+ } else
+ this[BUFFERPUSH](chunk)
+
+ if (this[BUFFERLENGTH] !== 0)
+ this.emit('readable')
+
+ if (cb)
+ cb()
+
+ return this.flowing
}
read (n) {
@@ -286,10 +293,10 @@ module.exports = class Minipass extends Stream {
return this.buffer.shift()
}
- [FLUSH] () {
+ [FLUSH] (noDrain) {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
- if (!this.buffer.length && !this[EOF])
+ if (!noDrain && !this.buffer.length && !this[EOF])
this.emit('drain')
}
diff --git a/node_modules/minipass/package.json b/node_modules/minipass/package.json
index 0eaa37808..315022964 100644
--- a/node_modules/minipass/package.json
+++ b/node_modules/minipass/package.json
@@ -1,8 +1,8 @@
{
"_from": "minipass@latest",
- "_id": "minipass@3.1.1",
+ "_id": "minipass@3.1.3",
"_inBundle": false,
- "_integrity": "sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==",
+ "_integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==",
"_location": "/minipass",
"_phantomChildren": {},
"_requested": {
@@ -19,7 +19,10 @@
"#USER",
"/",
"/cacache",
+ "/cacache/tar",
"/fs-minipass",
+ "/libnpmaccess",
+ "/make-fetch-happen",
"/minipass-collect",
"/minipass-fetch",
"/minipass-flush",
@@ -28,14 +31,12 @@
"/minipass-sized",
"/minizlib",
"/npm-registry-fetch",
- "/npm-registry-fetch/make-fetch-happen",
- "/pacote",
"/ssri",
"/tap-parser",
"/tar"
],
- "_resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.1.tgz",
- "_shasum": "7607ce778472a185ad6d89082aa2070f79cedcd5",
+ "_resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz",
+ "_shasum": "7d42ff1f39635482e15f9cdb53184deebd5815fd",
"_spec": "minipass@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
@@ -84,5 +85,5 @@
"tap": {
"check-coverage": true
},
- "version": "3.1.1"
+ "version": "3.1.3"
}
diff --git a/node_modules/mkdirp/node_modules/minimist/.travis.yml b/node_modules/mkdirp/node_modules/minimist/.travis.yml
deleted file mode 100644
index 74c57bf15..000000000
--- a/node_modules/mkdirp/node_modules/minimist/.travis.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-language: node_js
-node_js:
- - "0.8"
- - "0.10"
- - "0.12"
- - "iojs"
-before_install:
- - npm install -g npm@~1.4.6
diff --git a/node_modules/mkdirp/node_modules/minimist/LICENSE b/node_modules/mkdirp/node_modules/minimist/LICENSE
deleted file mode 100644
index ee27ba4b4..000000000
--- a/node_modules/mkdirp/node_modules/minimist/LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-This software is released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/mkdirp/node_modules/minimist/example/parse.js b/node_modules/mkdirp/node_modules/minimist/example/parse.js
deleted file mode 100644
index f7c8d4980..000000000
--- a/node_modules/mkdirp/node_modules/minimist/example/parse.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var argv = require('../')(process.argv.slice(2));
-console.log(argv);
diff --git a/node_modules/mkdirp/node_modules/minimist/index.js b/node_modules/mkdirp/node_modules/minimist/index.js
deleted file mode 100644
index d2afe5e4d..000000000
--- a/node_modules/mkdirp/node_modules/minimist/index.js
+++ /dev/null
@@ -1,245 +0,0 @@
-module.exports = function (args, opts) {
- if (!opts) opts = {};
-
- var flags = { bools : {}, strings : {}, unknownFn: null };
-
- if (typeof opts['unknown'] === 'function') {
- flags.unknownFn = opts['unknown'];
- }
-
- if (typeof opts['boolean'] === 'boolean' && opts['boolean']) {
- flags.allBools = true;
- } else {
- [].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
- flags.bools[key] = true;
- });
- }
-
- var aliases = {};
- Object.keys(opts.alias || {}).forEach(function (key) {
- aliases[key] = [].concat(opts.alias[key]);
- aliases[key].forEach(function (x) {
- aliases[x] = [key].concat(aliases[key].filter(function (y) {
- return x !== y;
- }));
- });
- });
-
- [].concat(opts.string).filter(Boolean).forEach(function (key) {
- flags.strings[key] = true;
- if (aliases[key]) {
- flags.strings[aliases[key]] = true;
- }
- });
-
- var defaults = opts['default'] || {};
-
- var argv = { _ : [] };
- Object.keys(flags.bools).forEach(function (key) {
- setArg(key, defaults[key] === undefined ? false : defaults[key]);
- });
-
- var notFlags = [];
-
- if (args.indexOf('--') !== -1) {
- notFlags = args.slice(args.indexOf('--')+1);
- args = args.slice(0, args.indexOf('--'));
- }
-
- function argDefined(key, arg) {
- return (flags.allBools && /^--[^=]+$/.test(arg)) ||
- flags.strings[key] || flags.bools[key] || aliases[key];
- }
-
- function setArg (key, val, arg) {
- if (arg && flags.unknownFn && !argDefined(key, arg)) {
- if (flags.unknownFn(arg) === false) return;
- }
-
- var value = !flags.strings[key] && isNumber(val)
- ? Number(val) : val
- ;
- setKey(argv, key.split('.'), value);
-
- (aliases[key] || []).forEach(function (x) {
- setKey(argv, x.split('.'), value);
- });
- }
-
- function setKey (obj, keys, value) {
- var o = obj;
- for (var i = 0; i < keys.length-1; i++) {
- var key = keys[i];
- if (key === '__proto__') return;
- if (o[key] === undefined) o[key] = {};
- if (o[key] === Object.prototype || o[key] === Number.prototype
- || o[key] === String.prototype) o[key] = {};
- if (o[key] === Array.prototype) o[key] = [];
- o = o[key];
- }
-
- var key = keys[keys.length - 1];
- if (key === '__proto__') return;
- if (o === Object.prototype || o === Number.prototype
- || o === String.prototype) o = {};
- if (o === Array.prototype) o = [];
- if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') {
- o[key] = value;
- }
- else if (Array.isArray(o[key])) {
- o[key].push(value);
- }
- else {
- o[key] = [ o[key], value ];
- }
- }
-
- function aliasIsBoolean(key) {
- return aliases[key].some(function (x) {
- return flags.bools[x];
- });
- }
-
- for (var i = 0; i < args.length; i++) {
- var arg = args[i];
-
- if (/^--.+=/.test(arg)) {
- // Using [\s\S] instead of . because js doesn't support the
- // 'dotall' regex modifier. See:
- // http://stackoverflow.com/a/1068308/13216
- var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
- var key = m[1];
- var value = m[2];
- if (flags.bools[key]) {
- value = value !== 'false';
- }
- setArg(key, value, arg);
- }
- else if (/^--no-.+/.test(arg)) {
- var key = arg.match(/^--no-(.+)/)[1];
- setArg(key, false, arg);
- }
- else if (/^--.+/.test(arg)) {
- var key = arg.match(/^--(.+)/)[1];
- var next = args[i + 1];
- if (next !== undefined && !/^-/.test(next)
- && !flags.bools[key]
- && !flags.allBools
- && (aliases[key] ? !aliasIsBoolean(key) : true)) {
- setArg(key, next, arg);
- i++;
- }
- else if (/^(true|false)$/.test(next)) {
- setArg(key, next === 'true', arg);
- i++;
- }
- else {
- setArg(key, flags.strings[key] ? '' : true, arg);
- }
- }
- else if (/^-[^-]+/.test(arg)) {
- var letters = arg.slice(1,-1).split('');
-
- var broken = false;
- for (var j = 0; j < letters.length; j++) {
- var next = arg.slice(j+2);
-
- if (next === '-') {
- setArg(letters[j], next, arg)
- continue;
- }
-
- if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) {
- setArg(letters[j], next.split('=')[1], arg);
- broken = true;
- break;
- }
-
- if (/[A-Za-z]/.test(letters[j])
- && /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
- setArg(letters[j], next, arg);
- broken = true;
- break;
- }
-
- if (letters[j+1] && letters[j+1].match(/\W/)) {
- setArg(letters[j], arg.slice(j+2), arg);
- broken = true;
- break;
- }
- else {
- setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg);
- }
- }
-
- var key = arg.slice(-1)[0];
- if (!broken && key !== '-') {
- if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
- && !flags.bools[key]
- && (aliases[key] ? !aliasIsBoolean(key) : true)) {
- setArg(key, args[i+1], arg);
- i++;
- }
- else if (args[i+1] && /^(true|false)$/.test(args[i+1])) {
- setArg(key, args[i+1] === 'true', arg);
- i++;
- }
- else {
- setArg(key, flags.strings[key] ? '' : true, arg);
- }
- }
- }
- else {
- if (!flags.unknownFn || flags.unknownFn(arg) !== false) {
- argv._.push(
- flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
- );
- }
- if (opts.stopEarly) {
- argv._.push.apply(argv._, args.slice(i + 1));
- break;
- }
- }
- }
-
- Object.keys(defaults).forEach(function (key) {
- if (!hasKey(argv, key.split('.'))) {
- setKey(argv, key.split('.'), defaults[key]);
-
- (aliases[key] || []).forEach(function (x) {
- setKey(argv, x.split('.'), defaults[key]);
- });
- }
- });
-
- if (opts['--']) {
- argv['--'] = new Array();
- notFlags.forEach(function(key) {
- argv['--'].push(key);
- });
- }
- else {
- notFlags.forEach(function(key) {
- argv._.push(key);
- });
- }
-
- return argv;
-};
-
-function hasKey (obj, keys) {
- var o = obj;
- keys.slice(0,-1).forEach(function (key) {
- o = (o[key] || {});
- });
-
- var key = keys[keys.length - 1];
- return key in o;
-}
-
-function isNumber (x) {
- if (typeof x === 'number') return true;
- if (/^0x[0-9a-f]+$/i.test(x)) return true;
- return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
-}
-
diff --git a/node_modules/mkdirp/node_modules/minimist/package.json b/node_modules/mkdirp/node_modules/minimist/package.json
deleted file mode 100644
index 2846f6885..000000000
--- a/node_modules/mkdirp/node_modules/minimist/package.json
+++ /dev/null
@@ -1,73 +0,0 @@
-{
- "_from": "minimist@^1.2.5",
- "_id": "minimist@1.2.5",
- "_inBundle": false,
- "_integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
- "_location": "/mkdirp/minimist",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "minimist@^1.2.5",
- "name": "minimist",
- "escapedName": "minimist",
- "rawSpec": "^1.2.5",
- "saveSpec": null,
- "fetchSpec": "^1.2.5"
- },
- "_requiredBy": [
- "/mkdirp"
- ],
- "_resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
- "_shasum": "67d66014b66a6a8aaa0c083c5fd58df4e4e97602",
- "_spec": "minimist@^1.2.5",
- "_where": "/Users/darcyclarke/Documents/Repos/npm/npm/cli/node_modules/mkdirp",
- "author": {
- "name": "James Halliday",
- "email": "mail@substack.net",
- "url": "http://substack.net"
- },
- "bugs": {
- "url": "https://github.com/substack/minimist/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "parse argument options",
- "devDependencies": {
- "covert": "^1.0.0",
- "tap": "~0.4.0",
- "tape": "^3.5.0"
- },
- "homepage": "https://github.com/substack/minimist",
- "keywords": [
- "argv",
- "getopt",
- "parser",
- "optimist"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "minimist",
- "repository": {
- "type": "git",
- "url": "git://github.com/substack/minimist.git"
- },
- "scripts": {
- "coverage": "covert test/*.js",
- "test": "tap test/*.js"
- },
- "testling": {
- "files": "test/*.js",
- "browsers": [
- "ie/6..latest",
- "ff/5",
- "firefox/latest",
- "chrome/10",
- "chrome/latest",
- "safari/5.1",
- "safari/latest",
- "opera/12"
- ]
- },
- "version": "1.2.5"
-}
diff --git a/node_modules/mkdirp/node_modules/minimist/readme.markdown b/node_modules/mkdirp/node_modules/minimist/readme.markdown
deleted file mode 100644
index 5fd97ab11..000000000
--- a/node_modules/mkdirp/node_modules/minimist/readme.markdown
+++ /dev/null
@@ -1,95 +0,0 @@
-# minimist
-
-parse argument options
-
-This module is the guts of optimist's argument parser without all the
-fanciful decoration.
-
-# example
-
-``` js
-var argv = require('minimist')(process.argv.slice(2));
-console.log(argv);
-```
-
-```
-$ node example/parse.js -a beep -b boop
-{ _: [], a: 'beep', b: 'boop' }
-```
-
-```
-$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz
-{ _: [ 'foo', 'bar', 'baz' ],
- x: 3,
- y: 4,
- n: 5,
- a: true,
- b: true,
- c: true,
- beep: 'boop' }
-```
-
-# security
-
-Previous versions had a prototype pollution bug that could cause privilege
-escalation in some circumstances when handling untrusted user input.
-
-Please use version 1.2.3 or later: https://snyk.io/vuln/SNYK-JS-MINIMIST-559764
-
-# methods
-
-``` js
-var parseArgs = require('minimist')
-```
-
-## var argv = parseArgs(args, opts={})
-
-Return an argument object `argv` populated with the array arguments from `args`.
-
-`argv._` contains all the arguments that didn't have an option associated with
-them.
-
-Numeric-looking arguments will be returned as numbers unless `opts.string` or
-`opts.boolean` is set for that argument name.
-
-Any arguments after `'--'` will not be parsed and will end up in `argv._`.
-
-options can be:
-
-* `opts.string` - a string or array of strings argument names to always treat as
-strings
-* `opts.boolean` - a boolean, string or array of strings to always treat as
-booleans. if `true` will treat all double hyphenated arguments without equal signs
-as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`)
-* `opts.alias` - an object mapping string names to strings or arrays of string
-argument names to use as aliases
-* `opts.default` - an object mapping string argument names to default values
-* `opts.stopEarly` - when true, populate `argv._` with everything after the
-first non-option
-* `opts['--']` - when true, populate `argv._` with everything before the `--`
-and `argv['--']` with everything after the `--`. Here's an example:
-
- ```
- > require('./')('one two three -- four five --six'.split(' '), { '--': true })
- { _: [ 'one', 'two', 'three' ],
- '--': [ 'four', 'five', '--six' ] }
- ```
-
- Note that with `opts['--']` set, parsing for arguments still stops after the
- `--`.
-
-* `opts.unknown` - a function which is invoked with a command line parameter not
-defined in the `opts` configuration object. If the function returns `false`, the
-unknown option is not added to `argv`.
-
-# install
-
-With [npm](https://npmjs.org) do:
-
-```
-npm install minimist
-```
-
-# license
-
-MIT
diff --git a/node_modules/mkdirp/node_modules/minimist/test/all_bool.js b/node_modules/mkdirp/node_modules/minimist/test/all_bool.js
deleted file mode 100644
index ac835483d..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/all_bool.js
+++ /dev/null
@@ -1,32 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('flag boolean true (default all --args to boolean)', function (t) {
- var argv = parse(['moo', '--honk', 'cow'], {
- boolean: true
- });
-
- t.deepEqual(argv, {
- honk: true,
- _: ['moo', 'cow']
- });
-
- t.deepEqual(typeof argv.honk, 'boolean');
- t.end();
-});
-
-test('flag boolean true only affects double hyphen arguments without equals signs', function (t) {
- var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], {
- boolean: true
- });
-
- t.deepEqual(argv, {
- honk: true,
- tacos: 'good',
- p: 55,
- _: ['moo', 'cow']
- });
-
- t.deepEqual(typeof argv.honk, 'boolean');
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/bool.js b/node_modules/mkdirp/node_modules/minimist/test/bool.js
deleted file mode 100644
index 5f7dbde16..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/bool.js
+++ /dev/null
@@ -1,178 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('flag boolean default false', function (t) {
- var argv = parse(['moo'], {
- boolean: ['t', 'verbose'],
- default: { verbose: false, t: false }
- });
-
- t.deepEqual(argv, {
- verbose: false,
- t: false,
- _: ['moo']
- });
-
- t.deepEqual(typeof argv.verbose, 'boolean');
- t.deepEqual(typeof argv.t, 'boolean');
- t.end();
-
-});
-
-test('boolean groups', function (t) {
- var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], {
- boolean: ['x','y','z']
- });
-
- t.deepEqual(argv, {
- x : true,
- y : false,
- z : true,
- _ : [ 'one', 'two', 'three' ]
- });
-
- t.deepEqual(typeof argv.x, 'boolean');
- t.deepEqual(typeof argv.y, 'boolean');
- t.deepEqual(typeof argv.z, 'boolean');
- t.end();
-});
-test('boolean and alias with chainable api', function (t) {
- var aliased = [ '-h', 'derp' ];
- var regular = [ '--herp', 'derp' ];
- var opts = {
- herp: { alias: 'h', boolean: true }
- };
- var aliasedArgv = parse(aliased, {
- boolean: 'herp',
- alias: { h: 'herp' }
- });
- var propertyArgv = parse(regular, {
- boolean: 'herp',
- alias: { h: 'herp' }
- });
- var expected = {
- herp: true,
- h: true,
- '_': [ 'derp' ]
- };
-
- t.same(aliasedArgv, expected);
- t.same(propertyArgv, expected);
- t.end();
-});
-
-test('boolean and alias with options hash', function (t) {
- var aliased = [ '-h', 'derp' ];
- var regular = [ '--herp', 'derp' ];
- var opts = {
- alias: { 'h': 'herp' },
- boolean: 'herp'
- };
- var aliasedArgv = parse(aliased, opts);
- var propertyArgv = parse(regular, opts);
- var expected = {
- herp: true,
- h: true,
- '_': [ 'derp' ]
- };
- t.same(aliasedArgv, expected);
- t.same(propertyArgv, expected);
- t.end();
-});
-
-test('boolean and alias array with options hash', function (t) {
- var aliased = [ '-h', 'derp' ];
- var regular = [ '--herp', 'derp' ];
- var alt = [ '--harp', 'derp' ];
- var opts = {
- alias: { 'h': ['herp', 'harp'] },
- boolean: 'h'
- };
- var aliasedArgv = parse(aliased, opts);
- var propertyArgv = parse(regular, opts);
- var altPropertyArgv = parse(alt, opts);
- var expected = {
- harp: true,
- herp: true,
- h: true,
- '_': [ 'derp' ]
- };
- t.same(aliasedArgv, expected);
- t.same(propertyArgv, expected);
- t.same(altPropertyArgv, expected);
- t.end();
-});
-
-test('boolean and alias using explicit true', function (t) {
- var aliased = [ '-h', 'true' ];
- var regular = [ '--herp', 'true' ];
- var opts = {
- alias: { h: 'herp' },
- boolean: 'h'
- };
- var aliasedArgv = parse(aliased, opts);
- var propertyArgv = parse(regular, opts);
- var expected = {
- herp: true,
- h: true,
- '_': [ ]
- };
-
- t.same(aliasedArgv, expected);
- t.same(propertyArgv, expected);
- t.end();
-});
-
-// regression, see https://github.com/substack/node-optimist/issues/71
-test('boolean and --x=true', function(t) {
- var parsed = parse(['--boool', '--other=true'], {
- boolean: 'boool'
- });
-
- t.same(parsed.boool, true);
- t.same(parsed.other, 'true');
-
- parsed = parse(['--boool', '--other=false'], {
- boolean: 'boool'
- });
-
- t.same(parsed.boool, true);
- t.same(parsed.other, 'false');
- t.end();
-});
-
-test('boolean --boool=true', function (t) {
- var parsed = parse(['--boool=true'], {
- default: {
- boool: false
- },
- boolean: ['boool']
- });
-
- t.same(parsed.boool, true);
- t.end();
-});
-
-test('boolean --boool=false', function (t) {
- var parsed = parse(['--boool=false'], {
- default: {
- boool: true
- },
- boolean: ['boool']
- });
-
- t.same(parsed.boool, false);
- t.end();
-});
-
-test('boolean using something similar to true', function (t) {
- var opts = { boolean: 'h' };
- var result = parse(['-h', 'true.txt'], opts);
- var expected = {
- h: true,
- '_': ['true.txt']
- };
-
- t.same(result, expected);
- t.end();
-}); \ No newline at end of file
diff --git a/node_modules/mkdirp/node_modules/minimist/test/dash.js b/node_modules/mkdirp/node_modules/minimist/test/dash.js
deleted file mode 100644
index 5a4fa5be4..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/dash.js
+++ /dev/null
@@ -1,31 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('-', function (t) {
- t.plan(5);
- t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] });
- t.deepEqual(parse([ '-' ]), { _: [ '-' ] });
- t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] });
- t.deepEqual(
- parse([ '-b', '-' ], { boolean: 'b' }),
- { b: true, _: [ '-' ] }
- );
- t.deepEqual(
- parse([ '-s', '-' ], { string: 's' }),
- { s: '-', _: [] }
- );
-});
-
-test('-a -- b', function (t) {
- t.plan(3);
- t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] });
- t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
- t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
-});
-
-test('move arguments after the -- into their own `--` array', function(t) {
- t.plan(1);
- t.deepEqual(
- parse([ '--name', 'John', 'before', '--', 'after' ], { '--': true }),
- { name: 'John', _: [ 'before' ], '--': [ 'after' ] });
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/default_bool.js b/node_modules/mkdirp/node_modules/minimist/test/default_bool.js
deleted file mode 100644
index 780a31127..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/default_bool.js
+++ /dev/null
@@ -1,35 +0,0 @@
-var test = require('tape');
-var parse = require('../');
-
-test('boolean default true', function (t) {
- var argv = parse([], {
- boolean: 'sometrue',
- default: { sometrue: true }
- });
- t.equal(argv.sometrue, true);
- t.end();
-});
-
-test('boolean default false', function (t) {
- var argv = parse([], {
- boolean: 'somefalse',
- default: { somefalse: false }
- });
- t.equal(argv.somefalse, false);
- t.end();
-});
-
-test('boolean default to null', function (t) {
- var argv = parse([], {
- boolean: 'maybe',
- default: { maybe: null }
- });
- t.equal(argv.maybe, null);
- var argv = parse(['--maybe'], {
- boolean: 'maybe',
- default: { maybe: null }
- });
- t.equal(argv.maybe, true);
- t.end();
-
-})
diff --git a/node_modules/mkdirp/node_modules/minimist/test/dotted.js b/node_modules/mkdirp/node_modules/minimist/test/dotted.js
deleted file mode 100644
index d8b3e856e..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/dotted.js
+++ /dev/null
@@ -1,22 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('dotted alias', function (t) {
- var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
- t.equal(argv.a.b, 22);
- t.equal(argv.aa.bb, 22);
- t.end();
-});
-
-test('dotted default', function (t) {
- var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
- t.equal(argv.a.b, 11);
- t.equal(argv.aa.bb, 11);
- t.end();
-});
-
-test('dotted default with no alias', function (t) {
- var argv = parse('', {default: {'a.b': 11}});
- t.equal(argv.a.b, 11);
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/kv_short.js b/node_modules/mkdirp/node_modules/minimist/test/kv_short.js
deleted file mode 100644
index f813b3050..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/kv_short.js
+++ /dev/null
@@ -1,16 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('short -k=v' , function (t) {
- t.plan(1);
-
- var argv = parse([ '-b=123' ]);
- t.deepEqual(argv, { b: 123, _: [] });
-});
-
-test('multi short -k=v' , function (t) {
- t.plan(1);
-
- var argv = parse([ '-a=whatever', '-b=robots' ]);
- t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] });
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/long.js b/node_modules/mkdirp/node_modules/minimist/test/long.js
deleted file mode 100644
index 5d3a1e09d..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/long.js
+++ /dev/null
@@ -1,31 +0,0 @@
-var test = require('tape');
-var parse = require('../');
-
-test('long opts', function (t) {
- t.deepEqual(
- parse([ '--bool' ]),
- { bool : true, _ : [] },
- 'long boolean'
- );
- t.deepEqual(
- parse([ '--pow', 'xixxle' ]),
- { pow : 'xixxle', _ : [] },
- 'long capture sp'
- );
- t.deepEqual(
- parse([ '--pow=xixxle' ]),
- { pow : 'xixxle', _ : [] },
- 'long capture eq'
- );
- t.deepEqual(
- parse([ '--host', 'localhost', '--port', '555' ]),
- { host : 'localhost', port : 555, _ : [] },
- 'long captures sp'
- );
- t.deepEqual(
- parse([ '--host=localhost', '--port=555' ]),
- { host : 'localhost', port : 555, _ : [] },
- 'long captures eq'
- );
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/num.js b/node_modules/mkdirp/node_modules/minimist/test/num.js
deleted file mode 100644
index 2cc77f4d6..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/num.js
+++ /dev/null
@@ -1,36 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('nums', function (t) {
- var argv = parse([
- '-x', '1234',
- '-y', '5.67',
- '-z', '1e7',
- '-w', '10f',
- '--hex', '0xdeadbeef',
- '789'
- ]);
- t.deepEqual(argv, {
- x : 1234,
- y : 5.67,
- z : 1e7,
- w : '10f',
- hex : 0xdeadbeef,
- _ : [ 789 ]
- });
- t.deepEqual(typeof argv.x, 'number');
- t.deepEqual(typeof argv.y, 'number');
- t.deepEqual(typeof argv.z, 'number');
- t.deepEqual(typeof argv.w, 'string');
- t.deepEqual(typeof argv.hex, 'number');
- t.deepEqual(typeof argv._[0], 'number');
- t.end();
-});
-
-test('already a number', function (t) {
- var argv = parse([ '-x', 1234, 789 ]);
- t.deepEqual(argv, { x : 1234, _ : [ 789 ] });
- t.deepEqual(typeof argv.x, 'number');
- t.deepEqual(typeof argv._[0], 'number');
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/parse.js b/node_modules/mkdirp/node_modules/minimist/test/parse.js
deleted file mode 100644
index 7b4a2a17c..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/parse.js
+++ /dev/null
@@ -1,197 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('parse args', function (t) {
- t.deepEqual(
- parse([ '--no-moo' ]),
- { moo : false, _ : [] },
- 'no'
- );
- t.deepEqual(
- parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]),
- { v : ['a','b','c'], _ : [] },
- 'multi'
- );
- t.end();
-});
-
-test('comprehensive', function (t) {
- t.deepEqual(
- parse([
- '--name=meowmers', 'bare', '-cats', 'woo',
- '-h', 'awesome', '--multi=quux',
- '--key', 'value',
- '-b', '--bool', '--no-meep', '--multi=baz',
- '--', '--not-a-flag', 'eek'
- ]),
- {
- c : true,
- a : true,
- t : true,
- s : 'woo',
- h : 'awesome',
- b : true,
- bool : true,
- key : 'value',
- multi : [ 'quux', 'baz' ],
- meep : false,
- name : 'meowmers',
- _ : [ 'bare', '--not-a-flag', 'eek' ]
- }
- );
- t.end();
-});
-
-test('flag boolean', function (t) {
- var argv = parse([ '-t', 'moo' ], { boolean: 't' });
- t.deepEqual(argv, { t : true, _ : [ 'moo' ] });
- t.deepEqual(typeof argv.t, 'boolean');
- t.end();
-});
-
-test('flag boolean value', function (t) {
- var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], {
- boolean: [ 't', 'verbose' ],
- default: { verbose: true }
- });
-
- t.deepEqual(argv, {
- verbose: false,
- t: true,
- _: ['moo']
- });
-
- t.deepEqual(typeof argv.verbose, 'boolean');
- t.deepEqual(typeof argv.t, 'boolean');
- t.end();
-});
-
-test('newlines in params' , function (t) {
- var args = parse([ '-s', "X\nX" ])
- t.deepEqual(args, { _ : [], s : "X\nX" });
-
- // reproduce in bash:
- // VALUE="new
- // line"
- // node program.js --s="$VALUE"
- args = parse([ "--s=X\nX" ])
- t.deepEqual(args, { _ : [], s : "X\nX" });
- t.end();
-});
-
-test('strings' , function (t) {
- var s = parse([ '-s', '0001234' ], { string: 's' }).s;
- t.equal(s, '0001234');
- t.equal(typeof s, 'string');
-
- var x = parse([ '-x', '56' ], { string: 'x' }).x;
- t.equal(x, '56');
- t.equal(typeof x, 'string');
- t.end();
-});
-
-test('stringArgs', function (t) {
- var s = parse([ ' ', ' ' ], { string: '_' })._;
- t.same(s.length, 2);
- t.same(typeof s[0], 'string');
- t.same(s[0], ' ');
- t.same(typeof s[1], 'string');
- t.same(s[1], ' ');
- t.end();
-});
-
-test('empty strings', function(t) {
- var s = parse([ '-s' ], { string: 's' }).s;
- t.equal(s, '');
- t.equal(typeof s, 'string');
-
- var str = parse([ '--str' ], { string: 'str' }).str;
- t.equal(str, '');
- t.equal(typeof str, 'string');
-
- var letters = parse([ '-art' ], {
- string: [ 'a', 't' ]
- });
-
- t.equal(letters.a, '');
- t.equal(letters.r, true);
- t.equal(letters.t, '');
-
- t.end();
-});
-
-
-test('string and alias', function(t) {
- var x = parse([ '--str', '000123' ], {
- string: 's',
- alias: { s: 'str' }
- });
-
- t.equal(x.str, '000123');
- t.equal(typeof x.str, 'string');
- t.equal(x.s, '000123');
- t.equal(typeof x.s, 'string');
-
- var y = parse([ '-s', '000123' ], {
- string: 'str',
- alias: { str: 's' }
- });
-
- t.equal(y.str, '000123');
- t.equal(typeof y.str, 'string');
- t.equal(y.s, '000123');
- t.equal(typeof y.s, 'string');
- t.end();
-});
-
-test('slashBreak', function (t) {
- t.same(
- parse([ '-I/foo/bar/baz' ]),
- { I : '/foo/bar/baz', _ : [] }
- );
- t.same(
- parse([ '-xyz/foo/bar/baz' ]),
- { x : true, y : true, z : '/foo/bar/baz', _ : [] }
- );
- t.end();
-});
-
-test('alias', function (t) {
- var argv = parse([ '-f', '11', '--zoom', '55' ], {
- alias: { z: 'zoom' }
- });
- t.equal(argv.zoom, 55);
- t.equal(argv.z, argv.zoom);
- t.equal(argv.f, 11);
- t.end();
-});
-
-test('multiAlias', function (t) {
- var argv = parse([ '-f', '11', '--zoom', '55' ], {
- alias: { z: [ 'zm', 'zoom' ] }
- });
- t.equal(argv.zoom, 55);
- t.equal(argv.z, argv.zoom);
- t.equal(argv.z, argv.zm);
- t.equal(argv.f, 11);
- t.end();
-});
-
-test('nested dotted objects', function (t) {
- var argv = parse([
- '--foo.bar', '3', '--foo.baz', '4',
- '--foo.quux.quibble', '5', '--foo.quux.o_O',
- '--beep.boop'
- ]);
-
- t.same(argv.foo, {
- bar : 3,
- baz : 4,
- quux : {
- quibble : 5,
- o_O : true
- }
- });
- t.same(argv.beep, { boop : true });
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/parse_modified.js b/node_modules/mkdirp/node_modules/minimist/test/parse_modified.js
deleted file mode 100644
index ab620dc5e..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/parse_modified.js
+++ /dev/null
@@ -1,9 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('parse with modifier functions' , function (t) {
- t.plan(1);
-
- var argv = parse([ '-b', '123' ], { boolean: 'b' });
- t.deepEqual(argv, { b: true, _: [123] });
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/proto.js b/node_modules/mkdirp/node_modules/minimist/test/proto.js
deleted file mode 100644
index 8649107ec..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/proto.js
+++ /dev/null
@@ -1,44 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('proto pollution', function (t) {
- var argv = parse(['--__proto__.x','123']);
- t.equal({}.x, undefined);
- t.equal(argv.__proto__.x, undefined);
- t.equal(argv.x, undefined);
- t.end();
-});
-
-test('proto pollution (array)', function (t) {
- var argv = parse(['--x','4','--x','5','--x.__proto__.z','789']);
- t.equal({}.z, undefined);
- t.deepEqual(argv.x, [4,5]);
- t.equal(argv.x.z, undefined);
- t.equal(argv.x.__proto__.z, undefined);
- t.end();
-});
-
-test('proto pollution (number)', function (t) {
- var argv = parse(['--x','5','--x.__proto__.z','100']);
- t.equal({}.z, undefined);
- t.equal((4).z, undefined);
- t.equal(argv.x, 5);
- t.equal(argv.x.z, undefined);
- t.end();
-});
-
-test('proto pollution (string)', function (t) {
- var argv = parse(['--x','abc','--x.__proto__.z','def']);
- t.equal({}.z, undefined);
- t.equal('...'.z, undefined);
- t.equal(argv.x, 'abc');
- t.equal(argv.x.z, undefined);
- t.end();
-});
-
-test('proto pollution (constructor)', function (t) {
- var argv = parse(['--constructor.prototype.y','123']);
- t.equal({}.y, undefined);
- t.equal(argv.y, undefined);
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/short.js b/node_modules/mkdirp/node_modules/minimist/test/short.js
deleted file mode 100644
index d513a1c25..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/short.js
+++ /dev/null
@@ -1,67 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('numeric short args', function (t) {
- t.plan(2);
- t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] });
- t.deepEqual(
- parse([ '-123', '456' ]),
- { 1: true, 2: true, 3: 456, _: [] }
- );
-});
-
-test('short', function (t) {
- t.deepEqual(
- parse([ '-b' ]),
- { b : true, _ : [] },
- 'short boolean'
- );
- t.deepEqual(
- parse([ 'foo', 'bar', 'baz' ]),
- { _ : [ 'foo', 'bar', 'baz' ] },
- 'bare'
- );
- t.deepEqual(
- parse([ '-cats' ]),
- { c : true, a : true, t : true, s : true, _ : [] },
- 'group'
- );
- t.deepEqual(
- parse([ '-cats', 'meow' ]),
- { c : true, a : true, t : true, s : 'meow', _ : [] },
- 'short group next'
- );
- t.deepEqual(
- parse([ '-h', 'localhost' ]),
- { h : 'localhost', _ : [] },
- 'short capture'
- );
- t.deepEqual(
- parse([ '-h', 'localhost', '-p', '555' ]),
- { h : 'localhost', p : 555, _ : [] },
- 'short captures'
- );
- t.end();
-});
-
-test('mixed short bool and capture', function (t) {
- t.same(
- parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]),
- {
- f : true, p : 555, h : 'localhost',
- _ : [ 'script.js' ]
- }
- );
- t.end();
-});
-
-test('short and long', function (t) {
- t.deepEqual(
- parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]),
- {
- f : true, p : 555, h : 'localhost',
- _ : [ 'script.js' ]
- }
- );
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/stop_early.js b/node_modules/mkdirp/node_modules/minimist/test/stop_early.js
deleted file mode 100644
index bdf9fbcb0..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/stop_early.js
+++ /dev/null
@@ -1,15 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('stops parsing on the first non-option when stopEarly is set', function (t) {
- var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], {
- stopEarly: true
- });
-
- t.deepEqual(argv, {
- aaa: 'bbb',
- _: ['ccc', '--ddd']
- });
-
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/unknown.js b/node_modules/mkdirp/node_modules/minimist/test/unknown.js
deleted file mode 100644
index 462a36bdd..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/unknown.js
+++ /dev/null
@@ -1,102 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('boolean and alias is not unknown', function (t) {
- var unknown = [];
- function unknownFn(arg) {
- unknown.push(arg);
- return false;
- }
- var aliased = [ '-h', 'true', '--derp', 'true' ];
- var regular = [ '--herp', 'true', '-d', 'true' ];
- var opts = {
- alias: { h: 'herp' },
- boolean: 'h',
- unknown: unknownFn
- };
- var aliasedArgv = parse(aliased, opts);
- var propertyArgv = parse(regular, opts);
-
- t.same(unknown, ['--derp', '-d']);
- t.end();
-});
-
-test('flag boolean true any double hyphen argument is not unknown', function (t) {
- var unknown = [];
- function unknownFn(arg) {
- unknown.push(arg);
- return false;
- }
- var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], {
- boolean: true,
- unknown: unknownFn
- });
- t.same(unknown, ['--tacos=good', 'cow', '-p']);
- t.same(argv, {
- honk: true,
- _: []
- });
- t.end();
-});
-
-test('string and alias is not unknown', function (t) {
- var unknown = [];
- function unknownFn(arg) {
- unknown.push(arg);
- return false;
- }
- var aliased = [ '-h', 'hello', '--derp', 'goodbye' ];
- var regular = [ '--herp', 'hello', '-d', 'moon' ];
- var opts = {
- alias: { h: 'herp' },
- string: 'h',
- unknown: unknownFn
- };
- var aliasedArgv = parse(aliased, opts);
- var propertyArgv = parse(regular, opts);
-
- t.same(unknown, ['--derp', '-d']);
- t.end();
-});
-
-test('default and alias is not unknown', function (t) {
- var unknown = [];
- function unknownFn(arg) {
- unknown.push(arg);
- return false;
- }
- var aliased = [ '-h', 'hello' ];
- var regular = [ '--herp', 'hello' ];
- var opts = {
- default: { 'h': 'bar' },
- alias: { 'h': 'herp' },
- unknown: unknownFn
- };
- var aliasedArgv = parse(aliased, opts);
- var propertyArgv = parse(regular, opts);
-
- t.same(unknown, []);
- t.end();
- unknownFn(); // exercise fn for 100% coverage
-});
-
-test('value following -- is not unknown', function (t) {
- var unknown = [];
- function unknownFn(arg) {
- unknown.push(arg);
- return false;
- }
- var aliased = [ '--bad', '--', 'good', 'arg' ];
- var opts = {
- '--': true,
- unknown: unknownFn
- };
- var argv = parse(aliased, opts);
-
- t.same(unknown, ['--bad']);
- t.same(argv, {
- '--': ['good', 'arg'],
- '_': []
- })
- t.end();
-});
diff --git a/node_modules/mkdirp/node_modules/minimist/test/whitespace.js b/node_modules/mkdirp/node_modules/minimist/test/whitespace.js
deleted file mode 100644
index 8a52a58ce..000000000
--- a/node_modules/mkdirp/node_modules/minimist/test/whitespace.js
+++ /dev/null
@@ -1,8 +0,0 @@
-var parse = require('../');
-var test = require('tape');
-
-test('whitespace should be whitespace' , function (t) {
- t.plan(1);
- var x = parse([ '-x', '\t' ]).x;
- t.equal(x, '\t');
-});
diff --git a/node_modules/move-file/index.d.ts b/node_modules/move-file/index.d.ts
deleted file mode 100644
index 45616d0f4..000000000
--- a/node_modules/move-file/index.d.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-declare namespace moveFile {
- interface Options {
- /**
- Overwrite existing destination file.
-
- @default true
- */
- readonly overwrite?: boolean;
- }
-}
-
-declare const moveFile: {
- /**
- Move a file.
-
- @param source - File you want to move.
- @param destination - Where you want the file moved.
- @returns A `Promise` that resolves when the file has been moved.
-
- @example
- ```
- import moveFile = require('move-file');
-
- (async () => {
- await moveFile('source/unicorn.png', 'destination/unicorn.png');
- console.log('The file has been moved');
- })();
- ```
- */
- (source: string, destination: string, options?: moveFile.Options): Promise<void>;
-
- /**
- Move a file synchronously.
-
- @param source - File you want to move.
- @param destination - Where you want the file moved.
- */
- sync(source: string, destination: string, options?: moveFile.Options): void;
-};
-
-export = moveFile;
diff --git a/node_modules/move-file/index.js b/node_modules/move-file/index.js
deleted file mode 100644
index 09e31acaa..000000000
--- a/node_modules/move-file/index.js
+++ /dev/null
@@ -1,62 +0,0 @@
-'use strict';
-const path = require('path');
-const fs = require('fs');
-const pathExists = require('path-exists');
-
-const fsP = fs.promises;
-
-module.exports = async (source, destination, options) => {
- if (!source || !destination) {
- throw new TypeError('`source` and `destination` file required');
- }
-
- options = {
- overwrite: true,
- ...options
- };
-
- if (!options.overwrite && await pathExists(destination)) {
- throw new Error(`The destination file exists: ${destination}`);
- }
-
- await fsP.mkdir(path.dirname(destination), {recursive: true});
-
- try {
- await fsP.rename(source, destination);
- } catch (error) {
- if (error.code === 'EXDEV') {
- await fsP.copyFile(source, destination);
- await fsP.unlink(source);
- } else {
- throw error;
- }
- }
-};
-
-module.exports.sync = (source, destination, options) => {
- if (!source || !destination) {
- throw new TypeError('`source` and `destination` file required');
- }
-
- options = {
- overwrite: true,
- ...options
- };
-
- if (!options.overwrite && fs.existsSync(destination)) {
- throw new Error(`The destination file exists: ${destination}`);
- }
-
- fs.mkdirSync(path.dirname(destination), {recursive: true});
-
- try {
- fs.renameSync(source, destination);
- } catch (error) {
- if (error.code === 'EXDEV') {
- fs.copyFileSync(source, destination);
- fs.unlinkSync(source);
- } else {
- throw error;
- }
- }
-};
diff --git a/node_modules/move-file/license b/node_modules/move-file/license
deleted file mode 100644
index fa7ceba3e..000000000
--- a/node_modules/move-file/license
+++ /dev/null
@@ -1,9 +0,0 @@
-MIT License
-
-Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/move-file/node_modules/path-exists/index.d.ts b/node_modules/move-file/node_modules/path-exists/index.d.ts
deleted file mode 100644
index 54b7ab8f4..000000000
--- a/node_modules/move-file/node_modules/path-exists/index.d.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-declare const pathExists: {
- /**
- Check if a path exists.
-
- @returns Whether the path exists.
-
- @example
- ```
- // foo.ts
- import pathExists = require('path-exists');
-
- (async () => {
- console.log(await pathExists('foo.ts'));
- //=> true
- })();
- ```
- */
- (path: string): Promise<boolean>;
-
- /**
- Synchronously check if a path exists.
-
- @returns Whether the path exists.
- */
- sync(path: string): boolean;
-};
-
-export = pathExists;
diff --git a/node_modules/move-file/node_modules/path-exists/index.js b/node_modules/move-file/node_modules/path-exists/index.js
deleted file mode 100644
index 1943921b7..000000000
--- a/node_modules/move-file/node_modules/path-exists/index.js
+++ /dev/null
@@ -1,23 +0,0 @@
-'use strict';
-const fs = require('fs');
-const {promisify} = require('util');
-
-const pAccess = promisify(fs.access);
-
-module.exports = async path => {
- try {
- await pAccess(path);
- return true;
- } catch (_) {
- return false;
- }
-};
-
-module.exports.sync = path => {
- try {
- fs.accessSync(path);
- return true;
- } catch (_) {
- return false;
- }
-};
diff --git a/node_modules/move-file/node_modules/path-exists/license b/node_modules/move-file/node_modules/path-exists/license
deleted file mode 100644
index e7af2f771..000000000
--- a/node_modules/move-file/node_modules/path-exists/license
+++ /dev/null
@@ -1,9 +0,0 @@
-MIT License
-
-Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/move-file/node_modules/path-exists/package.json b/node_modules/move-file/node_modules/path-exists/package.json
deleted file mode 100644
index 4c6d0b13d..000000000
--- a/node_modules/move-file/node_modules/path-exists/package.json
+++ /dev/null
@@ -1,71 +0,0 @@
-{
- "_from": "path-exists@^4.0.0",
- "_id": "path-exists@4.0.0",
- "_inBundle": false,
- "_integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
- "_location": "/move-file/path-exists",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "path-exists@^4.0.0",
- "name": "path-exists",
- "escapedName": "path-exists",
- "rawSpec": "^4.0.0",
- "saveSpec": null,
- "fetchSpec": "^4.0.0"
- },
- "_requiredBy": [
- "/move-file"
- ],
- "_resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
- "_shasum": "513bdbe2d3b95d7762e8c1137efa195c6c61b5b3",
- "_spec": "path-exists@^4.0.0",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/move-file",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
- },
- "bugs": {
- "url": "https://github.com/sindresorhus/path-exists/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Check if a path exists",
- "devDependencies": {
- "ava": "^1.4.1",
- "tsd": "^0.7.2",
- "xo": "^0.24.0"
- },
- "engines": {
- "node": ">=8"
- },
- "files": [
- "index.js",
- "index.d.ts"
- ],
- "homepage": "https://github.com/sindresorhus/path-exists#readme",
- "keywords": [
- "path",
- "exists",
- "exist",
- "file",
- "filepath",
- "fs",
- "filesystem",
- "file-system",
- "access",
- "stat"
- ],
- "license": "MIT",
- "name": "path-exists",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/sindresorhus/path-exists.git"
- },
- "scripts": {
- "test": "xo && ava && tsd"
- },
- "version": "4.0.0"
-}
diff --git a/node_modules/move-file/node_modules/path-exists/readme.md b/node_modules/move-file/node_modules/path-exists/readme.md
deleted file mode 100644
index 81f984545..000000000
--- a/node_modules/move-file/node_modules/path-exists/readme.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# path-exists [![Build Status](https://travis-ci.org/sindresorhus/path-exists.svg?branch=master)](https://travis-ci.org/sindresorhus/path-exists)
-
-> Check if a path exists
-
-NOTE: `fs.existsSync` has been un-deprecated in Node.js since 6.8.0. If you only need to check synchronously, this module is not needed.
-
-While [`fs.exists()`](https://nodejs.org/api/fs.html#fs_fs_exists_path_callback) is being [deprecated](https://github.com/iojs/io.js/issues/103), there's still a genuine use-case of being able to check if a path exists for other purposes than doing IO with it.
-
-Never use this before handling a file though:
-
-> In particular, checking if a file exists before opening it is an anti-pattern that leaves you vulnerable to race conditions: another process may remove the file between the calls to `fs.exists()` and `fs.open()`. Just open the file and handle the error when it's not there.
-
-
-## Install
-
-```
-$ npm install path-exists
-```
-
-
-## Usage
-
-```js
-// foo.js
-const pathExists = require('path-exists');
-
-(async () => {
- console.log(await pathExists('foo.js'));
- //=> true
-})();
-```
-
-
-## API
-
-### pathExists(path)
-
-Returns a `Promise<boolean>` of whether the path exists.
-
-### pathExists.sync(path)
-
-Returns a `boolean` of whether the path exists.
-
-
-## Related
-
-- [path-exists-cli](https://github.com/sindresorhus/path-exists-cli) - CLI for this module
-
-
-## License
-
-MIT © [Sindre Sorhus](https://sindresorhus.com)
diff --git a/node_modules/move-file/package.json b/node_modules/move-file/package.json
deleted file mode 100644
index 35a25410e..000000000
--- a/node_modules/move-file/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "_from": "move-file@^2.0.0",
- "_id": "move-file@2.0.0",
- "_inBundle": false,
- "_integrity": "sha512-cdkdhNCgbP5dvS4tlGxZbD+nloio9GIimP57EjqFhwLcMjnU+XJKAZzlmg/TN/AK1LuNAdTSvm3CPPP4Xkv0iQ==",
- "_location": "/move-file",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "move-file@^2.0.0",
- "name": "move-file",
- "escapedName": "move-file",
- "rawSpec": "^2.0.0",
- "saveSpec": null,
- "fetchSpec": "^2.0.0"
- },
- "_requiredBy": [
- "/@npmcli/arborist/cacache"
- ],
- "_resolved": "https://registry.npmjs.org/move-file/-/move-file-2.0.0.tgz",
- "_shasum": "83ffa309b5d7f69d518b28e1333e2ffadf331e3e",
- "_spec": "move-file@^2.0.0",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist/node_modules/cacache",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "https://sindresorhus.com"
- },
- "bugs": {
- "url": "https://github.com/sindresorhus/move-file/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "path-exists": "^4.0.0"
- },
- "deprecated": false,
- "description": "Move a file - Even works across devices",
- "devDependencies": {
- "ava": "^1.4.1",
- "sinon": "^9.0.2",
- "temp-write": "^4.0.0",
- "tempy": "^0.5.0",
- "tsd": "^0.11.0",
- "xo": "^0.24.0"
- },
- "engines": {
- "node": ">=10.17"
- },
- "files": [
- "index.js",
- "index.d.ts"
- ],
- "funding": "https://github.com/sponsors/sindresorhus",
- "homepage": "https://github.com/sindresorhus/move-file#readme",
- "keywords": [
- "move",
- "file",
- "mv",
- "fs",
- "stream",
- "file-system",
- "ncp",
- "fast",
- "quick",
- "data",
- "content",
- "contents",
- "devices",
- "partitions"
- ],
- "license": "MIT",
- "name": "move-file",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/sindresorhus/move-file.git"
- },
- "scripts": {
- "test": "xo && ava && tsd"
- },
- "version": "2.0.0"
-}
diff --git a/node_modules/move-file/readme.md b/node_modules/move-file/readme.md
deleted file mode 100644
index 9f7625f4d..000000000
--- a/node_modules/move-file/readme.md
+++ /dev/null
@@ -1,67 +0,0 @@
-# move-file [![Build Status](https://travis-ci.com/sindresorhus/move-file.svg?branch=master)](https://travis-ci.com/sindresorhus/move-file)
-
-> Move a file
-
-The built-in [`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback) is just a JavaScript wrapper for the C `rename(2)` function, which doesn't support moving files across partitions or devices. This module is what you would have expected `fs.rename()` to be.
-
-## Highlights
-
-- Promise API.
-- Supports moving a file across partitions and devices.
-- Optionally prevent overwriting an existing file.
-- Creates non-existent destination directories for you.
-
-## Install
-
-```
-$ npm install move-file
-```
-
-## Usage
-
-```js
-const moveFile = require('move-file');
-
-(async () => {
- await moveFile('source/unicorn.png', 'destination/unicorn.png');
- console.log('The file has been moved');
-})();
-```
-
-## API
-
-### moveFile(source, destination, options?)
-
-Returns a `Promise` that resolves when the file has been moved.
-
-### moveFile.sync(source, destination, options?)
-
-#### source
-
-Type: `string`
-
-File you want to move.
-
-#### destination
-
-Type: `string`
-
-Where you want the file moved.
-
-#### options
-
-Type: `object`
-
-##### overwrite
-
-Type: `boolean`\
-Default: `true`
-
-Overwrite existing destination file.
-
-## Related
-
-- [move-file-cli](https://github.com/sindresorhus/move-file-cli) - CLI for this module
-- [cp-file](https://github.com/sindresorhus/cp-file) - Copy a file
-- [cpy](https://github.com/sindresorhus/cpy) - Copy files
-- [make-dir](https://github.com/sindresorhus/make-dir) - Make a directory and its parents if needed
diff --git a/node_modules/npm-registry-fetch/CHANGELOG.md b/node_modules/npm-registry-fetch/CHANGELOG.md
index f4abc203d..003dc8b6d 100644
--- a/node_modules/npm-registry-fetch/CHANGELOG.md
+++ b/node_modules/npm-registry-fetch/CHANGELOG.md
@@ -2,6 +2,20 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+## [8.1.0](https://github.com/npm/registry-fetch/compare/v8.0.3...v8.1.0) (2020-05-20)
+
+
+### Features
+
+* add npm-command HTTP header ([1bb4eb2](https://github.com/npm/registry-fetch/commit/1bb4eb2c66ee8a0dc62558bdcff1b548e2bb9820))
+
+### [8.0.3](https://github.com/npm/registry-fetch/compare/v8.0.2...v8.0.3) (2020-05-13)
+
+
+### Bug Fixes
+
+* update minipass and make-fetch-happen to latest ([3b6c5d0](https://github.com/npm/registry-fetch/commit/3b6c5d0d8ccd4c4a97862a65acef956f19aec127)), closes [#23](https://github.com/npm/registry-fetch/issues/23)
+
### [8.0.2](https://github.com/npm/registry-fetch/compare/v8.0.1...v8.0.2) (2020-05-04)
diff --git a/node_modules/npm-registry-fetch/README.md b/node_modules/npm-registry-fetch/README.md
index 499d4f983..e660940e3 100644
--- a/node_modules/npm-registry-fetch/README.md
+++ b/node_modules/npm-registry-fetch/README.md
@@ -398,6 +398,15 @@ If provided, will be sent in the `npm-session` header. This header is used by
the npm registry to identify individual user sessions (usually individual
invocations of the CLI).
+##### <a name="opts-npmCommand"></a> `opts.npmCommand`
+
+* Type: String
+* Default: null
+
+If provided, it will be sent in the `npm-command` header. This yeader is
+used by the npm registry to identify the npm command that caused this
+request to be made.
+
##### <a name="opts-offline"></a> `opts.offline`
* Type: Boolean
diff --git a/node_modules/npm-registry-fetch/index.js b/node_modules/npm-registry-fetch/index.js
index 8e05f4184..eb48ba6c0 100644
--- a/node_modules/npm-registry-fetch/index.js
+++ b/node_modules/npm-registry-fetch/index.js
@@ -186,6 +186,10 @@ function getHeaders (registry, uri, opts) {
headers['npm-session'] = opts.npmSession
}
+ if (opts.npmCommand) {
+ headers['npm-command'] = opts.npmCommand
+ }
+
const auth = getAuth(registry, opts)
// If a tarball is hosted on a different place than the manifest, only send
// credentials on `alwaysAuth`
diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json
index 0371d785d..251bfd280 100644
--- a/node_modules/npm-registry-fetch/package.json
+++ b/node_modules/npm-registry-fetch/package.json
@@ -1,19 +1,19 @@
{
- "_from": "npm-registry-fetch@8.0.2",
- "_id": "npm-registry-fetch@8.0.2",
+ "_from": "npm-registry-fetch@latest",
+ "_id": "npm-registry-fetch@8.1.0",
"_inBundle": false,
- "_integrity": "sha512-/UteT/LQ+0eegPh96w2sVjpAJk2kuphWASp1SPBFBWtSyuPoUW5gf4utzQm5A5FLrZF3eZJGH59j4X6KBUa40g==",
+ "_integrity": "sha512-RkcugRDye2j6yEiHGMyAdKQoipgp8VToSIjm+TFLhVraXOkC/WU2kjE2URcYBpcJ4hs++VFBKo6+Zg4wmrS+Qw==",
"_location": "/npm-registry-fetch",
"_phantomChildren": {},
"_requested": {
- "type": "version",
+ "type": "tag",
"registry": true,
- "raw": "npm-registry-fetch@8.0.2",
+ "raw": "npm-registry-fetch@latest",
"name": "npm-registry-fetch",
"escapedName": "npm-registry-fetch",
- "rawSpec": "8.0.2",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "8.0.2"
+ "fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
@@ -24,12 +24,11 @@
"/libnpmpublish",
"/libnpmsearch",
"/libnpmteam",
- "/npm-profile",
- "/pacote"
+ "/npm-profile"
],
- "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-8.0.2.tgz",
- "_shasum": "8a16e7e09b51939b6ca8763793ef171c6935d2da",
- "_spec": "npm-registry-fetch@8.0.2",
+ "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-8.1.0.tgz",
+ "_shasum": "1d5c229b82412414b9c63cde040b51981db76904",
+ "_spec": "npm-registry-fetch@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
"name": "Kat Marchán",
@@ -42,8 +41,8 @@
"dependencies": {
"@npmcli/ci-detect": "^1.0.0",
"lru-cache": "^5.1.1",
- "make-fetch-happen": "^8.0.6",
- "minipass": "^3.0.0",
+ "make-fetch-happen": "^8.0.7",
+ "minipass": "^3.1.3",
"minipass-fetch": "^1.1.2",
"minipass-json-stream": "^1.0.1",
"minizlib": "^2.0.0",
@@ -95,5 +94,5 @@
"check-coverage": true,
"test-ignore": "test[\\\\/](util|cache)[\\\\/]"
},
- "version": "8.0.2"
+ "version": "8.1.0"
}
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 41aa27628..480536ac4 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,8 +1,8 @@
{
"_from": "pacote@latest",
- "_id": "pacote@11.1.9",
+ "_id": "pacote@11.1.10",
"_inBundle": false,
- "_integrity": "sha512-gnaYYFA3JUZKcbISv24Y14dbMqAheMjGZpMWt8rvrXSI8fFDT/iUlUaJ0u74CLT7WVWOBzApIBErbF4n2DbikQ==",
+ "_integrity": "sha512-xJSSSxcHLo1CZJESvIZ2hbC5/5WP2oDHeUeTV2FvVDPOZG7+ixbxnV0n3TkdaUqlbC0RwnwMQ3wkJ/YywYM0Cg==",
"_location": "/pacote",
"_phantomChildren": {
"glob": "7.1.4"
@@ -23,8 +23,8 @@
"/@npmcli/arborist",
"/libnpmpack"
],
- "_resolved": "https://registry.npmjs.org/pacote/-/pacote-11.1.9.tgz",
- "_shasum": "7d91d6f249893b95c63851dfdfe1ef58a250c751",
+ "_resolved": "https://registry.npmjs.org/pacote/-/pacote-11.1.10.tgz",
+ "_shasum": "cbd56a5a60c9cf472bc8ea7ba87112d749655dd4",
"_spec": "pacote@latest",
"_where": "/Users/isaacs/dev/npm/cli",
"author": {
@@ -48,12 +48,12 @@
"chownr": "^2.0.0",
"fs-minipass": "^2.1.0",
"infer-owner": "^1.0.4",
- "minipass": "^3.0.1",
+ "minipass": "^3.1.3",
"mkdirp": "^1.0.3",
"npm-package-arg": "^8.0.1",
"npm-packlist": "^2.1.0",
"npm-pick-manifest": "^6.0.0",
- "npm-registry-fetch": "^8.0.2",
+ "npm-registry-fetch": "^8.0.3",
"promise-retry": "^1.1.1",
"read-package-json-fast": "^1.1.3",
"rimraf": "^3.0.2",
@@ -100,5 +100,5 @@
"coverage-map": "map.js",
"esm": false
},
- "version": "11.1.9"
+ "version": "11.1.10"
}
diff --git a/node_modules/string_decoder/node_modules/safe-buffer/README.md b/node_modules/string_decoder/node_modules/safe-buffer/README.md
deleted file mode 100644
index 356e35193..000000000
--- a/node_modules/string_decoder/node_modules/safe-buffer/README.md
+++ /dev/null
@@ -1,586 +0,0 @@
-# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
-
-[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg
-[travis-url]: https://travis-ci.org/feross/safe-buffer
-[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg
-[npm-url]: https://npmjs.org/package/safe-buffer
-[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg
-[downloads-url]: https://npmjs.org/package/safe-buffer
-[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
-[standard-url]: https://standardjs.com
-
-#### Safer Node.js Buffer API
-
-**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`,
-`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.**
-
-**Uses the built-in implementation when available.**
-
-## install
-
-```
-npm install safe-buffer
-```
-
-[Get supported safe-buffer with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-safe-buffer?utm_source=npm-safe-buffer&utm_medium=referral&utm_campaign=readme)
-
-## usage
-
-The goal of this package is to provide a safe replacement for the node.js `Buffer`.
-
-It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to
-the top of your node.js modules:
-
-```js
-var Buffer = require('safe-buffer').Buffer
-
-// Existing buffer code will continue to work without issues:
-
-new Buffer('hey', 'utf8')
-new Buffer([1, 2, 3], 'utf8')
-new Buffer(obj)
-new Buffer(16) // create an uninitialized buffer (potentially unsafe)
-
-// But you can use these new explicit APIs to make clear what you want:
-
-Buffer.from('hey', 'utf8') // convert from many types to a Buffer
-Buffer.alloc(16) // create a zero-filled buffer (safe)
-Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe)
-```
-
-## api
-
-### Class Method: Buffer.from(array)
-<!-- YAML
-added: v3.0.0
--->
-
-* `array` {Array}
-
-Allocates a new `Buffer` using an `array` of octets.
-
-```js
-const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]);
- // creates a new Buffer containing ASCII bytes
- // ['b','u','f','f','e','r']
-```
-
-A `TypeError` will be thrown if `array` is not an `Array`.
-
-### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
-<!-- YAML
-added: v5.10.0
--->
-
-* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or
- a `new ArrayBuffer()`
-* `byteOffset` {Number} Default: `0`
-* `length` {Number} Default: `arrayBuffer.length - byteOffset`
-
-When passed a reference to the `.buffer` property of a `TypedArray` instance,
-the newly created `Buffer` will share the same allocated memory as the
-TypedArray.
-
-```js
-const arr = new Uint16Array(2);
-arr[0] = 5000;
-arr[1] = 4000;
-
-const buf = Buffer.from(arr.buffer); // shares the memory with arr;
-
-console.log(buf);
- // Prints: <Buffer 88 13 a0 0f>
-
-// changing the TypedArray changes the Buffer also
-arr[1] = 6000;
-
-console.log(buf);
- // Prints: <Buffer 88 13 70 17>
-```
-
-The optional `byteOffset` and `length` arguments specify a memory range within
-the `arrayBuffer` that will be shared by the `Buffer`.
-
-```js
-const ab = new ArrayBuffer(10);
-const buf = Buffer.from(ab, 0, 2);
-console.log(buf.length);
- // Prints: 2
-```
-
-A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`.
-
-### Class Method: Buffer.from(buffer)
-<!-- YAML
-added: v3.0.0
--->
-
-* `buffer` {Buffer}
-
-Copies the passed `buffer` data onto a new `Buffer` instance.
-
-```js
-const buf1 = Buffer.from('buffer');
-const buf2 = Buffer.from(buf1);
-
-buf1[0] = 0x61;
-console.log(buf1.toString());
- // 'auffer'
-console.log(buf2.toString());
- // 'buffer' (copy is not changed)
-```
-
-A `TypeError` will be thrown if `buffer` is not a `Buffer`.
-
-### Class Method: Buffer.from(str[, encoding])
-<!-- YAML
-added: v5.10.0
--->
-
-* `str` {String} String to encode.
-* `encoding` {String} Encoding to use, Default: `'utf8'`
-
-Creates a new `Buffer` containing the given JavaScript string `str`. If
-provided, the `encoding` parameter identifies the character encoding.
-If not provided, `encoding` defaults to `'utf8'`.
-
-```js
-const buf1 = Buffer.from('this is a tést');
-console.log(buf1.toString());
- // prints: this is a tést
-console.log(buf1.toString('ascii'));
- // prints: this is a tC)st
-
-const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
-console.log(buf2.toString());
- // prints: this is a tést
-```
-
-A `TypeError` will be thrown if `str` is not a string.
-
-### Class Method: Buffer.alloc(size[, fill[, encoding]])
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-* `fill` {Value} Default: `undefined`
-* `encoding` {String} Default: `utf8`
-
-Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
-`Buffer` will be *zero-filled*.
-
-```js
-const buf = Buffer.alloc(5);
-console.log(buf);
- // <Buffer 00 00 00 00 00>
-```
-
-The `size` must be less than or equal to the value of
-`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
-`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
-be created if a `size` less than or equal to 0 is specified.
-
-If `fill` is specified, the allocated `Buffer` will be initialized by calling
-`buf.fill(fill)`. See [`buf.fill()`][] for more information.
-
-```js
-const buf = Buffer.alloc(5, 'a');
-console.log(buf);
- // <Buffer 61 61 61 61 61>
-```
-
-If both `fill` and `encoding` are specified, the allocated `Buffer` will be
-initialized by calling `buf.fill(fill, encoding)`. For example:
-
-```js
-const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
-console.log(buf);
- // <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
-```
-
-Calling `Buffer.alloc(size)` can be significantly slower than the alternative
-`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance
-contents will *never contain sensitive data*.
-
-A `TypeError` will be thrown if `size` is not a number.
-
-### Class Method: Buffer.allocUnsafe(size)
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-
-Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must
-be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit
-architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is
-thrown. A zero-length Buffer will be created if a `size` less than or equal to
-0 is specified.
-
-The underlying memory for `Buffer` instances created in this way is *not
-initialized*. The contents of the newly created `Buffer` are unknown and
-*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
-`Buffer` instances to zeroes.
-
-```js
-const buf = Buffer.allocUnsafe(5);
-console.log(buf);
- // <Buffer 78 e0 82 02 01>
- // (octets will be different, every time)
-buf.fill(0);
-console.log(buf);
- // <Buffer 00 00 00 00 00>
-```
-
-A `TypeError` will be thrown if `size` is not a number.
-
-Note that the `Buffer` module pre-allocates an internal `Buffer` instance of
-size `Buffer.poolSize` that is used as a pool for the fast allocation of new
-`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated
-`new Buffer(size)` constructor) only when `size` is less than or equal to
-`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default
-value of `Buffer.poolSize` is `8192` but can be modified.
-
-Use of this pre-allocated internal memory pool is a key difference between
-calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
-Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer
-pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal
-Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The
-difference is subtle but can be important when an application requires the
-additional performance that `Buffer.allocUnsafe(size)` provides.
-
-### Class Method: Buffer.allocUnsafeSlow(size)
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-
-Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The
-`size` must be less than or equal to the value of
-`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
-`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
-be created if a `size` less than or equal to 0 is specified.
-
-The underlying memory for `Buffer` instances created in this way is *not
-initialized*. The contents of the newly created `Buffer` are unknown and
-*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
-`Buffer` instances to zeroes.
-
-When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
-allocations under 4KB are, by default, sliced from a single pre-allocated
-`Buffer`. This allows applications to avoid the garbage collection overhead of
-creating many individually allocated Buffers. This approach improves both
-performance and memory usage by eliminating the need to track and cleanup as
-many `Persistent` objects.
-
-However, in the case where a developer may need to retain a small chunk of
-memory from a pool for an indeterminate amount of time, it may be appropriate
-to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then
-copy out the relevant bits.
-
-```js
-// need to keep around a few small chunks of memory
-const store = [];
-
-socket.on('readable', () => {
- const data = socket.read();
- // allocate for retained data
- const sb = Buffer.allocUnsafeSlow(10);
- // copy the data into the new allocation
- data.copy(sb, 0, 0, 10);
- store.push(sb);
-});
-```
-
-Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after*
-a developer has observed undue memory retention in their applications.
-
-A `TypeError` will be thrown if `size` is not a number.
-
-### All the Rest
-
-The rest of the `Buffer` API is exactly the same as in node.js.
-[See the docs](https://nodejs.org/api/buffer.html).
-
-
-## Related links
-
-- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660)
-- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4)
-
-## Why is `Buffer` unsafe?
-
-Today, the node.js `Buffer` constructor is overloaded to handle many different argument
-types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.),
-`ArrayBuffer`, and also `Number`.
-
-The API is optimized for convenience: you can throw any type at it, and it will try to do
-what you want.
-
-Because the Buffer constructor is so powerful, you often see code like this:
-
-```js
-// Convert UTF-8 strings to hex
-function toHex (str) {
- return new Buffer(str).toString('hex')
-}
-```
-
-***But what happens if `toHex` is called with a `Number` argument?***
-
-### Remote Memory Disclosure
-
-If an attacker can make your program call the `Buffer` constructor with a `Number`
-argument, then they can make it allocate uninitialized memory from the node.js process.
-This could potentially disclose TLS private keys, user data, or database passwords.
-
-When the `Buffer` constructor is passed a `Number` argument, it returns an
-**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like
-this, you **MUST** overwrite the contents before returning it to the user.
-
-From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size):
-
-> `new Buffer(size)`
->
-> - `size` Number
->
-> The underlying memory for `Buffer` instances created in this way is not initialized.
-> **The contents of a newly created `Buffer` are unknown and could contain sensitive
-> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes.
-
-(Emphasis our own.)
-
-Whenever the programmer intended to create an uninitialized `Buffer` you often see code
-like this:
-
-```js
-var buf = new Buffer(16)
-
-// Immediately overwrite the uninitialized buffer with data from another buffer
-for (var i = 0; i < buf.length; i++) {
- buf[i] = otherBuf[i]
-}
-```
-
-
-### Would this ever be a problem in real code?
-
-Yes. It's surprisingly common to forget to check the type of your variables in a
-dynamically-typed language like JavaScript.
-
-Usually the consequences of assuming the wrong type is that your program crashes with an
-uncaught exception. But the failure mode for forgetting to check the type of arguments to
-the `Buffer` constructor is more catastrophic.
-
-Here's an example of a vulnerable service that takes a JSON payload and converts it to
-hex:
-
-```js
-// Take a JSON payload {str: "some string"} and convert it to hex
-var server = http.createServer(function (req, res) {
- var data = ''
- req.setEncoding('utf8')
- req.on('data', function (chunk) {
- data += chunk
- })
- req.on('end', function () {
- var body = JSON.parse(data)
- res.end(new Buffer(body.str).toString('hex'))
- })
-})
-
-server.listen(8080)
-```
-
-In this example, an http client just has to send:
-
-```json
-{
- "str": 1000
-}
-```
-
-and it will get back 1,000 bytes of uninitialized memory from the server.
-
-This is a very serious bug. It's similar in severity to the
-[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process
-memory by remote attackers.
-
-
-### Which real-world packages were vulnerable?
-
-#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht)
-
-[Mathias Buus](https://github.com/mafintosh) and I
-([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages,
-[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow
-anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get
-them to reveal 20 bytes at a time of uninitialized memory from the node.js process.
-
-Here's
-[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8)
-that fixed it. We released a new fixed version, created a
-[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all
-vulnerable versions on npm so users will get a warning to upgrade to a newer version.
-
-#### [`ws`](https://www.npmjs.com/package/ws)
-
-That got us wondering if there were other vulnerable packages. Sure enough, within a short
-period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the
-most popular WebSocket implementation in node.js.
-
-If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as
-expected, then uninitialized server memory would be disclosed to the remote peer.
-
-These were the vulnerable methods:
-
-```js
-socket.send(number)
-socket.ping(number)
-socket.pong(number)
-```
-
-Here's a vulnerable socket server with some echo functionality:
-
-```js
-server.on('connection', function (socket) {
- socket.on('message', function (message) {
- message = JSON.parse(message)
- if (message.type === 'echo') {
- socket.send(message.data) // send back the user's message
- }
- })
-})
-```
-
-`socket.send(number)` called on the server, will disclose server memory.
-
-Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue
-was fixed, with a more detailed explanation. Props to
-[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the
-[Node Security Project disclosure](https://nodesecurity.io/advisories/67).
-
-
-### What's the solution?
-
-It's important that node.js offers a fast way to get memory otherwise performance-critical
-applications would needlessly get a lot slower.
-
-But we need a better way to *signal our intent* as programmers. **When we want
-uninitialized memory, we should request it explicitly.**
-
-Sensitive functionality should not be packed into a developer-friendly API that loosely
-accepts many different types. This type of API encourages the lazy practice of passing
-variables in without checking the type very carefully.
-
-#### A new API: `Buffer.allocUnsafe(number)`
-
-The functionality of creating buffers with uninitialized memory should be part of another
-API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that
-frequently gets user input of all sorts of different types passed into it.
-
-```js
-var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory!
-
-// Immediately overwrite the uninitialized buffer with data from another buffer
-for (var i = 0; i < buf.length; i++) {
- buf[i] = otherBuf[i]
-}
-```
-
-
-### How do we fix node.js core?
-
-We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as
-`semver-major`) which defends against one case:
-
-```js
-var str = 16
-new Buffer(str, 'utf8')
-```
-
-In this situation, it's implied that the programmer intended the first argument to be a
-string, since they passed an encoding as a second argument. Today, node.js will allocate
-uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not
-what the programmer intended.
-
-But this is only a partial solution, since if the programmer does `new Buffer(variable)`
-(without an `encoding` parameter) there's no way to know what they intended. If `variable`
-is sometimes a number, then uninitialized memory will sometimes be returned.
-
-### What's the real long-term fix?
-
-We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when
-we need uninitialized memory. But that would break 1000s of packages.
-
-~~We believe the best solution is to:~~
-
-~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~
-
-~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~
-
-#### Update
-
-We now support adding three new APIs:
-
-- `Buffer.from(value)` - convert from any type to a buffer
-- `Buffer.alloc(size)` - create a zero-filled buffer
-- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size
-
-This solves the core problem that affected `ws` and `bittorrent-dht` which is
-`Buffer(variable)` getting tricked into taking a number argument.
-
-This way, existing code continues working and the impact on the npm ecosystem will be
-minimal. Over time, npm maintainers can migrate performance-critical code to use
-`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`.
-
-
-### Conclusion
-
-We think there's a serious design issue with the `Buffer` API as it exists today. It
-promotes insecure software by putting high-risk functionality into a convenient API
-with friendly "developer ergonomics".
-
-This wasn't merely a theoretical exercise because we found the issue in some of the
-most popular npm packages.
-
-Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of
-`buffer`.
-
-```js
-var Buffer = require('safe-buffer').Buffer
-```
-
-Eventually, we hope that node.js core can switch to this new, safer behavior. We believe
-the impact on the ecosystem would be minimal since it's not a breaking change.
-Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while
-older, insecure packages would magically become safe from this attack vector.
-
-
-## links
-
-- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514)
-- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67)
-- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68)
-
-
-## credit
-
-The original issues in `bittorrent-dht`
-([disclosure](https://nodesecurity.io/advisories/68)) and
-`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by
-[Mathias Buus](https://github.com/mafintosh) and
-[Feross Aboukhadijeh](http://feross.org/).
-
-Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues
-and for his work running the [Node Security Project](https://nodesecurity.io/).
-
-Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and
-auditing the code.
-
-
-## license
-
-MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org)
diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts b/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts
deleted file mode 100644
index e9fed809a..000000000
--- a/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts
+++ /dev/null
@@ -1,187 +0,0 @@
-declare module "safe-buffer" {
- export class Buffer {
- length: number
- write(string: string, offset?: number, length?: number, encoding?: string): number;
- toString(encoding?: string, start?: number, end?: number): string;
- toJSON(): { type: 'Buffer', data: any[] };
- equals(otherBuffer: Buffer): boolean;
- compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
- copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
- slice(start?: number, end?: number): Buffer;
- writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
- readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
- readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
- readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
- readUInt8(offset: number, noAssert?: boolean): number;
- readUInt16LE(offset: number, noAssert?: boolean): number;
- readUInt16BE(offset: number, noAssert?: boolean): number;
- readUInt32LE(offset: number, noAssert?: boolean): number;
- readUInt32BE(offset: number, noAssert?: boolean): number;
- readInt8(offset: number, noAssert?: boolean): number;
- readInt16LE(offset: number, noAssert?: boolean): number;
- readInt16BE(offset: number, noAssert?: boolean): number;
- readInt32LE(offset: number, noAssert?: boolean): number;
- readInt32BE(offset: number, noAssert?: boolean): number;
- readFloatLE(offset: number, noAssert?: boolean): number;
- readFloatBE(offset: number, noAssert?: boolean): number;
- readDoubleLE(offset: number, noAssert?: boolean): number;
- readDoubleBE(offset: number, noAssert?: boolean): number;
- swap16(): Buffer;
- swap32(): Buffer;
- swap64(): Buffer;
- writeUInt8(value: number, offset: number, noAssert?: boolean): number;
- writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
- writeInt8(value: number, offset: number, noAssert?: boolean): number;
- writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
- writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
- writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
- writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
- writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
- writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
- writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
- writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
- fill(value: any, offset?: number, end?: number): this;
- indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
- lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
- includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
-
- /**
- * Allocates a new buffer containing the given {str}.
- *
- * @param str String to store in buffer.
- * @param encoding encoding to use, optional. Default is 'utf8'
- */
- constructor (str: string, encoding?: string);
- /**
- * Allocates a new buffer of {size} octets.
- *
- * @param size count of octets to allocate.
- */
- constructor (size: number);
- /**
- * Allocates a new buffer containing the given {array} of octets.
- *
- * @param array The octets to store.
- */
- constructor (array: Uint8Array);
- /**
- * Produces a Buffer backed by the same allocated memory as
- * the given {ArrayBuffer}.
- *
- *
- * @param arrayBuffer The ArrayBuffer with which to share memory.
- */
- constructor (arrayBuffer: ArrayBuffer);
- /**
- * Allocates a new buffer containing the given {array} of octets.
- *
- * @param array The octets to store.
- */
- constructor (array: any[]);
- /**
- * Copies the passed {buffer} data onto a new {Buffer} instance.
- *
- * @param buffer The buffer to copy.
- */
- constructor (buffer: Buffer);
- prototype: Buffer;
- /**
- * Allocates a new Buffer using an {array} of octets.
- *
- * @param array
- */
- static from(array: any[]): Buffer;
- /**
- * When passed a reference to the .buffer property of a TypedArray instance,
- * the newly created Buffer will share the same allocated memory as the TypedArray.
- * The optional {byteOffset} and {length} arguments specify a memory range
- * within the {arrayBuffer} that will be shared by the Buffer.
- *
- * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
- * @param byteOffset
- * @param length
- */
- static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
- /**
- * Copies the passed {buffer} data onto a new Buffer instance.
- *
- * @param buffer
- */
- static from(buffer: Buffer): Buffer;
- /**
- * Creates a new Buffer containing the given JavaScript string {str}.
- * If provided, the {encoding} parameter identifies the character encoding.
- * If not provided, {encoding} defaults to 'utf8'.
- *
- * @param str
- */
- static from(str: string, encoding?: string): Buffer;
- /**
- * Returns true if {obj} is a Buffer
- *
- * @param obj object to test.
- */
- static isBuffer(obj: any): obj is Buffer;
- /**
- * Returns true if {encoding} is a valid encoding argument.
- * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
- *
- * @param encoding string to test.
- */
- static isEncoding(encoding: string): boolean;
- /**
- * Gives the actual byte length of a string. encoding defaults to 'utf8'.
- * This is not the same as String.prototype.length since that returns the number of characters in a string.
- *
- * @param string string to test.
- * @param encoding encoding used to evaluate (defaults to 'utf8')
- */
- static byteLength(string: string, encoding?: string): number;
- /**
- * Returns a buffer which is the result of concatenating all the buffers in the list together.
- *
- * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
- * If the list has exactly one item, then the first item of the list is returned.
- * If the list has more than one item, then a new Buffer is created.
- *
- * @param list An array of Buffer objects to concatenate
- * @param totalLength Total length of the buffers when concatenated.
- * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
- */
- static concat(list: Buffer[], totalLength?: number): Buffer;
- /**
- * The same as buf1.compare(buf2).
- */
- static compare(buf1: Buffer, buf2: Buffer): number;
- /**
- * Allocates a new buffer of {size} octets.
- *
- * @param size count of octets to allocate.
- * @param fill if specified, buffer will be initialized by calling buf.fill(fill).
- * If parameter is omitted, buffer will be filled with zeros.
- * @param encoding encoding used for call to buf.fill while initalizing
- */
- static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
- /**
- * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
- * of the newly created Buffer are unknown and may contain sensitive data.
- *
- * @param size count of octets to allocate
- */
- static allocUnsafe(size: number): Buffer;
- /**
- * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
- * of the newly created Buffer are unknown and may contain sensitive data.
- *
- * @param size count of octets to allocate
- */
- static allocUnsafeSlow(size: number): Buffer;
- }
-} \ No newline at end of file
diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.js b/node_modules/string_decoder/node_modules/safe-buffer/index.js
deleted file mode 100644
index 054c8d30d..000000000
--- a/node_modules/string_decoder/node_modules/safe-buffer/index.js
+++ /dev/null
@@ -1,64 +0,0 @@
-/* eslint-disable node/no-deprecated-api */
-var buffer = require('buffer')
-var Buffer = buffer.Buffer
-
-// alternative to using Object.keys for old browsers
-function copyProps (src, dst) {
- for (var key in src) {
- dst[key] = src[key]
- }
-}
-if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
- module.exports = buffer
-} else {
- // Copy properties from require('buffer')
- copyProps(buffer, exports)
- exports.Buffer = SafeBuffer
-}
-
-function SafeBuffer (arg, encodingOrOffset, length) {
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.prototype = Object.create(Buffer.prototype)
-
-// Copy static methods from Buffer
-copyProps(Buffer, SafeBuffer)
-
-SafeBuffer.from = function (arg, encodingOrOffset, length) {
- if (typeof arg === 'number') {
- throw new TypeError('Argument must not be a number')
- }
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.alloc = function (size, fill, encoding) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- var buf = Buffer(size)
- if (fill !== undefined) {
- if (typeof encoding === 'string') {
- buf.fill(fill, encoding)
- } else {
- buf.fill(fill)
- }
- } else {
- buf.fill(0)
- }
- return buf
-}
-
-SafeBuffer.allocUnsafe = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return Buffer(size)
-}
-
-SafeBuffer.allocUnsafeSlow = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return buffer.SlowBuffer(size)
-}
diff --git a/node_modules/string_decoder/node_modules/safe-buffer/package.json b/node_modules/string_decoder/node_modules/safe-buffer/package.json
deleted file mode 100644
index e7f13cb4b..000000000
--- a/node_modules/string_decoder/node_modules/safe-buffer/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "safe-buffer@~5.2.0",
- "_id": "safe-buffer@5.2.0",
- "_inBundle": false,
- "_integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==",
- "_location": "/string_decoder/safe-buffer",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "safe-buffer@~5.2.0",
- "name": "safe-buffer",
- "escapedName": "safe-buffer",
- "rawSpec": "~5.2.0",
- "saveSpec": null,
- "fetchSpec": "~5.2.0"
- },
- "_requiredBy": [
- "/string_decoder"
- ],
- "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
- "_shasum": "b74daec49b1148f88c64b68d49b1e815c1f2f519",
- "_spec": "safe-buffer@~5.2.0",
- "_where": "/Users/darcyclarke/Documents/Repos/npm/cli/node_modules/string_decoder",
- "author": {
- "name": "Feross Aboukhadijeh",
- "email": "feross@feross.org",
- "url": "http://feross.org"
- },
- "bugs": {
- "url": "https://github.com/feross/safe-buffer/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Safer Node.js Buffer API",
- "devDependencies": {
- "standard": "*",
- "tape": "^4.0.0"
- },
- "homepage": "https://github.com/feross/safe-buffer",
- "keywords": [
- "buffer",
- "buffer allocate",
- "node security",
- "safe",
- "safe-buffer",
- "security",
- "uninitialized"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "safe-buffer",
- "repository": {
- "type": "git",
- "url": "git://github.com/feross/safe-buffer.git"
- },
- "scripts": {
- "test": "standard && tape test/*.js"
- },
- "types": "index.d.ts",
- "version": "5.2.0"
-}
diff --git a/node_modules/walk-up-path/LICENSE b/node_modules/walk-up-path/LICENSE
new file mode 100644
index 000000000..05eeeb88c
--- /dev/null
+++ b/node_modules/walk-up-path/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/walk-up-path/README.md b/node_modules/walk-up-path/README.md
new file mode 100644
index 000000000..6729745f8
--- /dev/null
+++ b/node_modules/walk-up-path/README.md
@@ -0,0 +1,46 @@
+# walk-up-path
+
+Given a path string, return a generator that walks up the path, emitting
+each dirname.
+
+So, to get a platform-portable walk up, instead of doing something like
+this:
+
+```js
+for (let p = dirname(path); p;) {
+
+ // ... do stuff ...
+
+ const pp = dirname(p)
+ if (p === pp)
+ p = null
+ else
+ p = pp
+}
+```
+
+Or this:
+
+```js
+for (let p = dirname(path); !isRoot(p); p = dirname(p)) {
+ // ... do stuff ...
+}
+```
+
+You can do this:
+
+```js
+const walkUpPath = require('walk-up-path')
+for (const p of walkUpPath(path)) {
+ // ... do stuff ..
+}
+```
+
+## API
+
+```js
+const walkUpPath = require('walk-up-path')
+```
+
+Give the fn a string, it'll yield all the directories walking up to the
+root.
diff --git a/node_modules/walk-up-path/index.js b/node_modules/walk-up-path/index.js
new file mode 100644
index 000000000..05524a6c0
--- /dev/null
+++ b/node_modules/walk-up-path/index.js
@@ -0,0 +1,11 @@
+const {dirname, resolve} = require('path')
+module.exports = function* (path) {
+ for (path = resolve(path); path;) {
+ yield path
+ const pp = dirname(path)
+ if (pp === path)
+ path = null
+ else
+ path = pp
+ }
+}
diff --git a/node_modules/walk-up-path/package.json b/node_modules/walk-up-path/package.json
new file mode 100644
index 000000000..0c1da3341
--- /dev/null
+++ b/node_modules/walk-up-path/package.json
@@ -0,0 +1,61 @@
+{
+ "_from": "walk-up-path@^1.0.0",
+ "_id": "walk-up-path@1.0.0",
+ "_inBundle": false,
+ "_integrity": "sha512-hwj/qMDUEjCU5h0xr90KGCf0tg0/LgJbmOWgrWKYlcJZM7XvquvUJZ0G/HMGr7F7OQMOUuPHWP9JpriinkAlkg==",
+ "_location": "/walk-up-path",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "walk-up-path@^1.0.0",
+ "name": "walk-up-path",
+ "escapedName": "walk-up-path",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/@npmcli/arborist"
+ ],
+ "_resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-1.0.0.tgz",
+ "_shasum": "d4745e893dd5fd0dbb58dd0a4c6a33d9c9fec53e",
+ "_spec": "walk-up-path@^1.0.0",
+ "_where": "/Users/isaacs/dev/npm/cli/node_modules/@npmcli/arborist",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "https://izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/walk-up-path/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Given a path string, return a generator that walks up the path, emitting each dirname.",
+ "devDependencies": {
+ "require-inject": "^1.4.4",
+ "tap": "^14.10.7"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/isaacs/walk-up-path#readme",
+ "license": "ISC",
+ "name": "walk-up-path",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/walk-up-path.git"
+ },
+ "scripts": {
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "1.0.0"
+}