Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorclaudiahdz <cghr1990@gmail.com>2020-03-28 02:24:27 +0300
committerisaacs <i@izs.me>2020-05-08 04:12:57 +0300
commita29031f9eafd001b73abfc63b8ec3341a1e12f28 (patch)
tree9726ebaf83bfb38bcb7e7148bb180cd4faaf538b
parentf39e6d3dcdd58cfbfb14c66be070d0bbb77056ec (diff)
libnpmpublish@3.0.1
-rw-r--r--node_modules/libnpmpack/CHANGELOG.md17
-rw-r--r--node_modules/libnpmpack/LICENSE13
-rw-r--r--node_modules/libnpmpack/README.md56
-rw-r--r--node_modules/libnpmpack/index.js48
l---------node_modules/libnpmpack/node_modules/.bin/mkdirp1
l---------node_modules/libnpmpack/node_modules/.bin/pacote1
l---------node_modules/libnpmpack/node_modules/.bin/rimraf1
-rw-r--r--node_modules/libnpmpack/node_modules/chownr/LICENSE (renamed from node_modules/libnpmpublish/node_modules/semver/LICENSE)0
-rw-r--r--node_modules/libnpmpack/node_modules/chownr/README.md3
-rw-r--r--node_modules/libnpmpack/node_modules/chownr/chownr.js167
-rw-r--r--node_modules/libnpmpack/node_modules/chownr/package.json62
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md15
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/LICENSE (renamed from node_modules/libnpmpublish/node_modules/safe-buffer/LICENSE)4
-rwxr-xr-xnode_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js68
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/index.js31
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js29
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js64
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js39
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js23
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js29
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js10
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/package.json75
-rw-r--r--node_modules/libnpmpack/node_modules/mkdirp/readme.markdown266
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/LICENSE15
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/README.md244
-rwxr-xr-xnode_modules/libnpmpack/node_modules/pacote/lib/bin.js149
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/dir.js98
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js470
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/file.js93
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/git.js272
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/index.js12
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/registry.js159
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/remote.js72
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js12
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js24
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js9
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js21
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/CHANGELOG.md (renamed from node_modules/libnpmpublish/node_modules/npm-package-arg/CHANGELOG.md)28
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/LICENSE (renamed from node_modules/libnpmpublish/node_modules/npm-package-arg/LICENSE)2
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/README.md (renamed from node_modules/libnpmpublish/node_modules/npm-package-arg/README.md)0
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/npa.js (renamed from node_modules/libnpmpublish/node_modules/npm-package-arg/npa.js)10
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json69
-rw-r--r--node_modules/libnpmpack/node_modules/pacote/package.json107
-rw-r--r--node_modules/libnpmpack/node_modules/rimraf/LICENSE15
-rw-r--r--node_modules/libnpmpack/node_modules/rimraf/README.md101
-rwxr-xr-xnode_modules/libnpmpack/node_modules/rimraf/bin.js50
-rw-r--r--node_modules/libnpmpack/node_modules/rimraf/package.json67
-rw-r--r--node_modules/libnpmpack/node_modules/rimraf/rimraf.js372
-rw-r--r--node_modules/libnpmpack/package.json100
-rw-r--r--node_modules/libnpmpublish/.travis.yml7
-rw-r--r--node_modules/libnpmpublish/CHANGELOG.md36
-rw-r--r--node_modules/libnpmpublish/CODE_OF_CONDUCT.md151
-rw-r--r--node_modules/libnpmpublish/CONTRIBUTING.md256
-rw-r--r--node_modules/libnpmpublish/PULL_REQUEST_TEMPLATE7
-rw-r--r--node_modules/libnpmpublish/README.md63
-rw-r--r--node_modules/libnpmpublish/appveyor.yml22
l---------node_modules/libnpmpublish/node_modules/.bin/semver1
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md115
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE13
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md133
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js79
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js156
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js125
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json69
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-package-arg/package.json74
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/CHANGELOG.md250
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/LICENSE.md16
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/README.md636
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/auth.js57
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/check-response.js109
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/config.js98
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/errors.js79
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/index.js203
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/package.json96
-rw-r--r--node_modules/libnpmpublish/node_modules/npm-registry-fetch/silentlog.js14
-rw-r--r--node_modules/libnpmpublish/node_modules/safe-buffer/README.md586
-rw-r--r--node_modules/libnpmpublish/node_modules/safe-buffer/index.d.ts187
-rw-r--r--node_modules/libnpmpublish/node_modules/safe-buffer/index.js64
-rw-r--r--node_modules/libnpmpublish/node_modules/safe-buffer/package.json62
-rw-r--r--node_modules/libnpmpublish/node_modules/semver/CHANGELOG.md39
-rw-r--r--node_modules/libnpmpublish/node_modules/semver/README.md412
-rwxr-xr-xnode_modules/libnpmpublish/node_modules/semver/bin/semver160
-rw-r--r--node_modules/libnpmpublish/node_modules/semver/package.json61
-rw-r--r--node_modules/libnpmpublish/node_modules/semver/range.bnf16
-rw-r--r--node_modules/libnpmpublish/node_modules/semver/semver.js1483
-rw-r--r--node_modules/libnpmpublish/node_modules/ssri/CHANGELOG.md286
-rw-r--r--node_modules/libnpmpublish/node_modules/ssri/LICENSE.md16
-rw-r--r--node_modules/libnpmpublish/node_modules/ssri/README.md488
-rw-r--r--node_modules/libnpmpublish/node_modules/ssri/index.js395
-rw-r--r--node_modules/libnpmpublish/node_modules/ssri/package.json89
-rw-r--r--node_modules/libnpmpublish/package.json98
-rw-r--r--node_modules/libnpmpublish/publish.js249
-rw-r--r--node_modules/libnpmpublish/test/publish.js1048
-rw-r--r--node_modules/libnpmpublish/test/unpublish.js249
-rw-r--r--node_modules/libnpmpublish/test/util/mock-tarball.js47
-rw-r--r--node_modules/libnpmpublish/test/util/tnock.js12
-rw-r--r--node_modules/libnpmpublish/unpublish.js158
-rw-r--r--package-lock.json126
-rw-r--r--package.json2
99 files changed, 3968 insertions, 8823 deletions
diff --git a/node_modules/libnpmpack/CHANGELOG.md b/node_modules/libnpmpack/CHANGELOG.md
new file mode 100644
index 000000000..2c078c63b
--- /dev/null
+++ b/node_modules/libnpmpack/CHANGELOG.md
@@ -0,0 +1,17 @@
+# Change Log
+
+<a name="2.0.0"></a>
+# [2.0.0](https://github.com/npm/libnpmpublish/compare/v1.0.0...v2.0.0) (2020-03-27)
+
+### Breaking Changes
+
+* [`cb2ecf2`](https://github.com/npm/libnpmpack/commit/cb2ecf2) feat: resolve to tarball data Buffer ([@claudiahdz](https://github.com/claudiahdz))
+
+<a name="1.0.0"></a>
+# 1.0.0 (2020-03-26)
+
+### Features
+
+* [`a35c590`](https://github.com/npm/libnpmpack/commit/a35c590) feat: pack tarballs from local dir or registry spec ([@claudiahdz](https://github.com/claudiahdz))
+
+* [`6d72149`](https://github.com/npm/libnpmpack/commit/6d72149) feat: sorted tarball contents ([@eridal](https://github.com/eridal))
diff --git a/node_modules/libnpmpack/LICENSE b/node_modules/libnpmpack/LICENSE
new file mode 100644
index 000000000..209e4477f
--- /dev/null
+++ b/node_modules/libnpmpack/LICENSE
@@ -0,0 +1,13 @@
+Copyright npm, Inc
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libnpmpack/README.md b/node_modules/libnpmpack/README.md
new file mode 100644
index 000000000..74b4934b0
--- /dev/null
+++ b/node_modules/libnpmpack/README.md
@@ -0,0 +1,56 @@
+# libnpmpack
+
+[![npm version](https://img.shields.io/npm/v/libnpmpack.svg)](https://npm.im/libnpmpack)
+[![license](https://img.shields.io/npm/l/libnpmpack.svg)](https://npm.im/libnpmpack)
+[![GitHub Actions](https://github.com/npm/libnpmpack/workflows/Node%20CI/badge.svg)](https://github.com/npm/libnpmpack/actions?query=workflow%3A%22Node+CI%22)
+[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmpack/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmpack?branch=latest)
+
+[`libnpmpack`](https://github.com/npm/libnpmpack) is a Node.js library for
+programmatically packing tarballs from a local directory or from a registry or github spec. If packing from a local source, `libnpmpack` will also run the `prepack` and `postpack` lifecycles.
+
+## Table of Contents
+
+* [Example](#example)
+* [Install](#install)
+* [API](#api)
+ * [`pack()`](#pack)
+
+## Example
+
+```js
+const pack = require('libnpmpack')
+```
+
+## Install
+
+`$ npm install libnpmpack`
+
+### API
+
+#### <a name="pack"></a> `> pack(spec, [opts]) -> Promise`
+
+Packs a tarball from a local directory or from a registry or github spec and returns a Promise that resolves to the tarball data Buffer, with from, resolved, and integrity fields attached.
+
+If no options are passed, the tarball file will be saved on the same directory from which `pack` was called in.
+
+`libnpmpack` uses [`pacote`](https://npm.im/pacote).
+Most options are passed through directly to that library, so please refer to
+[its own `opts`
+documentation](https://www.npmjs.com/package/pacote#options)
+for options that can be passed in.
+
+##### Examples
+
+```javascript
+// packs from cwd
+const tarball = await pack()
+
+// packs from a local directory
+const localTar = await pack('/Users/claudiahdz/projects/my-cool-pkg')
+
+// packs from a registry spec
+const registryTar = await pack('abbrev@1.0.3')
+
+// packs from a github spec
+const githubTar = await pack('isaacs/rimraf#PR-192')
+```
diff --git a/node_modules/libnpmpack/index.js b/node_modules/libnpmpack/index.js
new file mode 100644
index 000000000..a756ebca0
--- /dev/null
+++ b/node_modules/libnpmpack/index.js
@@ -0,0 +1,48 @@
+'use strict'
+
+const pacote = require('pacote')
+const npa = require('npm-package-arg')
+const runScript = require('@npmcli/run-script')
+
+module.exports = pack
+async function pack (spec = 'file:.', opts = {}) {
+ // gets spec
+ spec = npa(spec)
+
+ const manifest = await pacote.manifest(spec, opts)
+
+ if (spec.type === 'directory') {
+ // prepack
+ await runScript({
+ ...opts,
+ event: 'prepack',
+ path: spec.fetchSpec,
+ stdio: 'inherit',
+ pkg: manifest
+ })
+ }
+
+ // packs tarball
+ const tarball = await pacote.tarball(manifest._resolved, {
+ ...opts,
+ integrity: manifest._integrity
+ })
+
+ if (spec.type === 'directory') {
+ // postpack
+ await runScript({
+ ...opts,
+ event: 'postpack',
+ path: spec.fetchSpec,
+ stdio: 'inherit',
+ pkg: manifest,
+ env: {
+ npm_package_from: tarball.from,
+ npm_package_resolved: tarball.resolved,
+ npm_package_integrity: tarball.integrity
+ }
+ })
+ }
+
+ return tarball
+}
diff --git a/node_modules/libnpmpack/node_modules/.bin/mkdirp b/node_modules/libnpmpack/node_modules/.bin/mkdirp
new file mode 120000
index 000000000..017896ceb
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/.bin/mkdirp
@@ -0,0 +1 @@
+../mkdirp/bin/cmd.js \ No newline at end of file
diff --git a/node_modules/libnpmpack/node_modules/.bin/pacote b/node_modules/libnpmpack/node_modules/.bin/pacote
new file mode 120000
index 000000000..e59583143
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/.bin/pacote
@@ -0,0 +1 @@
+../pacote/lib/bin.js \ No newline at end of file
diff --git a/node_modules/libnpmpack/node_modules/.bin/rimraf b/node_modules/libnpmpack/node_modules/.bin/rimraf
new file mode 120000
index 000000000..4cd49a49d
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/.bin/rimraf
@@ -0,0 +1 @@
+../rimraf/bin.js \ No newline at end of file
diff --git a/node_modules/libnpmpublish/node_modules/semver/LICENSE b/node_modules/libnpmpack/node_modules/chownr/LICENSE
index 19129e315..19129e315 100644
--- a/node_modules/libnpmpublish/node_modules/semver/LICENSE
+++ b/node_modules/libnpmpack/node_modules/chownr/LICENSE
diff --git a/node_modules/libnpmpack/node_modules/chownr/README.md b/node_modules/libnpmpack/node_modules/chownr/README.md
new file mode 100644
index 000000000..70e9a54a3
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/chownr/README.md
@@ -0,0 +1,3 @@
+Like `chown -R`.
+
+Takes the same arguments as `fs.chown()`
diff --git a/node_modules/libnpmpack/node_modules/chownr/chownr.js b/node_modules/libnpmpack/node_modules/chownr/chownr.js
new file mode 100644
index 000000000..0d4093216
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/chownr/chownr.js
@@ -0,0 +1,167 @@
+'use strict'
+const fs = require('fs')
+const path = require('path')
+
+/* istanbul ignore next */
+const LCHOWN = fs.lchown ? 'lchown' : 'chown'
+/* istanbul ignore next */
+const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
+
+/* istanbul ignore next */
+const needEISDIRHandled = fs.lchown &&
+ !process.version.match(/v1[1-9]+\./) &&
+ !process.version.match(/v10\.[6-9]/)
+
+const lchownSync = (path, uid, gid) => {
+ try {
+ return fs[LCHOWNSYNC](path, uid, gid)
+ } catch (er) {
+ if (er.code !== 'ENOENT')
+ throw er
+ }
+}
+
+/* istanbul ignore next */
+const chownSync = (path, uid, gid) => {
+ try {
+ return fs.chownSync(path, uid, gid)
+ } catch (er) {
+ if (er.code !== 'ENOENT')
+ throw er
+ }
+}
+
+/* istanbul ignore next */
+const handleEISDIR =
+ needEISDIRHandled ? (path, uid, gid, cb) => er => {
+ // Node prior to v10 had a very questionable implementation of
+ // fs.lchown, which would always try to call fs.open on a directory
+ // Fall back to fs.chown in those cases.
+ if (!er || er.code !== 'EISDIR')
+ cb(er)
+ else
+ fs.chown(path, uid, gid, cb)
+ }
+ : (_, __, ___, cb) => cb
+
+/* istanbul ignore next */
+const handleEISDirSync =
+ needEISDIRHandled ? (path, uid, gid) => {
+ try {
+ return lchownSync(path, uid, gid)
+ } catch (er) {
+ if (er.code !== 'EISDIR')
+ throw er
+ chownSync(path, uid, gid)
+ }
+ }
+ : (path, uid, gid) => lchownSync(path, uid, gid)
+
+// fs.readdir could only accept an options object as of node v6
+const nodeVersion = process.version
+let readdir = (path, options, cb) => fs.readdir(path, options, cb)
+let readdirSync = (path, options) => fs.readdirSync(path, options)
+/* istanbul ignore next */
+if (/^v4\./.test(nodeVersion))
+ readdir = (path, options, cb) => fs.readdir(path, cb)
+
+const chown = (cpath, uid, gid, cb) => {
+ fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
+ // Skip ENOENT error
+ cb(er && er.code !== 'ENOENT' ? er : null)
+ }))
+}
+
+const chownrKid = (p, child, uid, gid, cb) => {
+ if (typeof child === 'string')
+ return fs.lstat(path.resolve(p, child), (er, stats) => {
+ // Skip ENOENT error
+ if (er)
+ return cb(er.code !== 'ENOENT' ? er : null)
+ stats.name = child
+ chownrKid(p, stats, uid, gid, cb)
+ })
+
+ if (child.isDirectory()) {
+ chownr(path.resolve(p, child.name), uid, gid, er => {
+ if (er)
+ return cb(er)
+ const cpath = path.resolve(p, child.name)
+ chown(cpath, uid, gid, cb)
+ })
+ } else {
+ const cpath = path.resolve(p, child.name)
+ chown(cpath, uid, gid, cb)
+ }
+}
+
+
+const chownr = (p, uid, gid, cb) => {
+ readdir(p, { withFileTypes: true }, (er, children) => {
+ // any error other than ENOTDIR or ENOTSUP means it's not readable,
+ // or doesn't exist. give up.
+ if (er) {
+ if (er.code === 'ENOENT')
+ return cb()
+ else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
+ return cb(er)
+ }
+ if (er || !children.length)
+ return chown(p, uid, gid, cb)
+
+ let len = children.length
+ let errState = null
+ const then = er => {
+ if (errState)
+ return
+ if (er)
+ return cb(errState = er)
+ if (-- len === 0)
+ return chown(p, uid, gid, cb)
+ }
+
+ children.forEach(child => chownrKid(p, child, uid, gid, then))
+ })
+}
+
+const chownrKidSync = (p, child, uid, gid) => {
+ if (typeof child === 'string') {
+ try {
+ const stats = fs.lstatSync(path.resolve(p, child))
+ stats.name = child
+ child = stats
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ return
+ else
+ throw er
+ }
+ }
+
+ if (child.isDirectory())
+ chownrSync(path.resolve(p, child.name), uid, gid)
+
+ handleEISDirSync(path.resolve(p, child.name), uid, gid)
+}
+
+const chownrSync = (p, uid, gid) => {
+ let children
+ try {
+ children = readdirSync(p, { withFileTypes: true })
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ return
+ else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
+ return handleEISDirSync(p, uid, gid)
+ else
+ throw er
+ }
+
+ if (children && children.length)
+ children.forEach(child => chownrKidSync(p, child, uid, gid))
+
+ return handleEISDirSync(p, uid, gid)
+}
+
+module.exports = chownr
+chownr.sync = chownrSync
diff --git a/node_modules/libnpmpack/node_modules/chownr/package.json b/node_modules/libnpmpack/node_modules/chownr/package.json
new file mode 100644
index 000000000..f6ebbde8f
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/chownr/package.json
@@ -0,0 +1,62 @@
+{
+ "_from": "chownr@^1.1.4",
+ "_id": "chownr@1.1.4",
+ "_inBundle": false,
+ "_integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
+ "_location": "/libnpmpack/chownr",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "chownr@^1.1.4",
+ "name": "chownr",
+ "escapedName": "chownr",
+ "rawSpec": "^1.1.4",
+ "saveSpec": null,
+ "fetchSpec": "^1.1.4"
+ },
+ "_requiredBy": [
+ "/libnpmpack/pacote"
+ ],
+ "_resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
+ "_shasum": "6fc9d7b42d32a583596337666e7d08084da2cc6b",
+ "_spec": "chownr@^1.1.4",
+ "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/chownr/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "like `chown -R`",
+ "devDependencies": {
+ "mkdirp": "0.3",
+ "rimraf": "^2.7.1",
+ "tap": "^14.10.6"
+ },
+ "files": [
+ "chownr.js"
+ ],
+ "homepage": "https://github.com/isaacs/chownr#readme",
+ "license": "ISC",
+ "main": "chownr.js",
+ "name": "chownr",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/chownr.git"
+ },
+ "scripts": {
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "1.1.4"
+}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md b/node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md
new file mode 100644
index 000000000..81458380b
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/CHANGELOG.md
@@ -0,0 +1,15 @@
+# Changers Lorgs!
+
+## 1.0
+
+Full rewrite. Essentially a brand new module.
+
+- Return a promise instead of taking a callback.
+- Use native `fs.mkdir(path, { recursive: true })` when available.
+- Drop support for outdated Node.js versions. (Technically still works on
+ Node.js v8, but only 10 and above are officially supported.)
+
+## 0.x
+
+Original and most widely used recursive directory creation implementation
+in JavaScript, dating back to 2010.
diff --git a/node_modules/libnpmpublish/node_modules/safe-buffer/LICENSE b/node_modules/libnpmpack/node_modules/mkdirp/LICENSE
index 0c068ceec..13fcd15f0 100644
--- a/node_modules/libnpmpublish/node_modules/safe-buffer/LICENSE
+++ b/node_modules/libnpmpack/node_modules/mkdirp/LICENSE
@@ -1,6 +1,6 @@
-The MIT License (MIT)
+Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-Copyright (c) Feross Aboukhadijeh
+This project is free software released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js b/node_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js
new file mode 100755
index 000000000..6e0aa8dc4
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/bin/cmd.js
@@ -0,0 +1,68 @@
+#!/usr/bin/env node
+
+const usage = () => `
+usage: mkdirp [DIR1,DIR2..] {OPTIONS}
+
+ Create each supplied directory including any necessary parent directories
+ that don't yet exist.
+
+ If the directory already exists, do nothing.
+
+OPTIONS are:
+
+ -m<mode> If a directory needs to be created, set the mode as an octal
+ --mode=<mode> permission string.
+
+ -v --version Print the mkdirp version number
+
+ -h --help Print this helpful banner
+
+ -p --print Print the first directories created for each path provided
+
+ --manual Use manual implementation, even if native is available
+`
+
+const dirs = []
+const opts = {}
+let print = false
+let dashdash = false
+let manual = false
+for (const arg of process.argv.slice(2)) {
+ if (dashdash)
+ dirs.push(arg)
+ else if (arg === '--')
+ dashdash = true
+ else if (arg === '--manual')
+ manual = true
+ else if (/^-h/.test(arg) || /^--help/.test(arg)) {
+ console.log(usage())
+ process.exit(0)
+ } else if (arg === '-v' || arg === '--version') {
+ console.log(require('../package.json').version)
+ process.exit(0)
+ } else if (arg === '-p' || arg === '--print') {
+ print = true
+ } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
+ const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
+ if (isNaN(mode)) {
+ console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
+ process.exit(1)
+ }
+ opts.mode = mode
+ } else
+ dirs.push(arg)
+}
+
+const mkdirp = require('../')
+const impl = manual ? mkdirp.manual : mkdirp
+if (dirs.length === 0)
+ console.error(usage())
+
+Promise.all(dirs.map(dir => impl(dir, opts)))
+ .then(made => print ? made.forEach(m => m && console.log(m)) : null)
+ .catch(er => {
+ console.error(er.message)
+ if (er.code)
+ console.error(' code: ' + er.code)
+ process.exit(1)
+ })
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/index.js b/node_modules/libnpmpack/node_modules/mkdirp/index.js
new file mode 100644
index 000000000..ad7a16c9f
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/index.js
@@ -0,0 +1,31 @@
+const optsArg = require('./lib/opts-arg.js')
+const pathArg = require('./lib/path-arg.js')
+
+const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
+const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
+const {useNative, useNativeSync} = require('./lib/use-native.js')
+
+
+const mkdirp = (path, opts) => {
+ path = pathArg(path)
+ opts = optsArg(opts)
+ return useNative(opts)
+ ? mkdirpNative(path, opts)
+ : mkdirpManual(path, opts)
+}
+
+const mkdirpSync = (path, opts) => {
+ path = pathArg(path)
+ opts = optsArg(opts)
+ return useNativeSync(opts)
+ ? mkdirpNativeSync(path, opts)
+ : mkdirpManualSync(path, opts)
+}
+
+mkdirp.sync = mkdirpSync
+mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
+mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
+mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
+mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
+
+module.exports = mkdirp
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js
new file mode 100644
index 000000000..022e492c0
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/lib/find-made.js
@@ -0,0 +1,29 @@
+const {dirname} = require('path')
+
+const findMade = (opts, parent, path = undefined) => {
+ // we never want the 'made' return value to be a root directory
+ if (path === parent)
+ return Promise.resolve()
+
+ return opts.statAsync(parent).then(
+ st => st.isDirectory() ? path : undefined, // will fail later
+ er => er.code === 'ENOENT'
+ ? findMade(opts, dirname(parent), parent)
+ : undefined
+ )
+}
+
+const findMadeSync = (opts, parent, path = undefined) => {
+ if (path === parent)
+ return undefined
+
+ try {
+ return opts.statSync(parent).isDirectory() ? path : undefined
+ } catch (er) {
+ return er.code === 'ENOENT'
+ ? findMadeSync(opts, dirname(parent), parent)
+ : undefined
+ }
+}
+
+module.exports = {findMade, findMadeSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js
new file mode 100644
index 000000000..2eb18cd64
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-manual.js
@@ -0,0 +1,64 @@
+const {dirname} = require('path')
+
+const mkdirpManual = (path, opts, made) => {
+ opts.recursive = false
+ const parent = dirname(path)
+ if (parent === path) {
+ return opts.mkdirAsync(path, opts).catch(er => {
+ // swallowed by recursive implementation on posix systems
+ // any other error is a failure
+ if (er.code !== 'EISDIR')
+ throw er
+ })
+ }
+
+ return opts.mkdirAsync(path, opts).then(() => made || path, er => {
+ if (er.code === 'ENOENT')
+ return mkdirpManual(parent, opts)
+ .then(made => mkdirpManual(path, opts, made))
+ if (er.code !== 'EEXIST' && er.code !== 'EROFS')
+ throw er
+ return opts.statAsync(path).then(st => {
+ if (st.isDirectory())
+ return made
+ else
+ throw er
+ }, () => { throw er })
+ })
+}
+
+const mkdirpManualSync = (path, opts, made) => {
+ const parent = dirname(path)
+ opts.recursive = false
+
+ if (parent === path) {
+ try {
+ return opts.mkdirSync(path, opts)
+ } catch (er) {
+ // swallowed by recursive implementation on posix systems
+ // any other error is a failure
+ if (er.code !== 'EISDIR')
+ throw er
+ else
+ return
+ }
+ }
+
+ try {
+ opts.mkdirSync(path, opts)
+ return made || path
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
+ if (er.code !== 'EEXIST' && er.code !== 'EROFS')
+ throw er
+ try {
+ if (!opts.statSync(path).isDirectory())
+ throw er
+ } catch (_) {
+ throw er
+ }
+ }
+}
+
+module.exports = {mkdirpManual, mkdirpManualSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js
new file mode 100644
index 000000000..c7a6b6980
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/lib/mkdirp-native.js
@@ -0,0 +1,39 @@
+const {dirname} = require('path')
+const {findMade, findMadeSync} = require('./find-made.js')
+const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
+
+const mkdirpNative = (path, opts) => {
+ opts.recursive = true
+ const parent = dirname(path)
+ if (parent === path)
+ return opts.mkdirAsync(path, opts)
+
+ return findMade(opts, path).then(made =>
+ opts.mkdirAsync(path, opts).then(() => made)
+ .catch(er => {
+ if (er.code === 'ENOENT')
+ return mkdirpManual(path, opts)
+ else
+ throw er
+ }))
+}
+
+const mkdirpNativeSync = (path, opts) => {
+ opts.recursive = true
+ const parent = dirname(path)
+ if (parent === path)
+ return opts.mkdirSync(path, opts)
+
+ const made = findMadeSync(opts, path)
+ try {
+ opts.mkdirSync(path, opts)
+ return made
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ return mkdirpManualSync(path, opts)
+ else
+ throw er
+ }
+}
+
+module.exports = {mkdirpNative, mkdirpNativeSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js
new file mode 100644
index 000000000..488bd44c3
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/lib/opts-arg.js
@@ -0,0 +1,23 @@
+const { promisify } = require('util')
+const fs = require('fs')
+const optsArg = opts => {
+ if (!opts)
+ opts = { mode: 0o777 & (~process.umask()), fs }
+ else if (typeof opts === 'object')
+ opts = { mode: 0o777 & (~process.umask()), fs, ...opts }
+ else if (typeof opts === 'number')
+ opts = { mode: opts, fs }
+ else if (typeof opts === 'string')
+ opts = { mode: parseInt(opts, 8), fs }
+ else
+ throw new TypeError('invalid options argument')
+
+ opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
+ opts.mkdirAsync = promisify(opts.mkdir)
+ opts.stat = opts.stat || opts.fs.stat || fs.stat
+ opts.statAsync = promisify(opts.stat)
+ opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
+ opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
+ return opts
+}
+module.exports = optsArg
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js
new file mode 100644
index 000000000..cc07de5a6
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/lib/path-arg.js
@@ -0,0 +1,29 @@
+const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
+const { resolve, parse } = require('path')
+const pathArg = path => {
+ if (/\0/.test(path)) {
+ // simulate same failure that node raises
+ throw Object.assign(
+ new TypeError('path must be a string without null bytes'),
+ {
+ path,
+ code: 'ERR_INVALID_ARG_VALUE',
+ }
+ )
+ }
+
+ path = resolve(path)
+ if (platform === 'win32') {
+ const badWinChars = /[*|"<>?:]/
+ const {root} = parse(path)
+ if (badWinChars.test(path.substr(root.length))) {
+ throw Object.assign(new Error('Illegal characters in path.'), {
+ path,
+ code: 'EINVAL',
+ })
+ }
+ }
+
+ return path
+}
+module.exports = pathArg
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js b/node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js
new file mode 100644
index 000000000..079361de1
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/lib/use-native.js
@@ -0,0 +1,10 @@
+const fs = require('fs')
+
+const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
+const versArr = version.replace(/^v/, '').split('.')
+const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
+
+const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
+const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
+
+module.exports = {useNative, useNativeSync}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/package.json b/node_modules/libnpmpack/node_modules/mkdirp/package.json
new file mode 100644
index 000000000..d4c0acbb8
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/package.json
@@ -0,0 +1,75 @@
+{
+ "_from": "mkdirp@^1.0.3",
+ "_id": "mkdirp@1.0.3",
+ "_inBundle": false,
+ "_integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g==",
+ "_location": "/libnpmpack/mkdirp",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "mkdirp@^1.0.3",
+ "name": "mkdirp",
+ "escapedName": "mkdirp",
+ "rawSpec": "^1.0.3",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.3"
+ },
+ "_requiredBy": [
+ "/libnpmpack/pacote"
+ ],
+ "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
+ "_shasum": "4cf2e30ad45959dddea53ad97d518b6c8205e1ea",
+ "_spec": "mkdirp@^1.0.3",
+ "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
+ "bin": {
+ "mkdirp": "bin/cmd.js"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-mkdirp/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Recursively mkdir, like `mkdir -p`",
+ "devDependencies": {
+ "require-inject": "^1.4.4",
+ "tap": "^14.10.6"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "files": [
+ "bin",
+ "lib",
+ "index.js"
+ ],
+ "homepage": "https://github.com/isaacs/node-mkdirp#readme",
+ "keywords": [
+ "mkdir",
+ "directory",
+ "make dir",
+ "make",
+ "dir",
+ "recursive",
+ "native"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "mkdirp",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/node-mkdirp.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --follow-tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true,
+ "coverage-map": "map.js"
+ },
+ "version": "1.0.3"
+}
diff --git a/node_modules/libnpmpack/node_modules/mkdirp/readme.markdown b/node_modules/libnpmpack/node_modules/mkdirp/readme.markdown
new file mode 100644
index 000000000..827de5905
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/mkdirp/readme.markdown
@@ -0,0 +1,266 @@
+# mkdirp
+
+Like `mkdir -p`, but in Node.js!
+
+Now with a modern API and no\* bugs!
+
+<small>\* may contain some bugs</small>
+
+# example
+
+## pow.js
+
+```js
+const mkdirp = require('mkdirp')
+
+// return value is a Promise resolving to the first directory created
+mkdirp('/tmp/foo/bar/baz').then(made =>
+ console.log(`made directories, starting with ${made}`))
+```
+
+Output (where `/tmp/foo` already exists)
+
+```
+made directories, starting with /tmp/foo/bar
+```
+
+Or, if you don't have time to wait around for promises:
+
+```js
+const mkdirp = require('mkdirp')
+
+// return value is the first directory created
+const made = mkdirp.sync('/tmp/foo/bar/baz')
+console.log(`made directories, starting with ${made}`)
+```
+
+And now /tmp/foo/bar/baz exists, huzzah!
+
+# methods
+
+```js
+const mkdirp = require('mkdirp')
+```
+
+## mkdirp(dir, [opts]) -> Promise<String | undefined>
+
+Create a new directory and any necessary subdirectories at `dir` with octal
+permission string `opts.mode`. If `opts` is a string or number, it will be
+treated as the `opts.mode`.
+
+If `opts.mode` isn't specified, it defaults to `0o777 &
+(~process.umask())`.
+
+Promise resolves to first directory `made` that had to be created, or
+`undefined` if everything already exists. Promise rejects if any errors
+are encountered. Note that, in the case of promise rejection, some
+directories _may_ have been created, as recursive directory creation is not
+an atomic operation.
+
+You can optionally pass in an alternate `fs` implementation by passing in
+`opts.fs`. Your implementation should have `opts.fs.mkdir(path, opts, cb)`
+and `opts.fs.stat(path, cb)`.
+
+You can also override just one or the other of `mkdir` and `stat` by
+passing in `opts.stat` or `opts.mkdir`, or providing an `fs` option that
+only overrides one of these.
+
+## mkdirp.sync(dir, opts) -> String|null
+
+Synchronously create a new directory and any necessary subdirectories at
+`dir` with octal permission string `opts.mode`. If `opts` is a string or
+number, it will be treated as the `opts.mode`.
+
+If `opts.mode` isn't specified, it defaults to `0o777 &
+(~process.umask())`.
+
+Returns the first directory that had to be created, or undefined if
+everything already exists.
+
+You can optionally pass in an alternate `fs` implementation by passing in
+`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)`
+and `opts.fs.statSync(path)`.
+
+You can also override just one or the other of `mkdirSync` and `statSync`
+by passing in `opts.statSync` or `opts.mkdirSync`, or providing an `fs`
+option that only overrides one of these.
+
+## mkdirp.manual, mkdirp.manualSync
+
+Use the manual implementation (not the native one). This is the default
+when the native implementation is not available or the stat/mkdir
+implementation is overridden.
+
+## mkdirp.native, mkdirp.nativeSync
+
+Use the native implementation (not the manual one). This is the default
+when the native implementation is available and stat/mkdir are not
+overridden.
+
+# implementation
+
+On Node.js v10.12.0 and above, use the native `fs.mkdir(p,
+{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has been
+overridden by an option.
+
+## native implementation
+
+- If the path is a root directory, then pass it to the underlying
+ implementation and return the result/error. (In this case, it'll either
+ succeed or fail, but we aren't actually creating any dirs.)
+- Walk up the path statting each directory, to find the first path that
+ will be created, `made`.
+- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`)
+- If error, raise it to the caller.
+- Return `made`.
+
+## manual implementation
+
+- Call underlying `fs.mkdir` implementation, with `recursive: false`
+- If error:
+ - If path is a root directory, raise to the caller and do not handle it
+ - If ENOENT, mkdirp parent dir, store result as `made`
+ - stat(path)
+ - If error, raise original `mkdir` error
+ - If directory, return `made`
+ - Else, raise original `mkdir` error
+- else
+ - return `undefined` if a root dir, or `made` if set, or `path`
+
+## windows vs unix caveat
+
+On Windows file systems, attempts to create a root directory (ie, a drive
+letter or root UNC path) will fail. If the root directory exists, then it
+will fail with `EPERM`. If the root directory does not exist, then it will
+fail with `ENOENT`.
+
+On posix file systems, attempts to create a root directory (in recursive
+mode) will succeed silently, as it is treated like just another directory
+that already exists. (In non-recursive mode, of course, it fails with
+`EEXIST`.)
+
+In order to preserve this system-specific behavior (and because it's not as
+if we can create the parent of a root directory anyway), attempts to create
+a root directory are passed directly to the `fs` implementation, and any
+errors encountered are not handled.
+
+## native error caveat
+
+The native implementation (as of at least Node.js v13.4.0) does not provide
+appropriate errors in some cases (see
+[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) and
+[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)).
+
+In order to work around this issue, the native implementation will fall
+back to the manual implementation if an `ENOENT` error is encountered.
+
+# choosing a recursive mkdir implementation
+
+There are a few to choose from! Use the one that suits your needs best :D
+
+## use `fs.mkdir(path, {recursive: true}, cb)` if:
+
+- You wish to optimize performance even at the expense of other factors.
+- You don't need to know the first dir created.
+- You are ok with getting `ENOENT` as the error when some other problem is
+ the actual cause.
+- You can limit your platforms to Node.js v10.12 and above.
+- You're ok with using callbacks instead of promises.
+- You don't need/want a CLI.
+- You don't need to override the `fs` methods in use.
+
+## use this module (mkdirp 1.x) if:
+
+- You need to know the first directory that was created.
+- You wish to use the native implementation if available, but fall back
+ when it's not.
+- You prefer promise-returning APIs to callback-taking APIs.
+- You want more useful error messages than the native recursive mkdir
+ provides (at least as of Node.js v13.4), and are ok with re-trying on
+ `ENOENT` to achieve this.
+- You need (or at least, are ok with) a CLI.
+- You need to override the `fs` methods in use.
+
+## use [`make-dir`](http://npm.im/make-dir) if:
+
+- You do not need to know the first dir created (and wish to save a few
+ `stat` calls when using the native implementation for this reason).
+- You wish to use the native implementation if available, but fall back
+ when it's not.
+- You prefer promise-returning APIs to callback-taking APIs.
+- You are ok with occasionally getting `ENOENT` errors for failures that
+ are actually related to something other than a missing file system entry.
+- You don't need/want a CLI.
+- You need to override the `fs` methods in use.
+
+## use mkdirp 0.x if:
+
+- You need to know the first directory that was created.
+- You need (or at least, are ok with) a CLI.
+- You need to override the `fs` methods in use.
+- You're ok with using callbacks instead of promises.
+- You are not running on Windows, where the root-level ENOENT errors can
+ lead to infinite regress.
+- You think vinyl just sounds warmer and richer for some weird reason.
+- You are supporting truly ancient Node.js versions, before even the advent
+ of a `Promise` language primitive. (Please don't. You deserve better.)
+
+# cli
+
+This package also ships with a `mkdirp` command.
+
+```
+$ mkdirp -h
+
+usage: mkdirp [DIR1,DIR2..] {OPTIONS}
+
+ Create each supplied directory including any necessary parent directories
+ that don't yet exist.
+
+ If the directory already exists, do nothing.
+
+OPTIONS are:
+
+ -m<mode> If a directory needs to be created, set the mode as an octal
+ --mode=<mode> permission string.
+
+ -v --version Print the mkdirp version number
+
+ -h --help Print this helpful banner
+
+ -p --print Print the first directories created for each path provided
+
+ --manual Use manual implementation, even if native is available
+```
+
+# install
+
+With [npm](http://npmjs.org) do:
+
+```
+npm install mkdirp
+```
+
+to get the library locally, or
+
+```
+npm install -g mkdirp
+```
+
+to get the command everywhere, or
+
+```
+npx mkdirp ...
+```
+
+to run the command without installing it globally.
+
+# platform support
+
+This module works on node v8, but only v10 and above are officially
+supported, as Node v8 reached its LTS end of life 2020-01-01, which is in
+the past, as of this writing.
+
+# license
+
+MIT
diff --git a/node_modules/libnpmpack/node_modules/pacote/LICENSE b/node_modules/libnpmpack/node_modules/pacote/LICENSE
new file mode 100644
index 000000000..a03cd0ed0
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libnpmpack/node_modules/pacote/README.md b/node_modules/libnpmpack/node_modules/pacote/README.md
new file mode 100644
index 000000000..49dbde6a9
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/README.md
@@ -0,0 +1,244 @@
+# pacote
+
+JavaScript Package Handler
+
+## USAGE
+
+```js
+const pacote = require('pacote')
+
+// get a package manifest
+pacote.manifest('foo@1.x').then(manifest => console.log('got it', manifest))
+
+// extract a package into a folder
+pacote.extract('github:npm/cli', 'some/path', options)
+ .then(({from, resolved, integrity}) => {
+ console.log('extracted!', from, resolved, integrity)
+ })
+
+pacote.tarball('https://server.com/package.tgz').then(data => {
+ console.log('got ' + data.length + ' bytes of tarball data')
+})
+```
+
+Anything that you can do to with kind of package, you can do to any kind of
+package. Data that isn't relevant (like a packument for a tarball) will be
+simulated.
+
+## CLI
+
+This module exports a command line interface that can do most of what is
+described below. Run `pacote -h` to learn more.
+
+```
+Pacote - The JavaScript Package Handler, v10.1.1
+
+Usage:
+
+ pacote resolve <spec>
+ Resolve a specifier and output the fully resolved target
+ Returns integrity and from if '--long' flag is set.
+
+ pacote manifest <spec>
+ Fetch a manifest and print to stdout
+
+ pacote packument <spec>
+ Fetch a full packument and print to stdout
+
+ pacote tarball <spec> [<filename>]
+ Fetch a package tarball and save to <filename>
+ If <filename> is missing or '-', the tarball will be streamed to stdout.
+
+ pacote extract <spec> <folder>
+ Extract a package to the destination folder.
+
+Configuration values all match the names of configs passed to npm, or
+options passed to Pacote. Additional flags for this executable:
+
+ --long Print an object from 'resolve', including integrity and spec.
+ --json Print result objects as JSON rather than node's default.
+ (This is the default if stdout is not a TTY.)
+ --help -h Print this helpful text.
+
+For example '--cache=/path/to/folder' will use that folder as the cache.
+```
+
+## API
+
+The `spec` refers to any kind of package specifier that npm can install.
+If you can pass it to the npm CLI, you can pass it to pacote. (In fact,
+that's exactly what the npm CLI does.)
+
+See below for valid `opts` values.
+
+* `pacote.resolve(spec, opts)` Resolve a specifier like `foo@latest` or
+ `github:user/project` all the way to a tarball url, tarball file, or git
+ repo with commit hash.
+
+* `pacote.extract(spec, dest, opts)` Extract a package's tarball into a
+ destination folder. Returns a promise that resolves to the
+ `{from,resolved,integrity}` of the extracted package.
+
+* `pacote.manifest(spec, opts)` Fetch (or simulate) a package's manifest
+ (basically, the `package.json` file, plus a bit of metadata).
+ See below for more on manifests and packuments. Returns a Promise that
+ resolves to the manifest object.
+
+* `pacote.packument(spec, opts)` Fetch (or simulate) a package's packument
+ (basically, the top-level package document listing all the manifests that
+ the registry returns). See below for more on manifests and packuments.
+ Returns a Promise that resolves to the packument object.
+
+* `pacote.tarball(spec, opts)` Get a package tarball data as a buffer in
+ memory. Returns a Promise that resolves to the tarball data Buffer, with
+ `from`, `resolved`, and `integrity` fields attached.
+
+* `pacote.tarball.file(spec, dest, opts)` Save a package tarball data to
+ a file on disk. Returns a Promise that resolves to
+ `{from,integrity,resolved}` of the fetched tarball.
+
+* `pacote.tarball.stream(spec, streamHandler, opts)` Fetch a tarball and
+ make the stream available to the `streamHandler` function.
+
+ This is mostly an internal function, but it is exposed because it does
+ provide some functionality that may be difficult to achieve otherwise.
+
+ The `streamHandler` function MUST return a Promise that resolves when
+ the stream (and all associated work) is ended, or rejects if the stream
+ has an error.
+
+ The `streamHandler` function MAY be called multiple times, as Pacote
+ retries requests in some scenarios, such as cache corruption or
+ retriable network failures.
+
+### Options
+
+Options are passed to
+[`npm-registry-fetch`](http://npm.im/npm-registry-fetch) and
+[`cacache`](http://npm.im/cacache), so in addition to these, anything for
+those modules can be given to pacote as well.
+
+Options object is cloned, and mutated along the way to add integrity,
+resolved, and other properties, as they are determined.
+
+* `cache` Where to store cache entries and temp files. Passed to
+ [`cacache`](http://npm.im/cacache). Defaults to the same cache directory
+ that npm will use by default, based on platform and environment.
+* `where` Base folder for resolving relative `file:` dependencies.
+* `resolved` Shortcut for looking up resolved values. Should be specified
+ if known.
+* `integrity` Expected integrity of fetched package tarball. If specified,
+ tarballs with mismatched integrity values will raise an `EINTEGRITY`
+ error.
+* `umask` Permission mode mask for extracted files and directories.
+ Defaults to `0o22`. See "Extracted File Modes" below.
+* `fmode` Minimum permission mode for extracted files. Defaults to
+ `0o666`. See "Extracted File Modes" below.
+* `dmode` Minimum permission mode for extracted directories. Defaults to
+ `0o777`. See "Extracted File Modes" below.
+* `log` A logger object with methods for various log levels. Typically,
+ this will be [`npmlog`](http://npm.im/npmlog) in the npm CLI use case,
+ but if not specified, the default is a logger that emits `'log'` events
+ on the `process` object.
+* `preferOnline` Prefer to revalidate cache entries, even when it would not
+ be strictly necessary. Default `false`.
+* `before` When picking a manifest from a packument, only consider
+ packages published before the specified date. Default `null`.
+* `defaultTag` The default `dist-tag` to use when choosing a manifest from a
+ packument. Defaults to `latest`.
+* `registry` The npm registry to use by default. Defaults to
+ `https://registry.npmjs.org/`.
+* `fullMetadata` Fetch the full metadata from the registry for packuments,
+ including information not strictly required for installation (author,
+ description, etc.) Defaults to `true` when `before` is set, since the
+ version publish time is part of the extended packument metadata.
+
+## Extracted File Modes
+
+Files are extracted with a mode matching the following formula:
+
+```
+( (tarball entry mode value) | (minimum mode option) ) ~ (umask)
+```
+
+This is in order to prevent unreadable files or unlistable directories from
+cluttering a project's `node_modules` folder, even if the package tarball
+specifies that the file should be inaccessible.
+
+It also prevents files from being group- or world-writable without explicit
+opt-in by the user, because all file and directory modes are masked against
+the `umask` value.
+
+So, a file which is `0o771` in the tarball, using the default `fmode` of
+`0o666` and `umask` of `0o22`, will result in a file mode of `0o755`:
+
+```
+(0o771 | 0o666) => 0o777
+(0o777 ~ 0o22) => 0o755
+```
+
+In almost every case, the defaults are appropriate. To respect exactly
+what is in the package tarball (even if this makes an unusable system), set
+both `dmode` and `fmode` options to `0`. Otherwise, the `umask` config
+should be used in most cases where file mode modifications are required,
+and this functions more or less the same as the `umask` value in most Unix
+systems.
+
+## Extracted File Ownership
+
+When running as `root` on Unix systems, all extracted files and folders
+will have their owning `uid` and `gid` values set to match the ownership
+of the containing folder.
+
+This prevents `root`-owned files showing up in a project's `node_modules`
+folder when a user runs `sudo npm install`.
+
+## Manifests
+
+A `manifest` is similar to a `package.json` file. However, it has a few
+pieces of extra metadata, and sometimes lacks metadata that is inessential
+to package installation.
+
+In addition to the common `package.json` fields, manifests include:
+
+* `manifest._resolved` The tarball url or file path where the package
+ artifact can be found.
+* `manifest._from` A normalized form of the spec passed in as an argument.
+* `manifest._integrity` The integrity value for the package artifact.
+* `manifest.dist` Registry manifests (those included in a packument) have a
+ `dist` object. Only `tarball` is required, though at least one of
+ `shasum` or `integrity` is almost always present.
+
+ * `tarball` The url to the associated package artifact. (Copied by
+ Pacote to `manifest._resolved`.)
+ * `integrity` The integrity SRI string for the artifact. This may not
+ be present for older packages on the npm registry. (Copied by Pacote
+ to `manifest._integrity`.)
+ * `shasum` Legacy integrity value. Hexadecimal-encoded sha1 hash.
+ (Converted to an SRI string and copied by Pacote to
+ `manifest._integrity` when `dist.integrity` is not present.)
+ * `fileCount` Number of files in the tarball.
+ * `unpackedSize` Size on disk of the package when unpacked.
+ * `npm-signature` A signature of the package by the
+ [`npmregistry`](https://keybase.io/npmregistry) Keybase account.
+ (Obviously only present for packages published to
+ `https://registry.npmjs.org`.)
+
+## Packuments
+
+A packument is the top-level package document that lists the set of
+manifests for available versions for a package.
+
+When a packument is fetched with `accept:
+application/vnd.npm.install-v1+json` in the HTTP headers, only the most
+minimum necessary metadata is returned. Additional metadata is returned
+when fetched with only `accept: application/json`.
+
+For Pacote's purposes, the following fields are relevant:
+
+* `versions` An object where each key is a version, and each value is the
+ manifest for that version.
+* `dist-tags` An object mapping dist-tags to version numbers. This is how
+ `foo@latest` gets turned into `foo@1.2.3`.
+* `time` In the full packument, an object mapping version numbers to
+ publication times, for the `opts.before` functionality.
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/bin.js b/node_modules/libnpmpack/node_modules/pacote/lib/bin.js
new file mode 100755
index 000000000..c0409be1f
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/bin.js
@@ -0,0 +1,149 @@
+#!/usr/bin/env node
+
+const run = conf => {
+ const pacote = require('../')
+ switch (conf._[0]) {
+ case 'resolve':
+ if (conf.long)
+ return pacote.manifest(conf._[1], conf).then(mani => ({
+ resolved: mani._resolved,
+ integrity: mani._integrity,
+ from: mani._from,
+ }))
+ case 'manifest':
+ case 'packument':
+ return pacote[conf._[0]](conf._[1], conf)
+
+ case 'tarball':
+ if (!conf._[2] || conf._[2] === '-') {
+ return pacote.tarball.stream(conf._[1], stream => {
+ stream.pipe(conf.testStdout ||
+ /* istanbul ignore next */ process.stdout)
+ // make sure it resolves something falsey
+ return stream.promise().then(() => {})
+ }, conf)
+ } else
+ return pacote.tarball.file(conf._[1], conf._[2], conf)
+
+ case 'extract':
+ return pacote.extract(conf._[1], conf._[2], conf)
+
+ default: /* istanbul ignore next */ {
+ throw new Error(`bad command: ${conf._[0]}`)
+ }
+ }
+}
+
+const version = require('../package.json').version
+const usage = () =>
+`Pacote - The JavaScript Package Handler, v${version}
+
+Usage:
+
+ pacote resolve <spec>
+ Resolve a specifier and output the fully resolved target
+ Returns integrity and from if '--long' flag is set.
+
+ pacote manifest <spec>
+ Fetch a manifest and print to stdout
+
+ pacote packument <spec>
+ Fetch a full packument and print to stdout
+
+ pacote tarball <spec> [<filename>]
+ Fetch a package tarball and save to <filename>
+ If <filename> is missing or '-', the tarball will be streamed to stdout.
+
+ pacote extract <spec> <folder>
+ Extract a package to the destination folder.
+
+Configuration values all match the names of configs passed to npm, or
+options passed to Pacote. Additional flags for this executable:
+
+ --long Print an object from 'resolve', including integrity and spec.
+ --json Print result objects as JSON rather than node's default.
+ (This is the default if stdout is not a TTY.)
+ --help -h Print this helpful text.
+
+For example '--cache=/path/to/folder' will use that folder as the cache.
+`
+
+const shouldJSON = (conf, result) =>
+ conf.json ||
+ !process.stdout.isTTY &&
+ conf.json === undefined &&
+ result &&
+ typeof result === 'object'
+
+const pretty = (conf, result) =>
+ shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result
+
+let addedLogListener = false
+const main = args => {
+ const conf = parse(args)
+ if (conf.help || conf.h)
+ return console.log(usage())
+
+ if (!addedLogListener) {
+ process.on('log', console.error)
+ addedLogListener = true
+ }
+
+ try {
+ return run(conf)
+ .then(result => result && console.log(pretty(conf, result)))
+ .catch(er => {
+ console.error(er)
+ process.exit(1)
+ })
+ } catch (er) {
+ console.error(er.message)
+ console.error(usage())
+ }
+}
+
+const parseArg = arg => {
+ const split = arg.slice(2).split('=')
+ const k = split.shift()
+ const v = split.join('=')
+ const no = /^no-/.test(k) && !v
+ const key = (no ? k.substr(3) : k)
+ .replace(/^tag$/, 'defaultTag')
+ .replace(/-([a-z])/g, (_, c) => c.toUpperCase())
+ const value = v ? v.replace(/^~/, process.env.HOME) : !no
+ return { key, value }
+}
+
+const parse = args => {
+ const conf = {
+ _: [],
+ cache: process.env.HOME + '/.npm/_cacache',
+ }
+ let dashdash = false
+ args.forEach(arg => {
+ if (dashdash)
+ conf._.push(arg)
+ else if (arg === '--')
+ dashdash = true
+ else if (arg === '-h')
+ conf.help = true
+ else if (/^--/.test(arg)) {
+ const {key, value} = parseArg(arg)
+ conf[key] = value
+ } else {
+ conf._.push(arg)
+ }
+ })
+ return conf
+}
+
+if (module === require.main)
+ main(process.argv.slice(2))
+else
+ module.exports = {
+ main,
+ run,
+ usage,
+ parseArg,
+ parse,
+ }
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/dir.js b/node_modules/libnpmpack/node_modules/pacote/lib/dir.js
new file mode 100644
index 000000000..44dadaa32
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/dir.js
@@ -0,0 +1,98 @@
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const cacache = require('cacache')
+const Minipass = require('minipass')
+const { promisify } = require('util')
+const readPackageJson = require('read-package-json-fast')
+const npm = require('./util/npm.js')
+const isPackageBin = require('./util/is-package-bin.js')
+const packlist = require('npm-packlist')
+const tar = require('tar')
+const _prepareDir = Symbol('_prepareDir')
+const _tarcOpts = Symbol('_tarcOpts')
+
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+class DirFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+ // just the fully resolved filename
+ this.resolved = this.spec.fetchSpec
+ }
+
+ get types () {
+ return ['directory']
+ }
+
+ [_prepareDir] () {
+ return this.manifest().then(mani => {
+ if (!mani.scripts || !mani.scripts.prepare)
+ return
+
+ // we *only* run prepare.
+ // pre/post-pack is run by the npm CLI for publish and pack,
+ // but this function is *also* run when installing git deps
+ return npm(
+ this.npmBin,
+ [].concat(this.npmRunCmd).concat('prepare').concat(this.npmCliConfig),
+ this.resolved,
+ { message: 'directory preparation failed' }
+ )
+ })
+ }
+
+ [_tarballFromResolved] () {
+ const stream = new Minipass()
+ stream.resolved = this.resolved
+ stream.integrity = this.integrity
+
+ // run the prepare script, get the list of files, and tar it up
+ // pipe to the stream, and proxy errors the chain.
+ this[_prepareDir]()
+ .then(() => packlist({ path: this.resolved }))
+ .then(files => tar.c(this[_tarcOpts](), files)
+ .on('error', er => stream.emit('error', er)).pipe(stream))
+ .catch(er => stream.emit('error', er))
+ return stream
+ }
+
+ [_tarcOpts] () {
+ return {
+ cwd: this.resolved,
+ prefix: 'package/',
+ portable: true,
+ gzip: true,
+
+ // ensure that package bins are always executable
+ // Note that npm-packlist is already filtering out
+ // anything that is not a regular file, ignored by
+ // .npmignore or package.json "files", etc.
+ filter: (path, stat) => {
+ if (isPackageBin(this.package, path))
+ stat.mode |= 0o111
+ return true
+ },
+
+ // Provide a specific date in the 1980s for the benefit of zip,
+ // which is confounded by files dated at the Unix epoch 0.
+ mtime: new Date('1985-10-26T08:15:00.000Z'),
+ }
+ }
+
+ manifest () {
+ if (this.package)
+ return Promise.resolve(this.package)
+
+ return readPackageJson(this.resolved + '/package.json')
+ .then(mani => this.package = {
+ ...mani,
+ _integrity: this.integrity && String(this.integrity),
+ _resolved: this.resolved,
+ _from: this.from,
+ })
+ }
+
+ packument () {
+ return FileFetcher.prototype.packument.apply(this)
+ }
+}
+module.exports = DirFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js b/node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js
new file mode 100644
index 000000000..4c5efdc01
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/fetcher.js
@@ -0,0 +1,470 @@
+// This is the base class that the other fetcher types in lib
+// all descend from.
+// It handles the unpacking and retry logic that is shared among
+// all of the other Fetcher types.
+
+const npa = require('npm-package-arg')
+const ssri = require('ssri')
+const { promisify } = require('util')
+const { basename, dirname } = require('path')
+const rimraf = promisify(require('rimraf'))
+const tar = require('tar')
+const procLog = require('./util/proc-log.js')
+const retry = require('promise-retry')
+const fsm = require('fs-minipass')
+const cacache = require('cacache')
+const isPackageBin = require('./util/is-package-bin.js')
+const getContents = require('@npmcli/installed-package-contents')
+
+// we only change ownership on unix platforms, and only if uid is 0
+const selfOwner = process.getuid && process.getuid() === 0 ? {
+ uid: 0,
+ gid: process.getgid(),
+} : null
+const chownr = selfOwner ? promisify(require('chownr')) : null
+const inferOwner = selfOwner ? require('infer-owner') : null
+const mkdirp = require('mkdirp')
+const cacheDir = require('./util/cache-dir.js')
+
+// Private methods.
+// Child classes should not have to override these.
+// Users should never call them.
+const _chown = Symbol('_chown')
+const _extract = Symbol('_extract')
+const _mkdir = Symbol('_mkdir')
+const _empty = Symbol('_empty')
+const _toFile = Symbol('_toFile')
+const _tarxOptions = Symbol('_tarxOptions')
+const _entryMode = Symbol('_entryMode')
+const _istream = Symbol('_istream')
+const _assertType = Symbol('_assertType')
+const _tarballFromCache = Symbol('_tarballFromCache')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+
+class FetcherBase {
+ constructor (spec, opts) {
+ if (!opts || typeof opts !== 'object')
+ throw new TypeError('options object is required')
+ this.spec = npa(spec, opts.where)
+
+ // a bit redundant because presumably the caller already knows this,
+ // but it makes it easier to not have to keep track of the requested
+ // spec when we're dispatching thousands of these at once, and normalizing
+ // is nice. saveSpec is preferred if set, because it turns stuff like
+ // x/y#committish into github:x/y#committish. use name@rawSpec for
+ // registry deps so that we turn xyz and xyz@ -> xyz@
+ this.from = this.spec.registry
+ ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
+
+ this[_assertType]()
+ // clone the opts object so that others aren't upset when we mutate it
+ // by adding/modifying the integrity value.
+ this.opts = {...opts}
+ this.cache = opts.cache || cacheDir()
+ this.resolved = opts.resolved || null
+
+ // default to caching/verifying with sha512, that's what we usually have
+ // need to change this default, or start overriding it, when sha512
+ // is no longer strong enough.
+ this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
+
+ if (typeof opts.integrity === 'string')
+ this.opts.integrity = ssri.parse(opts.integrity)
+
+ this.package = null
+ this.type = this.constructor.name
+ this.fmode = opts.fmode || 0o666
+ this.dmode = opts.dmode || 0o777
+ this.umask = opts.umask || 0o022
+ this.log = opts.log || procLog
+
+ this.preferOnline = !!opts.preferOnline
+ this.preferOffline = !!opts.preferOffline
+ this.offline = !!opts.offline
+
+ this.before = opts.before
+ this.fullMetadata = this.before ? true : !!opts.fullMetadata
+
+ this.defaultTag = opts.defaultTag || 'latest'
+ this.registry = opts.registry || 'https://registry.npmjs.org'
+
+ // command to run 'prepare' scripts on directories and git dirs
+ // To use pacote with yarn, for example, set npmBin to 'yarn'
+ // and npmRunCmd to [], and npmCliConfig with yarn's equivalents.
+ this.npmBin = opts.npmBin || 'npm'
+ this.npmRunCmd = opts.npmRunCmd || 'run'
+
+ // command to install deps for preparing
+ this.npmInstallCmd = opts.npmInstallCmd || [
+ 'install',
+ '--only=dev',
+ '--prod',
+ '--ignore-prepublish',
+ '--no-progress',
+ '--no-save',
+ ]
+
+ // XXX fill more of this in based on what we know from this.opts
+ // we explicitly DO NOT fill in --tag, though, since we are often
+ // going to be packing in the context of a publish, which may set
+ // a dist-tag, but certainly wants to keep defaulting to latest.
+ this.npmCliConfig = opts.npmCliConfig || [
+ `--cache=${this.cache}`,
+ `--prefer-offline=${!!this.preferOffline}`,
+ `--prefer-online=${!!this.preferOnline}`,
+ `--offline=${!!this.offline}`,
+ `--before=${this.before ? this.before.toISOString() : ''}`,
+ ]
+ }
+
+ get integrity () {
+ return this.opts.integrity || null
+ }
+ set integrity (i) {
+ if (!i)
+ return
+
+ i = ssri.parse(i)
+ const current = this.opts.integrity
+
+ // do not ever update an existing hash value, but do
+ // merge in NEW algos and hashes that we don't already have.
+ if (current)
+ current.merge(i)
+ else
+ this.opts.integrity = i
+ }
+
+ get notImplementedError () {
+ return new Error('not implemented in this fetcher type: ' + this.type)
+ }
+
+ // override in child classes
+ // Returns a Promise that resolves to this.resolved string value
+ resolve () {
+ return this.resolved ? Promise.resolve(this.resolved)
+ : Promise.reject(this.notImplementedError)
+ }
+
+ packument () {
+ return Promise.reject(this.notImplementedError)
+ }
+
+ // override in child class
+ // returns a manifest containing:
+ // - name
+ // - version
+ // - _resolved
+ // - _integrity
+ // - plus whatever else was in there (corgi, full metadata, or pj file)
+ manifest () {
+ return Promise.reject(this.notImplementedError)
+ }
+
+ // private, should be overridden.
+ // Note that they should *not* calculate or check integrity, but *just*
+ // return the raw tarball data stream.
+ [_tarballFromResolved] () {
+ throw this.notImplementedError
+ }
+
+ // public, should not be overridden
+ tarball () {
+ return this.tarballStream(stream => new Promise((res, rej) => {
+ const buf = []
+ stream.on('error', er => rej(er))
+ stream.on('end', () => {
+ const data = Buffer.concat(buf)
+ data.integrity = this.integrity && String(this.integrity)
+ data.resolved = this.resolved
+ data.from = this.from
+ return res(data)
+ })
+ stream.on('data', d => buf.push(d))
+ }))
+ }
+
+ // private
+ // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
+ [_tarballFromCache] () {
+ return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
+ }
+
+ [_istream] (stream) {
+ // everyone will need one of these, either for verifying or calculating
+ // We always set it, because we have might only have a weak legacy hex
+ // sha1 in the packument, and this MAY upgrade it to a stronger algo.
+ // If we had an integrity, and it doesn't match, then this does not
+ // override that error; the istream will raise the error before it
+ // gets to the point of re-setting the integrity.
+ const istream = ssri.integrityStream(this.opts)
+ istream.on('integrity', i => this.integrity = i)
+ return stream.on('error', er => istream.emit('error', er)).pipe(istream)
+ }
+
+ pickIntegrityAlgorithm () {
+ return this.integrity ? this.integrity.pickAlgorithm(this.opts)
+ : this.defaultIntegrityAlgorithm
+ }
+
+ // TODO: check error class, once those are rolled out to our deps
+ isDataCorruptionError (er) {
+ return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
+ }
+
+ // override the types getter
+ get types () {}
+ [_assertType] () {
+ if (this.types && !this.types.includes(this.spec.type)) {
+ throw new TypeError(`Wrong spec type (${
+ this.spec.type
+ }) for ${
+ this.constructor.name
+ }. Supported types: ${this.types.join(', ')}`)
+ }
+ }
+
+ // We allow ENOENTs from cacache, but not anywhere else.
+ // An ENOENT trying to read a tgz file, for example, is Right Out.
+ isRetriableError (er) {
+ // TODO: check error class, once those are rolled out to our deps
+ return this.isDataCorruptionError(er) || er.code === 'ENOENT'
+ }
+
+ // Mostly internal, but has some uses
+ // Pass in a function which returns a promise
+ // Function will be called 1 or more times with streams that may fail.
+ // Retries:
+ // Function MUST handle errors on the stream by rejecting the promise,
+ // so that retry logic can pick it up and either retry or fail whatever
+ // promise it was making (ie, failing extraction, etc.)
+ //
+ // The return value of this method is a Promise that resolves the same
+ // as whatever the streamHandler resolves to.
+ //
+ // This should never be overridden by child classes, but it is public.
+ tarballStream (streamHandler) {
+ // Only short-circuit via cache if we have everything else we'll need,
+ // and the user has not expressed a preference for checking online.
+
+ const fromCache = (
+ !this.preferOnline &&
+ this.integrity &&
+ this.resolved
+ ) ? streamHandler(this[_tarballFromCache]()).catch(er => {
+ if (this.isDataCorruptionError(er)) {
+ this.log.warn('tarball', `cached data for ${
+ this.spec
+ } (${this.integrity}) seems to be corrupted. Refreshing cache.`)
+ return this.cleanupCached().then(() => { throw er })
+ } else {
+ throw er
+ }
+ }) : null
+
+ const fromResolved = er => {
+ if (er) {
+ if (!this.isRetriableError(er))
+ throw er
+ this.log.silly('tarball', `no local data for ${
+ this.spec
+ }. Extracting by manifest.`)
+ }
+ return this.resolve().then(() => retry(tryAgain =>
+ streamHandler(this[_istream](this[_tarballFromResolved]()))
+ .catch(er => {
+ // Most likely data integrity. A cache ENOENT error is unlikely
+ // here, since we're definitely not reading from the cache, but it
+ // IS possible that the fetch subsystem accessed the cache, and the
+ // entry got blown away or something. Try one more time to be sure.
+ if (this.isRetriableError(er)) {
+ this.log.warn('tarball', `tarball data for ${
+ this.spec
+ } (${this.integrity}) seems to be corrupted. Trying again.`)
+ return this.cleanupCached().then(() => tryAgain(er))
+ }
+ throw er
+ }), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
+ }
+
+ return fromCache ? fromCache.catch(fromResolved) : fromResolved()
+ }
+
+ cleanupCached () {
+ return cacache.rm.content(this.cache, this.integrity, this.opts)
+ }
+
+ [_chown] (path, uid, gid) {
+ return selfOwner && (selfOwner.gid !== gid || selfOwner.uid !== uid)
+ ? chownr(path, uid, gid)
+ : /* istanbul ignore next - we don't test in root-owned folders */ null
+ }
+
+ [_empty] (path) {
+ return getContents({path, depth: 1}).then(contents => Promise.all(
+ contents.map(entry => rimraf(entry))))
+ }
+
+ [_mkdir] (dest) {
+ // if we're bothering to do owner inference, then do it.
+ // otherwise just make the dir, and return an empty object.
+ // always empty the dir dir to start with, but do so
+ // _after_ inferring the owner, in case there's an existing folder
+ // there that we would want to preserve which differs from the
+ // parent folder (rare, but probably happens sometimes).
+ return !inferOwner
+ ? this[_empty](dest).then(() => mkdirp(dest)).then(() => ({}))
+ : inferOwner(dest).then(({uid, gid}) =>
+ this[_empty](dest)
+ .then(() => mkdirp(dest))
+ .then(made => {
+ // ignore the || dest part in coverage. It's there to handle
+ // race conditions where the dir may be made by someone else
+ // after being removed by us.
+ const dir = made || /* istanbul ignore next */ dest
+ return this[_chown](dir, uid, gid)
+ })
+ .then(() => ({uid, gid})))
+ }
+
+ // extraction is always the same. the only difference is where
+ // the tarball comes from.
+ extract (dest) {
+ return this[_mkdir](dest).then(({uid, gid}) =>
+ this.tarballStream(tarball => this[_extract](dest, tarball, uid, gid)))
+ }
+
+ [_toFile] (dest) {
+ return this.tarballStream(str => new Promise((res, rej) => {
+ const writer = new fsm.WriteStream(dest)
+ str.on('error', er => writer.emit('error', er))
+ writer.on('error', er => rej(er))
+ writer.on('close', () => res({
+ integrity: this.integrity && String(this.integrity),
+ resolved: this.resolved,
+ from: this.from,
+ }))
+ str.pipe(writer)
+ }))
+ }
+
+ // don't use this[_mkdir] because we don't want to rimraf anything
+ tarballFile (dest) {
+ const dir = dirname(dest)
+ return !inferOwner
+ ? mkdirp(dir).then(() => this[_toFile](dest))
+ : inferOwner(dest).then(({uid, gid}) =>
+ mkdirp(dir).then(made => this[_toFile](dest)
+ .then(res => this[_chown](made || dir, uid, gid)
+ .then(() => res))))
+ }
+
+ [_extract] (dest, tarball, uid, gid) {
+ const extractor = tar.x(this[_tarxOptions]({ cwd: dest, uid, gid }))
+ const p = new Promise((resolve, reject) => {
+ extractor.on('end', () => {
+ resolve({
+ resolved: this.resolved,
+ integrity: this.integrity && String(this.integrity),
+ from: this.from,
+ })
+ })
+
+ extractor.on('error', er => {
+ this.log.warn('tar', er.message)
+ this.log.silly('tar', er)
+ reject(er)
+ })
+
+ tarball.on('error', er => reject(er))
+ })
+
+ tarball.pipe(extractor)
+ return p
+ }
+
+ // always ensure that entries are at least as permissive as our configured
+ // dmode/fmode, but never more permissive than the umask allows.
+ [_entryMode] (path, mode, type) {
+ const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
+ : /File$/.test(type) ? this.fmode
+ : /* istanbul ignore next - should never happen in a pkg */ 0
+
+ // make sure package bins are executable
+ const exe = isPackageBin(this.package, path) ? 0o111 : 0
+ return ((mode | m) & ~this.umask) | exe
+ }
+
+ [_tarxOptions] ({ cwd, uid, gid }) {
+ const sawIgnores = new Set()
+ return {
+ cwd,
+ filter: (name, entry) => {
+ if (/Link$/.test(entry.type))
+ return false
+ entry.mode = this[_entryMode](entry.path, entry.mode, entry.type)
+ // this replicates the npm pack behavior where .gitignore files
+ // are treated like .npmignore files, but only if a .npmignore
+ // file is not present.
+ if (/File$/.test(entry.type)) {
+ const base = basename(entry.path)
+ if (base === '.npmignore')
+ sawIgnores.add(entry.path)
+ else if (base === '.gitignore') {
+ // rename, but only if there's not already a .npmignore
+ const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
+ if (sawIgnores.has(ni))
+ return false
+ entry.path = ni
+ }
+ return true
+ }
+ },
+ strip: 1,
+ onwarn: /* istanbul ignore next - we can trust that tar logs */
+ (code, msg, data) => {
+ this.log.warn('tar', code, msg)
+ this.log.silly('tar', code, msg, data)
+ },
+ uid,
+ gid,
+ umask: this.umask,
+ }
+ }
+}
+
+module.exports = FetcherBase
+
+// Child classes
+const GitFetcher = require('./git.js')
+const RegistryFetcher = require('./registry.js')
+const FileFetcher = require('./file.js')
+const DirFetcher = require('./dir.js')
+const RemoteFetcher = require('./remote.js')
+
+// Get an appropriate fetcher object from a spec and options
+FetcherBase.get = (rawSpec, opts = {}) => {
+ const spec = npa(rawSpec, opts.where)
+ switch (spec.type) {
+ case 'git':
+ return new GitFetcher(spec, opts)
+
+ case 'remote':
+ return new RemoteFetcher(spec, opts)
+
+ case 'version':
+ case 'range':
+ case 'tag':
+ case 'alias':
+ return new RegistryFetcher(spec.subSpec || spec, opts)
+
+ case 'file':
+ return new FileFetcher(spec, opts)
+
+ case 'directory':
+ return new DirFetcher(spec, opts)
+
+ default:
+ throw new TypeError('Unknown spec type: ' + spec.type)
+ }
+}
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/file.js b/node_modules/libnpmpack/node_modules/pacote/lib/file.js
new file mode 100644
index 000000000..d5c601aab
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/file.js
@@ -0,0 +1,93 @@
+const Fetcher = require('./fetcher.js')
+const fsm = require('fs-minipass')
+const cacache = require('cacache')
+const { promisify } = require('util')
+const readPackageJson = require('read-package-json-fast')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const _exeBins = Symbol('_exeBins')
+const { resolve } = require('path')
+const fs = require('fs')
+
+class FileFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+ // just the fully resolved filename
+ this.resolved = this.spec.fetchSpec
+ }
+
+ get types () {
+ return ['file']
+ }
+
+ manifest () {
+ if (this.package)
+ return Promise.resolve(this.package)
+
+ // have to unpack the tarball for this.
+ return cacache.tmp.withTmp(this.cache, this.opts, dir =>
+ this.extract(dir)
+ .then(() => readPackageJson(dir + '/package.json'))
+ .then(mani => this.package = {
+ ...mani,
+ _integrity: this.integrity && String(this.integrity),
+ _resolved: this.resolved,
+ _from: this.from,
+ }))
+ }
+
+ [_exeBins] (pkg, dest) {
+ if (!pkg.bin)
+ return Promise.resolve()
+
+ return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => {
+ const script = resolve(dest, pkg.bin[k])
+ // Best effort. Ignore errors here, the only result is that
+ // a bin script is not executable. But if it's missing or
+ // something, we just leave it for a later stage to trip over
+ // when we can provide a more useful contextual error.
+ fs.stat(script, (er, st) => {
+ if (er)
+ return res()
+ const mode = st.mode | 0o111
+ if (mode === st.mode)
+ return res()
+ fs.chmod(script, mode, res)
+ })
+ })))
+ }
+
+ extract (dest) {
+ // if we've already loaded the manifest, then the super got it.
+ // but if not, read the unpacked manifest and chmod properly.
+ return super.extract(dest)
+ .then(result => this.package ? result
+ : readPackageJson(dest + '/package.json').then(pkg =>
+ this[_exeBins](pkg, dest)).then(() => result))
+ }
+
+ [_tarballFromResolved] () {
+ // create a read stream and return it
+ return new fsm.ReadStream(this.resolved)
+ }
+
+ packument () {
+ // simulate based on manifest
+ return this.manifest().then(mani => ({
+ name: mani.name,
+ 'dist-tags': {
+ [this.defaultTag]: mani.version
+ },
+ versions: {
+ [mani.version]: {
+ ...mani,
+ dist: {
+ tarball: `file:${this.resolved}`,
+ integrity: this.integrity && String(this.integrity),
+ }
+ }
+ }
+ }))
+ }
+}
+
+module.exports = FileFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/git.js b/node_modules/libnpmpack/node_modules/pacote/lib/git.js
new file mode 100644
index 000000000..81f7ca256
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/git.js
@@ -0,0 +1,272 @@
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const RemoteFetcher = require('./remote.js')
+const DirFetcher = require('./dir.js')
+const hashre = /^[a-f0-9]{40}$/
+const git = require('@npmcli/git')
+const pickManifest = require('npm-pick-manifest')
+const npa = require('npm-package-arg')
+const url = require('url')
+const Minipass = require('minipass')
+const cacache = require('cacache')
+const { promisify } = require('util')
+const readPackageJson = require('read-package-json-fast')
+const npm = require('./util/npm.js')
+
+const _resolvedFromRepo = Symbol('_resolvedFromRepo')
+const _resolvedFromHosted = Symbol('_resolvedFromHosted')
+const _resolvedFromClone = Symbol('_resolvedFromClone')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const _addGitSha = Symbol('_addGitSha')
+const _clone = Symbol('_clone')
+const _cloneHosted = Symbol('_cloneHosted')
+const _cloneRepo = Symbol('_cloneRepo')
+const _setResolvedWithSha = Symbol('_setResolvedWithSha')
+const _prepareDir = Symbol('_prepareDir')
+
+// get the repository url. prefer ssh, fall back to git://
+// We have to add the git+ back because npa suppresses it.
+const repoUrl = (hosted, opts) =>
+ hosted.sshurl && addGitPlus(hosted.sshurl(opts)) ||
+ hosted.https && addGitPlus(hosted.https(opts))
+
+const addGitPlus = url => url && `git+${url}`
+
+class GitFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+ this.resolvedRef = null
+ if (this.spec.hosted)
+ this.from = this.spec.hosted.shortcut({ noCommittish: false })
+
+ // shortcut: avoid full clone when we can go straight to the tgz
+ // if we have the full sha and it's a hosted git platform
+ if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
+ this.resolvedSha = this.spec.gitCommittish
+ // use hosted.tarball() when we shell to RemoteFetcher later
+ this.resolved = this.spec.hosted
+ ? repoUrl(this.spec.hosted, { noCommittish: false })
+ : this.spec.fetchSpec + '#' + this.spec.gitCommittish
+ } else
+ this.resolvedSha = ''
+ }
+
+ get types () {
+ return ['git']
+ }
+
+ resolve () {
+ // likely a hosted git repo with a sha, so get the tarball url
+ // but in general, no reason to resolve() more than necessary!
+ if (this.resolved)
+ return super.resolve()
+
+ // fetch the git repo and then look at the current hash
+ const h = this.spec.hosted
+ // try to use ssh, fall back to git.
+ return h ? this[_resolvedFromHosted](h)
+ : this[_resolvedFromRepo](this.spec.fetchSpec)
+ }
+
+ // first try https, since that's faster and passphrase-less for
+ // public repos. Fall back to SSH to support private repos.
+ // NB: we always store the SSH url in the 'resolved' field.
+ [_resolvedFromHosted] (hosted) {
+ return this[_resolvedFromRepo](hosted.https && hosted.https())
+ .catch(er => {
+ const ssh = hosted.sshurl && hosted.sshurl()
+ if (!ssh)
+ throw er
+ return this[_resolvedFromRepo](ssh)
+ })
+ }
+
+ [_resolvedFromRepo] (gitRemote) {
+ // XXX make this a custom error class
+ if (!gitRemote)
+ return Promise.reject(new Error(`No git url for ${this.spec}`))
+ const gitRange = this.spec.gitRange
+ const name = this.spec.name
+ return git.revs(gitRemote, this.opts).then(remoteRefs => {
+ return gitRange ? pickManifest({
+ versions: remoteRefs.versions,
+ 'dist-tags': remoteRefs['dist-tags'],
+ name,
+ }, gitRange, this.opts)
+ : this.spec.gitCommittish ?
+ remoteRefs.refs[this.spec.gitCommittish] ||
+ remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
+ : remoteRefs.refs.HEAD // no git committish, get default head
+ }).then(revDoc => {
+ // the committish provided isn't in the rev list
+ // things like HEAD~3 or @yesterday can land here.
+ if (!revDoc || !revDoc.sha)
+ return this[_resolvedFromClone]()
+
+ this.resolvedRef = revDoc
+ this.resolvedSha = revDoc.sha
+ this[_addGitSha](revDoc.sha)
+ return this.resolved
+ })
+ }
+
+ [_setResolvedWithSha] (withSha) {
+ // we haven't cloned, so a tgz download is still faster
+ // of course, if it's not a known host, we can't do that.
+ this.resolved = !this.spec.hosted ? withSha
+ : repoUrl(npa(withSha).hosted, { noCommittish: false })
+ }
+
+ // when we get the git sha, we affix it to our spec to build up
+ // either a git url with a hash, or a tarball download URL
+ [_addGitSha] (sha) {
+ if (this.spec.hosted) {
+ this[_setResolvedWithSha](
+ this.spec.hosted.shortcut({ noCommittish: true }) + '#' + sha
+ )
+ } else {
+ const u = url.format(new url.URL(`#${sha}`, this.spec.rawSpec))
+ this[_setResolvedWithSha](url.format(u))
+ }
+ }
+
+ [_resolvedFromClone] () {
+ // do a full or shallow clone, then look at the HEAD
+ // kind of wasteful, but no other option, really
+ return this[_clone](dir => this.resolved)
+ }
+
+ [_prepareDir] (dir) {
+ return readPackageJson(dir + '/package.json').then(mani => {
+ // no need if we aren't going to do any preparation.
+ const scripts = mani.scripts
+ if (!scripts || !(
+ scripts.postinstall ||
+ scripts.build ||
+ scripts.preinstall ||
+ scripts.install ||
+ scripts.prepare))
+ return
+
+ // the DirFetcher will do its own preparation to run the prepare scripts
+ // All we have to do is put the deps in place so that it can succeed.
+ return npm(
+ this.npmBin,
+ [].concat(this.npmInstallCmd).concat(this.npmCliConfig),
+ dir,
+ { message: 'git dep preparation failed' }
+ )
+ })
+ }
+
+ [_tarballFromResolved] () {
+ const stream = new Minipass()
+ stream.resolved = this.resolved
+ stream.integrity = this.integrity
+ stream.from = this.from
+
+ // check it out and then shell out to the DirFetcher tarball packer
+ this[_clone](dir => this[_prepareDir](dir)
+ .then(() => new Promise((res, rej) => {
+ const df = new DirFetcher(`file:${dir}`, {
+ ...this.opts,
+ resolved: null,
+ integrity: null,
+ })
+ const dirStream = df[_tarballFromResolved]()
+ dirStream.on('error', rej)
+ dirStream.on('end', res)
+ dirStream.pipe(stream)
+ }))).catch(
+ /* istanbul ignore next: very unlikely and hard to test */
+ er => stream.emit('error', er)
+ )
+ return stream
+ }
+
+ // clone a git repo into a temp folder (or fetch and unpack if possible)
+ // handler accepts a directory, and returns a promise that resolves
+ // when we're done with it, at which point, cacache deletes it
+ //
+ // TODO: after cloning, create a tarball of the folder, and add to the cache
+ // with cacache.put.stream(), using a key that's deterministic based on the
+ // spec and repo, so that we don't ever clone the same thing multiple times.
+ [_clone] (handler, tarballOk = true) {
+ const o = { tmpPrefix: 'git-clone' }
+ const ref = this.resolvedSha || this.spec.gitCommittish
+ const h = this.spec.hosted
+ const resolved = this.resolved
+
+ // can be set manually to false to fall back to actual git clone
+ tarballOk = tarballOk &&
+ h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
+
+ return cacache.tmp.withTmp(this.cache, o, tmp => {
+ // if we're resolved, and have a tarball url, shell out to RemoteFetcher
+ if (tarballOk) {
+ const nameat = this.spec.name ? `${this.spec.name}@` : ''
+ return new RemoteFetcher(h.tarball({ noCommittish: false }), {
+ ...this.opts,
+ pkgid: `git:${nameat}${this.resolved}`,
+ resolved: this.resolved,
+ integrity: null, // it'll always be different, if we have one
+ }).extract(tmp).then(() => handler(tmp), er => {
+ // fall back to ssh download if tarball fails
+ if (er.constructor.name.match(/^Http/))
+ return this[_clone](handler, false)
+ else
+ throw er
+ })
+ }
+
+ return (
+ h ? this[_cloneHosted](ref, tmp)
+ : this[_cloneRepo](this.spec.fetchSpec, ref, tmp)
+ ).then(sha => {
+ this.resolvedSha = sha
+ if (!this.resolved)
+ this[_addGitSha](sha)
+ })
+ .then(() => handler(tmp))
+ })
+ }
+
+ [_cloneHosted] (ref, tmp) {
+ const hosted = this.spec.hosted
+ const https = hosted.https()
+ return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp)
+ .catch(er => {
+ const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
+ /* istanbul ignore if - should be covered by the resolve() call */
+ if (!ssh)
+ throw er
+ return this[_cloneRepo](ssh, ref, tmp)
+ })
+ }
+
+ [_cloneRepo] (repo, ref, tmp) {
+ const { opts, spec } = this
+ return git.clone(repo, ref, tmp, { ...opts, spec })
+ }
+
+ manifest () {
+ if (this.package)
+ return Promise.resolve(this.package)
+
+ return this.spec.hosted && this.resolved
+ ? FileFetcher.prototype.manifest.apply(this)
+ : this[_clone](dir =>
+ readPackageJson(dir + '/package.json')
+ .then(mani => this.package = {
+ ...mani,
+ _integrity: this.integrity && String(this.integrity),
+ _resolved: this.resolved,
+ _from: this.from,
+ }))
+ }
+
+ packument () {
+ return FileFetcher.prototype.packument.apply(this)
+ }
+}
+module.exports = GitFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/index.js b/node_modules/libnpmpack/node_modules/pacote/lib/index.js
new file mode 100644
index 000000000..546ba960b
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/index.js
@@ -0,0 +1,12 @@
+const { get } = require('./fetcher.js')
+module.exports = {
+ resolve: (spec, opts) => get(spec, opts).resolve(),
+ extract: (spec, dest, opts) => get(spec, opts).extract(dest),
+ manifest: (spec, opts) => get(spec, opts).manifest(),
+ tarball: (spec, opts) => get(spec, opts).tarball(),
+ packument: (spec, opts) => get(spec, opts).packument(),
+}
+module.exports.tarball.stream = (spec, handler, opts) =>
+ get(spec, opts).tarballStream(handler)
+module.exports.tarball.file = (spec, dest, opts) =>
+ get(spec, opts).tarballFile(dest)
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/registry.js b/node_modules/libnpmpack/node_modules/pacote/lib/registry.js
new file mode 100644
index 000000000..b9df03614
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/registry.js
@@ -0,0 +1,159 @@
+const Fetcher = require('./fetcher.js')
+const RemoteFetcher = require('./remote.js')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const pacoteVersion = require('../package.json').version
+const npa = require('npm-package-arg')
+const pickManifest = require('npm-pick-manifest')
+const ssri = require('ssri')
+const Minipass = require('minipass')
+
+// Corgis are cute. 🐕🐶
+const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
+const fullDoc = 'application/json'
+
+const fetch = require('npm-registry-fetch')
+
+// TODO: memoize reg requests, so we don't even have to check cache
+
+const _headers = Symbol('_headers')
+class RegistryFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+
+ // handle case when npm-package-arg guesses wrong.
+ if (this.spec.type === 'tag' &&
+ this.spec.rawSpec === '' &&
+ this.defaultTag !== 'latest')
+ this.spec = npa(`${this.spec.name}@${this.defaultTag}`)
+ this.registry = fetch.pickRegistry(spec, opts)
+ this.packumentUrl = this.registry.replace(/\/*$/, '/') +
+ this.spec.escapedName
+
+ // XXX pacote <=9 has some logic to ignore opts.resolved if
+ // the resolved URL doesn't go to the same registry.
+ // Consider reproducing that here, to throw away this.resolved
+ // in that case.
+ }
+
+ resolve () {
+ if (this.resolved)
+ return Promise.resolve(this.resolved)
+
+ // fetching the manifest sets resolved and (usually) integrity
+ return this.manifest().then(() => {
+ if (this.resolved)
+ return this.resolved
+
+ throw Object.assign(
+ new Error('Invalid package manifest: no `dist.tarball` field'),
+ { package: this.spec.toString() }
+ )
+ })
+ }
+
+ [_headers] () {
+ return {
+ // npm will override UA, but ensure that we always send *something*
+ 'user-agent': this.opts.userAgent ||
+ `pacote/${pacoteVersion} node/${process.version}`,
+ ...(this.opts.headers || {}),
+ 'pacote-version': pacoteVersion,
+ 'pacote-req-type': 'packument',
+ 'pacote-pkg-id': `registry:${this.spec.name}`,
+ accept: this.fullMetadata ? fullDoc : corgiDoc,
+ }
+ }
+
+ packument () {
+ // npm-registry-fetch the packument
+ // set the appropriate header for corgis if fullMetadata isn't set
+ // return the res.json() promise
+ return fetch(this.packumentUrl, {
+ ...this.opts,
+ headers: this[_headers](),
+ spec: this.spec,
+ // never check integrity for packuments themselves
+ integrity: null,
+ }).then(res => res.json().then(packument => {
+ packument._cached = res.headers.has('x-local-cache')
+ packument._contentLength = +res.headers.get('content-length')
+ return packument
+ })).catch(er => {
+ if (er.code === 'E404' && !this.fullMetadata) {
+ // possible that corgis are not supported by this registry
+ this.fullMetadata = true
+ return this.packument()
+ }
+ throw er
+ })
+ }
+
+ manifest () {
+ if (this.package)
+ return Promise.resolve(this.package)
+
+ return this.packument()
+ .then(packument => pickManifest(packument, this.spec.fetchSpec, {
+ ...this.opts,
+ defaultTag: this.defaultTag,
+ before: this.before,
+ }) /* XXX add ETARGET and E403 revalidation of cached packuments here */)
+ .then(mani => {
+ // add _resolved and _integrity from dist object
+ const { dist } = mani
+ if (dist) {
+ this.resolved = mani._resolved = dist.tarball
+ mani._from = this.from
+ const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
+ : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', {...this.opts})
+ : null
+ if (distIntegrity) {
+ if (!this.integrity)
+ this.integrity = distIntegrity
+ else if (!this.integrity.match(distIntegrity)) {
+ // only bork if they have algos in common.
+ // otherwise we end up breaking if we have saved a sha512
+ // previously for the tarball, but the manifest only
+ // provides a sha1, which is possible for older publishes.
+ // Otherwise, this is almost certainly a case of holding it
+ // wrong, and will result in weird or insecure behavior
+ // later on when building package tree.
+ for (const algo of Object.keys(this.integrity)) {
+ if (distIntegrity[algo]) {
+ throw Object.assign(new Error(
+ `Integrity checksum failed when using ${algo}: `+
+ `wanted ${this.integrity} but got ${distIntegrity}.`
+ ), { code: 'EINTEGRITY' })
+ }
+ }
+ // made it this far, the integrity is worthwhile. accept it.
+ // the setter here will take care of merging it into what we
+ // already had.
+ this.integrity = distIntegrity
+ }
+ }
+ }
+ if (this.integrity)
+ mani._integrity = String(this.integrity)
+ return this.package = mani
+ })
+ }
+
+ [_tarballFromResolved] () {
+ // we use a RemoteFetcher to get the actual tarball stream
+ return new RemoteFetcher(this.resolved, {
+ ...this.opts,
+ resolved: this.resolved,
+ pkgid: `registry:${this.spec.name}@${this.resolved}`,
+ })[_tarballFromResolved]()
+ }
+
+ get types () {
+ return [
+ 'tag',
+ 'version',
+ 'range',
+ ]
+ }
+}
+module.exports = RegistryFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/remote.js b/node_modules/libnpmpack/node_modules/pacote/lib/remote.js
new file mode 100644
index 000000000..81f14efbc
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/remote.js
@@ -0,0 +1,72 @@
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved')
+const pacoteVersion = require('../package.json').version
+const fetch = require('npm-registry-fetch')
+const ssri = require('ssri')
+const Minipass = require('minipass')
+
+const _headers = Symbol('_headers')
+class RemoteFetcher extends Fetcher {
+ constructor (spec, opts) {
+ super(spec, opts)
+ this.resolved = this.spec.fetchSpec
+ // nam is a fermented pork sausage that is good to eat
+ const nameat = this.spec.name ? `${this.spec.name}@` : ''
+ this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
+ }
+
+ [_tarballFromResolved] () {
+ const stream = new Minipass()
+ const fetchOpts = {
+ ...this.opts,
+ headers: this[_headers](),
+ spec: this.spec,
+ integrity: this.integrity,
+ algorithms: [ this.pickIntegrityAlgorithm() ],
+ }
+ fetch(this.resolved, fetchOpts).then(res => {
+ const hash = res.headers.get('x-local-cache-hash')
+ if (hash) {
+ this.integrity = decodeURIComponent(hash)
+ }
+
+ res.body.on('error',
+ /* istanbul ignore next - exceedingly rare and hard to simulate */
+ er => stream.emit('error', er)
+ ).pipe(stream)
+ }).catch(er => stream.emit('error', er))
+
+ return stream
+ }
+
+ [_headers] () {
+ return {
+ // npm will override this, but ensure that we always send *something*
+ 'user-agent': this.opts.userAgent ||
+ `pacote/${pacoteVersion} node/${process.version}`,
+ ...(this.opts.headers || {}),
+ 'pacote-version': pacoteVersion,
+ 'pacote-req-type': 'tarball',
+ 'pacote-pkg-id': this.pkgid,
+ ...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
+ : {}),
+ ...(this.opts.headers || {}),
+ }
+ }
+
+ get types () {
+ return ['remote']
+ }
+
+ // getting a packument and/or manifest is the same as with a file: spec.
+ // unpack the tarball stream, and then read from the package.json file.
+ packument () {
+ return FileFetcher.prototype.packument.apply(this)
+ }
+
+ manifest () {
+ return FileFetcher.prototype.manifest.apply(this)
+ }
+}
+module.exports = RemoteFetcher
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js
new file mode 100644
index 000000000..d5c0bf28f
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/util/cache-dir.js
@@ -0,0 +1,12 @@
+const os = require('os')
+const {resolve} = require('path')
+
+module.exports = (fakePlatform = false) => {
+ const temp = os.tmpdir()
+ const uidOrPid = process.getuid ? process.getuid() : process.pid
+ const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid)
+ const platform = fakePlatform || process.platform
+ const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
+ const cacheRoot = (platform === 'win32' && process.env.APPDATA) || home
+ return resolve(cacheRoot, cacheExtra)
+}
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js
new file mode 100644
index 000000000..35cf06427
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/util/is-package-bin.js
@@ -0,0 +1,24 @@
+// Function to determine whether a path is in the package.bin set.
+// Used to prevent issues when people publish a package from a
+// windows machine, and then install with --no-bin-links.
+//
+// Note: this is not possible in remote or file fetchers, since
+// we don't have the manifest until AFTER we've unpacked. But the
+// main use case is registry fetching with git a distant second,
+// so that's an acceptable edge case to not handle.
+
+const binObj = (name, bin) =>
+ typeof bin === 'string' ? { [name]: bin } : bin
+
+const hasBin = (pkg, path) => {
+ const bin = binObj(pkg.name, pkg.bin)
+ const p = path.replace(/^[^\\\/]*\//, '')
+ for (const [k, v] of Object.entries(bin)) {
+ if (v === p)
+ return true
+ }
+ return false
+}
+
+module.exports = (pkg, path) =>
+ pkg && pkg.bin ? hasBin(pkg, path) : false
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js
new file mode 100644
index 000000000..293695525
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/util/npm.js
@@ -0,0 +1,9 @@
+// run an npm command
+const spawn = require('@npmcli/promise-spawn')
+
+module.exports = (npmBin, npmCommand, cwd, extra) => {
+ const isJS = npmBin.endsWith('.js')
+ const cmd = isJS ? process.execPath : npmBin
+ const args = (isJS ? [npmBin] : []).concat(npmCommand)
+ return spawn(cmd, args, { cwd, stdioString: true }, extra)
+}
diff --git a/node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js b/node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js
new file mode 100644
index 000000000..b2bdd9dc9
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/lib/util/proc-log.js
@@ -0,0 +1,21 @@
+// default logger.
+// emits 'log' events on the process
+const LEVELS = [
+ 'notice',
+ 'error',
+ 'warn',
+ 'info',
+ 'verbose',
+ 'http',
+ 'silly',
+ 'pause',
+ 'resume'
+]
+
+const log = level => (...args) => process.emit('log', level, ...args)
+
+const logger = {}
+for (const level of LEVELS) {
+ logger[level] = log(level)
+}
+module.exports = logger
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/CHANGELOG.md b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/CHANGELOG.md
index 1b3431acc..390a3a3c4 100644
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/CHANGELOG.md
+++ b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/CHANGELOG.md
@@ -1,7 +1,33 @@
-# Change Log
+# Changelog
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+## [8.0.0](https://github.com/npm/npm-package-arg/compare/v7.0.0...v8.0.0) (2019-12-15)
+
+
+### ⚠ BREAKING CHANGES
+
+* Dropping support for node 6 and 8. It'll probably
+still work on those versions, but they are no longer supported or
+tested, since npm v7 is moving away from them.
+
+* drop support for node 6 and 8 ([ba85e68](https://github.com/npm/npm-package-arg/commit/ba85e68555d6270f672c3d59da17672f744d0376))
+
+<a name="7.0.0"></a>
+# [7.0.0](https://github.com/npm/npm-package-arg/compare/v6.1.1...v7.0.0) (2019-11-11)
+
+
+### deps
+
+* bump hosted-git-info to 3.0.2 ([68a4fc3](https://github.com/npm/npm-package-arg/commit/68a4fc3)), closes [/github.com/npm/hosted-git-info/pull/38#issuecomment-520243803](https://github.com//github.com/npm/hosted-git-info/pull/38/issues/issuecomment-520243803)
+
+
+### BREAKING CHANGES
+
+* this drops support for ancient node versions.
+
+
+
<a name="6.1.1"></a>
## [6.1.1](https://github.com/npm/npm-package-arg/compare/v6.1.0...v6.1.1) (2019-08-21)
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/LICENSE b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/LICENSE
index 05eeeb88c..19cec97b1 100644
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/LICENSE
+++ b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/LICENSE
@@ -1,6 +1,6 @@
The ISC License
-Copyright (c) Isaac Z. Schlueter
+Copyright (c) npm, Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/README.md b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/README.md
index 847341b21..847341b21 100644
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/README.md
+++ b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/README.md
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/npa.js b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/npa.js
index bf2c17cfd..d18168b75 100644
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/npa.js
+++ b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/npa.js
@@ -12,7 +12,7 @@ function path () {
return path_
}
let validatePackageName
-let osenv
+let os
const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
@@ -70,7 +70,7 @@ function resolve (name, spec, where, arg) {
return fromAlias(res, where)
}
if (!HostedGit) HostedGit = require('hosted-git-info')
- const hosted = HostedGit.fromUrl(spec, {noGitPlus: true, noCommittish: true})
+ const hosted = HostedGit.fromUrl(spec, { noGitPlus: true, noCommittish: true })
if (hosted) {
return fromHostedGit(res, hosted)
} else if (spec && isURL.test(spec)) {
@@ -174,8 +174,8 @@ function fromFile (res, where) {
.replace(/^file:(?:[/]*([~./]))?/, '$1')
if (/^~[/]/.test(spec)) {
// this is needed for windows and for file:~/foo/bar
- if (!osenv) osenv = require('osenv')
- res.fetchSpec = resolvePath(osenv.home(), spec.slice(2))
+ if (!os) os = require('os')
+ res.fetchSpec = resolvePath(os.homedir(), spec.slice(2))
res.saveSpec = 'file:' + spec
} else {
res.fetchSpec = resolvePath(where, spec)
@@ -191,7 +191,7 @@ function fromFile (res, where) {
function fromHostedGit (res, hosted) {
res.type = 'git'
res.hosted = hosted
- res.saveSpec = hosted.toString({noGitPlus: false, noCommittish: false})
+ res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
return setGitCommittish(res, hosted.committish)
}
diff --git a/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json
new file mode 100644
index 000000000..71728e253
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/node_modules/npm-package-arg/package.json
@@ -0,0 +1,69 @@
+{
+ "_from": "npm-package-arg@^8.0.1",
+ "_id": "npm-package-arg@8.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-/h5Fm6a/exByzFSTm7jAyHbgOqErl9qSNJDQF32Si/ZzgwT2TERVxRxn3Jurw1wflgyVVAxnFR4fRHPM7y1ClQ==",
+ "_location": "/libnpmpack/pacote/npm-package-arg",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "npm-package-arg@^8.0.1",
+ "name": "npm-package-arg",
+ "escapedName": "npm-package-arg",
+ "rawSpec": "^8.0.1",
+ "saveSpec": null,
+ "fetchSpec": "^8.0.1"
+ },
+ "_requiredBy": [
+ "/libnpmpack/pacote"
+ ],
+ "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.0.1.tgz",
+ "_shasum": "9d76f8d7667b2373ffda60bb801a27ef71e3e270",
+ "_spec": "npm-package-arg@^8.0.1",
+ "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/npm-package-arg/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "hosted-git-info": "^3.0.2",
+ "semver": "^7.0.0",
+ "validate-npm-package-name": "^3.0.0"
+ },
+ "deprecated": false,
+ "description": "Parse the things that can be arguments to `npm install`",
+ "devDependencies": {
+ "tap": "^14.10.2"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "files": [
+ "npa.js"
+ ],
+ "homepage": "https://github.com/npm/npm-package-arg",
+ "license": "ISC",
+ "main": "npa.js",
+ "name": "npm-package-arg",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/npm-package-arg.git"
+ },
+ "scripts": {
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "version": "8.0.1"
+}
diff --git a/node_modules/libnpmpack/node_modules/pacote/package.json b/node_modules/libnpmpack/node_modules/pacote/package.json
new file mode 100644
index 000000000..2e53042d9
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/pacote/package.json
@@ -0,0 +1,107 @@
+{
+ "_from": "pacote@^11.1.4",
+ "_id": "pacote@11.1.4",
+ "_inBundle": false,
+ "_integrity": "sha512-eUGJvSSpWFZKn3z8gig/HgnBmUl6gIWByIIaHzSyEr3tOWX0w8tFEADXtpu8HGv5E0ShCeTP6enRq8iHKCHSvw==",
+ "_location": "/libnpmpack/pacote",
+ "_phantomChildren": {
+ "hosted-git-info": "3.0.2",
+ "semver": "7.1.3",
+ "validate-npm-package-name": "3.0.0"
+ },
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "pacote@^11.1.4",
+ "name": "pacote",
+ "escapedName": "pacote",
+ "rawSpec": "^11.1.4",
+ "saveSpec": null,
+ "fetchSpec": "^11.1.4"
+ },
+ "_requiredBy": [
+ "/libnpmpack"
+ ],
+ "_resolved": "https://registry.npmjs.org/pacote/-/pacote-11.1.4.tgz",
+ "_shasum": "5529a453c59881b7f059da8af6903b0f79c124b2",
+ "_spec": "pacote@^11.1.4",
+ "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "https://izs.me"
+ },
+ "bin": {
+ "pacote": "lib/bin.js"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/pacote/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "@npmcli/git": "^2.0.1",
+ "@npmcli/installed-package-contents": "^1.0.5",
+ "@npmcli/promise-spawn": "^1.1.0",
+ "cacache": "^15.0.0",
+ "chownr": "^1.1.4",
+ "fs-minipass": "^2.1.0",
+ "infer-owner": "^1.0.4",
+ "lru-cache": "^5.1.1",
+ "minipass": "^3.0.1",
+ "minipass-fetch": "^1.2.1",
+ "mkdirp": "^1.0.3",
+ "npm-package-arg": "^8.0.1",
+ "npm-packlist": "^2.1.0",
+ "npm-pick-manifest": "^6.0.0",
+ "npm-registry-fetch": "^8.0.0",
+ "promise-inflight": "^1.0.1",
+ "promise-retry": "^1.1.1",
+ "read-package-json-fast": "^1.1.3",
+ "rimraf": "^2.7.1",
+ "semver": "^7.1.3",
+ "ssri": "^8.0.0",
+ "tar": "^6.0.1",
+ "which": "^2.0.2"
+ },
+ "deprecated": false,
+ "description": "JavaScript package downloader",
+ "devDependencies": {
+ "mutate-fs": "^2.1.1",
+ "npm-registry-mock": "^1.3.1",
+ "require-inject": "^1.4.4",
+ "tap": "^14.10.6"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "files": [
+ "lib/**/*.js"
+ ],
+ "homepage": "https://github.com/npm/pacote#readme",
+ "keywords": [
+ "packages",
+ "npm",
+ "git"
+ ],
+ "license": "ISC",
+ "main": "lib/index.js",
+ "name": "pacote",
+ "repository": {
+ "type": "git",
+ "url": "git+ssh://git@github.com/npm/pacote.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --follow-tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "snap": "tap",
+ "test": "tap"
+ },
+ "tap": {
+ "timeout": 300,
+ "check-coverage": true,
+ "coverage-map": "map.js",
+ "esm": false
+ },
+ "version": "11.1.4"
+}
diff --git a/node_modules/libnpmpack/node_modules/rimraf/LICENSE b/node_modules/libnpmpack/node_modules/rimraf/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/rimraf/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libnpmpack/node_modules/rimraf/README.md b/node_modules/libnpmpack/node_modules/rimraf/README.md
new file mode 100644
index 000000000..423b8cf85
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/rimraf/README.md
@@ -0,0 +1,101 @@
+[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies)
+
+The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node.
+
+Install with `npm install rimraf`, or just drop rimraf.js somewhere.
+
+## API
+
+`rimraf(f, [opts], callback)`
+
+The first parameter will be interpreted as a globbing pattern for files. If you
+want to disable globbing you can do so with `opts.disableGlob` (defaults to
+`false`). This might be handy, for instance, if you have filenames that contain
+globbing wildcard characters.
+
+The callback will be called with an error if there is one. Certain
+errors are handled for you:
+
+* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of
+ `opts.maxBusyTries` times before giving up, adding 100ms of wait
+ between each attempt. The default `maxBusyTries` is 3.
+* `ENOENT` - If the file doesn't exist, rimraf will return
+ successfully, since your desired outcome is already the case.
+* `EMFILE` - Since `readdir` requires opening a file descriptor, it's
+ possible to hit `EMFILE` if too many file descriptors are in use.
+ In the sync case, there's nothing to be done for this. But in the
+ async case, rimraf will gradually back off with timeouts up to
+ `opts.emfileWait` ms, which defaults to 1000.
+
+## options
+
+* unlink, chmod, stat, lstat, rmdir, readdir,
+ unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync
+
+ In order to use a custom file system library, you can override
+ specific fs functions on the options object.
+
+ If any of these functions are present on the options object, then
+ the supplied function will be used instead of the default fs
+ method.
+
+ Sync methods are only relevant for `rimraf.sync()`, of course.
+
+ For example:
+
+ ```javascript
+ var myCustomFS = require('some-custom-fs')
+
+ rimraf('some-thing', myCustomFS, callback)
+ ```
+
+* maxBusyTries
+
+ If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered
+ on Windows systems, then rimraf will retry with a linear backoff
+ wait of 100ms longer on each try. The default maxBusyTries is 3.
+
+ Only relevant for async usage.
+
+* emfileWait
+
+ If an `EMFILE` error is encountered, then rimraf will retry
+ repeatedly with a linear backoff of 1ms longer on each try, until
+ the timeout counter hits this max. The default limit is 1000.
+
+ If you repeatedly encounter `EMFILE` errors, then consider using
+ [graceful-fs](http://npm.im/graceful-fs) in your program.
+
+ Only relevant for async usage.
+
+* glob
+
+ Set to `false` to disable [glob](http://npm.im/glob) pattern
+ matching.
+
+ Set to an object to pass options to the glob module. The default
+ glob options are `{ nosort: true, silent: true }`.
+
+ Glob version 6 is used in this module.
+
+ Relevant for both sync and async usage.
+
+* disableGlob
+
+ Set to any non-falsey value to disable globbing entirely.
+ (Equivalent to setting `glob: false`.)
+
+## rimraf.sync
+
+It can remove stuff synchronously, too. But that's not so good. Use
+the async API. It's better.
+
+## CLI
+
+If installed with `npm install rimraf -g` it can be used as a global
+command `rimraf <path> [<path> ...]` which is useful for cross platform support.
+
+## mkdirp
+
+If you need to create a directory recursively, check out
+[mkdirp](https://github.com/substack/node-mkdirp).
diff --git a/node_modules/libnpmpack/node_modules/rimraf/bin.js b/node_modules/libnpmpack/node_modules/rimraf/bin.js
new file mode 100755
index 000000000..0d1e17be7
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/rimraf/bin.js
@@ -0,0 +1,50 @@
+#!/usr/bin/env node
+
+var rimraf = require('./')
+
+var help = false
+var dashdash = false
+var noglob = false
+var args = process.argv.slice(2).filter(function(arg) {
+ if (dashdash)
+ return !!arg
+ else if (arg === '--')
+ dashdash = true
+ else if (arg === '--no-glob' || arg === '-G')
+ noglob = true
+ else if (arg === '--glob' || arg === '-g')
+ noglob = false
+ else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/))
+ help = true
+ else
+ return !!arg
+})
+
+if (help || args.length === 0) {
+ // If they didn't ask for help, then this is not a "success"
+ var log = help ? console.log : console.error
+ log('Usage: rimraf <path> [<path> ...]')
+ log('')
+ log(' Deletes all files and folders at "path" recursively.')
+ log('')
+ log('Options:')
+ log('')
+ log(' -h, --help Display this usage info')
+ log(' -G, --no-glob Do not expand glob patterns in arguments')
+ log(' -g, --glob Expand glob patterns in arguments (default)')
+ process.exit(help ? 0 : 1)
+} else
+ go(0)
+
+function go (n) {
+ if (n >= args.length)
+ return
+ var options = {}
+ if (noglob)
+ options = { glob: false }
+ rimraf(args[n], options, function (er) {
+ if (er)
+ throw er
+ go(n+1)
+ })
+}
diff --git a/node_modules/libnpmpack/node_modules/rimraf/package.json b/node_modules/libnpmpack/node_modules/rimraf/package.json
new file mode 100644
index 000000000..a61c985d5
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/rimraf/package.json
@@ -0,0 +1,67 @@
+{
+ "_from": "rimraf@^2.7.1",
+ "_id": "rimraf@2.7.1",
+ "_inBundle": false,
+ "_integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
+ "_location": "/libnpmpack/rimraf",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "rimraf@^2.7.1",
+ "name": "rimraf",
+ "escapedName": "rimraf",
+ "rawSpec": "^2.7.1",
+ "saveSpec": null,
+ "fetchSpec": "^2.7.1"
+ },
+ "_requiredBy": [
+ "/libnpmpack/pacote"
+ ],
+ "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
+ "_shasum": "35797f13a7fdadc566142c29d4f07ccad483e3ec",
+ "_spec": "rimraf@^2.7.1",
+ "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpack/node_modules/pacote",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bin": {
+ "rimraf": "bin.js"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/rimraf/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "glob": "^7.1.3"
+ },
+ "deprecated": false,
+ "description": "A deep deletion module for node (like `rm -rf`)",
+ "devDependencies": {
+ "mkdirp": "^0.5.1",
+ "tap": "^12.1.1"
+ },
+ "files": [
+ "LICENSE",
+ "README.md",
+ "bin.js",
+ "rimraf.js"
+ ],
+ "homepage": "https://github.com/isaacs/rimraf#readme",
+ "license": "ISC",
+ "main": "rimraf.js",
+ "name": "rimraf",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/rimraf.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js"
+ },
+ "version": "2.7.1"
+}
diff --git a/node_modules/libnpmpack/node_modules/rimraf/rimraf.js b/node_modules/libnpmpack/node_modules/rimraf/rimraf.js
new file mode 100644
index 000000000..a90ad029f
--- /dev/null
+++ b/node_modules/libnpmpack/node_modules/rimraf/rimraf.js
@@ -0,0 +1,372 @@
+module.exports = rimraf
+rimraf.sync = rimrafSync
+
+var assert = require("assert")
+var path = require("path")
+var fs = require("fs")
+var glob = undefined
+try {
+ glob = require("glob")
+} catch (_err) {
+ // treat glob as optional.
+}
+var _0666 = parseInt('666', 8)
+
+var defaultGlobOpts = {
+ nosort: true,
+ silent: true
+}
+
+// for EMFILE handling
+var timeout = 0
+
+var isWindows = (process.platform === "win32")
+
+function defaults (options) {
+ var methods = [
+ 'unlink',
+ 'chmod',
+ 'stat',
+ 'lstat',
+ 'rmdir',
+ 'readdir'
+ ]
+ methods.forEach(function(m) {
+ options[m] = options[m] || fs[m]
+ m = m + 'Sync'
+ options[m] = options[m] || fs[m]
+ })
+
+ options.maxBusyTries = options.maxBusyTries || 3
+ options.emfileWait = options.emfileWait || 1000
+ if (options.glob === false) {
+ options.disableGlob = true
+ }
+ if (options.disableGlob !== true && glob === undefined) {
+ throw Error('glob dependency not found, set `options.disableGlob = true` if intentional')
+ }
+ options.disableGlob = options.disableGlob || false
+ options.glob = options.glob || defaultGlobOpts
+}
+
+function rimraf (p, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = {}
+ }
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert.equal(typeof cb, 'function', 'rimraf: callback function required')
+ assert(options, 'rimraf: invalid options argument provided')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ defaults(options)
+
+ var busyTries = 0
+ var errState = null
+ var n = 0
+
+ if (options.disableGlob || !glob.hasMagic(p))
+ return afterGlob(null, [p])
+
+ options.lstat(p, function (er, stat) {
+ if (!er)
+ return afterGlob(null, [p])
+
+ glob(p, options.glob, afterGlob)
+ })
+
+ function next (er) {
+ errState = errState || er
+ if (--n === 0)
+ cb(errState)
+ }
+
+ function afterGlob (er, results) {
+ if (er)
+ return cb(er)
+
+ n = results.length
+ if (n === 0)
+ return cb()
+
+ results.forEach(function (p) {
+ rimraf_(p, options, function CB (er) {
+ if (er) {
+ if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
+ busyTries < options.maxBusyTries) {
+ busyTries ++
+ var time = busyTries * 100
+ // try again, with the same exact callback as this one.
+ return setTimeout(function () {
+ rimraf_(p, options, CB)
+ }, time)
+ }
+
+ // this one won't happen if graceful-fs is used.
+ if (er.code === "EMFILE" && timeout < options.emfileWait) {
+ return setTimeout(function () {
+ rimraf_(p, options, CB)
+ }, timeout ++)
+ }
+
+ // already gone
+ if (er.code === "ENOENT") er = null
+ }
+
+ timeout = 0
+ next(er)
+ })
+ })
+ }
+}
+
+// Two possible strategies.
+// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
+// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
+//
+// Both result in an extra syscall when you guess wrong. However, there
+// are likely far more normal files in the world than directories. This
+// is based on the assumption that a the average number of files per
+// directory is >= 1.
+//
+// If anyone ever complains about this, then I guess the strategy could
+// be made configurable somehow. But until then, YAGNI.
+function rimraf_ (p, options, cb) {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ // sunos lets the root user unlink directories, which is... weird.
+ // so we have to lstat here and make sure it's not a dir.
+ options.lstat(p, function (er, st) {
+ if (er && er.code === "ENOENT")
+ return cb(null)
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er && er.code === "EPERM" && isWindows)
+ fixWinEPERM(p, options, er, cb)
+
+ if (st && st.isDirectory())
+ return rmdir(p, options, er, cb)
+
+ options.unlink(p, function (er) {
+ if (er) {
+ if (er.code === "ENOENT")
+ return cb(null)
+ if (er.code === "EPERM")
+ return (isWindows)
+ ? fixWinEPERM(p, options, er, cb)
+ : rmdir(p, options, er, cb)
+ if (er.code === "EISDIR")
+ return rmdir(p, options, er, cb)
+ }
+ return cb(er)
+ })
+ })
+}
+
+function fixWinEPERM (p, options, er, cb) {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+ if (er)
+ assert(er instanceof Error)
+
+ options.chmod(p, _0666, function (er2) {
+ if (er2)
+ cb(er2.code === "ENOENT" ? null : er)
+ else
+ options.stat(p, function(er3, stats) {
+ if (er3)
+ cb(er3.code === "ENOENT" ? null : er)
+ else if (stats.isDirectory())
+ rmdir(p, options, er, cb)
+ else
+ options.unlink(p, cb)
+ })
+ })
+}
+
+function fixWinEPERMSync (p, options, er) {
+ assert(p)
+ assert(options)
+ if (er)
+ assert(er instanceof Error)
+
+ try {
+ options.chmodSync(p, _0666)
+ } catch (er2) {
+ if (er2.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ try {
+ var stats = options.statSync(p)
+ } catch (er3) {
+ if (er3.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ if (stats.isDirectory())
+ rmdirSync(p, options, er)
+ else
+ options.unlinkSync(p)
+}
+
+function rmdir (p, options, originalEr, cb) {
+ assert(p)
+ assert(options)
+ if (originalEr)
+ assert(originalEr instanceof Error)
+ assert(typeof cb === 'function')
+
+ // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
+ // if we guessed wrong, and it's not a directory, then
+ // raise the original error.
+ options.rmdir(p, function (er) {
+ if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
+ rmkids(p, options, cb)
+ else if (er && er.code === "ENOTDIR")
+ cb(originalEr)
+ else
+ cb(er)
+ })
+}
+
+function rmkids(p, options, cb) {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ options.readdir(p, function (er, files) {
+ if (er)
+ return cb(er)
+ var n = files.length
+ if (n === 0)
+ return options.rmdir(p, cb)
+ var errState
+ files.forEach(function (f) {
+ rimraf(path.join(p, f), options, function (er) {
+ if (errState)
+ return
+ if (er)
+ return cb(errState = er)
+ if (--n === 0)
+ options.rmdir(p, cb)
+ })
+ })
+ })
+}
+
+// this looks simpler, and is strictly *faster*, but will
+// tie up the JavaScript thread and fail on excessively
+// deep directory trees.
+function rimrafSync (p, options) {
+ options = options || {}
+ defaults(options)
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert(options, 'rimraf: missing options')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ var results
+
+ if (options.disableGlob || !glob.hasMagic(p)) {
+ results = [p]
+ } else {
+ try {
+ options.lstatSync(p)
+ results = [p]
+ } catch (er) {
+ results = glob.sync(p, options.glob)
+ }
+ }
+
+ if (!results.length)
+ return
+
+ for (var i = 0; i < results.length; i++) {
+ var p = results[i]
+
+ try {
+ var st = options.lstatSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er.code === "EPERM" && isWindows)
+ fixWinEPERMSync(p, options, er)
+ }
+
+ try {
+ // sunos lets the root user unlink directories, which is... weird.
+ if (st && st.isDirectory())
+ rmdirSync(p, options, null)
+ else
+ options.unlinkSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "EPERM")
+ return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
+ if (er.code !== "EISDIR")
+ throw er
+
+ rmdirSync(p, options, er)
+ }
+ }
+}
+
+function rmdirSync (p, options, originalEr) {
+ assert(p)
+ assert(options)
+ if (originalEr)
+ assert(originalEr instanceof Error)
+
+ try {
+ options.rmdirSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "ENOTDIR")
+ throw originalEr
+ if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
+ rmkidsSync(p, options)
+ }
+}
+
+function rmkidsSync (p, options) {
+ assert(p)
+ assert(options)
+ options.readdirSync(p).forEach(function (f) {
+ rimrafSync(path.join(p, f), options)
+ })
+
+ // We only end up here once we got ENOTEMPTY at least once, and
+ // at this point, we are guaranteed to have removed all the kids.
+ // So, we know that it won't be ENOENT or ENOTDIR or anything else.
+ // try really hard to delete stuff on windows, because it has a
+ // PROFOUNDLY annoying habit of not closing handles promptly when
+ // files are deleted, resulting in spurious ENOTEMPTY errors.
+ var retries = isWindows ? 100 : 1
+ var i = 0
+ do {
+ var threw = true
+ try {
+ var ret = options.rmdirSync(p, options)
+ threw = false
+ return ret
+ } finally {
+ if (++i < retries && threw)
+ continue
+ }
+ } while (true)
+}
diff --git a/node_modules/libnpmpack/package.json b/node_modules/libnpmpack/package.json
new file mode 100644
index 000000000..6ed78d9ac
--- /dev/null
+++ b/node_modules/libnpmpack/package.json
@@ -0,0 +1,100 @@
+{
+ "_from": "libnpmpack@^2.0.0",
+ "_id": "libnpmpack@2.0.0",
+ "_inBundle": false,
+ "_integrity": "sha512-w4wB8ZQUceUANUEiSYqi4nHlqFxhzLXWmhVbDt3NlyZVkmblTokR4xK9VfihLXJhdARQxeILx/HxReeqas1KZQ==",
+ "_location": "/libnpmpack",
+ "_phantomChildren": {
+ "@npmcli/git": "2.0.1",
+ "@npmcli/installed-package-contents": "1.0.5",
+ "@npmcli/promise-spawn": "1.1.0",
+ "cacache": "15.0.0",
+ "fs-minipass": "2.1.0",
+ "glob": "7.1.4",
+ "hosted-git-info": "3.0.2",
+ "infer-owner": "1.0.4",
+ "lru-cache": "5.1.1",
+ "minipass": "3.1.1",
+ "minipass-fetch": "1.2.1",
+ "npm-packlist": "2.1.0",
+ "npm-pick-manifest": "6.0.0",
+ "npm-registry-fetch": "8.0.0",
+ "promise-inflight": "1.0.1",
+ "promise-retry": "1.1.1",
+ "read-package-json-fast": "1.1.3",
+ "semver": "7.1.3",
+ "ssri": "8.0.0",
+ "tar": "6.0.1",
+ "validate-npm-package-name": "3.0.0",
+ "which": "2.0.2"
+ },
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "libnpmpack@^2.0.0",
+ "name": "libnpmpack",
+ "escapedName": "libnpmpack",
+ "rawSpec": "^2.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^2.0.0"
+ },
+ "_requiredBy": [
+ "/libnpmpublish"
+ ],
+ "_resolved": "https://registry.npmjs.org/libnpmpack/-/libnpmpack-2.0.0.tgz",
+ "_shasum": "6b7f050f8dde248776c92495147f623dc711a221",
+ "_spec": "libnpmpack@^2.0.0",
+ "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpublish",
+ "author": {
+ "name": "npm Inc.",
+ "email": "support@npmjs.com"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/libnpmpack/issues"
+ },
+ "bundleDependencies": false,
+ "contributors": [
+ {
+ "name": "Claudia Hernández",
+ "email": "claudia@npmjs.com"
+ }
+ ],
+ "dependencies": {
+ "@npmcli/run-script": "^1.3.0",
+ "npm-package-arg": "^8.0.0",
+ "pacote": "^11.1.4"
+ },
+ "deprecated": false,
+ "description": "Programmatic API for the bits behind npm pack",
+ "devDependencies": {
+ "nock": "^12.0.2",
+ "standard": "^14.3.1",
+ "tap": "^14.10.6"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "files": [
+ "*.js"
+ ],
+ "homepage": "https://npmjs.com/package/libnpmpack",
+ "license": "ISC",
+ "main": "index.js",
+ "name": "libnpmpack",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/libnpmpack.git"
+ },
+ "scripts": {
+ "lint": "standard",
+ "posttest": "npm run lint",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
+ },
+ "version": "2.0.0"
+}
diff --git a/node_modules/libnpmpublish/.travis.yml b/node_modules/libnpmpublish/.travis.yml
deleted file mode 100644
index db5ea8b01..000000000
--- a/node_modules/libnpmpublish/.travis.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-language: node_js
-sudo: false
-node_js:
- - "10"
- - "9"
- - "8"
- - "6"
diff --git a/node_modules/libnpmpublish/CHANGELOG.md b/node_modules/libnpmpublish/CHANGELOG.md
index 974b3fd30..57d21f840 100644
--- a/node_modules/libnpmpublish/CHANGELOG.md
+++ b/node_modules/libnpmpublish/CHANGELOG.md
@@ -1,7 +1,43 @@
# Change Log
+<a name="3.0.1"></a>
+# [3.0.1](https://github.com/npm/libnpmpublish/compare/v3.0.0...v3.0.1) (2020-03-27)
+
+### Features
+
+* [`3e02307`](https://github.com/npm/libnpmpublish/commit/3e02307) chore: pack tarballs using libnpmpack ([@claudiahdz](https://github.com/claudiahdz))
+
+<a name="3.0.0"></a>
+# [3.0.0](https://github.com/npm/libnpmpublish/compare/v2.0.0...v3.0.0) (2020-03-09)
+
+### Breaking Changes
+
+* [`ecaeb0b`](https://github.com/npm/libnpmpublish/commit/ecaeb0b) feat: pack tarballs from source code using pacote v10 ([@claudiahdz](https://github.com/claudiahdz))
+
+* [`f6bf2b8`](https://github.com/npm/libnpmpublish/commit/f6bf2b8) feat: unpublish code refactor ([@claudiahdz](https://github.com/claudiahdz))
+
+### Miscellaneuous
+
+* [`5cea10f`](https://github.com/npm/libnpmpublish/commit/5cea10f) chore: basic project updates ([@claudiahdz](https://github.com/claudiahdz))
+* [`3010b93`](https://github.com/npm/libnpmpublish/commit/3010b93) chore: cleanup badges + contributing ([@ruyadorno](https://github.com/ruyadorno))
+
+---
+
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+## [2.0.0](https://github.com/npm/libnpmpublish/compare/v1.1.3...v2.0.0) (2019-09-18)
+
+
+### ⚠ BREAKING CHANGES
+
+* This drops support for Node.js version 6.
+
+### Bug Fixes
+
+* audit warnings, drop support for Node.js v6 ([d9a1fb6](https://github.com/npm/libnpmpublish/commit/d9a1fb6))
+
+### [1.1.3](https://github.com/npm/libnpmpublish/compare/v1.1.2...v1.1.3) (2019-09-18)
+
<a name="1.1.2"></a>
## [1.1.2](https://github.com/npm/libnpmpublish/compare/v1.1.1...v1.1.2) (2019-07-16)
diff --git a/node_modules/libnpmpublish/CODE_OF_CONDUCT.md b/node_modules/libnpmpublish/CODE_OF_CONDUCT.md
deleted file mode 100644
index aeb72f598..000000000
--- a/node_modules/libnpmpublish/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,151 +0,0 @@
-# Code of Conduct
-
-## When Something Happens
-
-If you see a Code of Conduct violation, follow these steps:
-
-1. Let the person know that what they did is not appropriate and ask them to stop and/or edit their message(s) or commits.
-2. That person should immediately stop the behavior and correct the issue.
-3. If this doesn’t happen, or if you're uncomfortable speaking up, [contact the maintainers](#contacting-maintainers).
-4. As soon as available, a maintainer will look into the issue, and take [further action (see below)](#further-enforcement), starting with a warning, then temporary block, then long-term repo or organization ban.
-
-When reporting, please include any relevant details, links, screenshots, context, or other information that may be used to better understand and resolve the situation.
-
-**The maintainer team will prioritize the well-being and comfort of the recipients of the violation over the comfort of the violator.** See [some examples below](#enforcement-examples).
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as contributors and maintainers of this project pledge to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, technical preferences, nationality, personal appearance, race, religion, or sexual identity and orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment include:
-
- * Using welcoming and inclusive language.
- * Being respectful of differing viewpoints and experiences.
- * Gracefully accepting constructive feedback.
- * Focusing on what is best for the community.
- * Showing empathy and kindness towards other community members.
- * Encouraging and raising up your peers in the project so you can all bask in hacks and glory.
-
-Examples of unacceptable behavior by participants include:
-
- * The use of sexualized language or imagery and unwelcome sexual attention or advances, including when simulated online. The only exception to sexual topics is channels/spaces specifically for topics of sexual identity.
- * Casual mention of slavery or indentured servitude and/or false comparisons of one's occupation or situation to slavery. Please consider using or asking about alternate terminology when referring to such metaphors in technology.
- * Making light of/making mocking comments about trigger warnings and content warnings.
- * Trolling, insulting/derogatory comments, and personal or political attacks.
- * Public or private harassment, deliberate intimidation, or threats.
- * Publishing others' private information, such as a physical or electronic address, without explicit permission. This includes any sort of "outing" of any aspect of someone's identity without their consent.
- * Publishing private screenshots or quotes of interactions in the context of this project without all quoted users' *explicit* consent.
- * Publishing of private communication that doesn't have to do with reporting harrassment.
- * Any of the above even when [presented as "ironic" or "joking"](https://en.wikipedia.org/wiki/Hipster_racism).
- * Any attempt to present "reverse-ism" versions of the above as violations. Examples of reverse-isms are "reverse racism", "reverse sexism", "heterophobia", and "cisphobia".
- * Unsolicited explanations under the assumption that someone doesn't already know it. Ask before you teach! Don't assume what people's knowledge gaps are.
- * [Feigning or exaggerating surprise](https://www.recurse.com/manual#no-feigned-surprise) when someone admits to not knowing something.
- * "[Well-actuallies](https://www.recurse.com/manual#no-well-actuallys)"
- * Other conduct which could reasonably be considered inappropriate in a professional or community setting.
-
-## Scope
-
-This Code of Conduct applies both within spaces involving this project and in other spaces involving community members. This includes the repository, its Pull Requests and Issue tracker, its Twitter community, private email communications in the context of the project, and any events where members of the project are participating, as well as adjacent communities and venues affecting the project's members.
-
-Depending on the violation, the maintainers may decide that violations of this code of conduct that have happened outside of the scope of the community may deem an individual unwelcome, and take appropriate action to maintain the comfort and safety of its members.
-
-### Other Community Standards
-
-As a project on GitHub, this project is additionally covered by the [GitHub Community Guidelines](https://help.github.com/articles/github-community-guidelines/).
-
-Additionally, as a project hosted on npm, is is covered by [npm, Inc's Code of Conduct](https://www.npmjs.com/policies/conduct).
-
-Enforcement of those guidelines after violations overlapping with the above are the responsibility of the entities, and enforcement may happen in any or all of the services/communities.
-
-## Maintainer Enforcement Process
-
-Once the maintainers get involved, they will follow a documented series of steps and do their best to preserve the well-being of project members. This section covers actual concrete steps.
-
-### Contacting Maintainers
-
-You may get in touch with the maintainer team through any of the following methods:
-
- * Through email:
- * [kzm@zkat.tech](mailto:kzm@zkat.tech) (Kat Marchán)
-
- * Through Twitter:
- * [@maybekatz](https://twitter.com/maybekatz) (Kat Marchán)
-
-### Further Enforcement
-
-If you've already followed the [initial enforcement steps](#enforcement), these are the steps maintainers will take for further enforcement, as needed:
-
- 1. Repeat the request to stop.
- 2. If the person doubles down, they will have offending messages removed or edited by a maintainers given an official warning. The PR or Issue may be locked.
- 3. If the behavior continues or is repeated later, the person will be blocked from participating for 24 hours.
- 4. If the behavior continues or is repeated after the temporary block, a long-term (6-12mo) ban will be used.
-
-On top of this, maintainers may remove any offending messages, images, contributions, etc, as they deem necessary.
-
-Maintainers reserve full rights to skip any of these steps, at their discretion, if the violation is considered to be a serious and/or immediate threat to the health and well-being of members of the community. These include any threats, serious physical or verbal attacks, and other such behavior that would be completely unacceptable in any social setting that puts our members at risk.
-
-Members expelled from events or venues with any sort of paid attendance will not be refunded.
-
-### Who Watches the Watchers?
-
-Maintainers and other leaders who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. These may include anything from removal from the maintainer team to a permanent ban from the community.
-
-Additionally, as a project hosted on both GitHub and npm, [their own Codes of Conducts may be applied against maintainers of this project](#other-community-standards), externally of this project's procedures.
-
-### Enforcement Examples
-
-#### The Best Case
-
-The vast majority of situations work out like this. This interaction is common, and generally positive.
-
-> Alex: "Yeah I used X and it was really crazy!"
-
-> Patt (not a maintainer): "Hey, could you not use that word? What about 'ridiculous' instead?"
-
-> Alex: "oh sorry, sure." -> edits old comment to say "it was really confusing!"
-
-#### The Maintainer Case
-
-Sometimes, though, you need to get maintainers involved. Maintainers will do their best to resolve conflicts, but people who were harmed by something **will take priority**.
-
-> Patt: "Honestly, sometimes I just really hate using $library and anyone who uses it probably sucks at their job."
-
-> Alex: "Whoa there, could you dial it back a bit? There's a CoC thing about attacking folks' tech use like that."
-
-> Patt: "I'm not attacking anyone, what's your problem?"
-
-> Alex: "@maintainers hey uh. Can someone look at this issue? Patt is getting a bit aggro. I tried to nudge them about it, but nope."
-
-> KeeperOfCommitBits: (on issue) "Hey Patt, maintainer here. Could you tone it down? This sort of attack is really not okay in this space."
-
-> Patt: "Leave me alone I haven't said anything bad wtf is wrong with you."
-
-> KeeperOfCommitBits: (deletes user's comment), "@patt I mean it. Please refer to the CoC over at (URL to this CoC) if you have questions, but you can consider this an actual warning. I'd appreciate it if you reworded your messages in this thread, since they made folks there uncomfortable. Let's try and be kind, yeah?"
-
-> Patt: "@keeperofbits Okay sorry. I'm just frustrated and I'm kinda burnt out and I guess I got carried away. I'll DM Alex a note apologizing and edit my messages. Sorry for the trouble."
-
-> KeeperOfCommitBits: "@patt Thanks for that. I hear you on the stress. Burnout sucks :/. Have a good one!"
-
-#### The Nope Case
-
-> PepeTheFrog🐸: "Hi, I am a literal actual nazi and I think white supremacists are quite fashionable."
-
-> Patt: "NOOOOPE. OH NOPE NOPE."
-
-> Alex: "JFC NO. NOPE. @keeperofbits NOPE NOPE LOOK HERE"
-
-> KeeperOfCommitBits: "👀 Nope. NOPE NOPE NOPE. 🔥"
-
-> PepeTheFrog🐸 has been banned from all organization or user repositories belonging to KeeperOfCommitBits.
-
-## Attribution
-
-This Code of Conduct was generated using [WeAllJS Code of Conduct Generator](https://npm.im/weallbehave), which is based on the [WeAllJS Code of
-Conduct](https://wealljs.org/code-of-conduct), which is itself based on
-[Contributor Covenant](http://contributor-covenant.org), version 1.4, available
-at
-[http://contributor-covenant.org/version/1/4](http://contributor-covenant.org/version/1/4),
-and the LGBTQ in Technology Slack [Code of
-Conduct](http://lgbtq.technology/coc.html).
diff --git a/node_modules/libnpmpublish/CONTRIBUTING.md b/node_modules/libnpmpublish/CONTRIBUTING.md
deleted file mode 100644
index 780044ffc..000000000
--- a/node_modules/libnpmpublish/CONTRIBUTING.md
+++ /dev/null
@@ -1,256 +0,0 @@
-# Contributing
-
-## How do I... <a name="toc"></a>
-
-* [Use This Guide](#introduction)?
-* Ask or Say Something? 🤔🐛😱
- * [Request Support](#request-support)
- * [Report an Error or Bug](#report-an-error-or-bug)
- * [Request a Feature](#request-a-feature)
-* Make Something? 🤓👩🏽‍💻📜🍳
- * [Project Setup](#project-setup)
- * [Contribute Documentation](#contribute-documentation)
- * [Contribute Code](#contribute-code)
-* Manage Something ✅🙆🏼💃👔
- * [Provide Support on Issues](#provide-support-on-issues)
- * [Label Issues](#label-issues)
- * [Clean Up Issues and PRs](#clean-up-issues-and-prs)
- * [Review Pull Requests](#review-pull-requests)
- * [Merge Pull Requests](#merge-pull-requests)
- * [Tag a Release](#tag-a-release)
- * [Join the Project Team](#join-the-project-team)
-* Add a Guide Like This One [To My Project](#attribution)? 🤖😻👻
-
-## Introduction
-
-Thank you so much for your interest in contributing!. All types of contributions are encouraged and valued. See the [table of contents](#toc) for different ways to help and details about how this project handles them!📝
-
-Please make sure to read the relevant section before making your contribution! It will make it a lot easier for us maintainers to make the most of it and smooth out the experience for all involved. 💚
-
-The [Project Team](#join-the-project-team) looks forward to your contributions. 🙌🏾✨
-
-## Request Support
-
-If you have a question about this project, how to use it, or just need clarification about something:
-
-* Open an Issue at https://github.com/npm/libnpmpublish/issues
-* Provide as much context as you can about what you're running into.
-* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it.
-
-Once it's filed:
-
-* The project team will [label the issue](#label-issues).
-* Someone will try to have a response soon.
-* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made.
-
-## Report an Error or Bug
-
-If you run into an error or bug with the project:
-
-* Open an Issue at https://github.com/npm/libnpmpublish/issues
-* Include *reproduction steps* that someone else can follow to recreate the bug or error on their own.
-* Provide project and platform versions (nodejs, npm, etc), depending on what seems relevant. If not, please be ready to provide that information if maintainers ask for it.
-
-Once it's filed:
-
-* The project team will [label the issue](#label-issues).
-* A team member will try to reproduce the issue with your provided steps. If there are no repro steps or no obvious way to reproduce the issue, the team will ask you for those steps and mark the issue as `needs-repro`. Bugs with the `needs-repro` tag will not be addressed until they are reproduced.
-* If the team is able to reproduce the issue, it will be marked `needs-fix`, as well as possibly other tags (such as `critical`), and the issue will be left to be [implemented by someone](#contribute-code).
-* If you or the maintainers don't respond to an issue for 30 days, the [issue will be closed](#clean-up-issues-and-prs). If you want to come back to it, reply (once, please), and we'll reopen the existing issue. Please avoid filing new issues as extensions of one you already made.
-* `critical` issues may be left open, depending on perceived immediacy and severity, even past the 30 day deadline.
-
-## Request a Feature
-
-If the project doesn't do something you need or want it to do:
-
-* Open an Issue at https://github.com/npm/libnpmpublish/issues
-* Provide as much context as you can about what you're running into.
-* Please try and be clear about why existing features and alternatives would not work for you.
-
-Once it's filed:
-
-* The project team will [label the issue](#label-issues).
-* The project team will evaluate the feature request, possibly asking you more questions to understand its purpose and any relevant requirements. If the issue is closed, the team will convey their reasoning and suggest an alternative path forward.
-* If the feature request is accepted, it will be marked for implementation with `feature-accepted`, which can then be done by either by a core team member or by anyone in the community who wants to [contribute code](#contribute-code).
-
-Note: The team is unlikely to be able to accept every single feature request that is filed. Please understand if they need to say no.
-
-## Project Setup
-
-So you wanna contribute some code! That's great! This project uses GitHub Pull Requests to manage contributions, so [read up on how to fork a GitHub project and file a PR](https://guides.github.com/activities/forking) if you've never done it before.
-
-If this seems like a lot or you aren't able to do all this setup, you might also be able to [edit the files directly](https://help.github.com/articles/editing-files-in-another-user-s-repository/) without having to do any of this setup. Yes, [even code](#contribute-code).
-
-If you want to go the usual route and run the project locally, though:
-
-* [Install Node.js](https://nodejs.org/en/download/)
-* [Fork the project](https://guides.github.com/activities/forking/#fork)
-
-Then in your terminal:
-* `cd path/to/your/clone`
-* `npm install`
-* `npm test`
-
-And you should be ready to go!
-
-## Contribute Documentation
-
-Documentation is a super important, critical part of this project. Docs are how we keep track of what we're doing, how, and why. It's how we stay on the same page about our policies. And it's how we tell others everything they need in order to be able to use this project -- or contribute to it. So thank you in advance.
-
-Documentation contributions of any size are welcome! Feel free to file a PR even if you're just rewording a sentence to be more clear, or fixing a spelling mistake!
-
-To contribute documentation:
-
-* [Set up the project](#project-setup).
-* Edit or add any relevant documentation.
-* Make sure your changes are formatted correctly and consistently with the rest of the documentation.
-* Re-read what you wrote, and run a spellchecker on it to make sure you didn't miss anything.
-* In your commit message(s), begin the first line with `docs: `. For example: `docs: Adding a doc contrib section to CONTRIBUTING.md`.
-* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md). Documentation commits should use `docs(<component>): <message>`.
-* Go to https://github.com/npm/libnpmpublish/pulls and open a new pull request with your changes.
-* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing.
-
-Once you've filed the PR:
-
-* One or more maintainers will use GitHub's review feature to review your PR.
-* If the maintainer asks for any changes, edit your changes, push, and ask for another review.
-* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚
-* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release)
-
-## Contribute Code
-
-We like code commits a lot! They're super handy, and they keep the project going and doing the work it needs to do to be useful to others.
-
-Code contributions of just about any size are acceptable!
-
-The main difference between code contributions and documentation contributions is that contributing code requires inclusion of relevant tests for the code being added or changed. Contributions without accompanying tests will be held off until a test is added, unless the maintainers consider the specific tests to be either impossible, or way too much of a burden for such a contribution.
-
-To contribute code:
-
-* [Set up the project](#project-setup).
-* Make any necessary changes to the source code.
-* Include any [additional documentation](#contribute-documentation) the changes might need.
-* Write tests that verify that your contribution works as expected.
-* Write clear, concise commit message(s) using [conventional-changelog format](https://github.com/conventional-changelog/conventional-changelog-angular/blob/master/convention.md).
-* Dependency updates, additions, or removals must be in individual commits, and the message must use the format: `<prefix>(deps): PKG@VERSION`, where `<prefix>` is any of the usual `conventional-changelog` prefixes, at your discretion.
-* Go to https://github.com/npm/libnpmpublish/pulls and open a new pull request with your changes.
-* If your PR is connected to an open issue, add a line in your PR's description that says `Fixes: #123`, where `#123` is the number of the issue you're fixing.
-
-Once you've filed the PR:
-
-* Barring special circumstances, maintainers will not review PRs until all checks pass (Travis, AppVeyor, etc).
-* One or more maintainers will use GitHub's review feature to review your PR.
-* If the maintainer asks for any changes, edit your changes, push, and ask for another review. Additional tags (such as `needs-tests`) will be added depending on the review.
-* If the maintainer decides to pass on your PR, they will thank you for the contribution and explain why they won't be accepting the changes. That's ok! We still really appreciate you taking the time to do it, and we don't take that lightly. 💚
-* If your PR gets accepted, it will be marked as such, and merged into the `latest` branch soon after. Your contribution will be distributed to the masses next time the maintainers [tag a release](#tag-a-release)
-
-## Provide Support on Issues
-
-[Needs Collaborator](#join-the-project-team): none
-
-Helping out other users with their questions is a really awesome way of contributing to any community. It's not uncommon for most of the issues on an open source projects being support-related questions by users trying to understand something they ran into, or find their way around a known bug.
-
-Sometimes, the `support` label will be added to things that turn out to actually be other things, like bugs or feature requests. In that case, suss out the details with the person who filed the original issue, add a comment explaining what the bug is, and change the label from `support` to `bug` or `feature`. If you can't do this yourself, @mention a maintainer so they can do it.
-
-In order to help other folks out with their questions:
-
-* Go to the issue tracker and [filter open issues by the `support` label](https://github.com/npm/libnpmpublish/issues?q=is%3Aopen+is%3Aissue+label%3Asupport).
-* Read through the list until you find something that you're familiar enough with to give an answer to.
-* Respond to the issue with whatever details are needed to clarify the question, or get more details about what's going on.
-* Once the discussion wraps up and things are clarified, either close the issue, or ask the original issue filer (or a maintainer) to close it for you.
-
-Some notes on picking up support issues:
-
-* Avoid responding to issues you don't know you can answer accurately.
-* As much as possible, try to refer to past issues with accepted answers. Link to them from your replies with the `#123` format.
-* Be kind and patient with users -- often, folks who have run into confusing things might be upset or impatient. This is ok. Try to understand where they're coming from, and if you're too uncomfortable with the tone, feel free to stay away or withdraw from the issue. (note: if the user is outright hostile or is violating the CoC, [refer to the Code of Conduct](CODE_OF_CONDUCT.md) to resolve the conflict).
-
-## Label Issues
-
-[Needs Collaborator](#join-the-project-team): Issue Tracker
-
-One of the most important tasks in handling issues is labeling them usefully and accurately. All other tasks involving issues ultimately rely on the issue being classified in such a way that relevant parties looking to do their own tasks can find them quickly and easily.
-
-In order to label issues, [open up the list of unlabeled issues](https://github.com/npm/libnpmpublish/issues?q=is%3Aopen+is%3Aissue+no%3Alabel) and, **from newest to oldest**, read through each one and apply issue labels according to the table below. If you're unsure about what label to apply, skip the issue and try the next one: don't feel obligated to label each and every issue yourself!
-
-Label | Apply When | Notes
---- | --- | ---
-`bug` | Cases where the code (or documentation) is behaving in a way it wasn't intended to. | If something is happening that surprises the *user* but does not go against the way the code is designed, it should use the `enhancement` label.
-`critical` | Added to `bug` issues if the problem described makes the code completely unusable in a common situation. |
-`documentation` | Added to issues or pull requests that affect any of the documentation for the project. | Can be combined with other labels, such as `bug` or `enhancement`.
-`duplicate` | Added to issues or PRs that refer to the exact same issue as another one that's been previously labeled. | Duplicate issues should be marked and closed right away, with a message referencing the issue it's a duplicate of (with `#123`)
-`enhancement` | Added to [feature requests](#request-a-feature), PRs, or documentation issues that are purely additive: the code or docs currently work as expected, but a change is being requested or suggested. |
-`help wanted` | Applied by [Committers](#join-the-project-team) to issues and PRs that they would like to get outside help for. Generally, this means it's lower priority for the maintainer team to itself implement, but that the community is encouraged to pick up if they so desire | Never applied on first-pass labeling.
-`in-progress` | Applied by [Committers](#join-the-project-team) to PRs that are pending some work before they're ready for review. | The original PR submitter should @mention the team member that applied the label once the PR is complete.
-`performance` | This issue or PR is directly related to improving performance. |
-`refactor` | Added to issues or PRs that deal with cleaning up or modifying the project for the betterment of it. |
-`starter` | Applied by [Committers](#join-the-project-team) to issues that they consider good introductions to the project for people who have not contributed before. These are not necessarily "easy", but rather focused around how much context is necessary in order to understand what needs to be done for this project in particular. | Existing project members are expected to stay away from these unless they increase in priority.
-`support` | This issue is either asking a question about how to use the project, clarifying the reason for unexpected behavior, or possibly reporting a `bug` but does not have enough detail yet to determine whether it would count as such. | The label should be switched to `bug` if reliable reproduction steps are provided. Issues primarily with unintended configurations of a user's environment are not considered bugs, even if they cause crashes.
-`tests` | This issue or PR either requests or adds primarily tests to the project. | If a PR is pending tests, that will be handled through the [PR review process](#review-pull-requests)
-`wontfix` | Labelers may apply this label to issues that clearly have nothing at all to do with the project or are otherwise entirely outside of its scope/sphere of influence. [Committers](#join-the-project-team) may apply this label and close an issue or PR if they decide to pass on an otherwise relevant issue. | The issue or PR should be closed as soon as the label is applied, and a clear explanation provided of why the label was used. Contributors are free to contest the labeling, but the decision ultimately falls on committers as to whether to accept something or not.
-
-## Clean Up Issues and PRs
-
-[Needs Collaborator](#join-the-project-team): Issue Tracker
-
-Issues and PRs can go stale after a while. Maybe they're abandoned. Maybe the team will just plain not have time to address them any time soon.
-
-In these cases, they should be closed until they're brought up again or the interaction starts over.
-
-To clean up issues and PRs:
-
-* Search the issue tracker for issues or PRs, and add the term `updated:<=YYYY-MM-DD`, where the date is 30 days before today.
-* Go through each issue *from oldest to newest*, and close them if **all of the following are true**:
- * not opened by a maintainer
- * not marked as `critical`
- * not marked as `starter` or `help wanted` (these might stick around for a while, in general, as they're intended to be available)
- * no explicit messages in the comments asking for it to be left open
- * does not belong to a milestone
-* Leave a message when closing saying "Cleaning up stale issue. Please reopen or ping us if and when you're ready to resume this. See https://github.com/npm/libnpmpublish/blob/latest/CONTRIBUTING.md#clean-up-issues-and-prs for more details."
-
-## Review Pull Requests
-
-[Needs Collaborator](#join-the-project-team): Issue Tracker
-
-While anyone can comment on a PR, add feedback, etc, PRs are only *approved* by team members with Issue Tracker or higher permissions.
-
-PR reviews use [GitHub's own review feature](https://help.github.com/articles/about-pull-request-reviews/), which manages comments, approval, and review iteration.
-
-Some notes:
-
-* You may ask for minor changes ("nitpicks"), but consider whether they are really blockers to merging: try to err on the side of "approve, with comments".
-* *ALL PULL REQUESTS* should be covered by a test: either by a previously-failing test, an existing test that covers the entire functionality of the submitted code, or new tests to verify any new/changed behavior. All tests must also pass and follow established conventions. Test coverage should not drop, unless the specific case is considered reasonable by maintainers.
-* Please make sure you're familiar with the code or documentation being updated, unless it's a minor change (spellchecking, minor formatting, etc). You may @mention another project member who you think is better suited for the review, but still provide a non-approving review of your own.
-* Be extra kind: people who submit code/doc contributions are putting themselves in a pretty vulnerable position, and have put time and care into what they've done (even if that's not obvious to you!) -- always respond with respect, be understanding, but don't feel like you need to sacrifice your standards for their sake, either. Just don't be a jerk about it?
-
-## Merge Pull Requests
-
-[Needs Collaborator](#join-the-project-team): Committer
-
-TBD - need to hash out a bit more of this process.
-
-## Tag A Release
-
-[Needs Collaborator](#join-the-project-team): Committer
-
-TBD - need to hash out a bit more of this process. The most important bit here is probably that all tests must pass, and tags must use [semver](https://semver.org).
-
-## Join the Project Team
-
-### Ways to Join
-
-There are many ways to contribute! Most of them don't require any official status unless otherwise noted. That said, there's a couple of positions that grant special repository abilities, and this section describes how they're granted and what they do.
-
-All of the below positions are granted based on the project team's needs, as well as their consensus opinion about whether they would like to work with the person and think that they would fit well into that position. The process is relatively informal, and it's likely that people who express interest in participating can just be granted the permissions they'd like.
-
-You can spot a collaborator on the repo by looking for the `[Collaborator]` or `[Owner]` tags next to their names.
-
-Permission | Description
---- | ---
-Issue Tracker | Granted to contributors who express a strong interest in spending time on the project's issue tracker. These tasks are mainly [labeling issues](#label-issues), [cleaning up old ones](#clean-up-issues-and-prs), and [reviewing pull requests](#review-pull-requests), as well as all the usual things non-team-member contributors can do. Issue handlers should not merge pull requests, tag releases, or directly commit code themselves: that should still be done through the usual pull request process. Becoming an Issue Handler means the project team trusts you to understand enough of the team's process and context to implement it on the issue tracker.
-Committer | Granted to contributors who want to handle the actual pull request merges, tagging new versions, etc. Committers should have a good level of familiarity with the codebase, and enough context to understand the implications of various changes, as well as a good sense of the will and expectations of the project team.
-Admin/Owner | Granted to people ultimately responsible for the project, its community, etc.
-
-## Attribution
-
-This guide was generated using the WeAllJS `CONTRIBUTING.md` generator. [Make your own](https://npm.im/weallcontribute)!
diff --git a/node_modules/libnpmpublish/PULL_REQUEST_TEMPLATE b/node_modules/libnpmpublish/PULL_REQUEST_TEMPLATE
deleted file mode 100644
index 9471c6d32..000000000
--- a/node_modules/libnpmpublish/PULL_REQUEST_TEMPLATE
+++ /dev/null
@@ -1,7 +0,0 @@
-<!--
-⚠️🚨 BEFORE FILING A PR: 🚨⚠️
-
-👉🏼 CONTRIBUTING.md 👈🏼 (the "contribution guidelines" up there ☝🏼)
-
-I PROMISE IT'S A VERY VERY SHORT READ.🙇🏼
--->
diff --git a/node_modules/libnpmpublish/README.md b/node_modules/libnpmpublish/README.md
index 1511b7c14..7d66958c9 100644
--- a/node_modules/libnpmpublish/README.md
+++ b/node_modules/libnpmpublish/README.md
@@ -1,9 +1,22 @@
-# libnpmpublish [![npm version](https://img.shields.io/npm/v/libnpmpublish.svg)](https://npm.im/libnpmpublish) [![license](https://img.shields.io/npm/l/libnpmpublish.svg)](https://npm.im/libnpmpublish) [![Travis](https://img.shields.io/travis/npm/libnpmpublish.svg)](https://travis-ci.org/npm/libnpmpublish) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/libnpmpublish?svg=true)](https://ci.appveyor.com/project/zkat/libnpmpublish) [![Coverage Status](https://coveralls.io/repos/github/npm/libnpmpublish/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmpublish?branch=latest)
+# libnpmpublish
+
+[![npm version](https://img.shields.io/npm/v/libnpmpublish.svg)](https://npm.im/libnpmpublish)
+[![license](https://img.shields.io/npm/l/libnpmpublish.svg)](https://npm.im/libnpmpublish)
+[![GitHub Actions](https://github.com/npm/libnpmpublish/workflows/Node%20CI/badge.svg)](https://github.com/npm/libnpmpublish/actions?query=workflow%3A%22Node+CI%22)
+[![Coverage Status](https://coveralls.io/repos/github/npm/libnpmpublish/badge.svg?branch=latest)](https://coveralls.io/github/npm/libnpmpublish?branch=latest)
[`libnpmpublish`](https://github.com/npm/libnpmpublish) is a Node.js library for
-programmatically publishing and unpublishing npm packages. It does not take care
-of packing tarballs from source code, but once you have a tarball, it can take
-care of putting it up on a nice registry for you.
+programmatically publishing and unpublishing npm packages. It takes care
+of packing tarballs from source code and putting it up on a nice registry for you.
+
+## Table of Contents
+
+* [Example](#example)
+* [Install](#install)
+* [API](#api)
+ * [publish/unpublish opts](#opts)
+ * [`publish()`](#publish)
+ * [`unpublish()`](#unpublish)
## Example
@@ -16,15 +29,6 @@ const { publish, unpublish } = require('libnpmpublish')
`$ npm install libnpmpublish`
-## Table of Contents
-
-* [Example](#example)
-* [Install](#install)
-* [API](#api)
- * [publish/unpublish opts](#opts)
- * [`publish()`](#publish)
- * [`unpublish()`](#unpublish)
-
### API
#### <a name="opts"></a> `opts` for `libnpmpublish` commands
@@ -36,29 +40,17 @@ documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options)
for options that can be passed in.
A couple of options of note for those in a hurry:
+* `opts.defaultTag` - registers the published package with the given tag, defaults to `latest`.
+
+* `opts.access` - tells the registry whether this package should be published as public or restricted. Only applies to scoped packages, which default to restricted.
* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs.
-* `opts.Promise` - If you pass this in, the Promises returned by `libnpmpublish` commands will use this Promise class instead. For example: `{Promise: require('bluebird')}`
-#### <a name="publish"></a> `> libpub.publish(pkgJson, tarData, [opts]) -> Promise`
+#### <a name="publish"></a> `> libpub.publish(path, pkgJson, [opts]) -> Promise`
-Publishes `tarData` to the appropriate configured registry. `pkgJson` should be
+Packs a tarball located in `path` and publishes to the appropriate configured registry. `pkgJson` should be
the parsed `package.json` for the package that is being published.
-`tarData` can be a Buffer, a base64-encoded string, or a binary stream of data.
-Note that publishing itself can't be streamed, so the entire stream will be
-consumed into RAM before publishing (and are thus limited in how big they can
-be).
-
-Since `libnpmpublish` does not generate tarballs itself, one way to build your
-own tarball for publishing is to do `npm pack` in the directory you wish to
-pack. You can then `fs.createReadStream('my-proj-1.0.0.tgz')` and pass that to
-`libnpmpublish`, along with `require('./package.json')`.
-
-`publish()` does its best to emulate legacy publish logic in the standard npm
-client, and so should generally be compatible with any registry the npm CLI has
-been able to publish to in the past.
-
If `opts.npmVersion` is passed in, it will be used as the `_npmVersion` field in
the outgoing packument. It's recommended you add your own user agent string in
there!
@@ -69,19 +61,14 @@ end up with `dist.integrity = 'sha512-deadbeefbadc0ffee'`. Any algorithm
supported by your current node version is allowed -- npm clients that do not
support those algorithms will simply ignore the unsupported hashes.
-If `opts.access` is passed in, it must be one of `public` or `restricted`.
-Unscoped packages cannot be `restricted`, and the registry may agree or disagree
-with whether you're allowed to publish a restricted package.
-
##### Example
```javascript
-const pkg = require('./dist/package.json')
-const tarball = fs.createReadStream('./dist/pkg-1.0.1.tgz')
-await libpub.publish(pkg, tarball, {
+const path = '/a/path/to/your/source/code'
+await libpub.publish(path, {
npmVersion: 'my-pub-script@1.0.2',
token: 'my-auth-token-here'
-})
+}, opts)
// Package has been published to the npm registry.
```
diff --git a/node_modules/libnpmpublish/appveyor.yml b/node_modules/libnpmpublish/appveyor.yml
deleted file mode 100644
index 9cc64c58e..000000000
--- a/node_modules/libnpmpublish/appveyor.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-environment:
- matrix:
- - nodejs_version: "10"
- - nodejs_version: "9"
- - nodejs_version: "8"
- - nodejs_version: "6"
-
-platform:
- - x64
-
-install:
- - ps: Install-Product node $env:nodejs_version $env:platform
- - npm config set spin false
- - npm install
-
-test_script:
- - npm test
-
-matrix:
- fast_finish: true
-
-build: off
diff --git a/node_modules/libnpmpublish/node_modules/.bin/semver b/node_modules/libnpmpublish/node_modules/.bin/semver
deleted file mode 120000
index 317eb293d..000000000
--- a/node_modules/libnpmpublish/node_modules/.bin/semver
+++ /dev/null
@@ -1 +0,0 @@
-../semver/bin/semver \ No newline at end of file
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md b/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md
deleted file mode 100644
index 479f24b64..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md
+++ /dev/null
@@ -1,115 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="2.8.5"></a>
-## [2.8.5](https://github.com/npm/hosted-git-info/compare/v2.8.4...v2.8.5) (2019-10-07)
-
-
-### Bug Fixes
-
-* updated pathmatch for gitlab ([e8325b5](https://github.com/npm/hosted-git-info/commit/e8325b5)), closes [#51](https://github.com/npm/hosted-git-info/issues/51)
-* updated pathmatch for gitlab ([ffe056f](https://github.com/npm/hosted-git-info/commit/ffe056f))
-
-
-
-<a name="2.8.4"></a>
-## [2.8.4](https://github.com/npm/hosted-git-info/compare/v2.8.3...v2.8.4) (2019-08-12)
-
-
-
-<a name="2.8.3"></a>
-## [2.8.3](https://github.com/npm/hosted-git-info/compare/v2.8.2...v2.8.3) (2019-08-12)
-
-
-
-<a name="2.8.2"></a>
-## [2.8.2](https://github.com/npm/hosted-git-info/compare/v2.8.1...v2.8.2) (2019-08-05)
-
-
-### Bug Fixes
-
-* http protocol use sshurl by default ([3b1d629](https://github.com/npm/hosted-git-info/commit/3b1d629)), closes [#48](https://github.com/npm/hosted-git-info/issues/48)
-
-
-
-<a name="2.8.1"></a>
-## [2.8.1](https://github.com/npm/hosted-git-info/compare/v2.8.0...v2.8.1) (2019-08-05)
-
-
-### Bug Fixes
-
-* ignore noCommittish on tarball url generation ([5d4a8d7](https://github.com/npm/hosted-git-info/commit/5d4a8d7))
-* use gist tarball url that works for anonymous gists ([1692435](https://github.com/npm/hosted-git-info/commit/1692435))
-
-
-
-<a name="2.8.0"></a>
-# [2.8.0](https://github.com/npm/hosted-git-info/compare/v2.7.1...v2.8.0) (2019-08-05)
-
-
-### Bug Fixes
-
-* Allow slashes in gitlab project section ([bbcf7b2](https://github.com/npm/hosted-git-info/commit/bbcf7b2)), closes [#46](https://github.com/npm/hosted-git-info/issues/46) [#43](https://github.com/npm/hosted-git-info/issues/43)
-* **git-host:** disallow URI-encoded slash (%2F) in `path` ([3776fa5](https://github.com/npm/hosted-git-info/commit/3776fa5)), closes [#44](https://github.com/npm/hosted-git-info/issues/44)
-* **gitlab:** Do not URL encode slashes in project name for GitLab https URL ([cbf04f9](https://github.com/npm/hosted-git-info/commit/cbf04f9)), closes [#47](https://github.com/npm/hosted-git-info/issues/47)
-* do not allow invalid gist urls ([d5cf830](https://github.com/npm/hosted-git-info/commit/d5cf830))
-* **cache:** Switch to lru-cache to save ourselves from unlimited memory consumption ([e518222](https://github.com/npm/hosted-git-info/commit/e518222)), closes [#38](https://github.com/npm/hosted-git-info/issues/38)
-
-
-### Features
-
-* give these objects a name ([60abaea](https://github.com/npm/hosted-git-info/commit/60abaea))
-
-
-
-<a name="2.7.1"></a>
-## [2.7.1](https://github.com/npm/hosted-git-info/compare/v2.7.0...v2.7.1) (2018-07-07)
-
-
-### Bug Fixes
-
-* **index:** Guard against non-string types ([5bc580d](https://github.com/npm/hosted-git-info/commit/5bc580d))
-* **parse:** Crash on strings that parse to having no host ([c931482](https://github.com/npm/hosted-git-info/commit/c931482)), closes [#35](https://github.com/npm/hosted-git-info/issues/35)
-
-
-
-<a name="2.7.0"></a>
-# [2.7.0](https://github.com/npm/hosted-git-info/compare/v2.6.1...v2.7.0) (2018-07-06)
-
-
-### Bug Fixes
-
-* **github tarball:** update github tarballtemplate ([6efd582](https://github.com/npm/hosted-git-info/commit/6efd582)), closes [#34](https://github.com/npm/hosted-git-info/issues/34)
-* **gitlab docs:** switched to lowercase anchors for readmes ([701bcd1](https://github.com/npm/hosted-git-info/commit/701bcd1))
-
-
-### Features
-
-* **all:** Support www. prefixes on hostnames ([3349575](https://github.com/npm/hosted-git-info/commit/3349575)), closes [#32](https://github.com/npm/hosted-git-info/issues/32)
-
-
-
-<a name="2.6.1"></a>
-## [2.6.1](https://github.com/npm/hosted-git-info/compare/v2.6.0...v2.6.1) (2018-06-25)
-
-### Bug Fixes
-
-* **Revert:** "compat: remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25))" ([cce5a62](https://github.com/npm/hosted-git-info/commit/cce5a62))
-* **Revert:** "git-host: fix forgotten extend()" ([a815ec9](https://github.com/npm/hosted-git-info/commit/a815ec9))
-
-
-
-<a name="2.6.0"></a>
-# [2.6.0](https://github.com/npm/hosted-git-info/compare/v2.5.0...v2.6.0) (2018-03-07)
-
-
-### Bug Fixes
-
-* **compat:** remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25)) ([627ab55](https://github.com/npm/hosted-git-info/commit/627ab55))
-* **git-host:** fix forgotten extend() ([eba1f7b](https://github.com/npm/hosted-git-info/commit/eba1f7b))
-
-
-### Features
-
-* **browse:** fragment support for browse() ([#28](https://github.com/npm/hosted-git-info/issues/28)) ([cd5e5bb](https://github.com/npm/hosted-git-info/commit/cd5e5bb))
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE b/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763d..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md b/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md
deleted file mode 100644
index 7b723f6b9..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md
+++ /dev/null
@@ -1,133 +0,0 @@
-# hosted-git-info
-
-This will let you identify and transform various git hosts URLs between
-protocols. It also can tell you what the URL is for the raw path for
-particular file for direct access without git.
-
-## Example
-
-```javascript
-var hostedGitInfo = require("hosted-git-info")
-var info = hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git", opts)
-/* info looks like:
-{
- type: "github",
- domain: "github.com",
- user: "npm",
- project: "hosted-git-info"
-}
-*/
-```
-
-If the URL can't be matched with a git host, `null` will be returned. We
-can match git, ssh and https urls. Additionally, we can match ssh connect
-strings (`git@github.com:npm/hosted-git-info`) and shortcuts (eg,
-`github:npm/hosted-git-info`). Github specifically, is detected in the case
-of a third, unprefixed, form: `npm/hosted-git-info`.
-
-If it does match, the returned object has properties of:
-
-* info.type -- The short name of the service
-* info.domain -- The domain for git protocol use
-* info.user -- The name of the user/org on the git host
-* info.project -- The name of the project on the git host
-
-## Version Contract
-
-The major version will be bumped any time…
-
-* The constructor stops accepting URLs that it previously accepted.
-* A method is removed.
-* A method can no longer accept the number and type of arguments it previously accepted.
-* A method can return a different type than it currently returns.
-
-Implications:
-
-* I do not consider the specific format of the urls returned from, say
- `.https()` to be a part of the contract. The contract is that it will
- return a string that can be used to fetch the repo via HTTPS. But what
- that string looks like, specifically, can change.
-* Dropping support for a hosted git provider would constitute a breaking
- change.
-
-## Usage
-
-### var info = hostedGitInfo.fromUrl(gitSpecifier[, options])
-
-* *gitSpecifer* is a URL of a git repository or a SCP-style specifier of one.
-* *options* is an optional object. It can have the following properties:
- * *noCommittish* — If true then committishes won't be included in generated URLs.
- * *noGitPlus* — If true then `git+` won't be prefixed on URLs.
-
-## Methods
-
-All of the methods take the same options as the `fromUrl` factory. Options
-provided to a method override those provided to the constructor.
-
-* info.file(path, opts)
-
-Given the path of a file relative to the repository, returns a URL for
-directly fetching it from the githost. If no committish was set then
-`master` will be used as the default.
-
-For example `hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git#v1.0.0").file("package.json")`
-would return `https://raw.githubusercontent.com/npm/hosted-git-info/v1.0.0/package.json`
-
-* info.shortcut(opts)
-
-eg, `github:npm/hosted-git-info`
-
-* info.browse(path, fragment, opts)
-
-eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0`,
-`https://github.com/npm/hosted-git-info/tree/v1.2.0/package.json`,
-`https://github.com/npm/hosted-git-info/tree/v1.2.0/REAMDE.md#supported-hosts`
-
-* info.bugs(opts)
-
-eg, `https://github.com/npm/hosted-git-info/issues`
-
-* info.docs(opts)
-
-eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0#readme`
-
-* info.https(opts)
-
-eg, `git+https://github.com/npm/hosted-git-info.git`
-
-* info.sshurl(opts)
-
-eg, `git+ssh://git@github.com/npm/hosted-git-info.git`
-
-* info.ssh(opts)
-
-eg, `git@github.com:npm/hosted-git-info.git`
-
-* info.path(opts)
-
-eg, `npm/hosted-git-info`
-
-* info.tarball(opts)
-
-eg, `https://github.com/npm/hosted-git-info/archive/v1.2.0.tar.gz`
-
-* info.getDefaultRepresentation()
-
-Returns the default output type. The default output type is based on the
-string you passed in to be parsed
-
-* info.toString(opts)
-
-Uses the getDefaultRepresentation to call one of the other methods to get a URL for
-this resource. As such `hostedGitInfo.fromUrl(url).toString()` will give
-you a normalized version of the URL that still uses the same protocol.
-
-Shortcuts will still be returned as shortcuts, but the special case github
-form of `org/project` will be normalized to `github:org/project`.
-
-SSH connect strings will be normalized into `git+ssh` URLs.
-
-## Supported hosts
-
-Currently this supports Github, Bitbucket and Gitlab. Pull requests for
-additional hosts welcome.
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js b/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js
deleted file mode 100644
index 8147e3348..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js
+++ /dev/null
@@ -1,79 +0,0 @@
-'use strict'
-
-var gitHosts = module.exports = {
- github: {
- // First two are insecure and generally shouldn't be used any more, but
- // they are still supported.
- 'protocols': [ 'git', 'http', 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'github.com',
- 'treepath': 'tree',
- 'filetemplate': 'https://{auth@}raw.githubusercontent.com/{user}/{project}/{committish}/{path}',
- 'bugstemplate': 'https://{domain}/{user}/{project}/issues',
- 'gittemplate': 'git://{auth@}{domain}/{user}/{project}.git{#committish}',
- 'tarballtemplate': 'https://codeload.{domain}/{user}/{project}/tar.gz/{committish}'
- },
- bitbucket: {
- 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'bitbucket.org',
- 'treepath': 'src',
- 'tarballtemplate': 'https://{domain}/{user}/{project}/get/{committish}.tar.gz'
- },
- gitlab: {
- 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'gitlab.com',
- 'treepath': 'tree',
- 'bugstemplate': 'https://{domain}/{user}/{project}/issues',
- 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{projectPath}.git{#committish}',
- 'tarballtemplate': 'https://{domain}/{user}/{project}/repository/archive.tar.gz?ref={committish}',
- 'pathmatch': /^[/]([^/]+)[/]((?!.*(\/-\/|\/repository\/archive\.tar\.gz\?=.*|\/repository\/[^/]+\/archive.tar.gz$)).*?)(?:[.]git|[/])?$/
- },
- gist: {
- 'protocols': [ 'git', 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'gist.github.com',
- 'pathmatch': /^[/](?:([^/]+)[/])?([a-z0-9]{32,})(?:[.]git)?$/,
- 'filetemplate': 'https://gist.githubusercontent.com/{user}/{project}/raw{/committish}/{path}',
- 'bugstemplate': 'https://{domain}/{project}',
- 'gittemplate': 'git://{domain}/{project}.git{#committish}',
- 'sshtemplate': 'git@{domain}:/{project}.git{#committish}',
- 'sshurltemplate': 'git+ssh://git@{domain}/{project}.git{#committish}',
- 'browsetemplate': 'https://{domain}/{project}{/committish}',
- 'browsefiletemplate': 'https://{domain}/{project}{/committish}{#path}',
- 'docstemplate': 'https://{domain}/{project}{/committish}',
- 'httpstemplate': 'git+https://{domain}/{project}.git{#committish}',
- 'shortcuttemplate': '{type}:{project}{#committish}',
- 'pathtemplate': '{project}{#committish}',
- 'tarballtemplate': 'https://codeload.github.com/gist/{project}/tar.gz/{committish}',
- 'hashformat': function (fragment) {
- return 'file-' + formatHashFragment(fragment)
- }
- }
-}
-
-var gitHostDefaults = {
- 'sshtemplate': 'git@{domain}:{user}/{project}.git{#committish}',
- 'sshurltemplate': 'git+ssh://git@{domain}/{user}/{project}.git{#committish}',
- 'browsetemplate': 'https://{domain}/{user}/{project}{/tree/committish}',
- 'browsefiletemplate': 'https://{domain}/{user}/{project}/{treepath}/{committish}/{path}{#fragment}',
- 'docstemplate': 'https://{domain}/{user}/{project}{/tree/committish}#readme',
- 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{project}.git{#committish}',
- 'filetemplate': 'https://{domain}/{user}/{project}/raw/{committish}/{path}',
- 'shortcuttemplate': '{type}:{user}/{project}{#committish}',
- 'pathtemplate': '{user}/{project}{#committish}',
- 'pathmatch': /^[/]([^/]+)[/]([^/]+?)(?:[.]git|[/])?$/,
- 'hashformat': formatHashFragment
-}
-
-Object.keys(gitHosts).forEach(function (name) {
- Object.keys(gitHostDefaults).forEach(function (key) {
- if (gitHosts[name][key]) return
- gitHosts[name][key] = gitHostDefaults[key]
- })
- gitHosts[name].protocols_re = RegExp('^(' +
- gitHosts[name].protocols.map(function (protocol) {
- return protocol.replace(/([\\+*{}()[\]$^|])/g, '\\$1')
- }).join('|') + '):$')
-})
-
-function formatHashFragment (fragment) {
- return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js b/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js
deleted file mode 100644
index 9616fbaa6..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js
+++ /dev/null
@@ -1,156 +0,0 @@
-'use strict'
-var gitHosts = require('./git-host-info.js')
-/* eslint-disable node/no-deprecated-api */
-
-// copy-pasta util._extend from node's source, to avoid pulling
-// the whole util module into peoples' webpack bundles.
-/* istanbul ignore next */
-var extend = Object.assign || function _extend (target, source) {
- // Don't do anything if source isn't an object
- if (source === null || typeof source !== 'object') return target
-
- var keys = Object.keys(source)
- var i = keys.length
- while (i--) {
- target[keys[i]] = source[keys[i]]
- }
- return target
-}
-
-module.exports = GitHost
-function GitHost (type, user, auth, project, committish, defaultRepresentation, opts) {
- var gitHostInfo = this
- gitHostInfo.type = type
- Object.keys(gitHosts[type]).forEach(function (key) {
- gitHostInfo[key] = gitHosts[type][key]
- })
- gitHostInfo.user = user
- gitHostInfo.auth = auth
- gitHostInfo.project = project
- gitHostInfo.committish = committish
- gitHostInfo.default = defaultRepresentation
- gitHostInfo.opts = opts || {}
-}
-
-GitHost.prototype.hash = function () {
- return this.committish ? '#' + this.committish : ''
-}
-
-GitHost.prototype._fill = function (template, opts) {
- if (!template) return
- var vars = extend({}, opts)
- vars.path = vars.path ? vars.path.replace(/^[/]+/g, '') : ''
- opts = extend(extend({}, this.opts), opts)
- var self = this
- Object.keys(this).forEach(function (key) {
- if (self[key] != null && vars[key] == null) vars[key] = self[key]
- })
- var rawAuth = vars.auth
- var rawcommittish = vars.committish
- var rawFragment = vars.fragment
- var rawPath = vars.path
- var rawProject = vars.project
- Object.keys(vars).forEach(function (key) {
- var value = vars[key]
- if ((key === 'path' || key === 'project') && typeof value === 'string') {
- vars[key] = value.split('/').map(function (pathComponent) {
- return encodeURIComponent(pathComponent)
- }).join('/')
- } else {
- vars[key] = encodeURIComponent(value)
- }
- })
- vars['auth@'] = rawAuth ? rawAuth + '@' : ''
- vars['#fragment'] = rawFragment ? '#' + this.hashformat(rawFragment) : ''
- vars.fragment = vars.fragment ? vars.fragment : ''
- vars['#path'] = rawPath ? '#' + this.hashformat(rawPath) : ''
- vars['/path'] = vars.path ? '/' + vars.path : ''
- vars.projectPath = rawProject.split('/').map(encodeURIComponent).join('/')
- if (opts.noCommittish) {
- vars['#committish'] = ''
- vars['/tree/committish'] = ''
- vars['/committish'] = ''
- vars.committish = ''
- } else {
- vars['#committish'] = rawcommittish ? '#' + rawcommittish : ''
- vars['/tree/committish'] = vars.committish
- ? '/' + vars.treepath + '/' + vars.committish
- : ''
- vars['/committish'] = vars.committish ? '/' + vars.committish : ''
- vars.committish = vars.committish || 'master'
- }
- var res = template
- Object.keys(vars).forEach(function (key) {
- res = res.replace(new RegExp('[{]' + key + '[}]', 'g'), vars[key])
- })
- if (opts.noGitPlus) {
- return res.replace(/^git[+]/, '')
- } else {
- return res
- }
-}
-
-GitHost.prototype.ssh = function (opts) {
- return this._fill(this.sshtemplate, opts)
-}
-
-GitHost.prototype.sshurl = function (opts) {
- return this._fill(this.sshurltemplate, opts)
-}
-
-GitHost.prototype.browse = function (P, F, opts) {
- if (typeof P === 'string') {
- if (typeof F !== 'string') {
- opts = F
- F = null
- }
- return this._fill(this.browsefiletemplate, extend({
- fragment: F,
- path: P
- }, opts))
- } else {
- return this._fill(this.browsetemplate, P)
- }
-}
-
-GitHost.prototype.docs = function (opts) {
- return this._fill(this.docstemplate, opts)
-}
-
-GitHost.prototype.bugs = function (opts) {
- return this._fill(this.bugstemplate, opts)
-}
-
-GitHost.prototype.https = function (opts) {
- return this._fill(this.httpstemplate, opts)
-}
-
-GitHost.prototype.git = function (opts) {
- return this._fill(this.gittemplate, opts)
-}
-
-GitHost.prototype.shortcut = function (opts) {
- return this._fill(this.shortcuttemplate, opts)
-}
-
-GitHost.prototype.path = function (opts) {
- return this._fill(this.pathtemplate, opts)
-}
-
-GitHost.prototype.tarball = function (opts_) {
- var opts = extend({}, opts_, { noCommittish: false })
- return this._fill(this.tarballtemplate, opts)
-}
-
-GitHost.prototype.file = function (P, opts) {
- return this._fill(this.filetemplate, extend({ path: P }, opts))
-}
-
-GitHost.prototype.getDefaultRepresentation = function () {
- return this.default
-}
-
-GitHost.prototype.toString = function (opts) {
- if (this.default && typeof this[this.default] === 'function') return this[this.default](opts)
- return this.sshurl(opts)
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js b/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js
deleted file mode 100644
index fc959cb04..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js
+++ /dev/null
@@ -1,125 +0,0 @@
-'use strict'
-var url = require('url')
-var gitHosts = require('./git-host-info.js')
-var GitHost = module.exports = require('./git-host.js')
-
-var protocolToRepresentationMap = {
- 'git+ssh:': 'sshurl',
- 'git+https:': 'https',
- 'ssh:': 'sshurl',
- 'git:': 'git'
-}
-
-function protocolToRepresentation (protocol) {
- return protocolToRepresentationMap[protocol] || protocol.slice(0, -1)
-}
-
-var authProtocols = {
- 'git:': true,
- 'https:': true,
- 'git+https:': true,
- 'http:': true,
- 'git+http:': true
-}
-
-var cache = {}
-
-module.exports.fromUrl = function (giturl, opts) {
- if (typeof giturl !== 'string') return
- var key = giturl + JSON.stringify(opts || {})
-
- if (!(key in cache)) {
- cache[key] = fromUrl(giturl, opts)
- }
-
- return cache[key]
-}
-
-function fromUrl (giturl, opts) {
- if (giturl == null || giturl === '') return
- var url = fixupUnqualifiedGist(
- isGitHubShorthand(giturl) ? 'github:' + giturl : giturl
- )
- var parsed = parseGitUrl(url)
- var shortcutMatch = url.match(new RegExp('^([^:]+):(?:(?:[^@:]+(?:[^@]+)?@)?([^/]*))[/](.+?)(?:[.]git)?($|#)'))
- var matches = Object.keys(gitHosts).map(function (gitHostName) {
- try {
- var gitHostInfo = gitHosts[gitHostName]
- var auth = null
- if (parsed.auth && authProtocols[parsed.protocol]) {
- auth = decodeURIComponent(parsed.auth)
- }
- var committish = parsed.hash ? decodeURIComponent(parsed.hash.substr(1)) : null
- var user = null
- var project = null
- var defaultRepresentation = null
- if (shortcutMatch && shortcutMatch[1] === gitHostName) {
- user = shortcutMatch[2] && decodeURIComponent(shortcutMatch[2])
- project = decodeURIComponent(shortcutMatch[3])
- defaultRepresentation = 'shortcut'
- } else {
- if (parsed.host && parsed.host !== gitHostInfo.domain && parsed.host.replace(/^www[.]/, '') !== gitHostInfo.domain) return
- if (!gitHostInfo.protocols_re.test(parsed.protocol)) return
- if (!parsed.path) return
- var pathmatch = gitHostInfo.pathmatch
- var matched = parsed.path.match(pathmatch)
- if (!matched) return
- /* istanbul ignore else */
- if (matched[1] !== null && matched[1] !== undefined) {
- user = decodeURIComponent(matched[1].replace(/^:/, ''))
- }
- project = decodeURIComponent(matched[2])
- defaultRepresentation = protocolToRepresentation(parsed.protocol)
- }
- return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts)
- } catch (ex) {
- /* istanbul ignore else */
- if (ex instanceof URIError) {
- } else throw ex
- }
- }).filter(function (gitHostInfo) { return gitHostInfo })
- if (matches.length !== 1) return
- return matches[0]
-}
-
-function isGitHubShorthand (arg) {
- // Note: This does not fully test the git ref format.
- // See https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
- //
- // The only way to do this properly would be to shell out to
- // git-check-ref-format, and as this is a fast sync function,
- // we don't want to do that. Just let git fail if it turns
- // out that the commit-ish is invalid.
- // GH usernames cannot start with . or -
- return /^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(arg)
-}
-
-function fixupUnqualifiedGist (giturl) {
- // necessary for round-tripping gists
- var parsed = url.parse(giturl)
- if (parsed.protocol === 'gist:' && parsed.host && !parsed.path) {
- return parsed.protocol + '/' + parsed.host
- } else {
- return giturl
- }
-}
-
-function parseGitUrl (giturl) {
- var matched = giturl.match(/^([^@]+)@([^:/]+):[/]?((?:[^/]+[/])?[^/]+?)(?:[.]git)?(#.*)?$/)
- if (!matched) return url.parse(giturl)
- return {
- protocol: 'git+ssh:',
- slashes: true,
- auth: matched[1],
- host: matched[2],
- port: null,
- hostname: matched[2],
- hash: matched[4],
- search: null,
- query: null,
- pathname: '/' + matched[3],
- path: '/' + matched[3],
- href: 'git+ssh://' + matched[1] + '@' + matched[2] +
- '/' + matched[3] + (matched[4] || '')
- }
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json b/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 956b5ca08..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- "_from": "hosted-git-info@^2.7.1",
- "_id": "hosted-git-info@2.8.5",
- "_inBundle": false,
- "_integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==",
- "_location": "/libnpmpublish/npm-package-arg/hosted-git-info",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "hosted-git-info@^2.7.1",
- "name": "hosted-git-info",
- "escapedName": "hosted-git-info",
- "rawSpec": "^2.7.1",
- "saveSpec": null,
- "fetchSpec": "^2.7.1"
- },
- "_requiredBy": [
- "/libnpmpublish/npm-package-arg"
- ],
- "_resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz",
- "_shasum": "759cfcf2c4d156ade59b0b2dfabddc42a6b9c70c",
- "_spec": "hosted-git-info@^2.7.1",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/libnpmpublish/node_modules/npm-package-arg",
- "author": {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org",
- "url": "http://re-becca.org"
- },
- "bugs": {
- "url": "https://github.com/npm/hosted-git-info/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Provides metadata and conversions from repository urls for Github, Bitbucket and Gitlab",
- "devDependencies": {
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tap": "^12.7.0"
- },
- "files": [
- "index.js",
- "git-host.js",
- "git-host-info.js"
- ],
- "homepage": "https://github.com/npm/hosted-git-info",
- "keywords": [
- "git",
- "github",
- "bitbucket",
- "gitlab"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "hosted-git-info",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/hosted-git-info.git"
- },
- "scripts": {
- "postrelease": "npm publish --tag=ancient-legacy-fixes && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --100 --no-esm test/*.js",
- "test:coverage": "tap --coverage-report=html -J --100 --no-esm test/*.js"
- },
- "version": "2.8.5"
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-package-arg/package.json b/node_modules/libnpmpublish/node_modules/npm-package-arg/package.json
deleted file mode 100644
index 0796a1754..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,74 +0,0 @@
-{
- "_from": "npm-package-arg@^6.1.0",
- "_id": "npm-package-arg@6.1.1",
- "_inBundle": false,
- "_integrity": "sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg==",
- "_location": "/libnpmpublish/npm-package-arg",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-package-arg@^6.1.0",
- "name": "npm-package-arg",
- "escapedName": "npm-package-arg",
- "rawSpec": "^6.1.0",
- "saveSpec": null,
- "fetchSpec": "^6.1.0"
- },
- "_requiredBy": [
- "/libnpmpublish",
- "/libnpmpublish/npm-registry-fetch"
- ],
- "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.1.tgz",
- "_shasum": "02168cb0a49a2b75bf988a28698de7b529df5cb7",
- "_spec": "npm-package-arg@^6.1.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpublish",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/npm-package-arg/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "hosted-git-info": "^2.7.1",
- "osenv": "^0.1.5",
- "semver": "^5.6.0",
- "validate-npm-package-name": "^3.0.0"
- },
- "deprecated": false,
- "description": "Parse the things that can be arguments to `npm install`",
- "devDependencies": {
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tap": "^12.5.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "directories": {
- "test": "test"
- },
- "files": [
- "npa.js"
- ],
- "homepage": "https://github.com/npm/npm-package-arg",
- "license": "ISC",
- "main": "npa.js",
- "name": "npm-package-arg",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/npm-package-arg.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap --100 -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "6.1.1"
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/CHANGELOG.md b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/CHANGELOG.md
deleted file mode 100644
index 3599c6b2f..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/CHANGELOG.md
+++ /dev/null
@@ -1,250 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="4.0.3"></a>
-## [4.0.3](https://github.com/npm/registry-fetch/compare/v4.0.2...v4.0.3) (2020-02-13)
-
-
-### Bug Fixes
-
-* always bypass cache when ?write=true ([ba8b4fe](https://github.com/npm/registry-fetch/commit/ba8b4fe))
-* use 30s default for timeout as per README ([69c2977](https://github.com/npm/registry-fetch/commit/69c2977)), closes [#20](https://github.com/npm/registry-fetch/issues/20)
-
-
-
-<a name="4.0.2"></a>
-## [4.0.2](https://github.com/npm/registry-fetch/compare/v4.0.0...v4.0.2) (2019-10-04)
-
-
-### Bug Fixes
-
-* Add null check on body on 401 errors ([e3a0186](https://github.com/npm/registry-fetch/commit/e3a0186)), closes [#9](https://github.com/npm/registry-fetch/issues/9)
-* **deps:** Add explicit dependency on safe-buffer ([8eae5f0](https://github.com/npm/registry-fetch/commit/8eae5f0)), closes [npm/libnpmaccess#2](https://github.com/npm/libnpmaccess/issues/2) [#3](https://github.com/npm/registry-fetch/issues/3)
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/npm/registry-fetch/compare/v3.9.1...v4.0.0) (2019-07-15)
-
-
-* cacache@12.0.0, infer uid from cache folder ([0c4f060](https://github.com/npm/registry-fetch/commit/0c4f060))
-
-
-### BREAKING CHANGES
-
-* uid and gid are inferred from cache folder, rather than
-being passed in as options.
-
-
-
-<a name="3.9.1"></a>
-## [3.9.1](https://github.com/npm/registry-fetch/compare/v3.9.0...v3.9.1) (2019-07-02)
-
-
-
-<a name="3.9.0"></a>
-# [3.9.0](https://github.com/npm/registry-fetch/compare/v3.8.0...v3.9.0) (2019-01-24)
-
-
-### Features
-
-* **auth:** support username:password encoded legacy _auth ([a91f90c](https://github.com/npm/registry-fetch/commit/a91f90c))
-
-
-
-<a name="3.8.0"></a>
-# [3.8.0](https://github.com/npm/registry-fetch/compare/v3.7.0...v3.8.0) (2018-08-23)
-
-
-### Features
-
-* **mapJson:** add support for passing in json stream mapper ([0600986](https://github.com/npm/registry-fetch/commit/0600986))
-
-
-
-<a name="3.7.0"></a>
-# [3.7.0](https://github.com/npm/registry-fetch/compare/v3.6.0...v3.7.0) (2018-08-23)
-
-
-### Features
-
-* **json.stream:** add utility function for streamed JSON parsing ([051d969](https://github.com/npm/registry-fetch/commit/051d969))
-
-
-
-<a name="3.6.0"></a>
-# [3.6.0](https://github.com/npm/registry-fetch/compare/v3.5.0...v3.6.0) (2018-08-22)
-
-
-### Bug Fixes
-
-* **docs:** document opts.forceAuth ([40bcd65](https://github.com/npm/registry-fetch/commit/40bcd65))
-
-
-### Features
-
-* **opts.ignoreBody:** add a boolean to throw away response bodies ([6923702](https://github.com/npm/registry-fetch/commit/6923702))
-
-
-
-<a name="3.5.0"></a>
-# [3.5.0](https://github.com/npm/registry-fetch/compare/v3.4.0...v3.5.0) (2018-08-22)
-
-
-### Features
-
-* **pkgid:** heuristic pkgid calculation for errors ([2e789a5](https://github.com/npm/registry-fetch/commit/2e789a5))
-
-
-
-<a name="3.4.0"></a>
-# [3.4.0](https://github.com/npm/registry-fetch/compare/v3.3.0...v3.4.0) (2018-08-22)
-
-
-### Bug Fixes
-
-* **deps:** use new figgy-pudding with aliases fix ([0308f54](https://github.com/npm/registry-fetch/commit/0308f54))
-
-
-### Features
-
-* **auth:** add forceAuth option to force a specific auth mechanism ([4524d17](https://github.com/npm/registry-fetch/commit/4524d17))
-
-
-
-<a name="3.3.0"></a>
-# [3.3.0](https://github.com/npm/registry-fetch/compare/v3.2.1...v3.3.0) (2018-08-21)
-
-
-### Bug Fixes
-
-* **query:** stop including undefined keys ([4718b1b](https://github.com/npm/registry-fetch/commit/4718b1b))
-
-
-### Features
-
-* **otp:** use heuristic detection for malformed EOTP responses ([f035194](https://github.com/npm/registry-fetch/commit/f035194))
-
-
-
-<a name="3.2.1"></a>
-## [3.2.1](https://github.com/npm/registry-fetch/compare/v3.2.0...v3.2.1) (2018-08-16)
-
-
-### Bug Fixes
-
-* **opts:** pass through non-null opts.retry ([beba040](https://github.com/npm/registry-fetch/commit/beba040))
-
-
-
-<a name="3.2.0"></a>
-# [3.2.0](https://github.com/npm/registry-fetch/compare/v3.1.1...v3.2.0) (2018-07-27)
-
-
-### Features
-
-* **gzip:** add opts.gzip convenience opt ([340abe0](https://github.com/npm/registry-fetch/commit/340abe0))
-
-
-
-<a name="3.1.1"></a>
-## [3.1.1](https://github.com/npm/registry-fetch/compare/v3.1.0...v3.1.1) (2018-04-09)
-
-
-
-<a name="3.1.0"></a>
-# [3.1.0](https://github.com/npm/registry-fetch/compare/v3.0.0...v3.1.0) (2018-04-09)
-
-
-### Features
-
-* **config:** support no-proxy and https-proxy options ([9aa906b](https://github.com/npm/registry-fetch/commit/9aa906b))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/npm/registry-fetch/compare/v2.1.0...v3.0.0) (2018-04-09)
-
-
-### Bug Fixes
-
-* **api:** pacote integration-related fixes ([a29de4f](https://github.com/npm/registry-fetch/commit/a29de4f))
-* **config:** stop caring about opts.config ([5856a6f](https://github.com/npm/registry-fetch/commit/5856a6f))
-
-
-### BREAKING CHANGES
-
-* **config:** opts.config is no longer supported. Pass the options down in opts itself.
-
-
-
-<a name="2.1.0"></a>
-# [2.1.0](https://github.com/npm/registry-fetch/compare/v2.0.0...v2.1.0) (2018-04-08)
-
-
-### Features
-
-* **token:** accept opts.token for opts._authToken ([108c9f0](https://github.com/npm/registry-fetch/commit/108c9f0))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/npm/registry-fetch/compare/v1.1.1...v2.0.0) (2018-04-08)
-
-
-### meta
-
-* drop support for node@4 ([758536e](https://github.com/npm/registry-fetch/commit/758536e))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="1.1.1"></a>
-## [1.1.1](https://github.com/npm/registry-fetch/compare/v1.1.0...v1.1.1) (2018-04-06)
-
-
-
-<a name="1.1.0"></a>
-# [1.1.0](https://github.com/npm/registry-fetch/compare/v1.0.1...v1.1.0) (2018-03-16)
-
-
-### Features
-
-* **specs:** can use opts.spec to trigger pickManifest ([85c4ac9](https://github.com/npm/registry-fetch/commit/85c4ac9))
-
-
-
-<a name="1.0.1"></a>
-## [1.0.1](https://github.com/npm/registry-fetch/compare/v1.0.0...v1.0.1) (2018-03-16)
-
-
-### Bug Fixes
-
-* **query:** oops console.log ([870e4f5](https://github.com/npm/registry-fetch/commit/870e4f5))
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2018-03-16)
-
-
-### Bug Fixes
-
-* **auth:** get auth working with all the little details ([84b94ba](https://github.com/npm/registry-fetch/commit/84b94ba))
-* **deps:** add bluebird as an actual dep ([1286e31](https://github.com/npm/registry-fetch/commit/1286e31))
-* **errors:** Unknown auth errors use default code ([#1](https://github.com/npm/registry-fetch/issues/1)) ([3d91b93](https://github.com/npm/registry-fetch/commit/3d91b93))
-* **standard:** remove args from invocation ([9620a0a](https://github.com/npm/registry-fetch/commit/9620a0a))
-
-
-### Features
-
-* **api:** baseline kinda-working API impl ([bf91f9f](https://github.com/npm/registry-fetch/commit/bf91f9f))
-* **body:** automatic handling of different opts.body values ([f3b97db](https://github.com/npm/registry-fetch/commit/f3b97db))
-* **config:** nicer input config input handling ([b9ce21d](https://github.com/npm/registry-fetch/commit/b9ce21d))
-* **opts:** use figgy-pudding for opts handling ([0abd527](https://github.com/npm/registry-fetch/commit/0abd527))
-* **query:** add query utility support ([65ea8b1](https://github.com/npm/registry-fetch/commit/65ea8b1))
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/README.md b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/README.md
deleted file mode 100644
index 80ce64cda..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/README.md
+++ /dev/null
@@ -1,636 +0,0 @@
-# npm-registry-fetch [![npm version](https://img.shields.io/npm/v/npm-registry-fetch.svg)](https://npm.im/npm-registry-fetch) [![license](https://img.shields.io/npm/l/npm-registry-fetch.svg)](https://npm.im/npm-registry-fetch) [![Travis](https://img.shields.io/travis/npm/npm-registry-fetch/latest.svg)](https://travis-ci.org/npm/npm-registry-fetch) [![AppVeyor](https://img.shields.io/appveyor/ci/zkat/npm-registry-fetch/latest.svg)](https://ci.appveyor.com/project/npm/npm-registry-fetch) [![Coverage Status](https://coveralls.io/repos/github/npm/npm-registry-fetch/badge.svg?branch=latest)](https://coveralls.io/github/npm/npm-registry-fetch?branch=latest)
-
-[`npm-registry-fetch`](https://github.com/npm/npm-registry-fetch) is a Node.js
-library that implements a `fetch`-like API for accessing npm registry APIs
-consistently. It's able to consume npm-style configuration values and has all
-the necessary logic for picking registries, handling scopes, and dealing with
-authentication details built-in.
-
-This package is meant to replace the older
-[`npm-registry-client`](https://npm.im/npm-registry-client).
-
-## Example
-
-```javascript
-const npmFetch = require('npm-registry-fetch')
-
-console.log(
- await npmFetch.json('/-/ping')
-)
-```
-
-## Table of Contents
-
-* [Installing](#install)
-* [Example](#example)
-* [Contributing](#contributing)
-* [API](#api)
- * [`fetch`](#fetch)
- * [`fetch.json`](#fetch-json)
- * [`fetch` options](#fetch-opts)
-
-### Install
-
-`$ npm install npm-registry-fetch`
-
-### Contributing
-
-The npm team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of
-Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### Caching and `write=true` query strings
-
-Before performing any PUT or DELETE operation, npm clients first make a
-GET request to the registry resource being updated, which includes
-the query string `?write=true`.
-
-The semantics of this are, effectively, "I intend to write to this thing,
-and need to know the latest current value, so that my write can land
-cleanly".
-
-The public npm registry handles these `?write=true` requests by ensuring
-that the cache is re-validated before sending a response. In order to
-maintain the same behavior on the client, and not get tripped up by an
-overeager local cache when we intend to write data to the registry, any
-request that comes through `npm-registry-fetch` that contains `write=true`
-in the query string will forcibly set the `prefer-online` option to `true`,
-and set both `prefer-offline` and `offline` to false, so that any local
-cached value will be revalidated.
-
-#### <a name="fetch"></a> `> fetch(url, [opts]) -> Promise<Response>`
-
-Performs a request to a given URL.
-
-The URL can be either a full URL, or a path to one. The appropriate registry
-will be automatically picked if only a URL path is given.
-
-For available options, please see the section on [`fetch` options](#fetch-opts).
-
-##### Example
-
-```javascript
-const res = await fetch('/-/ping')
-console.log(res.headers)
-res.on('data', d => console.log(d.toString('utf8')))
-```
-
-#### <a name="fetch-json"></a> `> fetch.json(url, [opts]) -> Promise<ResponseJSON>`
-
-Performs a request to a given registry URL, parses the body of the response as
-JSON, and returns it as its final value. This is a utility shorthand for
-`fetch(url).then(res => res.json())`.
-
-For available options, please see the section on [`fetch` options](#fetch-opts).
-
-##### Example
-
-```javascript
-const res = await fetch.json('/-/ping')
-console.log(res) // Body parsed as JSON
-```
-
-#### <a name="fetch-json-stream"></a> `> fetch.json.stream(url, jsonPath, [opts]) -> Stream`
-
-Performs a request to a given registry URL and parses the body of the response
-as JSON, with each entry being emitted through the stream.
-
-The `jsonPath` argument is a [`JSONStream.parse()`
-path](https://github.com/dominictarr/JSONStream#jsonstreamparsepath), and the
-returned stream (unlike default `JSONStream`s), has a valid
-`Symbol.asyncIterator` implementation.
-
-For available options, please see the section on [`fetch` options](#fetch-opts).
-
-##### Example
-
-```javascript
-console.log('https://npm.im/~zkat has access to the following packages:')
-for await (let {key, value} of fetch.json.stream('/-/user/zkat/package', '$*')) {
- console.log(`https://npm.im/${key} (perms: ${value})`)
-}
-```
-
-#### <a name="fetch-opts"></a> `fetch` Options
-
-Fetch options are optional, and can be passed in as either a Map-like object
-(one with a `.get()` method), a plain javascript object, or a
-[`figgy-pudding`](https://npm.im/figgy-pudding) instance.
-
-##### <a name="opts-agent"></a> `opts.agent`
-
-* Type: http.Agent
-* Default: an appropriate agent based on URL protocol and proxy settings
-
-An [`Agent`](https://nodejs.org/api/http.html#http_class_http_agent) instance to
-be shared across requests. This allows multiple concurrent `fetch` requests to
-happen on the same socket.
-
-You do _not_ need to provide this option unless you want something particularly
-specialized, since proxy configurations and http/https agents are already
-automatically managed internally when this option is not passed through.
-
-##### <a name="opts-body"></a> `opts.body`
-
-* Type: Buffer | Stream | Object
-* Default: null
-
-Request body to send through the outgoing request. Buffers and Streams will be
-passed through as-is, with a default `content-type` of
-`application/octet-stream`. Plain JavaScript objects will be `JSON.stringify`ed
-and the `content-type` will default to `application/json`.
-
-Use [`opts.headers`](#opts-headers) to set the content-type to something else.
-
-##### <a name="opts-ca"></a> `opts.ca`
-
-* Type: String, Array, or null
-* Default: null
-
-The Certificate Authority signing certificate that is trusted for SSL
-connections to the registry. Values should be in PEM format (Windows calls it
-"Base-64 encoded X.509 (.CER)") with newlines replaced by the string `'\n'`. For
-example:
-
-```
-{
- ca: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----'
-}
-```
-
-Set to `null` to only allow "known" registrars, or to a specific CA cert
-to trust only that specific signing authority.
-
-Multiple CAs can be trusted by specifying an array of certificates instead of a
-single string.
-
-See also [`opts.strict-ssl`](#opts-strict-ssl), [`opts.ca`](#opts-ca) and
-[`opts.key`](#opts-key)
-
-##### <a name="opts-cache"></a> `opts.cache`
-
-* Type: path
-* Default: null
-
-The location of the http cache directory. If provided, certain cachable requests
-will be cached according to [IETF RFC 7234](https://tools.ietf.org/html/rfc7234)
-rules. This will speed up future requests, as well as make the cached data
-available offline if necessary/requested.
-
-See also [`offline`](#opts-offline), [`prefer-offline`](#opts-prefer-offline),
-and [`prefer-online`](#opts-prefer-online).
-
-##### <a name="opts-cert"></a> `opts.cert`
-
-* Type: String
-* Default: null
-
-A client certificate to pass when accessing the registry. Values should be in
-PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines
-replaced by the string `'\n'`. For example:
-
-```
-{
- cert: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----'
-}
-```
-
-It is _not_ the path to a certificate file (and there is no "certfile" option).
-
-See also: [`opts.ca`](#opts-ca) and [`opts.key`](#opts-key)
-
-##### <a name="opts-fetch-retries"></a> `opts.fetch-retries`
-
-* Type: Number
-* Default: 2
-
-The "retries" config for [`retry`](https://npm.im/retry) to use when fetching
-packages from the registry.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-fetch-retry-factor"></a> `opts.fetch-retry-factor`
-
-* Type: Number
-* Default: 10
-
-The "factor" config for [`retry`](https://npm.im/retry) to use when fetching
-packages.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-fetch-retry-mintimeout"></a> `opts.fetch-retry-mintimeout`
-
-* Type: Number
-* Default: 10000 (10 seconds)
-
-The "minTimeout" config for [`retry`](https://npm.im/retry) to use when fetching
-packages.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-fetch-retry-maxtimeout"></a> `opts.fetch-retry-maxtimeout`
-
-* Type: Number
-* Default: 60000 (1 minute)
-
-The "maxTimeout" config for [`retry`](https://npm.im/retry) to use when fetching
-packages.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-force-auth"></a> `opts.force-auth`
-
-* Alias: `opts.forceAuth`
-* Type: Object
-* Default: null
-
-If present, other auth-related values in `opts` will be completely ignored,
-including `alwaysAuth`, `email`, and `otp`, when calculating auth for a request,
-and the auth details in `opts.forceAuth` will be used instead.
-
-##### <a name="opts-gzip"></a> `opts.gzip`
-
-* Type: Boolean
-* Default: false
-
-If true, `npm-registry-fetch` will set the `Content-Encoding` header to `gzip`
-and use `zlib.gzip()` or `zlib.createGzip()` to gzip-encode
-[`opts.body`](#opts-body).
-
-##### <a name="opts-headers"></a> `opts.headers`
-
-* Type: Object
-* Default: null
-
-Additional headers for the outgoing request. This option can also be used to
-override headers automatically generated by `npm-registry-fetch`, such as
-`Content-Type`.
-
-##### <a name="opts-ignore-body"></a> `opts.ignore-body`
-
-* Alias: `opts.ignoreBody`
-* Type: Boolean
-* Default: false
-
-If true, the **response body** will be thrown away and `res.body` set to `null`.
-This will prevent dangling response sockets for requests where you don't usually
-care what the response body is.
-
-##### <a name="opts-integrity"></a> `opts.integrity`
-
-* Type: String | [SRI object](https://npm.im/ssri)
-* Default: null
-
-If provided, the response body's will be verified against this integrity string,
-using [`ssri`](https://npm.im/ssri). If verification succeeds, the response will
-complete as normal. If verification fails, the response body will error with an
-`EINTEGRITY` error.
-
-Body integrity is only verified if the body is actually consumed to completion --
-that is, if you use `res.json()`/`res.buffer()`, or if you consume the default
-`res` stream data to its end.
-
-Cached data will have its integrity automatically verified using the
-previously-generated integrity hash for the saved request information, so
-`EINTEGRITY` errors can happen if [`opts.cache`](#opts-cache) is used, even if
-`opts.integrity` is not passed in.
-
-##### <a name='opts-is-from-ci'></a> `opts.is-from-ci`
-
-* Alias: `opts.isFromCI`
-* Type: Boolean
-* Default: Based on environment variables
-
-This is used to populate the `npm-in-ci` request header sent to the registry.
-
-##### <a name="opts-key"></a> `opts.key`
-
-* Type: String
-* Default: null
-
-A client key to pass when accessing the registry. Values should be in PEM
-format with newlines replaced by the string `'\n'`. For example:
-
-```
-{
- key: '-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----'
-}
-```
-
-It is _not_ the path to a key file (and there is no "keyfile" option).
-
-See also: [`opts.ca`](#opts-ca) and [`opts.cert`](#opts-cert)
-
-##### <a name="opts-local-address"></a> `opts.local-address`
-
-* Type: IP Address String
-* Default: null
-
-The IP address of the local interface to use when making connections
-to the registry.
-
-See also [`opts.proxy`](#opts-proxy)
-
-##### <a name="opts-log"></a> `opts.log`
-
-* Type: [`npmlog`](https://npm.im/npmlog)-like
-* Default: null
-
-Logger object to use for logging operation details. Must have the same methods
-as `npmlog`.
-
-##### <a name="opts-map-json"></a> `opts.map-json`
-
-* Alias: `mapJson`, `mapJSON`
-* Type: Function
-* Default: undefined
-
-When using `fetch.json.stream()` (NOT `fetch.json()`), this will be passed down
-to [`JSONStream`](https://npm.im/JSONStream) as the second argument to
-`JSONStream.parse`, and can be used to transform stream data before output.
-
-##### <a name="opts-maxsockets"></a> `opts.maxsockets`
-
-* Alias: `opts.max-sockets`
-* Type: Integer
-* Default: 12
-
-Maximum number of sockets to keep open during requests. Has no effect if
-[`opts.agent`](#opts-agent) is used.
-
-##### <a name="opts-method"></a> `opts.method`
-
-* Type: String
-* Default: 'GET'
-
-HTTP method to use for the outgoing request. Case-insensitive.
-
-##### <a name="opts-noproxy"></a> `opts.noproxy`
-
-* Type: Boolean
-* Default: process.env.NOPROXY
-
-If true, proxying will be disabled even if [`opts.proxy`](#opts-proxy) is used.
-
-##### <a name="opts-npm-session"></a> `opts.npm-session`
-
-* Alias: `opts.npmSession`
-* Type: String
-* Default: null
-
-If provided, will be sent in the `npm-session` header. This header is used by
-the npm registry to identify individual user sessions (usually individual
-invocations of the CLI).
-
-##### <a name="opts-offline"></a> `opts.offline`
-
-* Type: Boolean
-* Default: false
-
-Force offline mode: no network requests will be done during install. To allow
-`npm-registry-fetch` to fill in missing cache data, see
-[`opts.prefer-offline`](#opts-prefer-offline).
-
-This option is only really useful if you're also using
-[`opts.cache`](#opts-cache).
-
-This option is set to `true` when the request includes `write=true` in the
-query string.
-
-##### <a name="opts-otp"></a> `opts.otp`
-
-* Type: Number | String
-* Default: null
-
-This is a one-time password from a two-factor authenticator. It is required for
-certain registry interactions when two-factor auth is enabled for a user
-account.
-
-##### <a name="opts-password"></a> `opts.password`
-
-* Alias: `_password`
-* Type: String
-* Default: null
-
-Password used for basic authentication. For the more modern authentication
-method, please use the (more secure) [`opts.token`](#opts-token)
-
-Can optionally be scoped to a registry by using a "nerf dart" for that registry.
-That is:
-
-```
-{
- '//registry.npmjs.org/:password': 't0k3nH34r'
-}
-```
-
-See also [`opts.username`](#opts-username)
-
-##### <a name="opts-prefer-offline"></a> `opts.prefer-offline`
-
-* Type: Boolean
-* Default: false
-
-If true, staleness checks for cached data will be bypassed, but missing data
-will be requested from the server. To force full offline mode, use
-[`opts.offline`](#opts-offline).
-
-This option is generally only useful if you're also using
-[`opts.cache`](#opts-cache).
-
-This option is set to `false` when the request includes `write=true` in the
-query string.
-
-##### <a name="opts-prefer-online"></a> `opts.prefer-online`
-
-* Type: Boolean
-* Default: false
-
-If true, staleness checks for cached data will be forced, making the CLI look
-for updates immediately even for fresh package data.
-
-This option is generally only useful if you're also using
-[`opts.cache`](#opts-cache).
-
-This option is set to `true` when the request includes `write=true` in the
-query string.
-
-##### <a name="opts-project-scope"></a> `opts.project-scope`
-
-* Alias: `opts.projectScope`
-* Type: String
-* Default: null
-
-If provided, will be sent in the `npm-scope` header. This header is used by the
-npm registry to identify the toplevel package scope that a particular project
-installation is using.
-
-##### <a name="opts-proxy"></a> `opts.proxy`
-
-* Type: url
-* Default: null
-
-A proxy to use for outgoing http requests. If not passed in, the `HTTP(S)_PROXY`
-environment variable will be used.
-
-##### <a name="opts-query"></a> `opts.query`
-
-* Type: String | Object
-* Default: null
-
-If provided, the request URI will have a query string appended to it using this
-query. If `opts.query` is an object, it will be converted to a query string
-using
-[`querystring.stringify()`](https://nodejs.org/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options).
-
-If the request URI already has a query string, it will be merged with
-`opts.query`, preferring `opts.query` values.
-
-##### <a name="opts-refer"></a> `opts.refer`
-
-* Alias: `opts.referer`
-* Type: String
-* Default: null
-
-Value to use for the `Referer` header. The npm CLI itself uses this to serialize
-the npm command line using the given request.
-
-##### <a name="opts-registry"></a> `opts.registry`
-
-* Type: URL
-* Default: `'https://registry.npmjs.org'`
-
-Registry configuration for a request. If a request URL only includes the URL
-path, this registry setting will be prepended. This configuration is also used
-to determine authentication details, so even if the request URL references a
-completely different host, `opts.registry` will be used to find the auth details
-for that request.
-
-See also [`opts.scope`](#opts-scope), [`opts.spec`](#opts-spec), and
-[`opts.<scope>:registry`](#opts-scope-registry) which can all affect the actual
-registry URL used by the outgoing request.
-
-##### <a name="opts-retry"></a> `opts.retry`
-
-* Type: Object
-* Default: null
-
-Single-object configuration for request retry settings. If passed in, will
-override individually-passed `fetch-retry-*` settings.
-
-##### <a name="opts-scope"></a> `opts.scope`
-
-* Type: String
-* Default: null
-
-Associate an operation with a scope for a scoped registry. This option can force
-lookup of scope-specific registries and authentication.
-
-See also [`opts.<scope>:registry`](#opts-scope-registry) and
-[`opts.spec`](#opts-spec) for interactions with this option.
-
-##### <a name="opts-scope-registry"></a> `opts.<scope>:registry`
-
-* Type: String
-* Default: null
-
-This option type can be used to configure the registry used for requests
-involving a particular scope. For example, `opts['@myscope:registry'] =
-'https://scope-specific.registry/'` will make it so requests go out to this
-registry instead of [`opts.registry`](#opts-registry) when
-[`opts.scope`](#opts-scope) is used, or when [`opts.spec`](#opts-spec) is a
-scoped package spec.
-
-The `@` before the scope name is optional, but recommended.
-
-##### <a name="opts-spec"></a> `opts.spec`
-
-* Type: String | [`npm-registry-arg`](https://npm.im/npm-registry-arg) object.
-* Default: null
-
-If provided, can be used to automatically configure [`opts.scope`](#opts-scope)
-based on a specific package name. Non-registry package specs will throw an
-error.
-
-##### <a name="opts-strict-ssl"></a> `opts.strict-ssl`
-
-* Type: Boolean
-* Default: true
-
-Whether or not to do SSL key validation when making requests to the
-registry via https.
-
-See also [`opts.ca`](#opts-ca).
-
-##### <a name="opts-timeout"></a> `opts.timeout`
-
-* Type: Milliseconds
-* Default: 30000 (30 seconds)
-
-Time before a hanging request times out.
-
-##### <a name="opts-token"></a> `opts.token`
-
-* Alias: `opts._authToken`
-* Type: String
-* Default: null
-
-Authentication token string.
-
-Can be scoped to a registry by using a "nerf dart" for that registry. That is:
-
-```
-{
- '//registry.npmjs.org/:token': 't0k3nH34r'
-}
-```
-
-##### <a name="opts-user-agent"></a> `opts.user-agent`
-
-* Type: String
-* Default: `'npm-registry-fetch@<version>/node@<node-version>+<arch> (<platform>)'`
-
-User agent string to send in the `User-Agent` header.
-
-##### <a name="opts-username"></a> `opts.username`
-
-* Type: String
-* Default: null
-
-Username used for basic authentication. For the more modern authentication
-method, please use the (more secure) [`opts.token`](#opts-token)
-
-Can optionally be scoped to a registry by using a "nerf dart" for that registry.
-That is:
-
-```
-{
- '//registry.npmjs.org/:username': 't0k3nH34r'
-}
-```
-
-See also [`opts.password`](#opts-password)
-
-##### <a name="opts-auth"></a> `opts._auth`
-
-* Type: String
-* Default: null
-
-** DEPRECATED ** This is a legacy authentication token supported only for
-compatibility. Please use [`opts.token`](#opts-token) instead.
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/auth.js b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/auth.js
deleted file mode 100644
index d583982d0..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/auth.js
+++ /dev/null
@@ -1,57 +0,0 @@
-'use strict'
-
-const config = require('./config.js')
-const url = require('url')
-
-module.exports = getAuth
-function getAuth (registry, opts) {
- if (!registry) { throw new Error('registry is required') }
- opts = config(opts)
- let AUTH = {}
- const regKey = registry && registryKey(registry)
- if (opts.forceAuth) {
- opts = opts.forceAuth
- }
- const doKey = (key, alias) => addKey(opts, AUTH, regKey, key, alias)
- doKey('token')
- doKey('_authToken', 'token')
- doKey('username')
- doKey('password')
- doKey('_password', 'password')
- doKey('email')
- doKey('_auth')
- doKey('otp')
- doKey('always-auth', 'alwaysAuth')
- if (AUTH.password) {
- AUTH.password = Buffer.from(AUTH.password, 'base64').toString('utf8')
- }
- if (AUTH._auth && !(AUTH.username && AUTH.password)) {
- let auth = Buffer.from(AUTH._auth, 'base64').toString()
- auth = auth.split(':')
- AUTH.username = auth.shift()
- AUTH.password = auth.join(':')
- }
- AUTH.alwaysAuth = AUTH.alwaysAuth === 'false' ? false : !!AUTH.alwaysAuth
- return AUTH
-}
-
-function addKey (opts, obj, scope, key, objKey) {
- if (opts[key]) {
- obj[objKey || key] = opts[key]
- }
- if (scope && opts[`${scope}:${key}`]) {
- obj[objKey || key] = opts[`${scope}:${key}`]
- }
-}
-
-// Called a nerf dart in the main codebase. Used as a "safe"
-// key when fetching registry info from config.
-function registryKey (registry) {
- const parsed = url.parse(registry)
- const formatted = url.format({
- host: parsed.host,
- pathname: parsed.pathname,
- slashes: parsed.slashes
- })
- return url.resolve(formatted, '.')
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/check-response.js b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/check-response.js
deleted file mode 100644
index 14058239b..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/check-response.js
+++ /dev/null
@@ -1,109 +0,0 @@
-'use strict'
-
-const config = require('./config.js')
-const errors = require('./errors.js')
-const LRU = require('lru-cache')
-
-module.exports = checkResponse
-function checkResponse (method, res, registry, startTime, opts) {
- opts = config(opts)
- if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
- opts.log.notice('', res.headers.get('npm-notice'))
- }
- checkWarnings(res, registry, opts)
- if (res.status >= 400) {
- logRequest(method, res, startTime, opts)
- return checkErrors(method, res, startTime, opts)
- } else {
- res.body.on('end', () => logRequest(method, res, startTime, opts))
- if (opts.ignoreBody) {
- res.body.resume()
- res.body = null
- }
- return res
- }
-}
-
-function logRequest (method, res, startTime, opts) {
- const elapsedTime = Date.now() - startTime
- const attempt = res.headers.get('x-fetch-attempts')
- const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
- const cacheStr = res.headers.get('x-local-cache') ? ' (from cache)' : ''
- opts.log.http(
- 'fetch',
- `${method.toUpperCase()} ${res.status} ${res.url} ${elapsedTime}ms${attemptStr}${cacheStr}`
- )
-}
-
-const WARNING_REGEXP = /^\s*(\d{3})\s+(\S+)\s+"(.*)"\s+"([^"]+)"/
-const BAD_HOSTS = new LRU({ max: 50 })
-
-function checkWarnings (res, registry, opts) {
- if (res.headers.has('warning') && !BAD_HOSTS.has(registry)) {
- const warnings = {}
- res.headers.raw()['warning'].forEach(w => {
- const match = w.match(WARNING_REGEXP)
- if (match) {
- warnings[match[1]] = {
- code: match[1],
- host: match[2],
- message: match[3],
- date: new Date(match[4])
- }
- }
- })
- BAD_HOSTS.set(registry, true)
- if (warnings['199']) {
- if (warnings['199'].message.match(/ENOTFOUND/)) {
- opts.log.warn('registry', `Using stale data from ${registry} because the host is inaccessible -- are you offline?`)
- } else {
- opts.log.warn('registry', `Unexpected warning for ${registry}: ${warnings['199'].message}`)
- }
- }
- if (warnings['111']) {
- // 111 Revalidation failed -- we're using stale data
- opts.log.warn(
- 'registry',
- `Using stale data from ${registry} due to a request error during revalidation.`
- )
- }
- }
-}
-
-function checkErrors (method, res, startTime, opts) {
- return res.buffer()
- .catch(() => null)
- .then(body => {
- let parsed = body
- try {
- parsed = JSON.parse(body.toString('utf8'))
- } catch (e) {}
- if (res.status === 401 && res.headers.get('www-authenticate')) {
- const auth = res.headers.get('www-authenticate')
- .split(/,\s*/)
- .map(s => s.toLowerCase())
- if (auth.indexOf('ipaddress') !== -1) {
- throw new errors.HttpErrorAuthIPAddress(
- method, res, parsed, opts.spec
- )
- } else if (auth.indexOf('otp') !== -1) {
- throw new errors.HttpErrorAuthOTP(
- method, res, parsed, opts.spec
- )
- } else {
- throw new errors.HttpErrorAuthUnknown(
- method, res, parsed, opts.spec
- )
- }
- } else if (res.status === 401 && body != null && /one-time pass/.test(body.toString('utf8'))) {
- // Heuristic for malformed OTP responses that don't include the www-authenticate header.
- throw new errors.HttpErrorAuthOTP(
- method, res, parsed, opts.spec
- )
- } else {
- throw new errors.HttpErrorGeneral(
- method, res, parsed, opts.spec
- )
- }
- })
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/config.js b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/config.js
deleted file mode 100644
index d7be3f9b3..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/config.js
+++ /dev/null
@@ -1,98 +0,0 @@
-'use strict'
-
-const pkg = require('./package.json')
-const figgyPudding = require('figgy-pudding')
-const silentLog = require('./silentlog.js')
-
-const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/
-const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi
-module.exports = figgyPudding({
- 'agent': {},
- 'algorithms': {},
- 'body': {},
- 'ca': {},
- 'cache': {},
- 'cert': {},
- 'fetch-retries': {},
- 'fetch-retry-factor': {},
- 'fetch-retry-maxtimeout': {},
- 'fetch-retry-mintimeout': {},
- 'force-auth': {},
- forceAuth: 'force-auth',
- 'gzip': {},
- 'headers': {},
- 'https-proxy': {},
- 'ignore-body': {},
- ignoreBody: 'ignore-body',
- 'integrity': {},
- 'is-from-ci': 'isFromCI',
- 'isFromCI': {
- default () {
- return (
- process.env['CI'] === 'true' ||
- process.env['TDDIUM'] ||
- process.env['JENKINS_URL'] ||
- process.env['bamboo.buildKey'] ||
- process.env['GO_PIPELINE_NAME']
- )
- }
- },
- 'key': {},
- 'local-address': {},
- 'log': {
- default: silentLog
- },
- 'map-json': 'mapJson',
- 'mapJSON': 'mapJson',
- 'mapJson': {},
- 'max-sockets': 'maxsockets',
- 'maxsockets': {
- default: 12
- },
- 'memoize': {},
- 'method': {
- default: 'GET'
- },
- 'no-proxy': {},
- 'noproxy': {},
- 'npm-session': 'npmSession',
- 'npmSession': {},
- 'offline': {},
- 'otp': {},
- 'prefer-offline': {},
- 'prefer-online': {},
- 'projectScope': {},
- 'project-scope': 'projectScope',
- 'Promise': {default: () => Promise},
- 'proxy': {},
- 'query': {},
- 'refer': {},
- 'referer': 'refer',
- 'registry': {
- default: 'https://registry.npmjs.org/'
- },
- 'retry': {},
- 'scope': {},
- 'spec': {},
- 'strict-ssl': {},
- 'timeout': {
- default: 30 * 1000
- },
- 'user-agent': {
- default: `${
- pkg.name
- }@${
- pkg.version
- }/node@${
- process.version
- }+${
- process.arch
- } (${
- process.platform
- })`
- }
-}, {
- other (key) {
- return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX)
- }
-})
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/errors.js b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/errors.js
deleted file mode 100644
index ba78735fc..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/errors.js
+++ /dev/null
@@ -1,79 +0,0 @@
-'use strict'
-
-const url = require('url')
-
-function packageName (href) {
- try {
- let basePath = url.parse(href).pathname.substr(1)
- if (!basePath.match(/^-/)) {
- basePath = basePath.split('/')
- var index = basePath.indexOf('_rewrite')
- if (index === -1) {
- index = basePath.length - 1
- } else {
- index++
- }
- return decodeURIComponent(basePath[index])
- }
- } catch (_) {
- // this is ok
- }
-}
-
-class HttpErrorBase extends Error {
- constructor (method, res, body, spec) {
- super()
- this.headers = res.headers.raw()
- this.statusCode = res.status
- this.code = `E${res.status}`
- this.method = method
- this.uri = res.url
- this.body = body
- this.pkgid = spec ? spec.toString() : packageName(res.url)
- }
-}
-module.exports.HttpErrorBase = HttpErrorBase
-
-class HttpErrorGeneral extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = `${res.status} ${res.statusText} - ${
- this.method.toUpperCase()
- } ${
- this.spec || this.uri
- }${
- (body && body.error) ? ' - ' + body.error : ''
- }`
- Error.captureStackTrace(this, HttpErrorGeneral)
- }
-}
-module.exports.HttpErrorGeneral = HttpErrorGeneral
-
-class HttpErrorAuthOTP extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = 'OTP required for authentication'
- this.code = 'EOTP'
- Error.captureStackTrace(this, HttpErrorAuthOTP)
- }
-}
-module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP
-
-class HttpErrorAuthIPAddress extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = 'Login is not allowed from your IP address'
- this.code = 'EAUTHIP'
- Error.captureStackTrace(this, HttpErrorAuthIPAddress)
- }
-}
-module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress
-
-class HttpErrorAuthUnknown extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
- Error.captureStackTrace(this, HttpErrorAuthUnknown)
- }
-}
-module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/index.js b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/index.js
deleted file mode 100644
index 9bd0ad32d..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/index.js
+++ /dev/null
@@ -1,203 +0,0 @@
-'use strict'
-
-const Buffer = require('safe-buffer').Buffer
-
-const checkResponse = require('./check-response.js')
-const config = require('./config.js')
-const getAuth = require('./auth.js')
-const fetch = require('make-fetch-happen')
-const JSONStream = require('JSONStream')
-const npa = require('npm-package-arg')
-const {PassThrough} = require('stream')
-const qs = require('querystring')
-const url = require('url')
-const zlib = require('zlib')
-
-module.exports = regFetch
-function regFetch (uri, opts) {
- opts = config(opts)
- const registry = (
- (opts.spec && pickRegistry(opts.spec, opts)) ||
- opts.registry ||
- 'https://registry.npmjs.org/'
- )
- uri = url.parse(uri).protocol
- ? uri
- : `${
- registry.trim().replace(/\/?$/g, '')
- }/${
- uri.trim().replace(/^\//, '')
- }`
- // through that takes into account the scope, the prefix of `uri`, etc
- const startTime = Date.now()
- const headers = getHeaders(registry, uri, opts)
- let body = opts.body
- const bodyIsStream = body &&
- typeof body === 'object' &&
- typeof body.pipe === 'function'
- if (body && !bodyIsStream && typeof body !== 'string' && !Buffer.isBuffer(body)) {
- headers['content-type'] = headers['content-type'] || 'application/json'
- body = JSON.stringify(body)
- } else if (body && !headers['content-type']) {
- headers['content-type'] = 'application/octet-stream'
- }
- if (opts.gzip) {
- headers['content-encoding'] = 'gzip'
- if (bodyIsStream) {
- const gz = zlib.createGzip()
- body.on('error', err => gz.emit('error', err))
- body = body.pipe(gz)
- } else {
- body = new opts.Promise((resolve, reject) => {
- zlib.gzip(body, (err, gz) => err ? reject(err) : resolve(gz))
- })
- }
- }
-
- let q = opts.query
- if (q) {
- if (typeof q === 'string') {
- q = qs.parse(q)
- } else if (typeof q !== 'object') {
- throw new TypeError('invalid query option, must be string or object')
- }
- Object.keys(q).forEach(key => {
- if (q[key] === undefined) {
- delete q[key]
- }
- })
- }
- const parsed = url.parse(uri)
-
- const query = parsed.query ? Object.assign(qs.parse(parsed.query), q || {})
- : Object.keys(q || {}).length ? q
- : null
-
- if (query) {
- if (String(query.write) === 'true' && opts.method === 'GET') {
- opts = opts.concat({
- offline: false,
- 'prefer-offline': false,
- 'prefer-online': true
- })
- }
- parsed.search = '?' + qs.stringify(query)
- uri = url.format(parsed)
- }
-
- return opts.Promise.resolve(body).then(body => fetch(uri, {
- agent: opts.agent,
- algorithms: opts.algorithms,
- body,
- cache: getCacheMode(opts),
- cacheManager: opts.cache,
- ca: opts.ca,
- cert: opts.cert,
- headers,
- integrity: opts.integrity,
- key: opts.key,
- localAddress: opts['local-address'],
- maxSockets: opts.maxsockets,
- memoize: opts.memoize,
- method: opts.method || 'GET',
- noProxy: opts['no-proxy'] || opts.noproxy,
- Promise: opts.Promise,
- proxy: opts['https-proxy'] || opts.proxy,
- referer: opts.refer,
- retry: opts.retry != null ? opts.retry : {
- retries: opts['fetch-retries'],
- factor: opts['fetch-retry-factor'],
- minTimeout: opts['fetch-retry-mintimeout'],
- maxTimeout: opts['fetch-retry-maxtimeout']
- },
- strictSSL: !!opts['strict-ssl'],
- timeout: opts.timeout
- }).then(res => checkResponse(
- opts.method || 'GET', res, registry, startTime, opts
- )))
-}
-
-module.exports.json = fetchJSON
-function fetchJSON (uri, opts) {
- return regFetch(uri, opts).then(res => res.json())
-}
-
-module.exports.json.stream = fetchJSONStream
-function fetchJSONStream (uri, jsonPath, opts) {
- opts = config(opts)
- const parser = JSONStream.parse(jsonPath, opts.mapJson)
- const pt = parser.pipe(new PassThrough({objectMode: true}))
- parser.on('error', err => pt.emit('error', err))
- regFetch(uri, opts).then(res => {
- res.body.on('error', err => parser.emit('error', err))
- res.body.pipe(parser)
- }, err => pt.emit('error', err))
- return pt
-}
-
-module.exports.pickRegistry = pickRegistry
-function pickRegistry (spec, opts) {
- spec = npa(spec)
- opts = config(opts)
- let registry = spec.scope &&
- opts[spec.scope.replace(/^@?/, '@') + ':registry']
-
- if (!registry && opts.scope) {
- registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
- }
-
- if (!registry) {
- registry = opts.registry || 'https://registry.npmjs.org/'
- }
-
- return registry
-}
-
-function getCacheMode (opts) {
- return opts.offline
- ? 'only-if-cached'
- : opts['prefer-offline']
- ? 'force-cache'
- : opts['prefer-online']
- ? 'no-cache'
- : 'default'
-}
-
-function getHeaders (registry, uri, opts) {
- const headers = Object.assign({
- 'npm-in-ci': !!(
- opts['is-from-ci'] ||
- process.env['CI'] === 'true' ||
- process.env['TDDIUM'] ||
- process.env['JENKINS_URL'] ||
- process.env['bamboo.buildKey'] ||
- process.env['GO_PIPELINE_NAME']
- ),
- 'npm-scope': opts['project-scope'],
- 'npm-session': opts['npm-session'],
- 'user-agent': opts['user-agent'],
- 'referer': opts.refer
- }, opts.headers)
-
- const auth = getAuth(registry, opts)
- // If a tarball is hosted on a different place than the manifest, only send
- // credentials on `alwaysAuth`
- const shouldAuth = (
- auth.alwaysAuth ||
- url.parse(uri).host === url.parse(registry).host
- )
- if (shouldAuth && auth.token) {
- headers.authorization = `Bearer ${auth.token}`
- } else if (shouldAuth && auth.username && auth.password) {
- const encoded = Buffer.from(
- `${auth.username}:${auth.password}`, 'utf8'
- ).toString('base64')
- headers.authorization = `Basic ${encoded}`
- } else if (shouldAuth && auth._auth) {
- headers.authorization = `Basic ${auth._auth}`
- }
- if (shouldAuth && auth.otp) {
- headers['npm-otp'] = auth.otp
- }
- return headers
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/package.json b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/package.json
deleted file mode 100644
index bdc04d179..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
- "_from": "npm-registry-fetch@^4.0.0",
- "_id": "npm-registry-fetch@4.0.3",
- "_inBundle": false,
- "_integrity": "sha512-WGvUx0lkKFhu9MbiGFuT9nG2NpfQ+4dCJwRwwtK2HK5izJEvwDxMeUyqbuMS7N/OkpVCqDorV6rO5E4V9F8lJw==",
- "_location": "/libnpmpublish/npm-registry-fetch",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-registry-fetch@^4.0.0",
- "name": "npm-registry-fetch",
- "escapedName": "npm-registry-fetch",
- "rawSpec": "^4.0.0",
- "saveSpec": null,
- "fetchSpec": "^4.0.0"
- },
- "_requiredBy": [
- "/libnpmpublish"
- ],
- "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-4.0.3.tgz",
- "_shasum": "3c2179e39e04f9348b1c2979545951d36bee8766",
- "_spec": "npm-registry-fetch@^4.0.0",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/libnpmpublish",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/registry-fetch/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "JSONStream": "^1.3.4",
- "bluebird": "^3.5.1",
- "figgy-pudding": "^3.4.1",
- "lru-cache": "^5.1.1",
- "make-fetch-happen": "^5.0.0",
- "npm-package-arg": "^6.1.0",
- "safe-buffer": "^5.2.0"
- },
- "deprecated": false,
- "description": "Fetch-based http client for use with npm registry APIs",
- "devDependencies": {
- "cacache": "^12.0.0",
- "get-stream": "^4.0.0",
- "mkdirp": "^0.5.1",
- "nock": "^9.4.3",
- "npmlog": "^4.1.2",
- "rimraf": "^2.6.2",
- "ssri": "^6.0.0",
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tap": "^12.0.1",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/npm/registry-fetch#readme",
- "keywords": [
- "npm",
- "registry",
- "fetch"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "npm-registry-fetch",
- "publishConfig": {
- "tag": "latest-v4"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/registry-fetch.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "4.0.3"
-}
diff --git a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/silentlog.js b/node_modules/libnpmpublish/node_modules/npm-registry-fetch/silentlog.js
deleted file mode 100644
index 886c5d55b..000000000
--- a/node_modules/libnpmpublish/node_modules/npm-registry-fetch/silentlog.js
+++ /dev/null
@@ -1,14 +0,0 @@
-'use strict'
-
-const noop = Function.prototype
-module.exports = {
- error: noop,
- warn: noop,
- notice: noop,
- info: noop,
- verbose: noop,
- silly: noop,
- http: noop,
- pause: noop,
- resume: noop
-}
diff --git a/node_modules/libnpmpublish/node_modules/safe-buffer/README.md b/node_modules/libnpmpublish/node_modules/safe-buffer/README.md
deleted file mode 100644
index 356e35193..000000000
--- a/node_modules/libnpmpublish/node_modules/safe-buffer/README.md
+++ /dev/null
@@ -1,586 +0,0 @@
-# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
-
-[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg
-[travis-url]: https://travis-ci.org/feross/safe-buffer
-[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg
-[npm-url]: https://npmjs.org/package/safe-buffer
-[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg
-[downloads-url]: https://npmjs.org/package/safe-buffer
-[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
-[standard-url]: https://standardjs.com
-
-#### Safer Node.js Buffer API
-
-**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`,
-`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.**
-
-**Uses the built-in implementation when available.**
-
-## install
-
-```
-npm install safe-buffer
-```
-
-[Get supported safe-buffer with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-safe-buffer?utm_source=npm-safe-buffer&utm_medium=referral&utm_campaign=readme)
-
-## usage
-
-The goal of this package is to provide a safe replacement for the node.js `Buffer`.
-
-It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to
-the top of your node.js modules:
-
-```js
-var Buffer = require('safe-buffer').Buffer
-
-// Existing buffer code will continue to work without issues:
-
-new Buffer('hey', 'utf8')
-new Buffer([1, 2, 3], 'utf8')
-new Buffer(obj)
-new Buffer(16) // create an uninitialized buffer (potentially unsafe)
-
-// But you can use these new explicit APIs to make clear what you want:
-
-Buffer.from('hey', 'utf8') // convert from many types to a Buffer
-Buffer.alloc(16) // create a zero-filled buffer (safe)
-Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe)
-```
-
-## api
-
-### Class Method: Buffer.from(array)
-<!-- YAML
-added: v3.0.0
--->
-
-* `array` {Array}
-
-Allocates a new `Buffer` using an `array` of octets.
-
-```js
-const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]);
- // creates a new Buffer containing ASCII bytes
- // ['b','u','f','f','e','r']
-```
-
-A `TypeError` will be thrown if `array` is not an `Array`.
-
-### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
-<!-- YAML
-added: v5.10.0
--->
-
-* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or
- a `new ArrayBuffer()`
-* `byteOffset` {Number} Default: `0`
-* `length` {Number} Default: `arrayBuffer.length - byteOffset`
-
-When passed a reference to the `.buffer` property of a `TypedArray` instance,
-the newly created `Buffer` will share the same allocated memory as the
-TypedArray.
-
-```js
-const arr = new Uint16Array(2);
-arr[0] = 5000;
-arr[1] = 4000;
-
-const buf = Buffer.from(arr.buffer); // shares the memory with arr;
-
-console.log(buf);
- // Prints: <Buffer 88 13 a0 0f>
-
-// changing the TypedArray changes the Buffer also
-arr[1] = 6000;
-
-console.log(buf);
- // Prints: <Buffer 88 13 70 17>
-```
-
-The optional `byteOffset` and `length` arguments specify a memory range within
-the `arrayBuffer` that will be shared by the `Buffer`.
-
-```js
-const ab = new ArrayBuffer(10);
-const buf = Buffer.from(ab, 0, 2);
-console.log(buf.length);
- // Prints: 2
-```
-
-A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`.
-
-### Class Method: Buffer.from(buffer)
-<!-- YAML
-added: v3.0.0
--->
-
-* `buffer` {Buffer}
-
-Copies the passed `buffer` data onto a new `Buffer` instance.
-
-```js
-const buf1 = Buffer.from('buffer');
-const buf2 = Buffer.from(buf1);
-
-buf1[0] = 0x61;
-console.log(buf1.toString());
- // 'auffer'
-console.log(buf2.toString());
- // 'buffer' (copy is not changed)
-```
-
-A `TypeError` will be thrown if `buffer` is not a `Buffer`.
-
-### Class Method: Buffer.from(str[, encoding])
-<!-- YAML
-added: v5.10.0
--->
-
-* `str` {String} String to encode.
-* `encoding` {String} Encoding to use, Default: `'utf8'`
-
-Creates a new `Buffer` containing the given JavaScript string `str`. If
-provided, the `encoding` parameter identifies the character encoding.
-If not provided, `encoding` defaults to `'utf8'`.
-
-```js
-const buf1 = Buffer.from('this is a tést');
-console.log(buf1.toString());
- // prints: this is a tést
-console.log(buf1.toString('ascii'));
- // prints: this is a tC)st
-
-const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
-console.log(buf2.toString());
- // prints: this is a tést
-```
-
-A `TypeError` will be thrown if `str` is not a string.
-
-### Class Method: Buffer.alloc(size[, fill[, encoding]])
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-* `fill` {Value} Default: `undefined`
-* `encoding` {String} Default: `utf8`
-
-Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
-`Buffer` will be *zero-filled*.
-
-```js
-const buf = Buffer.alloc(5);
-console.log(buf);
- // <Buffer 00 00 00 00 00>
-```
-
-The `size` must be less than or equal to the value of
-`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
-`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
-be created if a `size` less than or equal to 0 is specified.
-
-If `fill` is specified, the allocated `Buffer` will be initialized by calling
-`buf.fill(fill)`. See [`buf.fill()`][] for more information.
-
-```js
-const buf = Buffer.alloc(5, 'a');
-console.log(buf);
- // <Buffer 61 61 61 61 61>
-```
-
-If both `fill` and `encoding` are specified, the allocated `Buffer` will be
-initialized by calling `buf.fill(fill, encoding)`. For example:
-
-```js
-const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
-console.log(buf);
- // <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
-```
-
-Calling `Buffer.alloc(size)` can be significantly slower than the alternative
-`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance
-contents will *never contain sensitive data*.
-
-A `TypeError` will be thrown if `size` is not a number.
-
-### Class Method: Buffer.allocUnsafe(size)
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-
-Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must
-be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit
-architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is
-thrown. A zero-length Buffer will be created if a `size` less than or equal to
-0 is specified.
-
-The underlying memory for `Buffer` instances created in this way is *not
-initialized*. The contents of the newly created `Buffer` are unknown and
-*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
-`Buffer` instances to zeroes.
-
-```js
-const buf = Buffer.allocUnsafe(5);
-console.log(buf);
- // <Buffer 78 e0 82 02 01>
- // (octets will be different, every time)
-buf.fill(0);
-console.log(buf);
- // <Buffer 00 00 00 00 00>
-```
-
-A `TypeError` will be thrown if `size` is not a number.
-
-Note that the `Buffer` module pre-allocates an internal `Buffer` instance of
-size `Buffer.poolSize` that is used as a pool for the fast allocation of new
-`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated
-`new Buffer(size)` constructor) only when `size` is less than or equal to
-`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default
-value of `Buffer.poolSize` is `8192` but can be modified.
-
-Use of this pre-allocated internal memory pool is a key difference between
-calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
-Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer
-pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal
-Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The
-difference is subtle but can be important when an application requires the
-additional performance that `Buffer.allocUnsafe(size)` provides.
-
-### Class Method: Buffer.allocUnsafeSlow(size)
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-
-Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The
-`size` must be less than or equal to the value of
-`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
-`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
-be created if a `size` less than or equal to 0 is specified.
-
-The underlying memory for `Buffer` instances created in this way is *not
-initialized*. The contents of the newly created `Buffer` are unknown and
-*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
-`Buffer` instances to zeroes.
-
-When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
-allocations under 4KB are, by default, sliced from a single pre-allocated
-`Buffer`. This allows applications to avoid the garbage collection overhead of
-creating many individually allocated Buffers. This approach improves both
-performance and memory usage by eliminating the need to track and cleanup as
-many `Persistent` objects.
-
-However, in the case where a developer may need to retain a small chunk of
-memory from a pool for an indeterminate amount of time, it may be appropriate
-to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then
-copy out the relevant bits.
-
-```js
-// need to keep around a few small chunks of memory
-const store = [];
-
-socket.on('readable', () => {
- const data = socket.read();
- // allocate for retained data
- const sb = Buffer.allocUnsafeSlow(10);
- // copy the data into the new allocation
- data.copy(sb, 0, 0, 10);
- store.push(sb);
-});
-```
-
-Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after*
-a developer has observed undue memory retention in their applications.
-
-A `TypeError` will be thrown if `size` is not a number.
-
-### All the Rest
-
-The rest of the `Buffer` API is exactly the same as in node.js.
-[See the docs](https://nodejs.org/api/buffer.html).
-
-
-## Related links
-
-- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660)
-- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4)
-
-## Why is `Buffer` unsafe?
-
-Today, the node.js `Buffer` constructor is overloaded to handle many different argument
-types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.),
-`ArrayBuffer`, and also `Number`.
-
-The API is optimized for convenience: you can throw any type at it, and it will try to do
-what you want.
-
-Because the Buffer constructor is so powerful, you often see code like this:
-
-```js
-// Convert UTF-8 strings to hex
-function toHex (str) {
- return new Buffer(str).toString('hex')
-}
-```
-
-***But what happens if `toHex` is called with a `Number` argument?***
-
-### Remote Memory Disclosure
-
-If an attacker can make your program call the `Buffer` constructor with a `Number`
-argument, then they can make it allocate uninitialized memory from the node.js process.
-This could potentially disclose TLS private keys, user data, or database passwords.
-
-When the `Buffer` constructor is passed a `Number` argument, it returns an
-**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like
-this, you **MUST** overwrite the contents before returning it to the user.
-
-From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size):
-
-> `new Buffer(size)`
->
-> - `size` Number
->
-> The underlying memory for `Buffer` instances created in this way is not initialized.
-> **The contents of a newly created `Buffer` are unknown and could contain sensitive
-> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes.
-
-(Emphasis our own.)
-
-Whenever the programmer intended to create an uninitialized `Buffer` you often see code
-like this:
-
-```js
-var buf = new Buffer(16)
-
-// Immediately overwrite the uninitialized buffer with data from another buffer
-for (var i = 0; i < buf.length; i++) {
- buf[i] = otherBuf[i]
-}
-```
-
-
-### Would this ever be a problem in real code?
-
-Yes. It's surprisingly common to forget to check the type of your variables in a
-dynamically-typed language like JavaScript.
-
-Usually the consequences of assuming the wrong type is that your program crashes with an
-uncaught exception. But the failure mode for forgetting to check the type of arguments to
-the `Buffer` constructor is more catastrophic.
-
-Here's an example of a vulnerable service that takes a JSON payload and converts it to
-hex:
-
-```js
-// Take a JSON payload {str: "some string"} and convert it to hex
-var server = http.createServer(function (req, res) {
- var data = ''
- req.setEncoding('utf8')
- req.on('data', function (chunk) {
- data += chunk
- })
- req.on('end', function () {
- var body = JSON.parse(data)
- res.end(new Buffer(body.str).toString('hex'))
- })
-})
-
-server.listen(8080)
-```
-
-In this example, an http client just has to send:
-
-```json
-{
- "str": 1000
-}
-```
-
-and it will get back 1,000 bytes of uninitialized memory from the server.
-
-This is a very serious bug. It's similar in severity to the
-[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process
-memory by remote attackers.
-
-
-### Which real-world packages were vulnerable?
-
-#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht)
-
-[Mathias Buus](https://github.com/mafintosh) and I
-([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages,
-[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow
-anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get
-them to reveal 20 bytes at a time of uninitialized memory from the node.js process.
-
-Here's
-[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8)
-that fixed it. We released a new fixed version, created a
-[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all
-vulnerable versions on npm so users will get a warning to upgrade to a newer version.
-
-#### [`ws`](https://www.npmjs.com/package/ws)
-
-That got us wondering if there were other vulnerable packages. Sure enough, within a short
-period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the
-most popular WebSocket implementation in node.js.
-
-If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as
-expected, then uninitialized server memory would be disclosed to the remote peer.
-
-These were the vulnerable methods:
-
-```js
-socket.send(number)
-socket.ping(number)
-socket.pong(number)
-```
-
-Here's a vulnerable socket server with some echo functionality:
-
-```js
-server.on('connection', function (socket) {
- socket.on('message', function (message) {
- message = JSON.parse(message)
- if (message.type === 'echo') {
- socket.send(message.data) // send back the user's message
- }
- })
-})
-```
-
-`socket.send(number)` called on the server, will disclose server memory.
-
-Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue
-was fixed, with a more detailed explanation. Props to
-[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the
-[Node Security Project disclosure](https://nodesecurity.io/advisories/67).
-
-
-### What's the solution?
-
-It's important that node.js offers a fast way to get memory otherwise performance-critical
-applications would needlessly get a lot slower.
-
-But we need a better way to *signal our intent* as programmers. **When we want
-uninitialized memory, we should request it explicitly.**
-
-Sensitive functionality should not be packed into a developer-friendly API that loosely
-accepts many different types. This type of API encourages the lazy practice of passing
-variables in without checking the type very carefully.
-
-#### A new API: `Buffer.allocUnsafe(number)`
-
-The functionality of creating buffers with uninitialized memory should be part of another
-API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that
-frequently gets user input of all sorts of different types passed into it.
-
-```js
-var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory!
-
-// Immediately overwrite the uninitialized buffer with data from another buffer
-for (var i = 0; i < buf.length; i++) {
- buf[i] = otherBuf[i]
-}
-```
-
-
-### How do we fix node.js core?
-
-We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as
-`semver-major`) which defends against one case:
-
-```js
-var str = 16
-new Buffer(str, 'utf8')
-```
-
-In this situation, it's implied that the programmer intended the first argument to be a
-string, since they passed an encoding as a second argument. Today, node.js will allocate
-uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not
-what the programmer intended.
-
-But this is only a partial solution, since if the programmer does `new Buffer(variable)`
-(without an `encoding` parameter) there's no way to know what they intended. If `variable`
-is sometimes a number, then uninitialized memory will sometimes be returned.
-
-### What's the real long-term fix?
-
-We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when
-we need uninitialized memory. But that would break 1000s of packages.
-
-~~We believe the best solution is to:~~
-
-~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~
-
-~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~
-
-#### Update
-
-We now support adding three new APIs:
-
-- `Buffer.from(value)` - convert from any type to a buffer
-- `Buffer.alloc(size)` - create a zero-filled buffer
-- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size
-
-This solves the core problem that affected `ws` and `bittorrent-dht` which is
-`Buffer(variable)` getting tricked into taking a number argument.
-
-This way, existing code continues working and the impact on the npm ecosystem will be
-minimal. Over time, npm maintainers can migrate performance-critical code to use
-`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`.
-
-
-### Conclusion
-
-We think there's a serious design issue with the `Buffer` API as it exists today. It
-promotes insecure software by putting high-risk functionality into a convenient API
-with friendly "developer ergonomics".
-
-This wasn't merely a theoretical exercise because we found the issue in some of the
-most popular npm packages.
-
-Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of
-`buffer`.
-
-```js
-var Buffer = require('safe-buffer').Buffer
-```
-
-Eventually, we hope that node.js core can switch to this new, safer behavior. We believe
-the impact on the ecosystem would be minimal since it's not a breaking change.
-Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while
-older, insecure packages would magically become safe from this attack vector.
-
-
-## links
-
-- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514)
-- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67)
-- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68)
-
-
-## credit
-
-The original issues in `bittorrent-dht`
-([disclosure](https://nodesecurity.io/advisories/68)) and
-`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by
-[Mathias Buus](https://github.com/mafintosh) and
-[Feross Aboukhadijeh](http://feross.org/).
-
-Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues
-and for his work running the [Node Security Project](https://nodesecurity.io/).
-
-Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and
-auditing the code.
-
-
-## license
-
-MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org)
diff --git a/node_modules/libnpmpublish/node_modules/safe-buffer/index.d.ts b/node_modules/libnpmpublish/node_modules/safe-buffer/index.d.ts
deleted file mode 100644
index e9fed809a..000000000
--- a/node_modules/libnpmpublish/node_modules/safe-buffer/index.d.ts
+++ /dev/null
@@ -1,187 +0,0 @@
-declare module "safe-buffer" {
- export class Buffer {
- length: number
- write(string: string, offset?: number, length?: number, encoding?: string): number;
- toString(encoding?: string, start?: number, end?: number): string;
- toJSON(): { type: 'Buffer', data: any[] };
- equals(otherBuffer: Buffer): boolean;
- compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
- copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
- slice(start?: number, end?: number): Buffer;
- writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
- readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
- readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
- readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
- readUInt8(offset: number, noAssert?: boolean): number;
- readUInt16LE(offset: number, noAssert?: boolean): number;
- readUInt16BE(offset: number, noAssert?: boolean): number;
- readUInt32LE(offset: number, noAssert?: boolean): number;
- readUInt32BE(offset: number, noAssert?: boolean): number;
- readInt8(offset: number, noAssert?: boolean): number;
- readInt16LE(offset: number, noAssert?: boolean): number;
- readInt16BE(offset: number, noAssert?: boolean): number;
- readInt32LE(offset: number, noAssert?: boolean): number;
- readInt32BE(offset: number, noAssert?: boolean): number;
- readFloatLE(offset: number, noAssert?: boolean): number;
- readFloatBE(offset: number, noAssert?: boolean): number;
- readDoubleLE(offset: number, noAssert?: boolean): number;
- readDoubleBE(offset: number, noAssert?: boolean): number;
- swap16(): Buffer;
- swap32(): Buffer;
- swap64(): Buffer;
- writeUInt8(value: number, offset: number, noAssert?: boolean): number;
- writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
- writeInt8(value: number, offset: number, noAssert?: boolean): number;
- writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
- writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
- writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
- writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
- writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
- writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
- writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
- writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
- fill(value: any, offset?: number, end?: number): this;
- indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
- lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
- includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
-
- /**
- * Allocates a new buffer containing the given {str}.
- *
- * @param str String to store in buffer.
- * @param encoding encoding to use, optional. Default is 'utf8'
- */
- constructor (str: string, encoding?: string);
- /**
- * Allocates a new buffer of {size} octets.
- *
- * @param size count of octets to allocate.
- */
- constructor (size: number);
- /**
- * Allocates a new buffer containing the given {array} of octets.
- *
- * @param array The octets to store.
- */
- constructor (array: Uint8Array);
- /**
- * Produces a Buffer backed by the same allocated memory as
- * the given {ArrayBuffer}.
- *
- *
- * @param arrayBuffer The ArrayBuffer with which to share memory.
- */
- constructor (arrayBuffer: ArrayBuffer);
- /**
- * Allocates a new buffer containing the given {array} of octets.
- *
- * @param array The octets to store.
- */
- constructor (array: any[]);
- /**
- * Copies the passed {buffer} data onto a new {Buffer} instance.
- *
- * @param buffer The buffer to copy.
- */
- constructor (buffer: Buffer);
- prototype: Buffer;
- /**
- * Allocates a new Buffer using an {array} of octets.
- *
- * @param array
- */
- static from(array: any[]): Buffer;
- /**
- * When passed a reference to the .buffer property of a TypedArray instance,
- * the newly created Buffer will share the same allocated memory as the TypedArray.
- * The optional {byteOffset} and {length} arguments specify a memory range
- * within the {arrayBuffer} that will be shared by the Buffer.
- *
- * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
- * @param byteOffset
- * @param length
- */
- static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
- /**
- * Copies the passed {buffer} data onto a new Buffer instance.
- *
- * @param buffer
- */
- static from(buffer: Buffer): Buffer;
- /**
- * Creates a new Buffer containing the given JavaScript string {str}.
- * If provided, the {encoding} parameter identifies the character encoding.
- * If not provided, {encoding} defaults to 'utf8'.
- *
- * @param str
- */
- static from(str: string, encoding?: string): Buffer;
- /**
- * Returns true if {obj} is a Buffer
- *
- * @param obj object to test.
- */
- static isBuffer(obj: any): obj is Buffer;
- /**
- * Returns true if {encoding} is a valid encoding argument.
- * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
- *
- * @param encoding string to test.
- */
- static isEncoding(encoding: string): boolean;
- /**
- * Gives the actual byte length of a string. encoding defaults to 'utf8'.
- * This is not the same as String.prototype.length since that returns the number of characters in a string.
- *
- * @param string string to test.
- * @param encoding encoding used to evaluate (defaults to 'utf8')
- */
- static byteLength(string: string, encoding?: string): number;
- /**
- * Returns a buffer which is the result of concatenating all the buffers in the list together.
- *
- * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
- * If the list has exactly one item, then the first item of the list is returned.
- * If the list has more than one item, then a new Buffer is created.
- *
- * @param list An array of Buffer objects to concatenate
- * @param totalLength Total length of the buffers when concatenated.
- * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
- */
- static concat(list: Buffer[], totalLength?: number): Buffer;
- /**
- * The same as buf1.compare(buf2).
- */
- static compare(buf1: Buffer, buf2: Buffer): number;
- /**
- * Allocates a new buffer of {size} octets.
- *
- * @param size count of octets to allocate.
- * @param fill if specified, buffer will be initialized by calling buf.fill(fill).
- * If parameter is omitted, buffer will be filled with zeros.
- * @param encoding encoding used for call to buf.fill while initalizing
- */
- static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
- /**
- * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
- * of the newly created Buffer are unknown and may contain sensitive data.
- *
- * @param size count of octets to allocate
- */
- static allocUnsafe(size: number): Buffer;
- /**
- * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
- * of the newly created Buffer are unknown and may contain sensitive data.
- *
- * @param size count of octets to allocate
- */
- static allocUnsafeSlow(size: number): Buffer;
- }
-} \ No newline at end of file
diff --git a/node_modules/libnpmpublish/node_modules/safe-buffer/index.js b/node_modules/libnpmpublish/node_modules/safe-buffer/index.js
deleted file mode 100644
index 054c8d30d..000000000
--- a/node_modules/libnpmpublish/node_modules/safe-buffer/index.js
+++ /dev/null
@@ -1,64 +0,0 @@
-/* eslint-disable node/no-deprecated-api */
-var buffer = require('buffer')
-var Buffer = buffer.Buffer
-
-// alternative to using Object.keys for old browsers
-function copyProps (src, dst) {
- for (var key in src) {
- dst[key] = src[key]
- }
-}
-if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
- module.exports = buffer
-} else {
- // Copy properties from require('buffer')
- copyProps(buffer, exports)
- exports.Buffer = SafeBuffer
-}
-
-function SafeBuffer (arg, encodingOrOffset, length) {
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.prototype = Object.create(Buffer.prototype)
-
-// Copy static methods from Buffer
-copyProps(Buffer, SafeBuffer)
-
-SafeBuffer.from = function (arg, encodingOrOffset, length) {
- if (typeof arg === 'number') {
- throw new TypeError('Argument must not be a number')
- }
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.alloc = function (size, fill, encoding) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- var buf = Buffer(size)
- if (fill !== undefined) {
- if (typeof encoding === 'string') {
- buf.fill(fill, encoding)
- } else {
- buf.fill(fill)
- }
- } else {
- buf.fill(0)
- }
- return buf
-}
-
-SafeBuffer.allocUnsafe = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return Buffer(size)
-}
-
-SafeBuffer.allocUnsafeSlow = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return buffer.SlowBuffer(size)
-}
diff --git a/node_modules/libnpmpublish/node_modules/safe-buffer/package.json b/node_modules/libnpmpublish/node_modules/safe-buffer/package.json
deleted file mode 100644
index e737ce5d6..000000000
--- a/node_modules/libnpmpublish/node_modules/safe-buffer/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "safe-buffer@^5.2.0",
- "_id": "safe-buffer@5.2.0",
- "_inBundle": false,
- "_integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==",
- "_location": "/libnpmpublish/safe-buffer",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "safe-buffer@^5.2.0",
- "name": "safe-buffer",
- "escapedName": "safe-buffer",
- "rawSpec": "^5.2.0",
- "saveSpec": null,
- "fetchSpec": "^5.2.0"
- },
- "_requiredBy": [
- "/libnpmpublish/npm-registry-fetch"
- ],
- "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
- "_shasum": "b74daec49b1148f88c64b68d49b1e815c1f2f519",
- "_spec": "safe-buffer@^5.2.0",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/libnpmpublish/node_modules/npm-registry-fetch",
- "author": {
- "name": "Feross Aboukhadijeh",
- "email": "feross@feross.org",
- "url": "http://feross.org"
- },
- "bugs": {
- "url": "https://github.com/feross/safe-buffer/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Safer Node.js Buffer API",
- "devDependencies": {
- "standard": "*",
- "tape": "^4.0.0"
- },
- "homepage": "https://github.com/feross/safe-buffer",
- "keywords": [
- "buffer",
- "buffer allocate",
- "node security",
- "safe",
- "safe-buffer",
- "security",
- "uninitialized"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "safe-buffer",
- "repository": {
- "type": "git",
- "url": "git://github.com/feross/safe-buffer.git"
- },
- "scripts": {
- "test": "standard && tape test/*.js"
- },
- "types": "index.d.ts",
- "version": "5.2.0"
-}
diff --git a/node_modules/libnpmpublish/node_modules/semver/CHANGELOG.md b/node_modules/libnpmpublish/node_modules/semver/CHANGELOG.md
deleted file mode 100644
index 66304fdd2..000000000
--- a/node_modules/libnpmpublish/node_modules/semver/CHANGELOG.md
+++ /dev/null
@@ -1,39 +0,0 @@
-# changes log
-
-## 5.7
-
-* Add `minVersion` method
-
-## 5.6
-
-* Move boolean `loose` param to an options object, with
- backwards-compatibility protection.
-* Add ability to opt out of special prerelease version handling with
- the `includePrerelease` option flag.
-
-## 5.5
-
-* Add version coercion capabilities
-
-## 5.4
-
-* Add intersection checking
-
-## 5.3
-
-* Add `minSatisfying` method
-
-## 5.2
-
-* Add `prerelease(v)` that returns prerelease components
-
-## 5.1
-
-* Add Backus-Naur for ranges
-* Remove excessively cute inspection methods
-
-## 5.0
-
-* Remove AMD/Browserified build artifacts
-* Fix ltr and gtr when using the `*` range
-* Fix for range `*` with a prerelease identifier
diff --git a/node_modules/libnpmpublish/node_modules/semver/README.md b/node_modules/libnpmpublish/node_modules/semver/README.md
deleted file mode 100644
index f8dfa5a0d..000000000
--- a/node_modules/libnpmpublish/node_modules/semver/README.md
+++ /dev/null
@@ -1,412 +0,0 @@
-semver(1) -- The semantic versioner for npm
-===========================================
-
-## Install
-
-```bash
-npm install --save semver
-````
-
-## Usage
-
-As a node module:
-
-```js
-const semver = require('semver')
-
-semver.valid('1.2.3') // '1.2.3'
-semver.valid('a.b.c') // null
-semver.clean(' =v1.2.3 ') // '1.2.3'
-semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
-semver.gt('1.2.3', '9.8.7') // false
-semver.lt('1.2.3', '9.8.7') // true
-semver.minVersion('>=1.0.0') // '1.0.0'
-semver.valid(semver.coerce('v2')) // '2.0.0'
-semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
-```
-
-As a command-line utility:
-
-```
-$ semver -h
-
-A JavaScript implementation of the https://semver.org/ specification
-Copyright Isaac Z. Schlueter
-
-Usage: semver [options] <version> [<version> [...]]
-Prints valid versions sorted by SemVer precedence
-
-Options:
--r --range <range>
- Print versions that match the specified range.
-
--i --increment [<level>]
- Increment a version by the specified level. Level can
- be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
- Only one version may be specified.
-
---preid <identifier>
- Identifier to be used to prefix premajor, preminor,
- prepatch or prerelease version increments.
-
--l --loose
- Interpret versions and ranges loosely
-
--p --include-prerelease
- Always include prerelease versions in range matching
-
--c --coerce
- Coerce a string into SemVer if possible
- (does not imply --loose)
-
-Program exits successfully if any valid version satisfies
-all supplied ranges, and prints all satisfying versions.
-
-If no satisfying versions are found, then exits failure.
-
-Versions are printed in ascending order, so supplying
-multiple versions to the utility will just sort them.
-```
-
-## Versions
-
-A "version" is described by the `v2.0.0` specification found at
-<https://semver.org/>.
-
-A leading `"="` or `"v"` character is stripped off and ignored.
-
-## Ranges
-
-A `version range` is a set of `comparators` which specify versions
-that satisfy the range.
-
-A `comparator` is composed of an `operator` and a `version`. The set
-of primitive `operators` is:
-
-* `<` Less than
-* `<=` Less than or equal to
-* `>` Greater than
-* `>=` Greater than or equal to
-* `=` Equal. If no operator is specified, then equality is assumed,
- so this operator is optional, but MAY be included.
-
-For example, the comparator `>=1.2.7` would match the versions
-`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
-or `1.1.0`.
-
-Comparators can be joined by whitespace to form a `comparator set`,
-which is satisfied by the **intersection** of all of the comparators
-it includes.
-
-A range is composed of one or more comparator sets, joined by `||`. A
-version matches a range if and only if every comparator in at least
-one of the `||`-separated comparator sets is satisfied by the version.
-
-For example, the range `>=1.2.7 <1.3.0` would match the versions
-`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
-or `1.1.0`.
-
-The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
-`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
-
-### Prerelease Tags
-
-If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
-it will only be allowed to satisfy comparator sets if at least one
-comparator with the same `[major, minor, patch]` tuple also has a
-prerelease tag.
-
-For example, the range `>1.2.3-alpha.3` would be allowed to match the
-version `1.2.3-alpha.7`, but it would *not* be satisfied by
-`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
-than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
-range only accepts prerelease tags on the `1.2.3` version. The
-version `3.4.5` *would* satisfy the range, because it does not have a
-prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
-
-The purpose for this behavior is twofold. First, prerelease versions
-frequently are updated very quickly, and contain many breaking changes
-that are (by the author's design) not yet fit for public consumption.
-Therefore, by default, they are excluded from range matching
-semantics.
-
-Second, a user who has opted into using a prerelease version has
-clearly indicated the intent to use *that specific* set of
-alpha/beta/rc versions. By including a prerelease tag in the range,
-the user is indicating that they are aware of the risk. However, it
-is still not appropriate to assume that they have opted into taking a
-similar risk on the *next* set of prerelease versions.
-
-Note that this behavior can be suppressed (treating all prerelease
-versions as if they were normal versions, for the purpose of range
-matching) by setting the `includePrerelease` flag on the options
-object to any
-[functions](https://github.com/npm/node-semver#functions) that do
-range matching.
-
-#### Prerelease Identifiers
-
-The method `.inc` takes an additional `identifier` string argument that
-will append the value of the string as a prerelease identifier:
-
-```javascript
-semver.inc('1.2.3', 'prerelease', 'beta')
-// '1.2.4-beta.0'
-```
-
-command-line example:
-
-```bash
-$ semver 1.2.3 -i prerelease --preid beta
-1.2.4-beta.0
-```
-
-Which then can be used to increment further:
-
-```bash
-$ semver 1.2.4-beta.0 -i prerelease
-1.2.4-beta.1
-```
-
-### Advanced Range Syntax
-
-Advanced range syntax desugars to primitive comparators in
-deterministic ways.
-
-Advanced ranges may be combined in the same way as primitive
-comparators using white space or `||`.
-
-#### Hyphen Ranges `X.Y.Z - A.B.C`
-
-Specifies an inclusive set.
-
-* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
-
-If a partial version is provided as the first version in the inclusive
-range, then the missing pieces are replaced with zeroes.
-
-* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
-
-If a partial version is provided as the second version in the
-inclusive range, then all versions that start with the supplied parts
-of the tuple are accepted, but nothing that would be greater than the
-provided tuple parts.
-
-* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
-* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
-
-#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
-
-Any of `X`, `x`, or `*` may be used to "stand in" for one of the
-numeric values in the `[major, minor, patch]` tuple.
-
-* `*` := `>=0.0.0` (Any version satisfies)
-* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
-* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
-
-A partial version range is treated as an X-Range, so the special
-character is in fact optional.
-
-* `""` (empty string) := `*` := `>=0.0.0`
-* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
-* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
-
-#### Tilde Ranges `~1.2.3` `~1.2` `~1`
-
-Allows patch-level changes if a minor version is specified on the
-comparator. Allows minor-level changes if not.
-
-* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
-* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
-* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
-* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
-* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
-* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
-* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-
-#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
-
-Allows changes that do not modify the left-most non-zero digit in the
-`[major, minor, patch]` tuple. In other words, this allows patch and
-minor updates for versions `1.0.0` and above, patch updates for
-versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
-
-Many authors treat a `0.x` version as if the `x` were the major
-"breaking-change" indicator.
-
-Caret ranges are ideal when an author may make breaking changes
-between `0.2.4` and `0.3.0` releases, which is a common practice.
-However, it presumes that there will *not* be breaking changes between
-`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
-additive (but non-breaking), according to commonly observed practices.
-
-* `^1.2.3` := `>=1.2.3 <2.0.0`
-* `^0.2.3` := `>=0.2.3 <0.3.0`
-* `^0.0.3` := `>=0.0.3 <0.0.4`
-* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
- `0.0.3` version *only* will be allowed, if they are greater than or
- equal to `beta`. So, `0.0.3-pr.2` would be allowed.
-
-When parsing caret ranges, a missing `patch` value desugars to the
-number `0`, but will allow flexibility within that value, even if the
-major and minor versions are both `0`.
-
-* `^1.2.x` := `>=1.2.0 <2.0.0`
-* `^0.0.x` := `>=0.0.0 <0.1.0`
-* `^0.0` := `>=0.0.0 <0.1.0`
-
-A missing `minor` and `patch` values will desugar to zero, but also
-allow flexibility within those values, even if the major version is
-zero.
-
-* `^1.x` := `>=1.0.0 <2.0.0`
-* `^0.x` := `>=0.0.0 <1.0.0`
-
-### Range Grammar
-
-Putting all this together, here is a Backus-Naur grammar for ranges,
-for the benefit of parser authors:
-
-```bnf
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
-```
-
-## Functions
-
-All methods and classes take a final `options` object argument. All
-options in this object are `false` by default. The options supported
-are:
-
-- `loose` Be more forgiving about not-quite-valid semver strings.
- (Any resulting output will always be 100% strict compliant, of
- course.) For backwards compatibility reasons, if the `options`
- argument is a boolean value instead of an object, it is interpreted
- to be the `loose` param.
-- `includePrerelease` Set to suppress the [default
- behavior](https://github.com/npm/node-semver#prerelease-tags) of
- excluding prerelease tagged versions from ranges unless they are
- explicitly opted into.
-
-Strict-mode Comparators and Ranges will be strict about the SemVer
-strings that they parse.
-
-* `valid(v)`: Return the parsed version, or null if it's not valid.
-* `inc(v, release)`: Return the version incremented by the release
- type (`major`, `premajor`, `minor`, `preminor`, `patch`,
- `prepatch`, or `prerelease`), or null if it's not valid
- * `premajor` in one call will bump the version up to the next major
- version and down to a prerelease of that major version.
- `preminor`, and `prepatch` work the same way.
- * If called from a non-prerelease version, the `prerelease` will work the
- same as `prepatch`. It increments the patch version, then makes a
- prerelease. If the input version is already a prerelease it simply
- increments it.
-* `prerelease(v)`: Returns an array of prerelease components, or null
- if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
-* `major(v)`: Return the major version number.
-* `minor(v)`: Return the minor version number.
-* `patch(v)`: Return the patch version number.
-* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
- or comparators intersect.
-* `parse(v)`: Attempt to parse a string as a semantic version, returning either
- a `SemVer` object or `null`.
-
-### Comparison
-
-* `gt(v1, v2)`: `v1 > v2`
-* `gte(v1, v2)`: `v1 >= v2`
-* `lt(v1, v2)`: `v1 < v2`
-* `lte(v1, v2)`: `v1 <= v2`
-* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
- even if they're not the exact same string. You already know how to
- compare strings.
-* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
-* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
- the corresponding function above. `"==="` and `"!=="` do simple
- string comparison, but are included for completeness. Throws if an
- invalid comparison string is provided.
-* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
- `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
-* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
- in descending order when passed to `Array.sort()`.
-* `diff(v1, v2)`: Returns difference between two versions by the release type
- (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
- or null if the versions are the same.
-
-### Comparators
-
-* `intersects(comparator)`: Return true if the comparators intersect
-
-### Ranges
-
-* `validRange(range)`: Return the valid range or null if it's not valid
-* `satisfies(version, range)`: Return true if the version satisfies the
- range.
-* `maxSatisfying(versions, range)`: Return the highest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minSatisfying(versions, range)`: Return the lowest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minVersion(range)`: Return the lowest version that can possibly match
- the given range.
-* `gtr(version, range)`: Return `true` if version is greater than all the
- versions possible in the range.
-* `ltr(version, range)`: Return `true` if version is less than all the
- versions possible in the range.
-* `outside(version, range, hilo)`: Return true if the version is outside
- the bounds of the range in either the high or low direction. The
- `hilo` argument must be either the string `'>'` or `'<'`. (This is
- the function called by `gtr` and `ltr`.)
-* `intersects(range)`: Return true if any of the ranges comparators intersect
-
-Note that, since ranges may be non-contiguous, a version might not be
-greater than a range, less than a range, *or* satisfy a range! For
-example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
-until `2.0.0`, so the version `1.2.10` would not be greater than the
-range (because `2.0.1` satisfies, which is higher), nor less than the
-range (since `1.2.8` satisfies, which is lower), and it also does not
-satisfy the range.
-
-If you want to know if a version satisfies or does not satisfy a
-range, use the `satisfies(version, range)` function.
-
-### Coercion
-
-* `coerce(version)`: Coerces a string to semver if possible
-
-This aims to provide a very forgiving translation of a non-semver string to
-semver. It looks for the first digit in a string, and consumes all
-remaining characters which satisfy at least a partial semver (e.g., `1`,
-`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
-versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
-surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
-`3.4.0`). Only text which lacks digits will fail coercion (`version one`
-is not valid). The maximum length for any semver component considered for
-coercion is 16 characters; longer components will be ignored
-(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
-semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
-components are invalid (`9999999999999999.4.7.4` is likely invalid).
diff --git a/node_modules/libnpmpublish/node_modules/semver/bin/semver b/node_modules/libnpmpublish/node_modules/semver/bin/semver
deleted file mode 100755
index 801e77f13..000000000
--- a/node_modules/libnpmpublish/node_modules/semver/bin/semver
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env node
-// Standalone semver comparison program.
-// Exits successfully and prints matching version(s) if
-// any supplied version is valid and passes all tests.
-
-var argv = process.argv.slice(2)
-
-var versions = []
-
-var range = []
-
-var inc = null
-
-var version = require('../package.json').version
-
-var loose = false
-
-var includePrerelease = false
-
-var coerce = false
-
-var identifier
-
-var semver = require('../semver')
-
-var reverse = false
-
-var options = {}
-
-main()
-
-function main () {
- if (!argv.length) return help()
- while (argv.length) {
- var a = argv.shift()
- var indexOfEqualSign = a.indexOf('=')
- if (indexOfEqualSign !== -1) {
- a = a.slice(0, indexOfEqualSign)
- argv.unshift(a.slice(indexOfEqualSign + 1))
- }
- switch (a) {
- case '-rv': case '-rev': case '--rev': case '--reverse':
- reverse = true
- break
- case '-l': case '--loose':
- loose = true
- break
- case '-p': case '--include-prerelease':
- includePrerelease = true
- break
- case '-v': case '--version':
- versions.push(argv.shift())
- break
- case '-i': case '--inc': case '--increment':
- switch (argv[0]) {
- case 'major': case 'minor': case 'patch': case 'prerelease':
- case 'premajor': case 'preminor': case 'prepatch':
- inc = argv.shift()
- break
- default:
- inc = 'patch'
- break
- }
- break
- case '--preid':
- identifier = argv.shift()
- break
- case '-r': case '--range':
- range.push(argv.shift())
- break
- case '-c': case '--coerce':
- coerce = true
- break
- case '-h': case '--help': case '-?':
- return help()
- default:
- versions.push(a)
- break
- }
- }
-
- var options = { loose: loose, includePrerelease: includePrerelease }
-
- versions = versions.map(function (v) {
- return coerce ? (semver.coerce(v) || { version: v }).version : v
- }).filter(function (v) {
- return semver.valid(v)
- })
- if (!versions.length) return fail()
- if (inc && (versions.length !== 1 || range.length)) { return failInc() }
-
- for (var i = 0, l = range.length; i < l; i++) {
- versions = versions.filter(function (v) {
- return semver.satisfies(v, range[i], options)
- })
- if (!versions.length) return fail()
- }
- return success(versions)
-}
-
-function failInc () {
- console.error('--inc can only be used on a single version with no range')
- fail()
-}
-
-function fail () { process.exit(1) }
-
-function success () {
- var compare = reverse ? 'rcompare' : 'compare'
- versions.sort(function (a, b) {
- return semver[compare](a, b, options)
- }).map(function (v) {
- return semver.clean(v, options)
- }).map(function (v) {
- return inc ? semver.inc(v, inc, options, identifier) : v
- }).forEach(function (v, i, _) { console.log(v) })
-}
-
-function help () {
- console.log(['SemVer ' + version,
- '',
- 'A JavaScript implementation of the https://semver.org/ specification',
- 'Copyright Isaac Z. Schlueter',
- '',
- 'Usage: semver [options] <version> [<version> [...]]',
- 'Prints valid versions sorted by SemVer precedence',
- '',
- 'Options:',
- '-r --range <range>',
- ' Print versions that match the specified range.',
- '',
- '-i --increment [<level>]',
- ' Increment a version by the specified level. Level can',
- ' be one of: major, minor, patch, premajor, preminor,',
- " prepatch, or prerelease. Default level is 'patch'.",
- ' Only one version may be specified.',
- '',
- '--preid <identifier>',
- ' Identifier to be used to prefix premajor, preminor,',
- ' prepatch or prerelease version increments.',
- '',
- '-l --loose',
- ' Interpret versions and ranges loosely',
- '',
- '-p --include-prerelease',
- ' Always include prerelease versions in range matching',
- '',
- '-c --coerce',
- ' Coerce a string into SemVer if possible',
- ' (does not imply --loose)',
- '',
- 'Program exits successfully if any valid version satisfies',
- 'all supplied ranges, and prints all satisfying versions.',
- '',
- 'If no satisfying versions are found, then exits failure.',
- '',
- 'Versions are printed in ascending order, so supplying',
- 'multiple versions to the utility will just sort them.'
- ].join('\n'))
-}
diff --git a/node_modules/libnpmpublish/node_modules/semver/package.json b/node_modules/libnpmpublish/node_modules/semver/package.json
deleted file mode 100644
index 201d6ba04..000000000
--- a/node_modules/libnpmpublish/node_modules/semver/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
- "_from": "semver@^5.5.1",
- "_id": "semver@5.7.1",
- "_inBundle": false,
- "_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
- "_location": "/libnpmpublish/semver",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "semver@^5.5.1",
- "name": "semver",
- "escapedName": "semver",
- "rawSpec": "^5.5.1",
- "saveSpec": null,
- "fetchSpec": "^5.5.1"
- },
- "_requiredBy": [
- "/libnpmpublish",
- "/libnpmpublish/npm-package-arg"
- ],
- "_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "_shasum": "a954f931aeba508d307bbf069eff0c01c96116f7",
- "_spec": "semver@^5.5.1",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/libnpmpublish",
- "bin": {
- "semver": "bin/semver"
- },
- "bugs": {
- "url": "https://github.com/npm/node-semver/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "The semantic version parser used by npm.",
- "devDependencies": {
- "tap": "^13.0.0-rc.18"
- },
- "files": [
- "bin",
- "range.bnf",
- "semver.js"
- ],
- "homepage": "https://github.com/npm/node-semver#readme",
- "license": "ISC",
- "main": "semver.js",
- "name": "semver",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/node-semver.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap"
- },
- "tap": {
- "check-coverage": true
- },
- "version": "5.7.1"
-}
diff --git a/node_modules/libnpmpublish/node_modules/semver/range.bnf b/node_modules/libnpmpublish/node_modules/semver/range.bnf
deleted file mode 100644
index d4c6ae0d7..000000000
--- a/node_modules/libnpmpublish/node_modules/semver/range.bnf
+++ /dev/null
@@ -1,16 +0,0 @@
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | [1-9] ( [0-9] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
diff --git a/node_modules/libnpmpublish/node_modules/semver/semver.js b/node_modules/libnpmpublish/node_modules/semver/semver.js
deleted file mode 100644
index d315d5d68..000000000
--- a/node_modules/libnpmpublish/node_modules/semver/semver.js
+++ /dev/null
@@ -1,1483 +0,0 @@
-exports = module.exports = SemVer
-
-var debug
-/* istanbul ignore next */
-if (typeof process === 'object' &&
- process.env &&
- process.env.NODE_DEBUG &&
- /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
- debug = function () {
- var args = Array.prototype.slice.call(arguments, 0)
- args.unshift('SEMVER')
- console.log.apply(console, args)
- }
-} else {
- debug = function () {}
-}
-
-// Note: this is the semver.org version of the spec that it implements
-// Not necessarily the package version of this code.
-exports.SEMVER_SPEC_VERSION = '2.0.0'
-
-var MAX_LENGTH = 256
-var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
- /* istanbul ignore next */ 9007199254740991
-
-// Max safe segment length for coercion.
-var MAX_SAFE_COMPONENT_LENGTH = 16
-
-// The actual regexps go on exports.re
-var re = exports.re = []
-var src = exports.src = []
-var R = 0
-
-// The following Regular Expressions can be used for tokenizing,
-// validating, and parsing SemVer version strings.
-
-// ## Numeric Identifier
-// A single `0`, or a non-zero digit followed by zero or more digits.
-
-var NUMERICIDENTIFIER = R++
-src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
-var NUMERICIDENTIFIERLOOSE = R++
-src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
-
-// ## Non-numeric Identifier
-// Zero or more digits, followed by a letter or hyphen, and then zero or
-// more letters, digits, or hyphens.
-
-var NONNUMERICIDENTIFIER = R++
-src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
-
-// ## Main Version
-// Three dot-separated numeric identifiers.
-
-var MAINVERSION = R++
-src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')'
-
-var MAINVERSIONLOOSE = R++
-src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
-
-// ## Pre-release Version Identifier
-// A numeric identifier, or a non-numeric identifier.
-
-var PRERELEASEIDENTIFIER = R++
-src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-var PRERELEASEIDENTIFIERLOOSE = R++
-src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-// ## Pre-release Version
-// Hyphen, followed by one or more dot-separated pre-release version
-// identifiers.
-
-var PRERELEASE = R++
-src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
- '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
-
-var PRERELEASELOOSE = R++
-src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
- '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
-
-// ## Build Metadata Identifier
-// Any combination of digits, letters, or hyphens.
-
-var BUILDIDENTIFIER = R++
-src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
-
-// ## Build Metadata
-// Plus sign, followed by one or more period-separated build metadata
-// identifiers.
-
-var BUILD = R++
-src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
- '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
-
-// ## Full Version String
-// A main version, followed optionally by a pre-release version and
-// build metadata.
-
-// Note that the only major, minor, patch, and pre-release sections of
-// the version string are capturing groups. The build metadata is not a
-// capturing group, because it should not ever be used in version
-// comparison.
-
-var FULL = R++
-var FULLPLAIN = 'v?' + src[MAINVERSION] +
- src[PRERELEASE] + '?' +
- src[BUILD] + '?'
-
-src[FULL] = '^' + FULLPLAIN + '$'
-
-// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
-// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
-// common in the npm registry.
-var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
- src[PRERELEASELOOSE] + '?' +
- src[BUILD] + '?'
-
-var LOOSE = R++
-src[LOOSE] = '^' + LOOSEPLAIN + '$'
-
-var GTLT = R++
-src[GTLT] = '((?:<|>)?=?)'
-
-// Something like "2.*" or "1.2.x".
-// Note that "x.x" is a valid xRange identifer, meaning "any version"
-// Only the first item is strictly required.
-var XRANGEIDENTIFIERLOOSE = R++
-src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
-var XRANGEIDENTIFIER = R++
-src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
-
-var XRANGEPLAIN = R++
-src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:' + src[PRERELEASE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGEPLAINLOOSE = R++
-src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:' + src[PRERELEASELOOSE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGE = R++
-src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
-var XRANGELOOSE = R++
-src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
-
-// Coercion.
-// Extract anything that could conceivably be a part of a valid semver
-var COERCE = R++
-src[COERCE] = '(?:^|[^\\d])' +
- '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:$|[^\\d])'
-
-// Tilde ranges.
-// Meaning is "reasonably at or greater than"
-var LONETILDE = R++
-src[LONETILDE] = '(?:~>?)'
-
-var TILDETRIM = R++
-src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
-re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
-var tildeTrimReplace = '$1~'
-
-var TILDE = R++
-src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
-var TILDELOOSE = R++
-src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
-
-// Caret ranges.
-// Meaning is "at least and backwards compatible with"
-var LONECARET = R++
-src[LONECARET] = '(?:\\^)'
-
-var CARETTRIM = R++
-src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
-re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
-var caretTrimReplace = '$1^'
-
-var CARET = R++
-src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
-var CARETLOOSE = R++
-src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
-
-// A simple gt/lt/eq thing, or just "" to indicate "any version"
-var COMPARATORLOOSE = R++
-src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
-var COMPARATOR = R++
-src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
-
-// An expression to strip any whitespace between the gtlt and the thing
-// it modifies, so that `> 1.2.3` ==> `>1.2.3`
-var COMPARATORTRIM = R++
-src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
- '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
-
-// this one has to use the /g flag
-re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
-var comparatorTrimReplace = '$1$2$3'
-
-// Something like `1.2.3 - 1.2.4`
-// Note that these all use the loose form, because they'll be
-// checked against either the strict or loose comparator form
-// later.
-var HYPHENRANGE = R++
-src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAIN] + ')' +
- '\\s*$'
-
-var HYPHENRANGELOOSE = R++
-src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s*$'
-
-// Star ranges basically just allow anything at all.
-var STAR = R++
-src[STAR] = '(<|>)?=?\\s*\\*'
-
-// Compile to actual regexp objects.
-// All are flag-free, unless they were created above with a flag.
-for (var i = 0; i < R; i++) {
- debug(i, src[i])
- if (!re[i]) {
- re[i] = new RegExp(src[i])
- }
-}
-
-exports.parse = parse
-function parse (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- if (version.length > MAX_LENGTH) {
- return null
- }
-
- var r = options.loose ? re[LOOSE] : re[FULL]
- if (!r.test(version)) {
- return null
- }
-
- try {
- return new SemVer(version, options)
- } catch (er) {
- return null
- }
-}
-
-exports.valid = valid
-function valid (version, options) {
- var v = parse(version, options)
- return v ? v.version : null
-}
-
-exports.clean = clean
-function clean (version, options) {
- var s = parse(version.trim().replace(/^[=v]+/, ''), options)
- return s ? s.version : null
-}
-
-exports.SemVer = SemVer
-
-function SemVer (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
- if (version instanceof SemVer) {
- if (version.loose === options.loose) {
- return version
- } else {
- version = version.version
- }
- } else if (typeof version !== 'string') {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- if (version.length > MAX_LENGTH) {
- throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
- }
-
- if (!(this instanceof SemVer)) {
- return new SemVer(version, options)
- }
-
- debug('SemVer', version, options)
- this.options = options
- this.loose = !!options.loose
-
- var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
-
- if (!m) {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- this.raw = version
-
- // these are actually numbers
- this.major = +m[1]
- this.minor = +m[2]
- this.patch = +m[3]
-
- if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
- throw new TypeError('Invalid major version')
- }
-
- if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
- throw new TypeError('Invalid minor version')
- }
-
- if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
- throw new TypeError('Invalid patch version')
- }
-
- // numberify any prerelease numeric ids
- if (!m[4]) {
- this.prerelease = []
- } else {
- this.prerelease = m[4].split('.').map(function (id) {
- if (/^[0-9]+$/.test(id)) {
- var num = +id
- if (num >= 0 && num < MAX_SAFE_INTEGER) {
- return num
- }
- }
- return id
- })
- }
-
- this.build = m[5] ? m[5].split('.') : []
- this.format()
-}
-
-SemVer.prototype.format = function () {
- this.version = this.major + '.' + this.minor + '.' + this.patch
- if (this.prerelease.length) {
- this.version += '-' + this.prerelease.join('.')
- }
- return this.version
-}
-
-SemVer.prototype.toString = function () {
- return this.version
-}
-
-SemVer.prototype.compare = function (other) {
- debug('SemVer.compare', this.version, this.options, other)
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return this.compareMain(other) || this.comparePre(other)
-}
-
-SemVer.prototype.compareMain = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return compareIdentifiers(this.major, other.major) ||
- compareIdentifiers(this.minor, other.minor) ||
- compareIdentifiers(this.patch, other.patch)
-}
-
-SemVer.prototype.comparePre = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- // NOT having a prerelease is > having one
- if (this.prerelease.length && !other.prerelease.length) {
- return -1
- } else if (!this.prerelease.length && other.prerelease.length) {
- return 1
- } else if (!this.prerelease.length && !other.prerelease.length) {
- return 0
- }
-
- var i = 0
- do {
- var a = this.prerelease[i]
- var b = other.prerelease[i]
- debug('prerelease compare', i, a, b)
- if (a === undefined && b === undefined) {
- return 0
- } else if (b === undefined) {
- return 1
- } else if (a === undefined) {
- return -1
- } else if (a === b) {
- continue
- } else {
- return compareIdentifiers(a, b)
- }
- } while (++i)
-}
-
-// preminor will bump the version up to the next minor release, and immediately
-// down to pre-release. premajor and prepatch work the same way.
-SemVer.prototype.inc = function (release, identifier) {
- switch (release) {
- case 'premajor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor = 0
- this.major++
- this.inc('pre', identifier)
- break
- case 'preminor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor++
- this.inc('pre', identifier)
- break
- case 'prepatch':
- // If this is already a prerelease, it will bump to the next version
- // drop any prereleases that might already exist, since they are not
- // relevant at this point.
- this.prerelease.length = 0
- this.inc('patch', identifier)
- this.inc('pre', identifier)
- break
- // If the input is a non-prerelease version, this acts the same as
- // prepatch.
- case 'prerelease':
- if (this.prerelease.length === 0) {
- this.inc('patch', identifier)
- }
- this.inc('pre', identifier)
- break
-
- case 'major':
- // If this is a pre-major version, bump up to the same major version.
- // Otherwise increment major.
- // 1.0.0-5 bumps to 1.0.0
- // 1.1.0 bumps to 2.0.0
- if (this.minor !== 0 ||
- this.patch !== 0 ||
- this.prerelease.length === 0) {
- this.major++
- }
- this.minor = 0
- this.patch = 0
- this.prerelease = []
- break
- case 'minor':
- // If this is a pre-minor version, bump up to the same minor version.
- // Otherwise increment minor.
- // 1.2.0-5 bumps to 1.2.0
- // 1.2.1 bumps to 1.3.0
- if (this.patch !== 0 || this.prerelease.length === 0) {
- this.minor++
- }
- this.patch = 0
- this.prerelease = []
- break
- case 'patch':
- // If this is not a pre-release version, it will increment the patch.
- // If it is a pre-release it will bump up to the same patch version.
- // 1.2.0-5 patches to 1.2.0
- // 1.2.0 patches to 1.2.1
- if (this.prerelease.length === 0) {
- this.patch++
- }
- this.prerelease = []
- break
- // This probably shouldn't be used publicly.
- // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
- case 'pre':
- if (this.prerelease.length === 0) {
- this.prerelease = [0]
- } else {
- var i = this.prerelease.length
- while (--i >= 0) {
- if (typeof this.prerelease[i] === 'number') {
- this.prerelease[i]++
- i = -2
- }
- }
- if (i === -1) {
- // didn't increment anything
- this.prerelease.push(0)
- }
- }
- if (identifier) {
- // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
- // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
- if (this.prerelease[0] === identifier) {
- if (isNaN(this.prerelease[1])) {
- this.prerelease = [identifier, 0]
- }
- } else {
- this.prerelease = [identifier, 0]
- }
- }
- break
-
- default:
- throw new Error('invalid increment argument: ' + release)
- }
- this.format()
- this.raw = this.version
- return this
-}
-
-exports.inc = inc
-function inc (version, release, loose, identifier) {
- if (typeof (loose) === 'string') {
- identifier = loose
- loose = undefined
- }
-
- try {
- return new SemVer(version, loose).inc(release, identifier).version
- } catch (er) {
- return null
- }
-}
-
-exports.diff = diff
-function diff (version1, version2) {
- if (eq(version1, version2)) {
- return null
- } else {
- var v1 = parse(version1)
- var v2 = parse(version2)
- var prefix = ''
- if (v1.prerelease.length || v2.prerelease.length) {
- prefix = 'pre'
- var defaultResult = 'prerelease'
- }
- for (var key in v1) {
- if (key === 'major' || key === 'minor' || key === 'patch') {
- if (v1[key] !== v2[key]) {
- return prefix + key
- }
- }
- }
- return defaultResult // may be undefined
- }
-}
-
-exports.compareIdentifiers = compareIdentifiers
-
-var numeric = /^[0-9]+$/
-function compareIdentifiers (a, b) {
- var anum = numeric.test(a)
- var bnum = numeric.test(b)
-
- if (anum && bnum) {
- a = +a
- b = +b
- }
-
- return a === b ? 0
- : (anum && !bnum) ? -1
- : (bnum && !anum) ? 1
- : a < b ? -1
- : 1
-}
-
-exports.rcompareIdentifiers = rcompareIdentifiers
-function rcompareIdentifiers (a, b) {
- return compareIdentifiers(b, a)
-}
-
-exports.major = major
-function major (a, loose) {
- return new SemVer(a, loose).major
-}
-
-exports.minor = minor
-function minor (a, loose) {
- return new SemVer(a, loose).minor
-}
-
-exports.patch = patch
-function patch (a, loose) {
- return new SemVer(a, loose).patch
-}
-
-exports.compare = compare
-function compare (a, b, loose) {
- return new SemVer(a, loose).compare(new SemVer(b, loose))
-}
-
-exports.compareLoose = compareLoose
-function compareLoose (a, b) {
- return compare(a, b, true)
-}
-
-exports.rcompare = rcompare
-function rcompare (a, b, loose) {
- return compare(b, a, loose)
-}
-
-exports.sort = sort
-function sort (list, loose) {
- return list.sort(function (a, b) {
- return exports.compare(a, b, loose)
- })
-}
-
-exports.rsort = rsort
-function rsort (list, loose) {
- return list.sort(function (a, b) {
- return exports.rcompare(a, b, loose)
- })
-}
-
-exports.gt = gt
-function gt (a, b, loose) {
- return compare(a, b, loose) > 0
-}
-
-exports.lt = lt
-function lt (a, b, loose) {
- return compare(a, b, loose) < 0
-}
-
-exports.eq = eq
-function eq (a, b, loose) {
- return compare(a, b, loose) === 0
-}
-
-exports.neq = neq
-function neq (a, b, loose) {
- return compare(a, b, loose) !== 0
-}
-
-exports.gte = gte
-function gte (a, b, loose) {
- return compare(a, b, loose) >= 0
-}
-
-exports.lte = lte
-function lte (a, b, loose) {
- return compare(a, b, loose) <= 0
-}
-
-exports.cmp = cmp
-function cmp (a, op, b, loose) {
- switch (op) {
- case '===':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a === b
-
- case '!==':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a !== b
-
- case '':
- case '=':
- case '==':
- return eq(a, b, loose)
-
- case '!=':
- return neq(a, b, loose)
-
- case '>':
- return gt(a, b, loose)
-
- case '>=':
- return gte(a, b, loose)
-
- case '<':
- return lt(a, b, loose)
-
- case '<=':
- return lte(a, b, loose)
-
- default:
- throw new TypeError('Invalid operator: ' + op)
- }
-}
-
-exports.Comparator = Comparator
-function Comparator (comp, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (comp instanceof Comparator) {
- if (comp.loose === !!options.loose) {
- return comp
- } else {
- comp = comp.value
- }
- }
-
- if (!(this instanceof Comparator)) {
- return new Comparator(comp, options)
- }
-
- debug('comparator', comp, options)
- this.options = options
- this.loose = !!options.loose
- this.parse(comp)
-
- if (this.semver === ANY) {
- this.value = ''
- } else {
- this.value = this.operator + this.semver.version
- }
-
- debug('comp', this)
-}
-
-var ANY = {}
-Comparator.prototype.parse = function (comp) {
- var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var m = comp.match(r)
-
- if (!m) {
- throw new TypeError('Invalid comparator: ' + comp)
- }
-
- this.operator = m[1]
- if (this.operator === '=') {
- this.operator = ''
- }
-
- // if it literally is just '>' or '' then allow anything.
- if (!m[2]) {
- this.semver = ANY
- } else {
- this.semver = new SemVer(m[2], this.options.loose)
- }
-}
-
-Comparator.prototype.toString = function () {
- return this.value
-}
-
-Comparator.prototype.test = function (version) {
- debug('Comparator.test', version, this.options.loose)
-
- if (this.semver === ANY) {
- return true
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- return cmp(version, this.operator, this.semver, this.options)
-}
-
-Comparator.prototype.intersects = function (comp, options) {
- if (!(comp instanceof Comparator)) {
- throw new TypeError('a Comparator is required')
- }
-
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- var rangeTmp
-
- if (this.operator === '') {
- rangeTmp = new Range(comp.value, options)
- return satisfies(this.value, rangeTmp, options)
- } else if (comp.operator === '') {
- rangeTmp = new Range(this.value, options)
- return satisfies(comp.semver, rangeTmp, options)
- }
-
- var sameDirectionIncreasing =
- (this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '>=' || comp.operator === '>')
- var sameDirectionDecreasing =
- (this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '<=' || comp.operator === '<')
- var sameSemVer = this.semver.version === comp.semver.version
- var differentDirectionsInclusive =
- (this.operator === '>=' || this.operator === '<=') &&
- (comp.operator === '>=' || comp.operator === '<=')
- var oppositeDirectionsLessThan =
- cmp(this.semver, '<', comp.semver, options) &&
- ((this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '<=' || comp.operator === '<'))
- var oppositeDirectionsGreaterThan =
- cmp(this.semver, '>', comp.semver, options) &&
- ((this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '>=' || comp.operator === '>'))
-
- return sameDirectionIncreasing || sameDirectionDecreasing ||
- (sameSemVer && differentDirectionsInclusive) ||
- oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
-}
-
-exports.Range = Range
-function Range (range, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (range instanceof Range) {
- if (range.loose === !!options.loose &&
- range.includePrerelease === !!options.includePrerelease) {
- return range
- } else {
- return new Range(range.raw, options)
- }
- }
-
- if (range instanceof Comparator) {
- return new Range(range.value, options)
- }
-
- if (!(this instanceof Range)) {
- return new Range(range, options)
- }
-
- this.options = options
- this.loose = !!options.loose
- this.includePrerelease = !!options.includePrerelease
-
- // First, split based on boolean or ||
- this.raw = range
- this.set = range.split(/\s*\|\|\s*/).map(function (range) {
- return this.parseRange(range.trim())
- }, this).filter(function (c) {
- // throw out any that are not relevant for whatever reason
- return c.length
- })
-
- if (!this.set.length) {
- throw new TypeError('Invalid SemVer Range: ' + range)
- }
-
- this.format()
-}
-
-Range.prototype.format = function () {
- this.range = this.set.map(function (comps) {
- return comps.join(' ').trim()
- }).join('||').trim()
- return this.range
-}
-
-Range.prototype.toString = function () {
- return this.range
-}
-
-Range.prototype.parseRange = function (range) {
- var loose = this.options.loose
- range = range.trim()
- // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
- var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
- range = range.replace(hr, hyphenReplace)
- debug('hyphen replace', range)
- // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
- range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
- debug('comparator trim', range, re[COMPARATORTRIM])
-
- // `~ 1.2.3` => `~1.2.3`
- range = range.replace(re[TILDETRIM], tildeTrimReplace)
-
- // `^ 1.2.3` => `^1.2.3`
- range = range.replace(re[CARETTRIM], caretTrimReplace)
-
- // normalize spaces
- range = range.split(/\s+/).join(' ')
-
- // At this point, the range is completely trimmed and
- // ready to be split into comparators.
-
- var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var set = range.split(' ').map(function (comp) {
- return parseComparator(comp, this.options)
- }, this).join(' ').split(/\s+/)
- if (this.options.loose) {
- // in loose mode, throw out any that are not valid comparators
- set = set.filter(function (comp) {
- return !!comp.match(compRe)
- })
- }
- set = set.map(function (comp) {
- return new Comparator(comp, this.options)
- }, this)
-
- return set
-}
-
-Range.prototype.intersects = function (range, options) {
- if (!(range instanceof Range)) {
- throw new TypeError('a Range is required')
- }
-
- return this.set.some(function (thisComparators) {
- return thisComparators.every(function (thisComparator) {
- return range.set.some(function (rangeComparators) {
- return rangeComparators.every(function (rangeComparator) {
- return thisComparator.intersects(rangeComparator, options)
- })
- })
- })
- })
-}
-
-// Mostly just for testing and legacy API reasons
-exports.toComparators = toComparators
-function toComparators (range, options) {
- return new Range(range, options).set.map(function (comp) {
- return comp.map(function (c) {
- return c.value
- }).join(' ').trim().split(' ')
- })
-}
-
-// comprised of xranges, tildes, stars, and gtlt's at this point.
-// already replaced the hyphen ranges
-// turn into a set of JUST comparators.
-function parseComparator (comp, options) {
- debug('comp', comp, options)
- comp = replaceCarets(comp, options)
- debug('caret', comp)
- comp = replaceTildes(comp, options)
- debug('tildes', comp)
- comp = replaceXRanges(comp, options)
- debug('xrange', comp)
- comp = replaceStars(comp, options)
- debug('stars', comp)
- return comp
-}
-
-function isX (id) {
- return !id || id.toLowerCase() === 'x' || id === '*'
-}
-
-// ~, ~> --> * (any, kinda silly)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
-function replaceTildes (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceTilde(comp, options)
- }).join(' ')
-}
-
-function replaceTilde (comp, options) {
- var r = options.loose ? re[TILDELOOSE] : re[TILDE]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('tilde', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- // ~1.2 == >=1.2.0 <1.3.0
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else if (pr) {
- debug('replaceTilde pr', pr)
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- } else {
- // ~1.2.3 == >=1.2.3 <1.3.0
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('tilde return', ret)
- return ret
- })
-}
-
-// ^ --> * (any, kinda silly)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
-// ^1.2.3 --> >=1.2.3 <2.0.0
-// ^1.2.0 --> >=1.2.0 <2.0.0
-function replaceCarets (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceCaret(comp, options)
- }).join(' ')
-}
-
-function replaceCaret (comp, options) {
- debug('caret', comp, options)
- var r = options.loose ? re[CARETLOOSE] : re[CARET]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('caret', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- if (M === '0') {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else {
- ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
- }
- } else if (pr) {
- debug('replaceCaret pr', pr)
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + (+M + 1) + '.0.0'
- }
- } else {
- debug('no pr')
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + (+M + 1) + '.0.0'
- }
- }
-
- debug('caret return', ret)
- return ret
- })
-}
-
-function replaceXRanges (comp, options) {
- debug('replaceXRanges', comp, options)
- return comp.split(/\s+/).map(function (comp) {
- return replaceXRange(comp, options)
- }).join(' ')
-}
-
-function replaceXRange (comp, options) {
- comp = comp.trim()
- var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
- return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
- debug('xRange', comp, ret, gtlt, M, m, p, pr)
- var xM = isX(M)
- var xm = xM || isX(m)
- var xp = xm || isX(p)
- var anyX = xp
-
- if (gtlt === '=' && anyX) {
- gtlt = ''
- }
-
- if (xM) {
- if (gtlt === '>' || gtlt === '<') {
- // nothing is allowed
- ret = '<0.0.0'
- } else {
- // nothing is forbidden
- ret = '*'
- }
- } else if (gtlt && anyX) {
- // we know patch is an x, because we have any x at all.
- // replace X with 0
- if (xm) {
- m = 0
- }
- p = 0
-
- if (gtlt === '>') {
- // >1 => >=2.0.0
- // >1.2 => >=1.3.0
- // >1.2.3 => >= 1.2.4
- gtlt = '>='
- if (xm) {
- M = +M + 1
- m = 0
- p = 0
- } else {
- m = +m + 1
- p = 0
- }
- } else if (gtlt === '<=') {
- // <=0.7.x is actually <0.8.0, since any 0.7.x should
- // pass. Similarly, <=7.x is actually <8.0.0, etc.
- gtlt = '<'
- if (xm) {
- M = +M + 1
- } else {
- m = +m + 1
- }
- }
-
- ret = gtlt + M + '.' + m + '.' + p
- } else if (xm) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (xp) {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('xRange return', ret)
-
- return ret
- })
-}
-
-// Because * is AND-ed with everything else in the comparator,
-// and '' means "any version", just remove the *s entirely.
-function replaceStars (comp, options) {
- debug('replaceStars', comp, options)
- // Looseness is ignored here. star is always as loose as it gets!
- return comp.trim().replace(re[STAR], '')
-}
-
-// This function is passed to string.replace(re[HYPHENRANGE])
-// M, m, patch, prerelease, build
-// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
-// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
-// 1.2 - 3.4 => >=1.2.0 <3.5.0
-function hyphenReplace ($0,
- from, fM, fm, fp, fpr, fb,
- to, tM, tm, tp, tpr, tb) {
- if (isX(fM)) {
- from = ''
- } else if (isX(fm)) {
- from = '>=' + fM + '.0.0'
- } else if (isX(fp)) {
- from = '>=' + fM + '.' + fm + '.0'
- } else {
- from = '>=' + from
- }
-
- if (isX(tM)) {
- to = ''
- } else if (isX(tm)) {
- to = '<' + (+tM + 1) + '.0.0'
- } else if (isX(tp)) {
- to = '<' + tM + '.' + (+tm + 1) + '.0'
- } else if (tpr) {
- to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
- } else {
- to = '<=' + to
- }
-
- return (from + ' ' + to).trim()
-}
-
-// if ANY of the sets match ALL of its comparators, then pass
-Range.prototype.test = function (version) {
- if (!version) {
- return false
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- for (var i = 0; i < this.set.length; i++) {
- if (testSet(this.set[i], version, this.options)) {
- return true
- }
- }
- return false
-}
-
-function testSet (set, version, options) {
- for (var i = 0; i < set.length; i++) {
- if (!set[i].test(version)) {
- return false
- }
- }
-
- if (version.prerelease.length && !options.includePrerelease) {
- // Find the set of versions that are allowed to have prereleases
- // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
- // That should allow `1.2.3-pr.2` to pass.
- // However, `1.2.4-alpha.notready` should NOT be allowed,
- // even though it's within the range set by the comparators.
- for (i = 0; i < set.length; i++) {
- debug(set[i].semver)
- if (set[i].semver === ANY) {
- continue
- }
-
- if (set[i].semver.prerelease.length > 0) {
- var allowed = set[i].semver
- if (allowed.major === version.major &&
- allowed.minor === version.minor &&
- allowed.patch === version.patch) {
- return true
- }
- }
- }
-
- // Version has a -pre, but it's not one of the ones we like.
- return false
- }
-
- return true
-}
-
-exports.satisfies = satisfies
-function satisfies (version, range, options) {
- try {
- range = new Range(range, options)
- } catch (er) {
- return false
- }
- return range.test(version)
-}
-
-exports.maxSatisfying = maxSatisfying
-function maxSatisfying (versions, range, options) {
- var max = null
- var maxSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!max || maxSV.compare(v) === -1) {
- // compare(max, v, true)
- max = v
- maxSV = new SemVer(max, options)
- }
- }
- })
- return max
-}
-
-exports.minSatisfying = minSatisfying
-function minSatisfying (versions, range, options) {
- var min = null
- var minSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!min || minSV.compare(v) === 1) {
- // compare(min, v, true)
- min = v
- minSV = new SemVer(min, options)
- }
- }
- })
- return min
-}
-
-exports.minVersion = minVersion
-function minVersion (range, loose) {
- range = new Range(range, loose)
-
- var minver = new SemVer('0.0.0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = new SemVer('0.0.0-0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = null
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- comparators.forEach(function (comparator) {
- // Clone to avoid manipulating the comparator's semver object.
- var compver = new SemVer(comparator.semver.version)
- switch (comparator.operator) {
- case '>':
- if (compver.prerelease.length === 0) {
- compver.patch++
- } else {
- compver.prerelease.push(0)
- }
- compver.raw = compver.format()
- /* fallthrough */
- case '':
- case '>=':
- if (!minver || gt(minver, compver)) {
- minver = compver
- }
- break
- case '<':
- case '<=':
- /* Ignore maximum versions */
- break
- /* istanbul ignore next */
- default:
- throw new Error('Unexpected operation: ' + comparator.operator)
- }
- })
- }
-
- if (minver && range.test(minver)) {
- return minver
- }
-
- return null
-}
-
-exports.validRange = validRange
-function validRange (range, options) {
- try {
- // Return '*' instead of '' so that truthiness works.
- // This will throw if it's invalid anyway
- return new Range(range, options).range || '*'
- } catch (er) {
- return null
- }
-}
-
-// Determine if version is less than all the versions possible in the range
-exports.ltr = ltr
-function ltr (version, range, options) {
- return outside(version, range, '<', options)
-}
-
-// Determine if version is greater than all the versions possible in the range.
-exports.gtr = gtr
-function gtr (version, range, options) {
- return outside(version, range, '>', options)
-}
-
-exports.outside = outside
-function outside (version, range, hilo, options) {
- version = new SemVer(version, options)
- range = new Range(range, options)
-
- var gtfn, ltefn, ltfn, comp, ecomp
- switch (hilo) {
- case '>':
- gtfn = gt
- ltefn = lte
- ltfn = lt
- comp = '>'
- ecomp = '>='
- break
- case '<':
- gtfn = lt
- ltefn = gte
- ltfn = gt
- comp = '<'
- ecomp = '<='
- break
- default:
- throw new TypeError('Must provide a hilo val of "<" or ">"')
- }
-
- // If it satisifes the range it is not outside
- if (satisfies(version, range, options)) {
- return false
- }
-
- // From now on, variable terms are as if we're in "gtr" mode.
- // but note that everything is flipped for the "ltr" function.
-
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- var high = null
- var low = null
-
- comparators.forEach(function (comparator) {
- if (comparator.semver === ANY) {
- comparator = new Comparator('>=0.0.0')
- }
- high = high || comparator
- low = low || comparator
- if (gtfn(comparator.semver, high.semver, options)) {
- high = comparator
- } else if (ltfn(comparator.semver, low.semver, options)) {
- low = comparator
- }
- })
-
- // If the edge version comparator has a operator then our version
- // isn't outside it
- if (high.operator === comp || high.operator === ecomp) {
- return false
- }
-
- // If the lowest version comparator has an operator and our version
- // is less than it then it isn't higher than the range
- if ((!low.operator || low.operator === comp) &&
- ltefn(version, low.semver)) {
- return false
- } else if (low.operator === ecomp && ltfn(version, low.semver)) {
- return false
- }
- }
- return true
-}
-
-exports.prerelease = prerelease
-function prerelease (version, options) {
- var parsed = parse(version, options)
- return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
-}
-
-exports.intersects = intersects
-function intersects (r1, r2, options) {
- r1 = new Range(r1, options)
- r2 = new Range(r2, options)
- return r1.intersects(r2)
-}
-
-exports.coerce = coerce
-function coerce (version) {
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- var match = version.match(re[COERCE])
-
- if (match == null) {
- return null
- }
-
- return parse(match[1] +
- '.' + (match[2] || '0') +
- '.' + (match[3] || '0'))
-}
diff --git a/node_modules/libnpmpublish/node_modules/ssri/CHANGELOG.md b/node_modules/libnpmpublish/node_modules/ssri/CHANGELOG.md
deleted file mode 100644
index d4c589790..000000000
--- a/node_modules/libnpmpublish/node_modules/ssri/CHANGELOG.md
+++ /dev/null
@@ -1,286 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="6.0.1"></a>
-## [6.0.1](https://github.com/zkat/ssri/compare/v6.0.0...v6.0.1) (2018-08-27)
-
-
-### Bug Fixes
-
-* **opts:** use figgy-pudding to specify consumed opts ([cf86553](https://github.com/zkat/ssri/commit/cf86553))
-
-
-
-<a name="6.0.0"></a>
-# [6.0.0](https://github.com/zkat/ssri/compare/v5.3.0...v6.0.0) (2018-04-09)
-
-
-### Bug Fixes
-
-* **docs:** minor typo ([b71ef17](https://github.com/zkat/ssri/commit/b71ef17))
-
-
-### meta
-
-* drop support for node@4 ([d9bf359](https://github.com/zkat/ssri/commit/d9bf359))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="5.3.0"></a>
-# [5.3.0](https://github.com/zkat/ssri/compare/v5.2.4...v5.3.0) (2018-03-13)
-
-
-### Features
-
-* **checkData:** optionally throw when checkData fails ([bf26b84](https://github.com/zkat/ssri/commit/bf26b84))
-
-
-
-<a name="5.2.4"></a>
-## [5.2.4](https://github.com/zkat/ssri/compare/v5.2.3...v5.2.4) (2018-02-16)
-
-
-
-<a name="5.2.3"></a>
-## [5.2.3](https://github.com/zkat/ssri/compare/v5.2.2...v5.2.3) (2018-02-16)
-
-
-### Bug Fixes
-
-* **hashes:** filter hash priority list by available hashes ([2fa30b8](https://github.com/zkat/ssri/commit/2fa30b8))
-* **integrityStream:** dedupe algorithms to generate ([d56c654](https://github.com/zkat/ssri/commit/d56c654))
-
-
-
-<a name="5.2.2"></a>
-## [5.2.2](https://github.com/zkat/ssri/compare/v5.2.1...v5.2.2) (2018-02-14)
-
-
-### Bug Fixes
-
-* **security:** tweak strict SRI regex ([#10](https://github.com/zkat/ssri/issues/10)) ([d0ebcdc](https://github.com/zkat/ssri/commit/d0ebcdc))
-
-
-
-<a name="5.2.1"></a>
-## [5.2.1](https://github.com/zkat/ssri/compare/v5.2.0...v5.2.1) (2018-02-06)
-
-
-
-<a name="5.2.0"></a>
-# [5.2.0](https://github.com/zkat/ssri/compare/v5.1.0...v5.2.0) (2018-02-06)
-
-
-### Features
-
-* **match:** add integrity.match() ([3c49cc4](https://github.com/zkat/ssri/commit/3c49cc4))
-
-
-
-<a name="5.1.0"></a>
-# [5.1.0](https://github.com/zkat/ssri/compare/v5.0.0...v5.1.0) (2018-01-18)
-
-
-### Bug Fixes
-
-* **checkStream:** integrityStream now takes opts.integrity algos into account ([d262910](https://github.com/zkat/ssri/commit/d262910))
-
-
-### Features
-
-* **sha3:** do some guesswork about upcoming sha3 ([7fdd9df](https://github.com/zkat/ssri/commit/7fdd9df))
-
-
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/ssri/compare/v4.1.6...v5.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#9) ([c82983a](https://github.com/zkat/ssri/commit/c82983a))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="4.1.6"></a>
-## [4.1.6](https://github.com/zkat/ssri/compare/v4.1.5...v4.1.6) (2017-06-07)
-
-
-### Bug Fixes
-
-* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/zkat/ssri/commit/0b1bcbe))
-
-
-
-<a name="4.1.5"></a>
-## [4.1.5](https://github.com/zkat/ssri/compare/v4.1.4...v4.1.5) (2017-06-05)
-
-
-### Bug Fixes
-
-* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/zkat/ssri/commit/fb1293e))
-
-
-
-<a name="4.1.4"></a>
-## [4.1.4](https://github.com/zkat/ssri/compare/v4.1.3...v4.1.4) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/zkat/ssri/commit/513df4e))
-
-
-
-<a name="4.1.3"></a>
-## [4.1.3](https://github.com/zkat/ssri/compare/v4.1.2...v4.1.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/zkat/ssri/commit/c2c262b))
-
-
-
-<a name="4.1.2"></a>
-## [4.1.2](https://github.com/zkat/ssri/compare/v4.1.1...v4.1.2) (2017-04-18)
-
-
-### Bug Fixes
-
-* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/zkat/ssri/commit/b1c4805))
-
-
-
-<a name="4.1.1"></a>
-## [4.1.1](https://github.com/zkat/ssri/compare/v4.1.0...v4.1.1) (2017-04-12)
-
-
-### Bug Fixes
-
-* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/zkat/ssri/commit/fab470e))
-
-
-
-<a name="4.1.0"></a>
-# [4.1.0](https://github.com/zkat/ssri/compare/v4.0.0...v4.1.0) (2017-04-07)
-
-
-### Features
-
-* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/zkat/ssri/commit/96f52ad))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/ssri/compare/v3.0.2...v4.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/zkat/ssri/commit/8381afa))
-
-
-### BREAKING CHANGES
-
-* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors
-
-
-
-<a name="3.0.2"></a>
-## [3.0.2](https://github.com/zkat/ssri/compare/v3.0.1...v3.0.2) (2017-04-03)
-
-
-
-<a name="3.0.1"></a>
-## [3.0.1](https://github.com/zkat/ssri/compare/v3.0.0...v3.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/zkat/ssri/commit/a6ac6d0))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/ssri/compare/v2.0.0...v3.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/zkat/ssri/commit/d04aa1f))
-
-
-### Features
-
-* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/zkat/ssri/commit/2301e74))
-* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/zkat/ssri/commit/049b89e))
-* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/zkat/ssri/commit/a9f021c))
-* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/zkat/ssri/commit/85208ba))
-* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/zkat/ssri/commit/1b29e6f))
-* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/zkat/ssri/commit/fd23e1b))
-* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/zkat/ssri/commit/c8ddf48))
-* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/zkat/ssri/commit/b97a796))
-* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/zkat/ssri/commit/02ed1ad))
-
-
-### BREAKING CHANGES
-
-* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`.
-* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream.
-
-To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same.
-* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match.
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/ssri/compare/v1.0.0...v2.0.0) (2017-03-24)
-
-
-### Bug Fixes
-
-* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/zkat/ssri/commit/122a32c))
-
-
-### Features
-
-* **api:** added serialize alias for unparse ([999b421](https://github.com/zkat/ssri/commit/999b421))
-* **concat:** add Integrity#concat() ([cae12c7](https://github.com/zkat/ssri/commit/cae12c7))
-* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/zkat/ssri/commit/58c18f7))
-* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/zkat/ssri/commit/3f0b64c))
-* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/zkat/ssri/commit/4acad30))
-* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/zkat/ssri/commit/f72e658))
-
-
-### BREAKING CHANGES
-
-* **pickAlgo:** ssri will prioritize specific hashes now
-* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead.
-* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`.
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-03-23)
-
-
-### Features
-
-* **api:** implemented initial api ([4fbb16b](https://github.com/zkat/ssri/commit/4fbb16b))
-
-
-### BREAKING CHANGES
-
-* **api:** Initial API established.
diff --git a/node_modules/libnpmpublish/node_modules/ssri/LICENSE.md b/node_modules/libnpmpublish/node_modules/ssri/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/libnpmpublish/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libnpmpublish/node_modules/ssri/README.md b/node_modules/libnpmpublish/node_modules/ssri/README.md
deleted file mode 100644
index c250961bd..000000000
--- a/node_modules/libnpmpublish/node_modules/ssri/README.md
+++ /dev/null
@@ -1,488 +0,0 @@
-# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/zkat/ssri.svg)](https://travis-ci.org/zkat/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/ssri?svg=true)](https://ci.appveyor.com/project/zkat/ssri) [![Coverage Status](https://coveralls.io/repos/github/zkat/ssri/badge.svg?branch=latest)](https://coveralls.io/github/zkat/ssri?branch=latest)
-
-[`ssri`](https://github.com/zkat/ssri), short for Standard Subresource
-Integrity, is a Node.js utility for parsing, manipulating, serializing,
-generating, and verifying [Subresource
-Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes.
-
-## Install
-
-`$ npm install --save ssri`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * Parsing & Serializing
- * [`parse`](#parse)
- * [`stringify`](#stringify)
- * [`Integrity#concat`](#integrity-concat)
- * [`Integrity#toString`](#integrity-to-string)
- * [`Integrity#toJSON`](#integrity-to-json)
- * [`Integrity#match`](#integrity-match)
- * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm)
- * [`Integrity#hexDigest`](#integrity-hex-digest)
- * Integrity Generation
- * [`fromHex`](#from-hex)
- * [`fromData`](#from-data)
- * [`fromStream`](#from-stream)
- * [`create`](#create)
- * Integrity Verification
- * [`checkData`](#check-data)
- * [`checkStream`](#check-stream)
- * [`integrityStream`](#integrity-stream)
-
-### Example
-
-```javascript
-const ssri = require('ssri')
-
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Parsing and serializing
-const parsed = ssri.parse(integrity)
-ssri.stringify(parsed) // === integrity (works on non-Integrity objects)
-parsed.toString() // === integrity
-
-// Async stream functions
-ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...)
-ssri.fromStream(fs.createReadStream('./my-file')).then(sri => {
- sri.toString() === integrity
-})
-fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri))
-
-// Sync data functions
-ssri.fromData(fs.readFileSync('./my-file')) // === parsed
-ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512'
-```
-
-### Features
-
-* Parses and stringifies SRI strings.
-* Generates SRI strings from raw data or Streams.
-* Strict standard compliance.
-* `?foo` metadata option support.
-* Multiple entries for the same algorithm.
-* Object-based integrity hash manipulation.
-* Small footprint: no dependencies, concise implementation.
-* Full test coverage.
-* Customizable algorithm picker.
-
-### Contributing
-
-The ssri team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-### API
-
-#### <a name="parse"></a> `> ssri.parse(sri, [opts]) -> Integrity`
-
-Parses `sri` into an `Integrity` data structure. `sri` can be an integrity
-string, an `Hash`-like with `digest` and `algorithm` fields and an optional
-`options` field, or an `Integrity`-like object. The resulting object will be an
-`Integrity` instance that has this shape:
-
-```javascript
-{
- 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}],
- 'sha512': [
- {algorithm: 'sha512', digest: 'c0ffee', options: []},
- {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']}
- ],
-}
-```
-
-If `opts.single` is truthy, a single `Hash` object will be returned. That is, a
-single object that looks like `{algorithm, digest, options}`, as opposed to a
-larger object with multiple of these.
-
-If `opts.strict` is truthy, the resulting object will be filtered such that
-it strictly follows the Subresource Integrity spec, throwing away any entries
-with any invalid components. This also means a restricted set of algorithms
-will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`.
-
-Strict mode is recommended if the integrity strings are intended for use in
-browsers, or in other situations where strict adherence to the spec is needed.
-
-##### Example
-
-```javascript
-ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object
-```
-
-#### <a name="stringify"></a> `> ssri.stringify(sri, [opts]) -> String`
-
-This function is identical to [`Integrity#toString()`](#integrity-to-string),
-except it can be used on _any_ object that [`parse`](#parse) can handle -- that
-is, a string, an `Hash`-like, or an `Integrity`-like.
-
-The `opts.sep` option defines the string to use when joining multiple entries
-together. To be spec-compliant, this _must_ be whitespace. The default is a
-single space (`' '`).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// Useful for cleaning up input SRI strings:
-ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar')
-// -> 'sha512-foo sha384-bar'
-
-// Hash-like: only a single entry.
-ssri.stringify({
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse`
-ssri.stringify({
- 'sha512': [
- {
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
- }
- ]
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-```
-
-#### <a name="integrity-concat"></a> `> Integrity#concat(otherIntegrity, [opts]) -> Integrity`
-
-Concatenates an `Integrity` object with another IntegrityLike, or an integrity
-string.
-
-This is functionally equivalent to concatenating the string format of both
-integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string.
-
-If `opts.strict` is true, the new `Integrity` will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// This will combine the integrity checks for two different versions of
-// your index.js file so you can use a single integrity string and serve
-// either of these to clients, from a single `<script>` tag.
-const desktopIntegrity = ssri.fromData(fs.readFileSync('./index.desktop.js'))
-const mobileIntegrity = ssri.fromData(fs.readFileSync('./index.mobile.js'))
-
-// Note that browsers (and ssri) will succeed as long as ONE of the entries
-// for the *prioritized* algorithm succeeds. That is, in order for this fallback
-// to work, both desktop and mobile *must* use the same `algorithm` values.
-desktopIntegrity.concat(mobileIntegrity)
-```
-
-#### <a name="integrity-to-string"></a> `> Integrity#toString([opts]) -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `opts.sep`, which defaults to `' '`.
-
-If you want to serialize an object that didn't come from an `ssri` function,
-use [`ssri.stringify()`](#stringify).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-ssri.parse(integrity).toString() === integrity
-```
-
-#### <a name="integrity-to-json"></a> `> Integrity#toJSON() -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `' '`.
-
-This is a convenience method so you can pass an `Integrity` object directly to `JSON.stringify`.
-For more info check out [toJSON() behavior on mdn](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior).
-
-##### Example
-
-```javascript
-const integrity = '"sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo"'
-
-JSON.stringify(ssri.parse(integrity)) === integrity
-```
-
-#### <a name="integrity-match"></a> `> Integrity#match(sri, [opts]) -> Hash | false`
-
-Returns the matching (truthy) hash if `Integrity` matches the argument passed as
-`sri`, which can be anything that [`parse`](#parse) will accept. `opts` will be
-passed through to `parse` and [`pickAlgorithm()`](#integrity-pick-algorithm).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-
-ssri.parse(integrity).match(integrity)
-// Hash {
-// digest: '9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-// algorithm: 'sha512'
-// }
-
-ssri.parse(integrity).match('sha1-deadbeef')
-// false
-```
-
-#### <a name="integrity-pick-algorithm"></a> `> Integrity#pickAlgorithm([opts]) -> String`
-
-Returns the "best" algorithm from those available in the integrity object.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-WEakDigEST sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1').pickAlgorithm() // sha512
-```
-
-#### <a name="integrity-hex-digest"></a> `> Integrity#hexDigest() -> String`
-
-`Integrity` is assumed to be either a single-hash `Integrity` instance, or a
-`Hash` instance. Returns its `digest`, converted to a hex representation of the
-base64 data.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-deadbeef').hexDigest() // '75e69d6de79f'
-```
-
-#### <a name="from-hex"></a> `> ssri.fromHex(hexDigest, algorithm, [opts]) -> Integrity`
-
-Creates an `Integrity` object with a single entry, based on a hex-formatted
-hash. This is a utility function to help convert existing shasums to the
-Integrity format, and is roughly equivalent to something like:
-
-```javascript
-algorithm + '-' + Buffer.from(hexDigest, 'hex').toString('base64')
-```
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-If `opts.single` is true, a single `Hash` object will be returned.
-
-##### Example
-
-```javascript
-ssri.fromHex('75e69d6de79f', 'sha1').toString() // 'sha1-deadbeef'
-```
-
-#### <a name="from-data"></a> `> ssri.fromData(data, [opts]) -> Integrity`
-
-Creates an `Integrity` object from either string or `Buffer` data, calculating
-all the requested hashes and adding any specified options to the object.
-
-`opts.algorithms` determines which algorithms to generate hashes for. All
-results will be included in a single `Integrity` object. The default value for
-`opts.algorithms` is `['sha512']`. All algorithm strings must be hashes listed
-in `crypto.getHashes()` for the host Node.js platform.
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrityObj = ssri.fromData('foobarbaz', {
- algorithms: ['sha256', 'sha384', 'sha512']
-})
-integrity.toString('\n')
-// ->
-// sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0=
-// sha384-irnCxQ0CfQhYGlVAUdwTPC9bF3+YWLxlaDGM4xbYminxpbXEq+D+2GCEBTxcjES9
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="from-stream"></a> `> ssri.fromStream(stream, [opts]) -> Promise<Integrity>`
-
-Returns a Promise of an Integrity object calculated by reading data from
-a given `stream`.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-Additionally, `opts.Promise` may be passed in to inject a Promise library of
-choice. By default, ssri will use Node's built-in Promises.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-ssri.fromStream(fs.createReadStream('index.js'), {
- algorithms: ['sha1', 'sha512']
-}).then(integrity => {
- return ssri.checkStream(fs.createReadStream('index.js'), integrity)
-}) // succeeds
-```
-
-#### <a name="create"></a> `> ssri.create([opts]) -> <Hash>`
-
-Returns a Hash object with `update(<Buffer or string>[,enc])` and `digest()` methods.
-
-
-The Hash object provides the same methods as [crypto class Hash](https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash).
-`digest()` accepts no arguments and returns an Integrity object calculated by reading data from
-calls to update.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = ssri.create().update('foobarbaz').digest()
-integrity.toString()
-// ->
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="check-data"></a> `> ssri.checkData(data, sri, [opts]) -> Hash|false`
-
-Verifies `data` integrity against an `sri` argument. `data` may be either a
-`String` or a `Buffer`, and `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-If verification succeeds, `checkData` will return the name of the algorithm that
-was used for verification (a truthy value). Otherwise, it will return `false`.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-If `opts.error` is true, and verification fails, `checkData` will throw either
-an `EBADSIZE` or an `EINTEGRITY` error, instead of just returning false.
-
-##### Example
-
-```javascript
-const data = fs.readFileSync('index.js')
-ssri.checkData(data, ssri.fromData(data)) // -> 'sha512'
-ssri.checkData(data, 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0')
-ssri.checkData(data, 'sha1-BaDDigEST') // -> false
-ssri.checkData(data, 'sha1-BaDDigEST', {error: true}) // -> Error! EINTEGRITY
-```
-
-#### <a name="check-stream"></a> `> ssri.checkStream(stream, sri, [opts]) -> Promise<Hash>`
-
-Verifies the contents of `stream` against an `sri` argument. `stream` will be
-consumed in its entirety by this process. `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-`checkStream` will return a Promise that either resolves to the
-`Hash` that succeeded verification, or, if the verification fails
-or an error happens with `stream`, the Promise will be rejected.
-
-If the Promise is rejected because verification failed, the returned error will
-have `err.code` as `EINTEGRITY`.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be returned by a rejection if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- integrity
-)
-// ->
-// Promise<{
-// algorithm: 'sha512',
-// digest: 'sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1'
-// }>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0'
-) // -> Promise<Hash>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha1-BaDDigEST'
-) // -> Promise<Error<{code: 'EINTEGRITY'}>>
-```
-
-#### <a name="integrity-stream"></a> `> integrityStream([opts]) -> IntegrityStream`
-
-Returns a `Transform` stream that data can be piped through in order to generate
-and optionally check data integrity for piped data. When the stream completes
-successfully, it emits `size` and `integrity` events, containing the total
-number of bytes processed and a calculated `Integrity` instance based on stream
-data, respectively.
-
-If `opts.algorithms` is passed in, the listed algorithms will be calculated when
-generating the final `Integrity` instance. The default is `['sha512']`.
-
-If `opts.single` is passed in, a single `Hash` instance will be returned.
-
-If `opts.integrity` is passed in, it should be an `integrity` value understood
-by [`parse`](#parse) that the stream will check the data against. If
-verification succeeds, the integrity stream will emit a `verified` event whose
-value is a single `Hash` object that is the one that succeeded verification. If
-verification fails, the stream will error with an `EINTEGRITY` error code.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be emitted by the stream if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-fs.createReadStream('index.js')
-.pipe(ssri.integrityStream({integrity}))
-```
diff --git a/node_modules/libnpmpublish/node_modules/ssri/index.js b/node_modules/libnpmpublish/node_modules/ssri/index.js
deleted file mode 100644
index e102892b0..000000000
--- a/node_modules/libnpmpublish/node_modules/ssri/index.js
+++ /dev/null
@@ -1,395 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const figgyPudding = require('figgy-pudding')
-const Transform = require('stream').Transform
-
-const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512']
-
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([^-]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([^-]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)*$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-const SsriOpts = figgyPudding({
- algorithms: {default: ['sha512']},
- error: {default: false},
- integrity: {},
- options: {default: []},
- pickAlgorithm: {default: () => getPrioritizedHash},
- Promise: {default: () => Promise},
- sep: {default: ' '},
- single: {default: false},
- size: {},
- strict: {default: false}
-})
-
-class Hash {
- get isHash () { return true }
- constructor (hash, opts) {
- opts = SsriOpts(opts)
- const strict = !!opts.strict
- this.source = hash.trim()
- // 3.1. Integrity metadata (called "Hash" by ssri)
- // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
- const match = this.source.match(
- strict
- ? STRICT_SRI_REGEX
- : SRI_REGEX
- )
- if (!match) { return }
- if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { return }
- this.algorithm = match[1]
- this.digest = match[2]
-
- const rawOpts = match[3]
- this.options = rawOpts ? rawOpts.slice(1).split('?') : []
- }
- hexDigest () {
- return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
- }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = SsriOpts(opts)
- if (opts.strict) {
- // Strict mode enforces the standard as close to the foot of the
- // letter as it can.
- if (!(
- // The spec has very restricted productions for algorithms.
- // https://www.w3.org/TR/CSP2/#source-list-syntax
- SPEC_ALGORITHMS.some(x => x === this.algorithm) &&
- // Usually, if someone insists on using a "different" base64, we
- // leave it as-is, since there's multiple standards, and the
- // specified is not a URL-safe variant.
- // https://www.w3.org/TR/CSP2/#base64_value
- this.digest.match(BASE64_REGEX) &&
- // Option syntax is strictly visual chars.
- // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
- // https://tools.ietf.org/html/rfc5234#appendix-B.1
- (this.options || []).every(opt => opt.match(VCHAR_REGEX))
- )) {
- return ''
- }
- }
- const options = this.options && this.options.length
- ? `?${this.options.join('?')}`
- : ''
- return `${this.algorithm}-${this.digest}${options}`
- }
-}
-
-class Integrity {
- get isIntegrity () { return true }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = SsriOpts(opts)
- let sep = opts.sep || ' '
- if (opts.strict) {
- // Entries must be separated by whitespace, according to spec.
- sep = sep.replace(/\S+/g, ' ')
- }
- return Object.keys(this).map(k => {
- return this[k].map(hash => {
- return Hash.prototype.toString.call(hash, opts)
- }).filter(x => x.length).join(sep)
- }).filter(x => x.length).join(sep)
- }
- concat (integrity, opts) {
- opts = SsriOpts(opts)
- const other = typeof integrity === 'string'
- ? integrity
- : stringify(integrity, opts)
- return parse(`${this.toString(opts)} ${other}`, opts)
- }
- hexDigest () {
- return parse(this, {single: true}).hexDigest()
- }
- match (integrity, opts) {
- opts = SsriOpts(opts)
- const other = parse(integrity, opts)
- const algo = other.pickAlgorithm(opts)
- return (
- this[algo] &&
- other[algo] &&
- this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest
- )
- )
- ) || false
- }
- pickAlgorithm (opts) {
- opts = SsriOpts(opts)
- const pickAlgorithm = opts.pickAlgorithm
- const keys = Object.keys(this)
- if (!keys.length) {
- throw new Error(`No algorithms available for ${
- JSON.stringify(this.toString())
- }`)
- }
- return keys.reduce((acc, algo) => {
- return pickAlgorithm(acc, algo) || acc
- })
- }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
- opts = SsriOpts(opts)
- if (typeof sri === 'string') {
- return _parse(sri, opts)
- } else if (sri.algorithm && sri.digest) {
- const fullSri = new Integrity()
- fullSri[sri.algorithm] = [sri]
- return _parse(stringify(fullSri, opts), opts)
- } else {
- return _parse(stringify(sri, opts), opts)
- }
-}
-
-function _parse (integrity, opts) {
- // 3.4.3. Parse metadata
- // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
- if (opts.single) {
- return new Hash(integrity, opts)
- }
- return integrity.trim().split(/\s+/).reduce((acc, string) => {
- const hash = new Hash(string, opts)
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
- opts = SsriOpts(opts)
- if (obj.algorithm && obj.digest) {
- return Hash.prototype.toString.call(obj, opts)
- } else if (typeof obj === 'string') {
- return stringify(parse(obj, opts), opts)
- } else {
- return Integrity.prototype.toString.call(obj, opts)
- }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
- opts = SsriOpts(opts)
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return parse(
- `${algorithm}-${
- Buffer.from(hexDigest, 'hex').toString('base64')
- }${optString}`, opts
- )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
- opts = SsriOpts(opts)
- const algorithms = opts.algorithms
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return algorithms.reduce((acc, algo) => {
- const digest = crypto.createHash(algo).update(data).digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
- opts = SsriOpts(opts)
- const P = opts.Promise || Promise
- const istream = integrityStream(opts)
- return new P((resolve, reject) => {
- stream.pipe(istream)
- stream.on('error', reject)
- istream.on('error', reject)
- let sri
- istream.on('integrity', s => { sri = s })
- istream.on('end', () => resolve(sri))
- istream.on('data', () => {})
- })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
- opts = SsriOpts(opts)
- sri = parse(sri, opts)
- if (!Object.keys(sri).length) {
- if (opts.error) {
- throw Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY'
- }
- )
- } else {
- return false
- }
- }
- const algorithm = sri.pickAlgorithm(opts)
- const digest = crypto.createHash(algorithm).update(data).digest('base64')
- const newSri = parse({algorithm, digest})
- const match = newSri.match(sri, opts)
- if (match || !opts.error) {
- return match
- } else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
- const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`)
- err.code = 'EBADSIZE'
- err.found = data.length
- err.expected = opts.size
- err.sri = sri
- throw err
- } else {
- const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = sri
- err.algorithm = algorithm
- err.sri = sri
- throw err
- }
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
- opts = SsriOpts(opts)
- const P = opts.Promise || Promise
- const checker = integrityStream(opts.concat({
- integrity: sri
- }))
- return new P((resolve, reject) => {
- stream.pipe(checker)
- stream.on('error', reject)
- checker.on('error', reject)
- let sri
- checker.on('verified', s => { sri = s })
- checker.on('end', () => resolve(sri))
- checker.on('data', () => {})
- })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts) {
- opts = SsriOpts(opts)
- // For verification
- const sri = opts.integrity && parse(opts.integrity, opts)
- const goodSri = sri && Object.keys(sri).length
- const algorithm = goodSri && sri.pickAlgorithm(opts)
- const digests = goodSri && sri[algorithm]
- // Calculating stream
- const algorithms = Array.from(
- new Set(opts.algorithms.concat(algorithm ? [algorithm] : []))
- )
- const hashes = algorithms.map(crypto.createHash)
- let streamSize = 0
- const stream = new Transform({
- transform (chunk, enc, cb) {
- streamSize += chunk.length
- hashes.forEach(h => h.update(chunk, enc))
- cb(null, chunk, enc)
- }
- }).on('end', () => {
- const optString = (opts.options && opts.options.length)
- ? `?${opts.options.join('?')}`
- : ''
- const newSri = parse(hashes.map((h, i) => {
- return `${algorithms[i]}-${h.digest('base64')}${optString}`
- }).join(' '), opts)
- // Integrity verification mode
- const match = goodSri && newSri.match(sri, opts)
- if (typeof opts.size === 'number' && streamSize !== opts.size) {
- const err = new Error(`stream size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${streamSize}`)
- err.code = 'EBADSIZE'
- err.found = streamSize
- err.expected = opts.size
- err.sri = sri
- stream.emit('error', err)
- } else if (opts.integrity && !match) {
- const err = new Error(`${sri} integrity checksum failed when using ${algorithm}: wanted ${digests} but got ${newSri}. (${streamSize} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = digests
- err.algorithm = algorithm
- err.sri = sri
- stream.emit('error', err)
- } else {
- stream.emit('size', streamSize)
- stream.emit('integrity', newSri)
- match && stream.emit('verified', match)
- }
- })
- return stream
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
- opts = SsriOpts(opts)
- const algorithms = opts.algorithms
- const optString = opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
-
- const hashes = algorithms.map(crypto.createHash)
-
- return {
- update: function (chunk, enc) {
- hashes.forEach(h => h.update(chunk, enc))
- return this
- },
- digest: function (enc) {
- const integrity = algorithms.reduce((acc, algo) => {
- const digest = hashes.shift().digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-
- return integrity
- }
- }
-}
-
-const NODE_HASHES = new Set(crypto.getHashes())
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
- 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
- // TODO - it's unclear _which_ of these Node will actually use as its name
- // for the algorithm, so we guesswork it based on the OpenSSL names.
- 'sha3',
- 'sha3-256', 'sha3-384', 'sha3-512',
- 'sha3_256', 'sha3_384', 'sha3_512'
-].filter(algo => NODE_HASHES.has(algo))
-
-function getPrioritizedHash (algo1, algo2) {
- return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
- ? algo1
- : algo2
-}
diff --git a/node_modules/libnpmpublish/node_modules/ssri/package.json b/node_modules/libnpmpublish/node_modules/ssri/package.json
deleted file mode 100644
index 65546f2b2..000000000
--- a/node_modules/libnpmpublish/node_modules/ssri/package.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
- "_from": "ssri@^6.0.1",
- "_id": "ssri@6.0.1",
- "_inBundle": false,
- "_integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==",
- "_location": "/libnpmpublish/ssri",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "ssri@^6.0.1",
- "name": "ssri",
- "escapedName": "ssri",
- "rawSpec": "^6.0.1",
- "saveSpec": null,
- "fetchSpec": "^6.0.1"
- },
- "_requiredBy": [
- "/libnpmpublish"
- ],
- "_resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz",
- "_shasum": "2a3c41b28dd45b62b63676ecb74001265ae9edd8",
- "_spec": "ssri@^6.0.1",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libnpmpublish",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/ssri/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "figgy-pudding": "^3.5.1"
- },
- "deprecated": false,
- "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
- "devDependencies": {
- "nyc": "^11.4.1",
- "standard": "^10.0.3",
- "standard-version": "^4.3.0",
- "tap": "^11.1.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js"
- ],
- "homepage": "https://github.com/zkat/ssri#readme",
- "keywords": [
- "w3c",
- "web",
- "security",
- "integrity",
- "checksum",
- "hashing",
- "subresource integrity",
- "sri",
- "sri hash",
- "sri string",
- "sri generator",
- "html"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "ssri",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/ssri.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "6.0.1"
-}
diff --git a/node_modules/libnpmpublish/package.json b/node_modules/libnpmpublish/package.json
index 9810b4080..f0c8a7f85 100644
--- a/node_modules/libnpmpublish/package.json
+++ b/node_modules/libnpmpublish/package.json
@@ -1,84 +1,86 @@
{
- "_from": "libnpmpublish@1.1.2",
- "_id": "libnpmpublish@1.1.2",
+ "_from": "libnpmpublish@3.0.1",
+ "_id": "libnpmpublish@3.0.1",
"_inBundle": false,
- "_integrity": "sha512-2yIwaXrhTTcF7bkJKIKmaCV9wZOALf/gsTDxVSu/Gu/6wiG3fA8ce8YKstiWKTxSFNC0R7isPUb6tXTVFZHt2g==",
+ "_integrity": "sha512-gpd+Ql3cA73MMN8tZa1ao/bfF2/An+B3JtP7SDbJTRrfwre9C/ZZZ4+ejtqBSopnLXlWujr2MVVE4FY1S+jlFg==",
"_location": "/libnpmpublish",
- "_phantomChildren": {
- "JSONStream": "1.3.5",
- "bluebird": "3.5.5",
- "figgy-pudding": "3.5.1",
- "hosted-git-info": "2.8.5",
- "lru-cache": "5.1.1",
- "make-fetch-happen": "5.0.2",
- "osenv": "0.1.5",
- "validate-npm-package-name": "3.0.0"
- },
+ "_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
- "raw": "libnpmpublish@1.1.2",
+ "raw": "libnpmpublish@3.0.1",
"name": "libnpmpublish",
"escapedName": "libnpmpublish",
- "rawSpec": "1.1.2",
+ "rawSpec": "3.0.1",
"saveSpec": null,
- "fetchSpec": "1.1.2"
+ "fetchSpec": "3.0.1"
},
"_requiredBy": [
"#USER",
- "/",
- "/libnpm"
+ "/"
],
- "_resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-1.1.2.tgz",
- "_shasum": "4201cfc4a69c44e6f454ec548fa1cd90f10df0a0",
- "_spec": "libnpmpublish@1.1.2",
- "_where": "/Users/mperrotte/npminc/cli",
+ "_resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-3.0.1.tgz",
+ "_shasum": "f7b3094b24a68edf9a044780dc4b4158c45e774b",
+ "_spec": "libnpmpublish@3.0.1",
+ "_where": "/Users/claudiahdz/npm/cli",
"author": {
- "name": "Kat Marchán",
- "email": "kzm@zkat.tech"
+ "name": "npm Inc.",
+ "email": "support@npmjs.com"
},
"bugs": {
"url": "https://github.com/npm/libnpmpublish/issues"
},
"bundleDependencies": false,
+ "contributors": [
+ {
+ "name": "Kat Marchán",
+ "email": "kzm@zkat.tech"
+ },
+ {
+ "name": "Claudia Hernández",
+ "email": "claudia@npmjs.com"
+ }
+ ],
"dependencies": {
- "aproba": "^2.0.0",
- "figgy-pudding": "^3.5.1",
- "get-stream": "^4.0.0",
+ "libnpmpack": "^2.0.0",
"lodash.clonedeep": "^4.5.0",
- "normalize-package-data": "^2.4.0",
- "npm-package-arg": "^6.1.0",
- "npm-registry-fetch": "^4.0.0",
- "semver": "^5.5.1",
- "ssri": "^6.0.1"
+ "normalize-package-data": "^2.5.0",
+ "npm-package-arg": "^8.0.0",
+ "npm-registry-fetch": "^8.0.0",
+ "semver": "^7.1.3",
+ "ssri": "^8.0.0"
},
"deprecated": false,
"description": "Programmatic API for the bits behind npm publish and unpublish",
"devDependencies": {
- "bluebird": "^3.5.1",
- "nock": "^9.6.1",
- "standard": "*",
- "standard-version": "*",
- "tap": "*",
- "tar-stream": "^1.6.1",
- "weallbehave": "*",
- "weallcontribute": "*"
+ "nock": "^12.0.2",
+ "standard": "^14.3.1",
+ "tap": "^14.10.6"
+ },
+ "engines": {
+ "node": ">=10"
},
+ "files": [
+ "*.js"
+ ],
"homepage": "https://npmjs.com/package/libnpmpublish",
"license": "ISC",
+ "main": "index.js",
"name": "libnpmpublish",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/libnpmpublish.git"
},
"scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --100 test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
+ "lint": "standard",
+ "posttest": "npm run lint",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "preversion": "npm test",
+ "test": "tap"
+ },
+ "tap": {
+ "check-coverage": true
},
- "version": "1.1.2"
+ "version": "3.0.1"
}
diff --git a/node_modules/libnpmpublish/publish.js b/node_modules/libnpmpublish/publish.js
index de5af4f5d..8a382e4ad 100644
--- a/node_modules/libnpmpublish/publish.js
+++ b/node_modules/libnpmpublish/publish.js
@@ -1,96 +1,98 @@
'use strict'
-const cloneDeep = require('lodash.clonedeep')
-const figgyPudding = require('figgy-pudding')
const { fixer } = require('normalize-package-data')
-const getStream = require('get-stream')
-const npa = require('npm-package-arg')
-const npmAuth = require('npm-registry-fetch/auth.js')
const npmFetch = require('npm-registry-fetch')
+const cloneDeep = require('lodash.clonedeep')
+const npa = require('npm-package-arg')
+const pack = require('libnpmpack')
const semver = require('semver')
+const { URL } = require('url')
+const util = require('util')
const ssri = require('ssri')
-const url = require('url')
-const validate = require('aproba')
-const PublishConfig = figgyPudding({
- access: {},
- algorithms: { default: ['sha512'] },
- npmVersion: {},
- tag: { default: 'latest' },
- Promise: { default: () => Promise }
-})
+const statAsync = util.promisify(require('fs').stat)
module.exports = publish
-function publish (manifest, tarball, opts) {
- opts = PublishConfig(opts)
- return new opts.Promise(resolve => resolve()).then(() => {
- validate('OSO|OOO', [manifest, tarball, opts])
- if (manifest.private) {
- throw Object.assign(new Error(
- 'This package has been marked as private\n' +
- "Remove the 'private' field from the package.json to publish it."
- ), { code: 'EPRIVATE' })
- }
- const spec = npa.resolve(manifest.name, manifest.version)
- // NOTE: spec is used to pick the appropriate registry/auth combo.
- opts = opts.concat(manifest.publishConfig, { spec })
- const reg = npmFetch.pickRegistry(spec, opts)
- const auth = npmAuth(reg, opts)
- const pubManifest = patchedManifest(spec, auth, manifest, opts)
-
- // registry-frontdoor cares about the access level, which is only
- // configurable for scoped packages
- if (!spec.scope && opts.access === 'restricted') {
- throw Object.assign(
- new Error("Can't restrict access to unscoped packages."),
- { code: 'EUNSCOPED' }
- )
- }
+async function publish (folder, manifest, opts) {
+ if (manifest.private) {
+ throw Object.assign(
+ new Error(
+ `This package has been marked as private\n
+ Remove the 'private' field from the package.json to publish it.`
+ ),
+ { code: 'EPRIVATE' }
+ )
+ }
- return slurpTarball(tarball, opts).then(tardata => {
- const metadata = buildMetadata(
- spec, auth, reg, pubManifest, tardata, opts
- )
- return npmFetch(spec.escapedName, opts.concat({
- method: 'PUT',
- body: metadata,
- ignoreBody: true
- })).catch(err => {
- if (err.code !== 'E409') { throw err }
- return npmFetch.json(spec.escapedName, opts.concat({
- query: { write: true }
- })).then(
- current => patchMetadata(current, metadata, opts)
- ).then(newMetadata => {
- return npmFetch(spec.escapedName, opts.concat({
- method: 'PUT',
- body: newMetadata,
- ignoreBody: true
- }))
- })
- })
+ // spec is used to pick the appropriate registry/auth combo
+ const spec = npa.resolve(manifest.name, manifest.version)
+ opts = {
+ defaultTag: 'latest',
+ // if scoped, restricted by default
+ access: spec.scope ? 'restricted' : 'public',
+ algorithms: ['sha512'],
+ ...opts,
+ spec
+ }
+
+ const stat = await statAsync(folder)
+ // checks if it's a dir
+ if (!stat.isDirectory()) {
+ throw Object.assign(
+ new Error('not a directory'),
+ { code: 'ENOTDIR' }
+ )
+ }
+
+ const reg = npmFetch.pickRegistry(spec, opts)
+ const pubManifest = patchManifest(manifest, opts)
+
+ // registry-frontdoor cares about the access level,
+ // which is only configurable for scoped packages
+ if (!spec.scope && opts.access === 'restricted') {
+ throw Object.assign(
+ new Error("Can't restrict access to unscoped packages."),
+ { code: 'EUNSCOPED' }
+ )
+ }
+
+ const tarballData = await pack(`file:${folder}`, { ...opts })
+ const metadata = buildMetadata(reg, pubManifest, tarballData, opts)
+
+ try {
+ return await npmFetch(spec.escapedName, {
+ ...opts,
+ method: 'PUT',
+ body: metadata,
+ ignoreBody: true
})
- }).then(() => true)
+ } catch (err) {
+ if (err.code !== 'E409') { throw err }
+ // if E409, we attempt exactly ONE retry, to protect us
+ // against malicious activity like trying to publish
+ // a bunch of new versions of a package at the same time
+ // and/or spamming the registry
+ const current = await npmFetch.json(spec.escapedName, {
+ ...opts,
+ query: { write: true }
+ })
+ const newMetadata = patchMetadata(current, metadata, opts)
+ return npmFetch(spec.escapedName, {
+ ...opts,
+ method: 'PUT',
+ body: newMetadata,
+ ignoreBody: true
+ })
+ }
}
-function patchedManifest (spec, auth, base, opts) {
- const manifest = cloneDeep(base)
+function patchManifest (_manifest, opts) {
+ const { npmVersion } = opts
+ const manifest = cloneDeep(_manifest)
+
manifest._nodeVersion = process.versions.node
- if (opts.npmVersion) {
- manifest._npmVersion = opts.npmVersion
- }
- if (auth.username || auth.email) {
- // NOTE: This is basically pointless, but reproduced because it's what
- // legacy does: tl;dr `auth.username` and `auth.email` are going to be
- // undefined in any auth situation that uses tokens instead of plain
- // auth. I can only assume some registries out there decided that
- // _npmUser would be of any use to them, but _npmUser in packuments
- // currently gets filled in by the npm registry itself, based on auth
- // information.
- manifest._npmUser = {
- name: auth.username,
- email: auth.email
- }
+ if (npmVersion) {
+ manifest._npmVersion = npmVersion
}
fixer.fixNameField(manifest, { strict: true, allowLegacyCase: true })
@@ -105,53 +107,52 @@ function patchedManifest (spec, auth, base, opts) {
return manifest
}
-function buildMetadata (spec, auth, registry, manifest, tardata, opts) {
+function buildMetadata (registry, manifest, tarballData, opts) {
+ const { access, defaultTag, algorithms } = opts
const root = {
_id: manifest.name,
name: manifest.name,
description: manifest.description,
'dist-tags': {},
versions: {},
+ access,
readme: manifest.readme || ''
}
- if (opts.access) root.access = opts.access
-
- if (!auth.token) {
- root.maintainers = [{ name: auth.username, email: auth.email }]
- manifest.maintainers = JSON.parse(JSON.stringify(root.maintainers))
- }
-
- root.versions[ manifest.version ] = manifest
- const tag = manifest.tag || opts.tag
+ root.versions[manifest.version] = manifest
+ const tag = manifest.tag || defaultTag
root['dist-tags'][tag] = manifest.version
- const tbName = manifest.name + '-' + manifest.version + '.tgz'
- const tbURI = manifest.name + '/-/' + tbName
- const integrity = ssri.fromData(tardata, {
- algorithms: [...new Set(['sha1'].concat(opts.algorithms))]
+ const tarballName = `${manifest.name}-${manifest.version}.tgz`
+ const tarballURI = `${manifest.name}/-/${tarballName}`
+ const integrity = ssri.fromData(tarballData, {
+ algorithms: [...new Set(['sha1'].concat(algorithms))]
})
- manifest._id = manifest.name + '@' + manifest.version
- manifest.dist = manifest.dist || {}
+ manifest._id = `${manifest.name}@${manifest.version}`
+ manifest.dist = { ...manifest.dist }
// Don't bother having sha1 in the actual integrity field
- manifest.dist.integrity = integrity['sha512'][0].toString()
+ manifest.dist.integrity = integrity.sha512[0].toString()
// Legacy shasum support
- manifest.dist.shasum = integrity['sha1'][0].hexDigest()
- manifest.dist.tarball = url.resolve(registry, tbURI)
+ manifest.dist.shasum = integrity.sha1[0].hexDigest()
+
+ // NB: the CLI always fetches via HTTPS if the registry is HTTPS,
+ // regardless of what's here. This makes it so that installing
+ // from an HTTP-only mirror doesn't cause problems, though.
+ manifest.dist.tarball = new URL(tarballURI, registry).href
.replace(/^https:\/\//, 'http://')
root._attachments = {}
- root._attachments[ tbName ] = {
- 'content_type': 'application/octet-stream',
- 'data': tardata.toString('base64'),
- 'length': tardata.length
+ root._attachments[tarballName] = {
+ content_type: 'application/octet-stream',
+ data: tarballData.toString('base64'),
+ length: tarballData.length
}
return root
}
-function patchMetadata (current, newData, opts) {
+function patchMetadata (current, newData) {
const curVers = Object.keys(current.versions || {}).map(v => {
return semver.clean(v, true)
}).concat(Object.keys(current.time || {}).map(v => {
@@ -161,7 +162,15 @@ function patchMetadata (current, newData, opts) {
const newVersion = Object.keys(newData.versions)[0]
if (curVers.indexOf(newVersion) !== -1) {
- throw ConflictError(newData.name, newData.version)
+ const { name: pkgid, version } = newData
+ throw Object.assign(
+ new Error(
+ `Cannot publish ${pkgid}@${version} over existing version.`
+ ), {
+ code: 'EPUBLISHCONFLICT',
+ pkgid,
+ version
+ })
}
current.versions = current.versions || {}
@@ -178,41 +187,11 @@ function patchMetadata (current, newData, opts) {
}
break
- // ignore these
- case 'maintainers':
- break
-
// copy
default:
current[i] = newData[i]
}
}
- const maint = newData.maintainers && JSON.parse(JSON.stringify(newData.maintainers))
- newData.versions[newVersion].maintainers = maint
- return current
-}
-
-function slurpTarball (tarSrc, opts) {
- if (Buffer.isBuffer(tarSrc)) {
- return opts.Promise.resolve(tarSrc)
- } else if (typeof tarSrc === 'string') {
- return opts.Promise.resolve(Buffer.from(tarSrc, 'base64'))
- } else if (typeof tarSrc.pipe === 'function') {
- return getStream.buffer(tarSrc)
- } else {
- return opts.Promise.reject(Object.assign(
- new Error('invalid tarball argument. Must be a Buffer, a base64 string, or a binary stream'), {
- code: 'EBADTAR'
- }))
- }
-}
-function ConflictError (pkgid, version) {
- return Object.assign(new Error(
- `Cannot publish ${pkgid}@${version} over existing version.`
- ), {
- code: 'EPUBLISHCONFLICT',
- pkgid,
- version
- })
+ return current
}
diff --git a/node_modules/libnpmpublish/test/publish.js b/node_modules/libnpmpublish/test/publish.js
deleted file mode 100644
index 23eef2181..000000000
--- a/node_modules/libnpmpublish/test/publish.js
+++ /dev/null
@@ -1,1048 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const cloneDeep = require('lodash.clonedeep')
-const figgyPudding = require('figgy-pudding')
-const mockTar = require('./util/mock-tarball.js')
-const { PassThrough } = require('stream')
-const ssri = require('ssri')
-const { test } = require('tap')
-const tnock = require('./util/tnock.js')
-
-const publish = require('../publish.js')
-
-const OPTS = figgyPudding({ registry: {} })({
- registry: 'https://mock.reg/'
-})
-
-const REG = OPTS.registry
-
-test('basic publish', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData, OPTS.concat({
- token: 'deadbeef'
- })).then(ret => {
- t.ok(ret, 'publish succeeded')
- })
- })
-})
-
-test('scoped publish', t => {
- const manifest = {
- name: '@zkat/libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: '@zkat/libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: '@zkat/libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: '@zkat/libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- name: '@zkat/libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/@zkat/libnpmpublish/-/@zkat/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- '@zkat/libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/@zkat%2flibnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- token: 'deadbeef'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-
-test('retry after a conflict', t => {
- const REV = '72-47f2986bfd8e8b55068b204588bbf484'
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const basePackument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {},
- versions: {},
- _attachments: {}
- }
- const currentPackument = cloneDeep(Object.assign({}, basePackument, {
- time: {
- modified: new Date().toISOString(),
- created: new Date().toISOString(),
- '1.0.1': new Date().toISOString()
- },
- 'dist-tags': { latest: '1.0.1' },
- maintainers: [{ name: 'zkat', email: 'idk@idk.tech' }],
- versions: {
- '1.0.1': {
- _id: 'libnpmpublish@1.0.1',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- name: 'libnpmpublish',
- version: '1.0.1',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.1.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.1.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }))
- const newPackument = cloneDeep(Object.assign({}, basePackument, {
- 'dist-tags': { latest: '1.0.0' },
- maintainers: [{ name: 'other', email: 'other@idk.tech' }],
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- _npmUser: {
- name: 'other',
- email: 'other@idk.tech'
- },
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- maintainers: [{ name: 'other', email: 'other@idk.tech' }],
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }))
- const mergedPackument = cloneDeep(Object.assign({}, basePackument, {
- time: currentPackument.time,
- 'dist-tags': { latest: '1.0.0' },
- maintainers: currentPackument.maintainers,
- versions: Object.assign({}, currentPackument.versions, newPackument.versions),
- _attachments: Object.assign({}, currentPackument._attachments, newPackument._attachments)
- }))
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.notOk(body._rev, 'no _rev in initial post')
- t.deepEqual(body, newPackument, 'got conflicting packument')
- return true
- }).reply(409, { error: 'gimme _rev plz' })
- srv.get('/libnpmpublish?write=true').reply(200, Object.assign({
- _rev: REV
- }, currentPackument))
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, Object.assign({
- _rev: REV
- }, mergedPackument), 'posted packument includes _rev and a merged version')
- return true
- }).reply(201, {})
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- username: 'other',
- email: 'other@idk.tech'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-
-test('retry after a conflict -- no versions on remote', t => {
- const REV = '72-47f2986bfd8e8b55068b204588bbf484'
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const basePackument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish'
- }
- const currentPackument = cloneDeep(Object.assign({}, basePackument, {
- maintainers: [{ name: 'zkat', email: 'idk@idk.tech' }]
- }))
- const newPackument = cloneDeep(Object.assign({}, basePackument, {
- 'dist-tags': { latest: '1.0.0' },
- maintainers: [{ name: 'other', email: 'other@idk.tech' }],
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- _npmUser: {
- name: 'other',
- email: 'other@idk.tech'
- },
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- maintainers: [{ name: 'other', email: 'other@idk.tech' }],
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }))
- const mergedPackument = cloneDeep(Object.assign({}, basePackument, {
- 'dist-tags': { latest: '1.0.0' },
- maintainers: currentPackument.maintainers,
- versions: Object.assign({}, currentPackument.versions, newPackument.versions),
- _attachments: Object.assign({}, currentPackument._attachments, newPackument._attachments)
- }))
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.notOk(body._rev, 'no _rev in initial post')
- t.deepEqual(body, newPackument, 'got conflicting packument')
- return true
- }).reply(409, { error: 'gimme _rev plz' })
- srv.get('/libnpmpublish?write=true').reply(200, Object.assign({
- _rev: REV
- }, currentPackument))
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, Object.assign({
- _rev: REV
- }, mergedPackument), 'posted packument includes _rev and a merged version')
- return true
- }).reply(201, {})
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- username: 'other',
- email: 'other@idk.tech'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-test('version conflict', t => {
- const REV = '72-47f2986bfd8e8b55068b204588bbf484'
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const basePackument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {},
- versions: {},
- _attachments: {}
- }
- const newPackument = cloneDeep(Object.assign({}, basePackument, {
- 'dist-tags': { latest: '1.0.0' },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }))
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.notOk(body._rev, 'no _rev in initial post')
- t.deepEqual(body, newPackument, 'got conflicting packument')
- return true
- }).reply(409, { error: 'gimme _rev plz' })
- srv.get('/libnpmpublish?write=true').reply(200, Object.assign({
- _rev: REV
- }, newPackument))
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- token: 'deadbeef'
- })).then(
- () => { throw new Error('should not succeed') },
- err => {
- t.equal(err.code, 'EPUBLISHCONFLICT', 'got publish conflict code')
- }
- )
- })
-})
-
-test('publish with basic auth', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- maintainers: [{
- name: 'zkat',
- email: 'kat@example.tech'
- }],
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- _npmUser: {
- name: 'zkat',
- email: 'kat@example.tech'
- },
- maintainers: [{
- name: 'zkat',
- email: 'kat@example.tech'
- }],
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: /^Basic /
- }).reply(201, {})
-
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- username: 'zkat',
- email: 'kat@example.tech'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-
-test('publish base64 string', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData.toString('base64'), OPTS.concat({
- npmVersion: '6.9.0',
- token: 'deadbeef'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-
-test('publish tar stream', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- const stream = new PassThrough()
- setTimeout(() => stream.end(tarData), 0)
- return publish(manifest, stream, OPTS.concat({
- npmVersion: '6.9.0',
- token: 'deadbeef'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-
-test('refuse if package marked private', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- private: true
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- token: 'deadbeef'
- })).then(
- () => { throw new Error('should not have succeeded') },
- err => {
- t.equal(err.code, 'EPRIVATE', 'got correct error code')
- }
- )
- })
-})
-
-test('publish includes access', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- access: 'public',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData, OPTS.concat({
- token: 'deadbeef',
- access: 'public'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-
-test('refuse if package is unscoped plus `restricted` access', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- access: 'restricted'
- })).then(
- () => { throw new Error('should not have succeeded') },
- err => {
- t.equal(err.code, 'EUNSCOPED', 'got correct error code')
- }
- )
- })
-})
-
-test('refuse if tarball is wrong type', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return publish(manifest, { data: 42 }, OPTS.concat({
- npmVersion: '6.9.0',
- token: 'deadbeef'
- })).then(
- () => { throw new Error('should not have succeeded') },
- err => {
- t.equal(err.code, 'EBADTAR', 'got correct error code')
- }
- )
-})
-
-test('refuse if bad semver on manifest', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: 'lmao',
- description: 'some stuff'
- }
- return publish(manifest, 'deadbeef', OPTS).then(
- () => { throw new Error('should not have succeeded') },
- err => {
- t.equal(err.code, 'EBADSEMVER', 'got correct error code')
- }
- )
-})
-
-test('other error code', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- _npmVersion: '6.9.0',
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(500, { error: 'go away' })
-
- return publish(manifest, tarData, OPTS.concat({
- npmVersion: '6.9.0',
- token: 'deadbeef'
- })).then(
- () => { throw new Error('should not succeed') },
- err => {
- t.match(err.message, /go away/, 'no retry on non-409')
- }
- )
- })
-})
-
-test('publish includes access', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- access: 'public',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData, OPTS.concat({
- token: 'deadbeef',
- access: 'public'
- })).then(() => {
- t.ok(true, 'publish succeeded')
- })
- })
-})
-
-test('publishConfig on manifest', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- publishConfig: {
- registry: REG
- }
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- },
- publishConfig: {
- registry: REG
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData, { token: 'deadbeef' }).then(ret => {
- t.ok(ret, 'publish succeeded')
- })
- })
-})
-
-test('publish with encoded _auth', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- maintainers: [
- { name: 'myuser', email: 'my@ema.il' }
- ],
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _npmUser: {
- name: 'myuser',
- email: 'my@ema.il'
- },
- maintainers: [
- { name: 'myuser', email: 'my@ema.il' }
- ],
- _nodeVersion: process.versions.node,
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- const srv = tnock(t, REG)
- srv.put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData, OPTS.concat({
- _auth: Buffer.from('myuser:mypassword', 'utf8').toString('base64'),
- email: 'my@ema.il'
- })).then(ret => {
- t.ok(ret, 'publish succeeded using _auth')
- })
- })
-})
-
-test('publish with 302 redirect', t => {
- const manifest = {
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff'
- }
- return mockTar({
- 'package.json': JSON.stringify(manifest),
- 'index.js': 'console.log("hello world")'
- }).then(tarData => {
- const shasum = crypto.createHash('sha1').update(tarData).digest('hex')
- const integrity = ssri.fromData(tarData, { algorithms: ['sha512'] })
- const packument = {
- name: 'libnpmpublish',
- description: 'some stuff',
- readme: '',
- _id: 'libnpmpublish',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- _id: 'libnpmpublish@1.0.0',
- _nodeVersion: process.versions.node,
- name: 'libnpmpublish',
- version: '1.0.0',
- description: 'some stuff',
- dist: {
- shasum,
- integrity: integrity.toString(),
- tarball: `http://mock.reg/libnpmpublish/-/libnpmpublish-1.0.0.tgz`
- }
- }
- },
- _attachments: {
- 'libnpmpublish-1.0.0.tgz': {
- 'content_type': 'application/octet-stream',
- data: tarData.toString('base64'),
- length: tarData.length
- }
- }
- }
- tnock(t, REG).put('/libnpmpublish').reply(302, '', {
- location: 'http://blah.net/libnpmpublish'
- })
- tnock(t, 'http://blah.net').put('/libnpmpublish', body => {
- t.deepEqual(body, packument, 'posted packument matches expectations')
- return true
- }, {
- authorization: 'Bearer deadbeef'
- }).reply(201, {})
-
- return publish(manifest, tarData, OPTS.concat({
- token: 'deadbeef'
- })).then(ret => {
- t.ok(ret, 'publish succeeded')
- })
- })
-})
diff --git a/node_modules/libnpmpublish/test/unpublish.js b/node_modules/libnpmpublish/test/unpublish.js
deleted file mode 100644
index 19ac464a3..000000000
--- a/node_modules/libnpmpublish/test/unpublish.js
+++ /dev/null
@@ -1,249 +0,0 @@
-'use strict'
-
-const figgyPudding = require('figgy-pudding')
-const test = require('tap').test
-const tnock = require('./util/tnock.js')
-
-const OPTS = figgyPudding({ registry: {} })({
- registry: 'https://mock.reg/'
-})
-
-const REG = OPTS.registry
-const REV = '72-47f2986bfd8e8b55068b204588bbf484'
-const unpub = require('../unpublish.js')
-
-test('basic test', t => {
- const doc = {
- _id: 'foo',
- _rev: REV,
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.0.tgz`
- }
- }
- }
- }
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(200, doc)
- srv.delete(`/foo/-rev/${REV}`).reply(201)
- return unpub('foo', OPTS).then(ret => {
- t.ok(ret, 'foo was unpublished')
- })
-})
-
-test('scoped basic test', t => {
- const doc = {
- _id: '@foo/bar',
- _rev: REV,
- name: '@foo/bar',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- name: '@foo/bar',
- dist: {
- tarball: `${REG}/@foo/bar/-/foo-1.0.0.tgz`
- }
- }
- }
- }
- const srv = tnock(t, REG)
- srv.get('/@foo%2fbar?write=true').reply(200, doc)
- srv.delete(`/@foo%2fbar/-rev/${REV}`).reply(201)
- return unpub('@foo/bar', OPTS).then(() => {
- t.ok(true, 'foo was unpublished')
- })
-})
-
-test('unpublish specific, last version', t => {
- const doc = {
- _id: 'foo',
- _rev: REV,
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.0.tgz`
- }
- }
- }
- }
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(200, doc)
- srv.delete(`/foo/-rev/${REV}`).reply(201)
- return unpub('foo@1.0.0', OPTS).then(() => {
- t.ok(true, 'foo was unpublished')
- })
-})
-
-test('unpublish specific version', t => {
- const doc = {
- _id: 'foo',
- _rev: REV,
- _revisions: [1, 2, 3],
- _attachments: [1, 2, 3],
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.1'
- },
- versions: {
- '1.0.0': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.0.tgz`
- }
- },
- '1.0.1': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.1.tgz`
- }
- }
- }
- }
- const postEdit = {
- _id: 'foo',
- _rev: REV,
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.0.tgz`
- }
- }
- }
- }
-
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(200, doc)
- srv.put(`/foo/-rev/${REV}`, postEdit).reply(200)
- srv.get('/foo?write=true').reply(200, postEdit)
- srv.delete(`/foo/-/foo-1.0.1.tgz/-rev/${REV}`).reply(200)
- return unpub('foo@1.0.1', OPTS).then(() => {
- t.ok(true, 'foo was unpublished')
- })
-})
-
-test('404 considered a success', t => {
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(404)
- return unpub('foo', OPTS).then(() => {
- t.ok(true, 'foo was unpublished')
- })
-})
-
-test('non-404 errors', t => {
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(500)
- return unpub('foo', OPTS).then(
- () => { throw new Error('should not have succeeded') },
- err => { t.equal(err.code, 'E500', 'got right error from server') }
- )
-})
-
-test('packument with missing versions unpublishes whole thing', t => {
- const doc = {
- _id: 'foo',
- _rev: REV,
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.0'
- }
- }
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(200, doc)
- srv.delete(`/foo/-rev/${REV}`).reply(201)
- return unpub('foo@1.0.0', OPTS).then(() => {
- t.ok(true, 'foo was unpublished')
- })
-})
-
-test('packument with missing specific version assumed unpublished', t => {
- const doc = {
- _id: 'foo',
- _rev: REV,
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.0.tgz`
- }
- }
- }
- }
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(200, doc)
- return unpub('foo@1.0.1', OPTS).then(() => {
- t.ok(true, 'foo was unpublished')
- })
-})
-
-test('unpublish specific version without dist-tag update', t => {
- const doc = {
- _id: 'foo',
- _rev: REV,
- _revisions: [1, 2, 3],
- _attachments: [1, 2, 3],
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.0.tgz`
- }
- },
- '1.0.1': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.1.tgz`
- }
- }
- }
- }
- const postEdit = {
- _id: 'foo',
- _rev: REV,
- name: 'foo',
- 'dist-tags': {
- latest: '1.0.0'
- },
- versions: {
- '1.0.0': {
- name: 'foo',
- dist: {
- tarball: `${REG}/foo/-/foo-1.0.0.tgz`
- }
- }
- }
- }
- const srv = tnock(t, REG)
- srv.get('/foo?write=true').reply(200, doc)
- srv.put(`/foo/-rev/${REV}`, postEdit).reply(200)
- srv.get('/foo?write=true').reply(200, postEdit)
- srv.delete(`/foo/-/foo-1.0.1.tgz/-rev/${REV}`).reply(200)
- return unpub('foo@1.0.1', OPTS).then(() => {
- t.ok(true, 'foo was unpublished')
- })
-})
diff --git a/node_modules/libnpmpublish/test/util/mock-tarball.js b/node_modules/libnpmpublish/test/util/mock-tarball.js
deleted file mode 100644
index c6253cd21..000000000
--- a/node_modules/libnpmpublish/test/util/mock-tarball.js
+++ /dev/null
@@ -1,47 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const getStream = require('get-stream')
-const tar = require('tar-stream')
-const zlib = require('zlib')
-
-module.exports = makeTarball
-function makeTarball (files, opts) {
- opts = opts || {}
- const pack = tar.pack()
- Object.keys(files).forEach(function (filename) {
- const entry = files[filename]
- pack.entry({
- name: (opts.noPrefix ? '' : 'package/') + filename,
- type: entry.type,
- size: entry.size,
- mode: entry.mode,
- mtime: entry.mtime || new Date(0),
- linkname: entry.linkname,
- uid: entry.uid,
- gid: entry.gid,
- uname: entry.uname,
- gname: entry.gname
- }, typeof files[filename] === 'string'
- ? files[filename]
- : files[filename].data)
- })
- pack.finalize()
- return BB.try(() => {
- if (opts.stream && opts.gzip) {
- const gz = zlib.createGzip()
- pack.on('error', err => gz.emit('error', err)).pipe(gz)
- } else if (opts.stream) {
- return pack
- } else {
- return getStream.buffer(pack).then(ret => {
- if (opts.gzip) {
- return BB.fromNode(cb => zlib.gzip(ret, cb))
- } else {
- return ret
- }
- })
- }
- })
-}
diff --git a/node_modules/libnpmpublish/test/util/tnock.js b/node_modules/libnpmpublish/test/util/tnock.js
deleted file mode 100644
index 00b6e160e..000000000
--- a/node_modules/libnpmpublish/test/util/tnock.js
+++ /dev/null
@@ -1,12 +0,0 @@
-'use strict'
-
-const nock = require('nock')
-
-module.exports = tnock
-function tnock (t, host) {
- const server = nock(host)
- t.tearDown(function () {
- server.done()
- })
- return server
-}
diff --git a/node_modules/libnpmpublish/unpublish.js b/node_modules/libnpmpublish/unpublish.js
index d7d98243c..3fe386042 100644
--- a/node_modules/libnpmpublish/unpublish.js
+++ b/node_modules/libnpmpublish/unpublish.js
@@ -1,86 +1,100 @@
'use strict'
-const figgyPudding = require('figgy-pudding')
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
const semver = require('semver')
-const url = require('url')
-
-const UnpublishConfig = figgyPudding({
- force: { default: false },
- Promise: { default: () => Promise }
-})
+const { URL } = require('url')
module.exports = unpublish
-function unpublish (spec, opts) {
- opts = UnpublishConfig(opts)
- return new opts.Promise(resolve => resolve()).then(() => {
- spec = npa(spec)
- // NOTE: spec is used to pick the appropriate registry/auth combo.
- opts = opts.concat({ spec })
+async function unpublish (spec, opts) {
+ spec = npa(spec)
+ // spec is used to pick the appropriate registry/auth combo.
+ opts = {
+ force: false,
+ ...opts,
+ spec
+ }
+
+ try {
const pkgUri = spec.escapedName
- return npmFetch.json(pkgUri, opts.concat({
+ const pkg = await npmFetch.json(pkgUri, {
+ ...opts,
query: { write: true }
- })).then(pkg => {
- if (!spec.rawSpec || spec.rawSpec === '*') {
- return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
- method: 'DELETE',
- ignoreBody: true
- }))
- } else {
- const version = spec.rawSpec
- const allVersions = pkg.versions || {}
- const versionPublic = allVersions[version]
- let dist
- if (versionPublic) {
- dist = allVersions[version].dist
- }
- delete allVersions[version]
- // if it was the only version, then delete the whole package.
- if (!Object.keys(allVersions).length) {
- return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
- method: 'DELETE',
- ignoreBody: true
- }))
- } else if (versionPublic) {
- const latestVer = pkg['dist-tags'].latest
- Object.keys(pkg['dist-tags']).forEach(tag => {
- if (pkg['dist-tags'][tag] === version) {
- delete pkg['dist-tags'][tag]
- }
- })
+ })
+
+ const version = spec.rawSpec
+ const allVersions = pkg.versions || {}
+ const versionData = allVersions[version]
+
+ const rawSpecs = (!spec.rawSpec || spec.rawSpec === '*')
+ const onlyVersion = Object.keys(allVersions).length === 1
+ const noVersions = !Object.keys(allVersions).length
+
+ // if missing specific version,
+ // assumed unpublished
+ if (!versionData && !rawSpecs && !noVersions) {
+ return true
+ }
- if (latestVer === version) {
- pkg['dist-tags'].latest = Object.keys(
- allVersions
- ).sort(semver.compareLoose).pop()
- }
+ // unpublish all versions of a package:
+ // - no specs supplied "npm unpublish foo"
+ // - all specs ("*") "npm unpublish foo@*"
+ // - there was only one version
+ // - has no versions field on packument
+ if (rawSpecs || onlyVersion || noVersions) {
+ await npmFetch(`${pkgUri}/-rev/${pkg._rev}`, {
+ ...opts,
+ method: 'DELETE',
+ ignoreBody: true
+ })
+ return true
+ } else {
+ const dist = allVersions[version].dist
+ delete allVersions[version]
- delete pkg._revisions
- delete pkg._attachments
- // Update packument with removed versions
- return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
- method: 'PUT',
- body: pkg,
- ignoreBody: true
- })).then(() => {
- // Remove the tarball itself
- return npmFetch.json(pkgUri, opts.concat({
- query: { write: true }
- })).then(({ _rev, _id }) => {
- const tarballUrl = url.parse(dist.tarball).pathname.substr(1)
- return npmFetch(`${tarballUrl}/-rev/${_rev}`, opts.concat({
- method: 'DELETE',
- ignoreBody: true
- }))
- })
- })
+ const latestVer = pkg['dist-tags'].latest
+
+ // deleting dist tags associated to version
+ Object.keys(pkg['dist-tags']).forEach(tag => {
+ if (pkg['dist-tags'][tag] === version) {
+ delete pkg['dist-tags'][tag]
}
+ })
+
+ if (latestVer === version) {
+ pkg['dist-tags'].latest = Object.keys(
+ allVersions
+ ).sort(semver.compareLoose).pop()
}
- }, err => {
- if (err.code !== 'E404') {
- throw err
- }
- })
- }).then(() => true)
+
+ delete pkg._revisions
+ delete pkg._attachments
+
+ // Update packument with removed versions
+ await npmFetch(`${pkgUri}/-rev/${pkg._rev}`, {
+ ...opts,
+ method: 'PUT',
+ body: pkg,
+ ignoreBody: true
+ })
+
+ // Remove the tarball itself
+ const { _rev } = await npmFetch.json(pkgUri, {
+ ...opts,
+ query: { write: true }
+ })
+ const tarballUrl = new URL(dist.tarball).pathname.substr(1)
+ await npmFetch(`${tarballUrl}/-rev/${_rev}`, {
+ ...opts,
+ method: 'DELETE',
+ ignoreBody: true
+ })
+ return true
+ }
+ } catch (err) {
+ if (err.code !== 'E404') {
+ throw err
+ }
+ return true
+ }
}
diff --git a/package-lock.json b/package-lock.json
index 875c46f67..61ceca812 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -3850,74 +3850,92 @@
"npm-registry-fetch": "^8.0.0"
}
},
- "libnpmpublish": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-1.1.2.tgz",
- "integrity": "sha512-2yIwaXrhTTcF7bkJKIKmaCV9wZOALf/gsTDxVSu/Gu/6wiG3fA8ce8YKstiWKTxSFNC0R7isPUb6tXTVFZHt2g==",
+ "libnpmpack": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/libnpmpack/-/libnpmpack-2.0.0.tgz",
+ "integrity": "sha512-w4wB8ZQUceUANUEiSYqi4nHlqFxhzLXWmhVbDt3NlyZVkmblTokR4xK9VfihLXJhdARQxeILx/HxReeqas1KZQ==",
"requires": {
- "aproba": "^2.0.0",
- "figgy-pudding": "^3.5.1",
- "get-stream": "^4.0.0",
- "lodash.clonedeep": "^4.5.0",
- "normalize-package-data": "^2.4.0",
- "npm-package-arg": "^6.1.0",
- "npm-registry-fetch": "^4.0.0",
- "semver": "^5.5.1",
- "ssri": "^6.0.1"
+ "@npmcli/run-script": "^1.3.0",
+ "npm-package-arg": "^8.0.0",
+ "pacote": "^11.1.4"
},
"dependencies": {
- "npm-package-arg": {
- "version": "6.1.1",
- "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.1.tgz",
- "integrity": "sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg==",
+ "chownr": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
+ "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
+ },
+ "mkdirp": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.3.tgz",
+ "integrity": "sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g=="
+ },
+ "pacote": {
+ "version": "11.1.4",
+ "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.1.4.tgz",
+ "integrity": "sha512-eUGJvSSpWFZKn3z8gig/HgnBmUl6gIWByIIaHzSyEr3tOWX0w8tFEADXtpu8HGv5E0ShCeTP6enRq8iHKCHSvw==",
"requires": {
- "hosted-git-info": "^2.7.1",
- "osenv": "^0.1.5",
- "semver": "^5.6.0",
- "validate-npm-package-name": "^3.0.0"
+ "@npmcli/git": "^2.0.1",
+ "@npmcli/installed-package-contents": "^1.0.5",
+ "@npmcli/promise-spawn": "^1.1.0",
+ "cacache": "^15.0.0",
+ "chownr": "^1.1.4",
+ "fs-minipass": "^2.1.0",
+ "infer-owner": "^1.0.4",
+ "lru-cache": "^5.1.1",
+ "minipass": "^3.0.1",
+ "minipass-fetch": "^1.2.1",
+ "mkdirp": "^1.0.3",
+ "npm-package-arg": "^8.0.1",
+ "npm-packlist": "^2.1.0",
+ "npm-pick-manifest": "^6.0.0",
+ "npm-registry-fetch": "^8.0.0",
+ "promise-inflight": "^1.0.1",
+ "promise-retry": "^1.1.1",
+ "read-package-json-fast": "^1.1.3",
+ "rimraf": "^2.7.1",
+ "semver": "^7.1.3",
+ "ssri": "^8.0.0",
+ "tar": "^6.0.1",
+ "which": "^2.0.2"
},
"dependencies": {
- "hosted-git-info": {
- "version": "2.8.5",
- "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz",
- "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg=="
+ "npm-package-arg": {
+ "version": "8.0.1",
+ "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-8.0.1.tgz",
+ "integrity": "sha512-/h5Fm6a/exByzFSTm7jAyHbgOqErl9qSNJDQF32Si/ZzgwT2TERVxRxn3Jurw1wflgyVVAxnFR4fRHPM7y1ClQ==",
+ "requires": {
+ "hosted-git-info": "^3.0.2",
+ "semver": "^7.0.0",
+ "validate-npm-package-name": "^3.0.0"
+ }
}
}
},
- "npm-registry-fetch": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-4.0.3.tgz",
- "integrity": "sha512-WGvUx0lkKFhu9MbiGFuT9nG2NpfQ+4dCJwRwwtK2HK5izJEvwDxMeUyqbuMS7N/OkpVCqDorV6rO5E4V9F8lJw==",
- "requires": {
- "JSONStream": "^1.3.4",
- "bluebird": "^3.5.1",
- "figgy-pudding": "^3.4.1",
- "lru-cache": "^5.1.1",
- "make-fetch-happen": "^5.0.0",
- "npm-package-arg": "^6.1.0",
- "safe-buffer": "^5.2.0"
- }
- },
- "safe-buffer": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
- "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg=="
- },
- "semver": {
- "version": "5.7.1",
- "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
- },
- "ssri": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz",
- "integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==",
+ "rimraf": {
+ "version": "2.7.1",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
+ "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
"requires": {
- "figgy-pudding": "^3.5.1"
+ "glob": "^7.1.3"
}
}
}
},
+ "libnpmpublish": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/libnpmpublish/-/libnpmpublish-3.0.1.tgz",
+ "integrity": "sha512-gpd+Ql3cA73MMN8tZa1ao/bfF2/An+B3JtP7SDbJTRrfwre9C/ZZZ4+ejtqBSopnLXlWujr2MVVE4FY1S+jlFg==",
+ "requires": {
+ "libnpmpack": "^2.0.0",
+ "lodash.clonedeep": "^4.5.0",
+ "normalize-package-data": "^2.5.0",
+ "npm-package-arg": "^8.0.0",
+ "npm-registry-fetch": "^8.0.0",
+ "semver": "^7.1.3",
+ "ssri": "^8.0.0"
+ }
+ },
"libnpmsearch": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-3.0.0.tgz",
diff --git a/package.json b/package.json
index ff19eaa46..1361c59bf 100644
--- a/package.json
+++ b/package.json
@@ -80,7 +80,7 @@
"libnpmaccess": "^4.0.0",
"libnpmhook": "^6.0.0",
"libnpmorg": "^2.0.0",
- "libnpmpublish": "^1.1.2",
+ "libnpmpublish": "^3.0.1",
"libnpmsearch": "^3.0.0",
"libnpmteam": "^2.0.1",
"libnpmversion": "^1.0.2",