Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGar <gar+gh@danger.computer>2022-03-14 23:52:55 +0300
committernlf <nlf@github.com>2022-03-15 23:38:37 +0300
commitf6b771aabece09dca2231426d4f681d3578e5ab7 (patch)
treeb672894c4fdeb0fc4f51e320696ae2109a8df898 /node_modules
parent6d3145014861b4198c16d7772d809fd037ece289 (diff)
deps: make-fetch-happen@10.0.6
Diffstat (limited to 'node_modules')
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md16
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js29
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js259
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js20
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js194
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js412
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/get.js251
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/index.js45
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js74
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/put.js87
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js31
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js31
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js148
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js7
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js69
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js35
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js291
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE15
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js334
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json34
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/package.json88
-rw-r--r--node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js1
-rw-r--r--node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs1
-rw-r--r--node_modules/make-fetch-happen/node_modules/lru-cache/index.js36
-rw-r--r--node_modules/make-fetch-happen/node_modules/lru-cache/package.json19
-rw-r--r--node_modules/make-fetch-happen/package.json10
-rw-r--r--node_modules/minipass-fetch/lib/index.js8
-rw-r--r--node_modules/minipass-fetch/package.json7
28 files changed, 2526 insertions, 26 deletions
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
new file mode 100644
index 000000000..8d28acf86
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
new file mode 100644
index 000000000..ad5a76a4f
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+ const sri = ssri.parse(integrity, { single: true })
+ // contentPath is the *strongest* algo given
+ return path.join(
+ contentDir(cache),
+ sri.algorithm,
+ ...hashToSegments(sri.hexDigest())
+ )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+ return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
new file mode 100644
index 000000000..8bffb2af8
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,259 @@
+'use strict'
+
+const util = require('util')
+
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+const lstat = util.promisify(fs.lstat)
+const readFile = util.promisify(fs.readFile)
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+function read (cache, integrity, opts = {}) {
+ const { size } = opts
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ // get size
+ return lstat(cpath).then(stat => ({ stat, cpath, sri }))
+ }).then(({ stat, cpath, sri }) => {
+ if (typeof size === 'number' && stat.size !== size) {
+ throw sizeError(size, stat.size)
+ }
+
+ if (stat.size > MAX_SINGLE_READ_SIZE) {
+ return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+ }
+
+ return readFile(cpath, null).then((data) => {
+ if (!ssri.checkData(data, sri)) {
+ throw integrityError(sri, cpath)
+ }
+
+ return data
+ })
+ })
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+ stream.push(
+ new fsm.ReadStream(cpath, {
+ size,
+ readSize: MAX_SINGLE_READ_SIZE,
+ }),
+ ssri.integrityStream({
+ integrity: sri,
+ size,
+ })
+ )
+ return stream
+}
+
+module.exports.sync = readSync
+
+function readSync (cache, integrity, opts = {}) {
+ const { size } = opts
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ const data = fs.readFileSync(cpath)
+ if (typeof size === 'number' && size !== data.length) {
+ throw sizeError(size, data.length)
+ }
+
+ if (ssri.checkData(data, sri)) {
+ return data
+ }
+
+ throw integrityError(sri, cpath)
+ })
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+ const { size } = opts
+ const stream = new Pipeline()
+ withContentSri(cache, integrity, (cpath, sri) => {
+ // just lstat to ensure it exists
+ return lstat(cpath).then((stat) => ({ stat, cpath, sri }))
+ }).then(({ stat, cpath, sri }) => {
+ if (typeof size === 'number' && size !== stat.size) {
+ return stream.emit('error', sizeError(size, stat.size))
+ }
+
+ readPipeline(cpath, stat.size, sri, stream)
+ }, er => stream.emit('error', er))
+
+ return stream
+}
+
+let copyFile
+if (fs.copyFile) {
+ module.exports.copy = copy
+ module.exports.copy.sync = copySync
+ copyFile = util.promisify(fs.copyFile)
+}
+
+function copy (cache, integrity, dest) {
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return copyFile(cpath, dest)
+ })
+}
+
+function copySync (cache, integrity, dest) {
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ return fs.copyFileSync(cpath, dest)
+ })
+}
+
+module.exports.hasContent = hasContent
+
+function hasContent (cache, integrity) {
+ if (!integrity) {
+ return Promise.resolve(false)
+ }
+
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat }))
+ }).catch((err) => {
+ if (err.code === 'ENOENT') {
+ return false
+ }
+
+ if (err.code === 'EPERM') {
+ /* istanbul ignore else */
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ return false
+ }
+ }
+ })
+}
+
+module.exports.hasContent.sync = hasContentSync
+
+function hasContentSync (cache, integrity) {
+ if (!integrity) {
+ return false
+ }
+
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ try {
+ const stat = fs.lstatSync(cpath)
+ return { size: stat.size, sri, stat }
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return false
+ }
+
+ if (err.code === 'EPERM') {
+ /* istanbul ignore else */
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ return false
+ }
+ }
+ }
+ })
+}
+
+function withContentSri (cache, integrity, fn) {
+ const tryFn = () => {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fn(cpath, digests[0])
+ } else {
+ // Can't use race here because a generic error can happen before
+ // a ENOENT error, and can happen before a valid result
+ return Promise
+ .all(digests.map((meta) => {
+ return withContentSri(cache, meta, fn)
+ .catch((err) => {
+ if (err.code === 'ENOENT') {
+ return Object.assign(
+ new Error('No matching content found for ' + sri.toString()),
+ { code: 'ENOENT' }
+ )
+ }
+ return err
+ })
+ }))
+ .then((results) => {
+ // Return the first non error if it is found
+ const result = results.find((r) => !(r instanceof Error))
+ if (result) {
+ return result
+ }
+
+ // Throw the No matching content found error
+ const enoentError = results.find((r) => r.code === 'ENOENT')
+ if (enoentError) {
+ throw enoentError
+ }
+
+ // Throw generic error
+ throw results.find((r) => r instanceof Error)
+ })
+ }
+ }
+
+ return new Promise((resolve, reject) => {
+ try {
+ tryFn()
+ .then(resolve)
+ .catch(reject)
+ } catch (err) {
+ reject(err)
+ }
+ })
+}
+
+function withContentSriSync (cache, integrity, fn) {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fn(cpath, digests[0])
+ } else {
+ let lastErr = null
+ for (const meta of digests) {
+ try {
+ return withContentSriSync(cache, meta, fn)
+ } catch (err) {
+ lastErr = err
+ }
+ }
+ throw lastErr
+ }
+}
+
+function sizeError (expected, found) {
+ /* eslint-disable-next-line max-len */
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function integrityError (sri, path) {
+ const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+ err.code = 'EINTEGRITY'
+ err.sri = sri
+ err.path = path
+ return err
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 000000000..50612364e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,20 @@
+'use strict'
+
+const util = require('util')
+
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+const rimraf = util.promisify(require('rimraf'))
+
+module.exports = rm
+
+function rm (cache, integrity) {
+ return hasContent(cache, integrity).then((content) => {
+ // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+ if (content && content.sri) {
+ return rimraf(contentPath(cache, content.sri)).then(() => true)
+ } else {
+ return false
+ }
+ })
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
new file mode 100644
index 000000000..a71e81ad5
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,194 @@
+'use strict'
+
+const util = require('util')
+
+const contentPath = require('./path')
+const fixOwner = require('../util/fix-owner')
+const fs = require('fs')
+const moveFile = require('../util/move-file')
+const Minipass = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const rimraf = util.promisify(require('rimraf'))
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const { disposer } = require('./../util/disposer')
+const fsm = require('fs-minipass')
+
+const writeFile = util.promisify(fs.writeFile)
+
+module.exports = write
+
+function write (cache, data, opts = {}) {
+ const { algorithms, size, integrity } = opts
+ if (algorithms && algorithms.length > 1) {
+ throw new Error('opts.algorithms only supports a single algorithm for now')
+ }
+
+ if (typeof size === 'number' && data.length !== size) {
+ return Promise.reject(sizeError(size, data.length))
+ }
+
+ const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+ if (integrity && !ssri.checkData(data, integrity, opts)) {
+ return Promise.reject(checksumError(integrity, sri))
+ }
+
+ return disposer(makeTmp(cache, opts), makeTmpDisposer,
+ (tmp) => {
+ return writeFile(tmp.target, data, { flag: 'wx' })
+ .then(() => moveToDestination(tmp, cache, sri, opts))
+ })
+ .then(() => ({ integrity: sri, size: data.length }))
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+ constructor (cache, opts) {
+ super()
+ this.opts = opts
+ this.cache = cache
+ this.inputStream = new Minipass()
+ this.inputStream.on('error', er => this.emit('error', er))
+ this.inputStream.on('drain', () => this.emit('drain'))
+ this.handleContentP = null
+ }
+
+ write (chunk, encoding, cb) {
+ if (!this.handleContentP) {
+ this.handleContentP = handleContent(
+ this.inputStream,
+ this.cache,
+ this.opts
+ )
+ }
+ return this.inputStream.write(chunk, encoding, cb)
+ }
+
+ flush (cb) {
+ this.inputStream.end(() => {
+ if (!this.handleContentP) {
+ const e = new Error('Cache input stream was empty')
+ e.code = 'ENODATA'
+ // empty streams are probably emitting end right away.
+ // defer this one tick by rejecting a promise on it.
+ return Promise.reject(e).catch(cb)
+ }
+ this.handleContentP.then(
+ (res) => {
+ res.integrity && this.emit('integrity', res.integrity)
+ res.size !== null && this.emit('size', res.size)
+ cb()
+ },
+ (er) => cb(er)
+ )
+ })
+ }
+}
+
+function writeStream (cache, opts = {}) {
+ return new CacacheWriteStream(cache, opts)
+}
+
+function handleContent (inputStream, cache, opts) {
+ return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => {
+ return pipeToTmp(inputStream, cache, tmp.target, opts)
+ .then((res) => {
+ return moveToDestination(
+ tmp,
+ cache,
+ res.integrity,
+ opts
+ ).then(() => res)
+ })
+ })
+}
+
+function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+ let integrity
+ let size
+ const hashStream = ssri.integrityStream({
+ integrity: opts.integrity,
+ algorithms: opts.algorithms,
+ size: opts.size,
+ })
+ hashStream.on('integrity', i => {
+ integrity = i
+ })
+ hashStream.on('size', s => {
+ size = s
+ })
+
+ const outStream = new fsm.WriteStream(tmpTarget, {
+ flags: 'wx',
+ })
+
+ // NB: this can throw if the hashStream has a problem with
+ // it, and the data is fully written. but pipeToTmp is only
+ // called in promisory contexts where that is handled.
+ const pipeline = new Pipeline(
+ inputStream,
+ hashStream,
+ outStream
+ )
+
+ return pipeline.promise()
+ .then(() => ({ integrity, size }))
+ .catch(er => rimraf(tmpTarget).then(() => {
+ throw er
+ }))
+}
+
+function makeTmp (cache, opts) {
+ const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({
+ target: tmpTarget,
+ moved: false,
+ }))
+}
+
+function makeTmpDisposer (tmp) {
+ if (tmp.moved) {
+ return Promise.resolve()
+ }
+
+ return rimraf(tmp.target)
+}
+
+function moveToDestination (tmp, cache, sri, opts) {
+ const destination = contentPath(cache, sri)
+ const destDir = path.dirname(destination)
+
+ return fixOwner
+ .mkdirfix(cache, destDir)
+ .then(() => {
+ return moveFile(tmp.target, destination)
+ })
+ .then(() => {
+ tmp.moved = true
+ return fixOwner.chownr(cache, destination)
+ })
+}
+
+function sizeError (expected, found) {
+ /* eslint-disable-next-line max-len */
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function checksumError (expected, found) {
+ const err = new Error(`Integrity check failed:
+ Wanted: ${expected}
+ Found: ${found}`)
+ err.code = 'EINTEGRITY'
+ err.expected = expected
+ err.found = found
+ return err
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 000000000..426778b85
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,412 @@
+'use strict'
+
+const util = require('util')
+const crypto = require('crypto')
+const fs = require('fs')
+const Minipass = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const { disposer } = require('./util/disposer')
+const contentPath = require('./content/path')
+const fixOwner = require('./util/fix-owner')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const moveFile = require('@npmcli/move-file')
+const _rimraf = require('rimraf')
+const rimraf = util.promisify(_rimraf)
+rimraf.sync = _rimraf.sync
+
+const appendFile = util.promisify(fs.appendFile)
+const readFile = util.promisify(fs.readFile)
+const readdir = util.promisify(fs.readdir)
+const writeFile = util.promisify(fs.writeFile)
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+ constructor (cache, key) {
+ super(`No cache entry for ${key} found in ${cache}`)
+ this.code = 'ENOENT'
+ this.cache = cache
+ this.key = key
+ }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+ const bucket = bucketPath(cache, key)
+ const entries = await bucketEntries(bucket)
+ const newEntries = []
+ // we loop backwards because the bottom-most result is the newest
+ // since we add new entries with appendFile
+ for (let i = entries.length - 1; i >= 0; --i) {
+ const entry = entries[i]
+ // a null integrity could mean either a delete was appended
+ // or the user has simply stored an index that does not map
+ // to any content. we determine if the user wants to keep the
+ // null integrity based on the validateEntry function passed in options.
+ // if the integrity is null and no validateEntry is provided, we break
+ // as we consider the null integrity to be a deletion of everything
+ // that came before it.
+ if (entry.integrity === null && !opts.validateEntry) {
+ break
+ }
+
+ // if this entry is valid, and it is either the first entry or
+ // the newEntries array doesn't already include an entry that
+ // matches this one based on the provided matchFn, then we add
+ // it to the beginning of our list
+ if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+ (newEntries.length === 0 ||
+ !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+ newEntries.unshift(entry)
+ }
+ }
+
+ const newIndex = '\n' + newEntries.map((entry) => {
+ const stringified = JSON.stringify(entry)
+ const hash = hashEntry(stringified)
+ return `${hash}\t${stringified}`
+ }).join('\n')
+
+ const setup = async () => {
+ const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ await fixOwner.mkdirfix(cache, path.dirname(target))
+ return {
+ target,
+ moved: false,
+ }
+ }
+
+ const teardown = async (tmp) => {
+ if (!tmp.moved) {
+ return rimraf(tmp.target)
+ }
+ }
+
+ const write = async (tmp) => {
+ await writeFile(tmp.target, newIndex, { flag: 'wx' })
+ await fixOwner.mkdirfix(cache, path.dirname(bucket))
+ // we use @npmcli/move-file directly here because we
+ // want to overwrite the existing file
+ await moveFile(tmp.target, bucket)
+ tmp.moved = true
+ try {
+ await fixOwner.chownr(cache, bucket)
+ } catch (err) {
+ if (err.code !== 'ENOENT') {
+ throw err
+ }
+ }
+ }
+
+ // write the file atomically
+ await disposer(setup(), teardown, write)
+
+ // we reverse the list we generated such that the newest
+ // entries come first in order to make looping through them easier
+ // the true passed to formatEntry tells it to keep null
+ // integrity values, if they made it this far it's because
+ // validateEntry returned true, and as such we should return it
+ return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+function insert (cache, key, integrity, opts = {}) {
+ const { metadata, size } = opts
+ const bucket = bucketPath(cache, key)
+ const entry = {
+ key,
+ integrity: integrity && ssri.stringify(integrity),
+ time: Date.now(),
+ size,
+ metadata,
+ }
+ return fixOwner
+ .mkdirfix(cache, path.dirname(bucket))
+ .then(() => {
+ const stringified = JSON.stringify(entry)
+ // NOTE - Cleverness ahoy!
+ //
+ // This works because it's tremendously unlikely for an entry to corrupt
+ // another while still preserving the string length of the JSON in
+ // question. So, we just slap the length in there and verify it on read.
+ //
+ // Thanks to @isaacs for the whiteboarding session that ended up with
+ // this.
+ return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+ })
+ .then(() => fixOwner.chownr(cache, bucket))
+ .catch((err) => {
+ if (err.code === 'ENOENT') {
+ return undefined
+ }
+
+ throw err
+ // There's a class of race conditions that happen when things get deleted
+ // during fixOwner, or between the two mkdirfix/chownr calls.
+ //
+ // It's perfectly fine to just not bother in those cases and lie
+ // that the index entry was written. Because it's a cache.
+ })
+ .then(() => {
+ return formatEntry(cache, entry)
+ })
+}
+
+module.exports.insert.sync = insertSync
+
+function insertSync (cache, key, integrity, opts = {}) {
+ const { metadata, size } = opts
+ const bucket = bucketPath(cache, key)
+ const entry = {
+ key,
+ integrity: integrity && ssri.stringify(integrity),
+ time: Date.now(),
+ size,
+ metadata,
+ }
+ fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
+ const stringified = JSON.stringify(entry)
+ fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+ try {
+ fixOwner.chownr.sync(cache, bucket)
+ } catch (err) {
+ if (err.code !== 'ENOENT') {
+ throw err
+ }
+ }
+ return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+function find (cache, key) {
+ const bucket = bucketPath(cache, key)
+ return bucketEntries(bucket)
+ .then((entries) => {
+ return entries.reduce((latest, next) => {
+ if (next && next.key === key) {
+ return formatEntry(cache, next)
+ } else {
+ return latest
+ }
+ }, null)
+ })
+ .catch((err) => {
+ if (err.code === 'ENOENT') {
+ return null
+ } else {
+ throw err
+ }
+ })
+}
+
+module.exports.find.sync = findSync
+
+function findSync (cache, key) {
+ const bucket = bucketPath(cache, key)
+ try {
+ return bucketEntriesSync(bucket).reduce((latest, next) => {
+ if (next && next.key === key) {
+ return formatEntry(cache, next)
+ } else {
+ return latest
+ }
+ }, null)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return null
+ } else {
+ throw err
+ }
+ }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+ if (!opts.removeFully) {
+ return insert(cache, key, null, opts)
+ }
+
+ const bucket = bucketPath(cache, key)
+ return rimraf(bucket)
+}
+
+module.exports.delete.sync = delSync
+
+function delSync (cache, key, opts = {}) {
+ if (!opts.removeFully) {
+ return insertSync(cache, key, null, opts)
+ }
+
+ const bucket = bucketPath(cache, key)
+ return rimraf.sync(bucket)
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+ const indexDir = bucketDir(cache)
+ const stream = new Minipass({ objectMode: true })
+
+ readdirOrEmpty(indexDir).then(buckets => Promise.all(
+ buckets.map(bucket => {
+ const bucketPath = path.join(indexDir, bucket)
+ return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all(
+ subbuckets.map(subbucket => {
+ const subbucketPath = path.join(bucketPath, subbucket)
+
+ // "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
+ return readdirOrEmpty(subbucketPath).then(entries => Promise.all(
+ entries.map(entry => {
+ const entryPath = path.join(subbucketPath, entry)
+ return bucketEntries(entryPath).then(entries =>
+ // using a Map here prevents duplicate keys from
+ // showing up twice, I guess?
+ entries.reduce((acc, entry) => {
+ acc.set(entry.key, entry)
+ return acc
+ }, new Map())
+ ).then(reduced => {
+ // reduced is a map of key => entry
+ for (const entry of reduced.values()) {
+ const formatted = formatEntry(cache, entry)
+ if (formatted) {
+ stream.write(formatted)
+ }
+ }
+ }).catch(err => {
+ if (err.code === 'ENOENT') {
+ return undefined
+ }
+ throw err
+ })
+ })
+ ))
+ })
+ ))
+ })
+ ))
+ .then(
+ () => stream.end(),
+ err => stream.emit('error', err)
+ )
+
+ return stream
+}
+
+module.exports.ls = ls
+
+function ls (cache) {
+ return lsStream(cache).collect().then(entries =>
+ entries.reduce((acc, xs) => {
+ acc[xs.key] = xs
+ return acc
+ }, {})
+ )
+}
+
+module.exports.bucketEntries = bucketEntries
+
+function bucketEntries (bucket, filter) {
+ return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter))
+}
+
+module.exports.bucketEntries.sync = bucketEntriesSync
+
+function bucketEntriesSync (bucket, filter) {
+ const data = fs.readFileSync(bucket, 'utf8')
+ return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+ const entries = []
+ data.split('\n').forEach((entry) => {
+ if (!entry) {
+ return
+ }
+
+ const pieces = entry.split('\t')
+ if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+ // Hash is no good! Corruption or malice? Doesn't matter!
+ // EJECT EJECT
+ return
+ }
+ let obj
+ try {
+ obj = JSON.parse(pieces[1])
+ } catch (e) {
+ // Entry is corrupted!
+ return
+ }
+ if (obj) {
+ entries.push(obj)
+ }
+ })
+ return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+ return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+ const hashed = hashKey(key)
+ return path.join.apply(
+ path,
+ [bucketDir(cache)].concat(hashToSegments(hashed))
+ )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+ return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+ return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+ return crypto
+ .createHash(digest)
+ .update(str)
+ .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+ // Treat null digests as deletions. They'll shadow any previous entries.
+ if (!entry.integrity && !keepAll) {
+ return null
+ }
+
+ return {
+ key: entry.key,
+ integrity: entry.integrity,
+ path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+ size: entry.size,
+ time: entry.time,
+ metadata: entry.metadata,
+ }
+}
+
+function readdirOrEmpty (dir) {
+ return readdir(dir).catch((err) => {
+ if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+ return []
+ }
+
+ throw err
+ })
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js
new file mode 100644
index 000000000..d9d4bf4c6
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js
@@ -0,0 +1,251 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const Minipass = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const fs = require('fs')
+const util = require('util')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+const writeFile = util.promisify(fs.writeFile)
+
+function getData (cache, key, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return Promise.resolve({
+ metadata: memoized.entry.metadata,
+ data: memoized.data,
+ integrity: memoized.entry.integrity,
+ size: memoized.entry.size,
+ })
+ }
+
+ return index.find(cache, key, opts).then((entry) => {
+ if (!entry) {
+ throw new index.NotFoundError(cache, key)
+ }
+
+ return read(cache, entry.integrity, { integrity, size }).then((data) => {
+ if (memoize) {
+ memo.put(cache, entry, data, opts)
+ }
+
+ return {
+ data,
+ metadata: entry.metadata,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
+ })
+ })
+}
+module.exports = getData
+
+function getDataByDigest (cache, key, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get.byDigest(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return Promise.resolve(memoized)
+ }
+
+ return read(cache, key, { integrity, size }).then((res) => {
+ if (memoize) {
+ memo.put.byDigest(cache, key, res, opts)
+ }
+ return res
+ })
+}
+module.exports.byDigest = getDataByDigest
+
+function getDataSync (cache, key, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get(cache, key, opts)
+
+ if (memoized && memoize !== false) {
+ return {
+ metadata: memoized.entry.metadata,
+ data: memoized.data,
+ integrity: memoized.entry.integrity,
+ size: memoized.entry.size,
+ }
+ }
+ const entry = index.find.sync(cache, key, opts)
+ if (!entry) {
+ throw new index.NotFoundError(cache, key)
+ }
+ const data = read.sync(cache, entry.integrity, {
+ integrity: integrity,
+ size: size,
+ })
+ const res = {
+ metadata: entry.metadata,
+ data: data,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
+ if (memoize) {
+ memo.put(cache, entry, res.data, opts)
+ }
+
+ return res
+}
+
+module.exports.sync = getDataSync
+
+function getDataByDigestSync (cache, digest, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get.byDigest(cache, digest, opts)
+
+ if (memoized && memoize !== false) {
+ return memoized
+ }
+
+ const res = read.sync(cache, digest, {
+ integrity: integrity,
+ size: size,
+ })
+ if (memoize) {
+ memo.put.byDigest(cache, digest, res, opts)
+ }
+
+ return res
+}
+module.exports.sync.byDigest = getDataByDigestSync
+
+const getMemoizedStream = (memoized) => {
+ const stream = new Minipass()
+ stream.on('newListener', function (ev, cb) {
+ ev === 'metadata' && cb(memoized.entry.metadata)
+ ev === 'integrity' && cb(memoized.entry.integrity)
+ ev === 'size' && cb(memoized.entry.size)
+ })
+ stream.end(memoized.data)
+ return stream
+}
+
+function getStream (cache, key, opts = {}) {
+ const { memoize, size } = opts
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return getMemoizedStream(memoized)
+ }
+
+ const stream = new Pipeline()
+ index
+ .find(cache, key)
+ .then((entry) => {
+ if (!entry) {
+ throw new index.NotFoundError(cache, key)
+ }
+
+ stream.emit('metadata', entry.metadata)
+ stream.emit('integrity', entry.integrity)
+ stream.emit('size', entry.size)
+ stream.on('newListener', function (ev, cb) {
+ ev === 'metadata' && cb(entry.metadata)
+ ev === 'integrity' && cb(entry.integrity)
+ ev === 'size' && cb(entry.size)
+ })
+
+ const src = read.readStream(
+ cache,
+ entry.integrity,
+ { ...opts, size: typeof size !== 'number' ? entry.size : size }
+ )
+
+ if (memoize) {
+ const memoStream = new Collect.PassThrough()
+ memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+ stream.unshift(memoStream)
+ }
+ stream.unshift(src)
+ })
+ .catch((err) => stream.emit('error', err))
+
+ return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+ const { memoize } = opts
+ const memoized = memo.get.byDigest(cache, integrity, opts)
+ if (memoized && memoize !== false) {
+ const stream = new Minipass()
+ stream.end(memoized)
+ return stream
+ } else {
+ const stream = read.readStream(cache, integrity, opts)
+ if (!memoize) {
+ return stream
+ }
+
+ const memoStream = new Collect.PassThrough()
+ memoStream.on('collect', data => memo.put.byDigest(
+ cache,
+ integrity,
+ data,
+ opts
+ ))
+ return new Pipeline(stream, memoStream)
+ }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+ const { memoize } = opts
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && memoize !== false) {
+ return Promise.resolve(memoized.entry)
+ } else {
+ return index.find(cache, key)
+ }
+}
+module.exports.info = info
+
+function copy (cache, key, dest, opts = {}) {
+ if (read.copy) {
+ return index.find(cache, key, opts).then((entry) => {
+ if (!entry) {
+ throw new index.NotFoundError(cache, key)
+ }
+ return read.copy(cache, entry.integrity, dest, opts)
+ .then(() => {
+ return {
+ metadata: entry.metadata,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
+ })
+ })
+ }
+
+ return getData(cache, key, opts).then((res) => {
+ return writeFile(dest, res.data).then(() => {
+ return {
+ metadata: res.metadata,
+ size: res.size,
+ integrity: res.integrity,
+ }
+ })
+ })
+}
+module.exports.copy = copy
+
+function copyByDigest (cache, key, dest, opts = {}) {
+ if (read.copy) {
+ return read.copy(cache, key, dest, opts).then(() => key)
+ }
+
+ return getDataByDigest(cache, key, opts).then((res) => {
+ return writeFile(dest, res).then(() => key)
+ })
+}
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js
new file mode 100644
index 000000000..1c56be68d
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.sync = get.sync
+module.exports.get.sync.byDigest = get.sync.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+module.exports.get.hasContent.sync = get.hasContent.sync
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
new file mode 100644
index 000000000..e1b13dd5f
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,74 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MAX_SIZE = 50 * 1024 * 1024 // 50MB
+const MAX_AGE = 3 * 60 * 1000
+
+const MEMOIZED = new LRU({
+ max: MAX_SIZE,
+ maxAge: MAX_AGE,
+ length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+ const old = {}
+ MEMOIZED.forEach((v, k) => {
+ old[k] = v
+ })
+ MEMOIZED.reset()
+ return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+ pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+ putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+ pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+ return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+ return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+ constructor (obj) {
+ this.obj = obj
+ }
+
+ get (key) {
+ return this.obj[key]
+ }
+
+ set (key, val) {
+ this.obj[key] = val
+ }
+}
+
+function pickMem (opts) {
+ if (!opts || !opts.memoize) {
+ return MEMOIZED
+ } else if (opts.memoize.get && opts.memoize.set) {
+ return opts.memoize
+ } else if (typeof opts.memoize === 'object') {
+ return new ObjProxy(opts.memoize)
+ } else {
+ return MEMOIZED
+ }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js
new file mode 100644
index 000000000..d6904fa30
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js
@@ -0,0 +1,87 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+ algorithms: ['sha512'],
+ ...opts,
+})
+
+module.exports = putData
+
+function putData (cache, key, data, opts = {}) {
+ const { memoize } = opts
+ opts = putOpts(opts)
+ return write(cache, data, opts).then((res) => {
+ return index
+ .insert(cache, key, res.integrity, { ...opts, size: res.size })
+ .then((entry) => {
+ if (memoize) {
+ memo.put(cache, entry, data, opts)
+ }
+
+ return res.integrity
+ })
+ })
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+ const { memoize } = opts
+ opts = putOpts(opts)
+ let integrity
+ let size
+
+ let memoData
+ const pipeline = new Pipeline()
+ // first item in the pipeline is the memoizer, because we need
+ // that to end first and get the collected data.
+ if (memoize) {
+ const memoizer = new PassThrough().on('collect', data => {
+ memoData = data
+ })
+ pipeline.push(memoizer)
+ }
+
+ // contentStream is a write-only, not a passthrough
+ // no data comes out of it.
+ const contentStream = write.stream(cache, opts)
+ .on('integrity', (int) => {
+ integrity = int
+ })
+ .on('size', (s) => {
+ size = s
+ })
+
+ pipeline.push(contentStream)
+
+ // last but not least, we write the index and emit hash and size,
+ // and memoize if we're doing that
+ pipeline.push(new Flush({
+ flush () {
+ return index
+ .insert(cache, key, integrity, { ...opts, size })
+ .then((entry) => {
+ if (memoize && memoData) {
+ memo.put(cache, entry, memoData, opts)
+ }
+
+ if (integrity) {
+ pipeline.emit('integrity', integrity)
+ }
+
+ if (size) {
+ pipeline.emit('size', size)
+ }
+ })
+ },
+ }))
+
+ return pipeline
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js
new file mode 100644
index 000000000..5f0007177
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const util = require('util')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rimraf = util.promisify(require('rimraf'))
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+ memo.clearMemoized()
+ return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+ memo.clearMemoized()
+ return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+function all (cache) {
+ memo.clearMemoized()
+ return rimraf(path.join(cache, '*(content-*|index-*)'))
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js
new file mode 100644
index 000000000..52d7d3edd
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/disposer.js
@@ -0,0 +1,31 @@
+'use strict'
+
+module.exports.disposer = disposer
+
+function disposer (creatorFn, disposerFn, fn) {
+ const runDisposer = (resource, result, shouldThrow = false) => {
+ return disposerFn(resource)
+ .then(
+ // disposer resolved, do something with original fn's promise
+ () => {
+ if (shouldThrow) {
+ throw result
+ }
+
+ return result
+ },
+ // Disposer fn failed, crash process
+ (err) => {
+ throw err
+ // Or process.exit?
+ })
+ }
+
+ return creatorFn
+ .then((resource) => {
+ // fn(resource) can throw, so wrap in a promise here
+ return Promise.resolve().then(() => fn(resource))
+ .then((result) => runDisposer(resource, result))
+ .catch((err) => runDisposer(resource, err, true))
+ })
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js
new file mode 100644
index 000000000..bc14def4e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js
@@ -0,0 +1,148 @@
+'use strict'
+
+const util = require('util')
+
+const chownr = util.promisify(require('chownr'))
+const mkdirp = require('mkdirp')
+const inflight = require('promise-inflight')
+const inferOwner = require('infer-owner')
+
+// Memoize getuid()/getgid() calls.
+// patch process.setuid/setgid to invalidate cached value on change
+const self = { uid: null, gid: null }
+const getSelf = () => {
+ if (typeof self.uid !== 'number') {
+ self.uid = process.getuid()
+ const setuid = process.setuid
+ process.setuid = (uid) => {
+ self.uid = null
+ process.setuid = setuid
+ return process.setuid(uid)
+ }
+ }
+ if (typeof self.gid !== 'number') {
+ self.gid = process.getgid()
+ const setgid = process.setgid
+ process.setgid = (gid) => {
+ self.gid = null
+ process.setgid = setgid
+ return process.setgid(gid)
+ }
+ }
+}
+
+module.exports.chownr = fixOwner
+
+function fixOwner (cache, filepath) {
+ if (!process.getuid) {
+ // This platform doesn't need ownership fixing
+ return Promise.resolve()
+ }
+
+ getSelf()
+ if (self.uid !== 0) {
+ // almost certainly can't chown anyway
+ return Promise.resolve()
+ }
+
+ return Promise.resolve(inferOwner(cache)).then((owner) => {
+ const { uid, gid } = owner
+
+ // No need to override if it's already what we used.
+ if (self.uid === uid && self.gid === gid) {
+ return
+ }
+
+ return inflight('fixOwner: fixing ownership on ' + filepath, () =>
+ chownr(
+ filepath,
+ typeof uid === 'number' ? uid : self.uid,
+ typeof gid === 'number' ? gid : self.gid
+ ).catch((err) => {
+ if (err.code === 'ENOENT') {
+ return null
+ }
+
+ throw err
+ })
+ )
+ })
+}
+
+module.exports.chownr.sync = fixOwnerSync
+
+function fixOwnerSync (cache, filepath) {
+ if (!process.getuid) {
+ // This platform doesn't need ownership fixing
+ return
+ }
+ const { uid, gid } = inferOwner.sync(cache)
+ getSelf()
+ if (self.uid !== 0) {
+ // almost certainly can't chown anyway
+ return
+ }
+
+ if (self.uid === uid && self.gid === gid) {
+ // No need to override if it's already what we used.
+ return
+ }
+ try {
+ chownr.sync(
+ filepath,
+ typeof uid === 'number' ? uid : self.uid,
+ typeof gid === 'number' ? gid : self.gid
+ )
+ } catch (err) {
+ // only catch ENOENT, any other error is a problem.
+ if (err.code === 'ENOENT') {
+ return null
+ }
+
+ throw err
+ }
+}
+
+module.exports.mkdirfix = mkdirfix
+
+function mkdirfix (cache, p, cb) {
+ // we have to infer the owner _before_ making the directory, even though
+ // we aren't going to use the results, since the cache itself might not
+ // exist yet. If we mkdirp it, then our current uid/gid will be assumed
+ // to be correct if it creates the cache folder in the process.
+ return Promise.resolve(inferOwner(cache)).then(() => {
+ return mkdirp(p)
+ .then((made) => {
+ if (made) {
+ return fixOwner(cache, made).then(() => made)
+ }
+ })
+ .catch((err) => {
+ if (err.code === 'EEXIST') {
+ return fixOwner(cache, p).then(() => null)
+ }
+
+ throw err
+ })
+ })
+}
+
+module.exports.mkdirfix.sync = mkdirfixSync
+
+function mkdirfixSync (cache, p) {
+ try {
+ inferOwner.sync(cache)
+ const made = mkdirp.sync(p)
+ if (made) {
+ fixOwnerSync(cache, made)
+ return made
+ }
+ } catch (err) {
+ if (err.code === 'EEXIST') {
+ fixOwnerSync(cache, p)
+ return null
+ } else {
+ throw err
+ }
+ }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 000000000..445599b50
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+ return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js
new file mode 100644
index 000000000..3739cea3d
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js
@@ -0,0 +1,69 @@
+'use strict'
+
+const fs = require('fs')
+const util = require('util')
+const chmod = util.promisify(fs.chmod)
+const unlink = util.promisify(fs.unlink)
+const stat = util.promisify(fs.stat)
+const move = require('@npmcli/move-file')
+const pinflight = require('promise-inflight')
+
+module.exports = moveFile
+
+function moveFile (src, dest) {
+ const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ ||
+ process.platform === 'win32'
+
+ // This isn't quite an fs.rename -- the assumption is that
+ // if `dest` already exists, and we get certain errors while
+ // trying to move it, we should just not bother.
+ //
+ // In the case of cache corruption, users will receive an
+ // EINTEGRITY error elsewhere, and can remove the offending
+ // content their own way.
+ //
+ // Note that, as the name suggests, this strictly only supports file moves.
+ return new Promise((resolve, reject) => {
+ fs.link(src, dest, (err) => {
+ if (err) {
+ if (isWindows && err.code === 'EPERM') {
+ // XXX This is a really weird way to handle this situation, as it
+ // results in the src file being deleted even though the dest
+ // might not exist. Since we pretty much always write files to
+ // deterministic locations based on content hash, this is likely
+ // ok (or at worst, just ends in a future cache miss). But it would
+ // be worth investigating at some time in the future if this is
+ // really what we want to do here.
+ return resolve()
+ } else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
+ // file already exists, so whatever
+ return resolve()
+ } else {
+ return reject(err)
+ }
+ } else {
+ return resolve()
+ }
+ })
+ })
+ .then(() => {
+ // content should never change for any reason, so make it read-only
+ return Promise.all([
+ unlink(src),
+ !isWindows && chmod(dest, '0444'),
+ ])
+ })
+ .catch(() => {
+ return pinflight('cacache-move-file:' + dest, () => {
+ return stat(dest).catch((err) => {
+ if (err.code !== 'ENOENT') {
+ // Something else is wrong here. Bail bail bail
+ throw err
+ }
+ // file doesn't already exist! let's try a rename -> copy fallback
+ // only delete if it successfully copies
+ return move(src, dest)
+ })
+ })
+ })
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 000000000..0a5a50eba
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,35 @@
+'use strict'
+
+const fs = require('@npmcli/fs')
+
+const fixOwner = require('./fix-owner')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+function mktmpdir (cache, opts = {}) {
+ const { tmpPrefix } = opts
+ const tmpDir = path.join(cache, 'tmp')
+ return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+ .then(() => {
+ // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+ const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+ return fs.mkdtemp(target, { owner: 'inherit' })
+ })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+ if (!cb) {
+ cb = opts
+ opts = {}
+ }
+ return fs.withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
+
+module.exports.fix = fixtmpdir
+
+function fixtmpdir (cache) {
+ return fixOwner(cache, path.join(cache, 'tmp'))
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
new file mode 100644
index 000000000..300cd9f9d
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
@@ -0,0 +1,291 @@
+'use strict'
+
+const util = require('util')
+
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fixOwner = require('./util/fix-owner')
+const fs = require('fs')
+const fsm = require('fs-minipass')
+const glob = util.promisify(require('glob'))
+const index = require('./entry-index')
+const path = require('path')
+const rimraf = util.promisify(require('rimraf'))
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+ Object.prototype.hasOwnProperty.call(obj, key)
+
+const stat = util.promisify(fs.stat)
+const truncate = util.promisify(fs.truncate)
+const writeFile = util.promisify(fs.writeFile)
+const readFile = util.promisify(fs.readFile)
+
+const verifyOpts = (opts) => ({
+ concurrency: 20,
+ log: { silly () {} },
+ ...opts,
+})
+
+module.exports = verify
+
+function verify (cache, opts) {
+ opts = verifyOpts(opts)
+ opts.log.silly('verify', 'verifying cache at', cache)
+
+ const steps = [
+ markStartTime,
+ fixPerms,
+ garbageCollect,
+ rebuildIndex,
+ cleanTmp,
+ writeVerifile,
+ markEndTime,
+ ]
+
+ return steps
+ .reduce((promise, step, i) => {
+ const label = step.name
+ const start = new Date()
+ return promise.then((stats) => {
+ return step(cache, opts).then((s) => {
+ s &&
+ Object.keys(s).forEach((k) => {
+ stats[k] = s[k]
+ })
+ const end = new Date()
+ if (!stats.runTime) {
+ stats.runTime = {}
+ }
+
+ stats.runTime[label] = end - start
+ return Promise.resolve(stats)
+ })
+ })
+ }, Promise.resolve({}))
+ .then((stats) => {
+ stats.runTime.total = stats.endTime - stats.startTime
+ opts.log.silly(
+ 'verify',
+ 'verification finished for',
+ cache,
+ 'in',
+ `${stats.runTime.total}ms`
+ )
+ return stats
+ })
+}
+
+function markStartTime (cache, opts) {
+ return Promise.resolve({ startTime: new Date() })
+}
+
+function markEndTime (cache, opts) {
+ return Promise.resolve({ endTime: new Date() })
+}
+
+function fixPerms (cache, opts) {
+ opts.log.silly('verify', 'fixing cache permissions')
+ return fixOwner
+ .mkdirfix(cache, cache)
+ .then(() => {
+ // TODO - fix file permissions too
+ return fixOwner.chownr(cache, cache)
+ })
+ .then(() => null)
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rimraf it.
+//
+function garbageCollect (cache, opts) {
+ opts.log.silly('verify', 'garbage collecting content')
+ const indexStream = index.lsStream(cache)
+ const liveContent = new Set()
+ indexStream.on('data', (entry) => {
+ if (opts.filter && !opts.filter(entry)) {
+ return
+ }
+
+ liveContent.add(entry.integrity.toString())
+ })
+ return new Promise((resolve, reject) => {
+ indexStream.on('end', resolve).on('error', reject)
+ }).then(() => {
+ const contentDir = contentPath.contentDir(cache)
+ return glob(path.join(contentDir, '**'), {
+ follow: false,
+ nodir: true,
+ nosort: true,
+ }).then((files) => {
+ return Promise.resolve({
+ verifiedContent: 0,
+ reclaimedCount: 0,
+ reclaimedSize: 0,
+ badContentCount: 0,
+ keptSize: 0,
+ }).then((stats) =>
+ pMap(
+ files,
+ (f) => {
+ const split = f.split(/[/\\]/)
+ const digest = split.slice(split.length - 3).join('')
+ const algo = split[split.length - 4]
+ const integrity = ssri.fromHex(digest, algo)
+ if (liveContent.has(integrity.toString())) {
+ return verifyContent(f, integrity).then((info) => {
+ if (!info.valid) {
+ stats.reclaimedCount++
+ stats.badContentCount++
+ stats.reclaimedSize += info.size
+ } else {
+ stats.verifiedContent++
+ stats.keptSize += info.size
+ }
+ return stats
+ })
+ } else {
+ // No entries refer to this content. We can delete.
+ stats.reclaimedCount++
+ return stat(f).then((s) => {
+ return rimraf(f).then(() => {
+ stats.reclaimedSize += s.size
+ return stats
+ })
+ })
+ }
+ },
+ { concurrency: opts.concurrency }
+ ).then(() => stats)
+ )
+ })
+ })
+}
+
+function verifyContent (filepath, sri) {
+ return stat(filepath)
+ .then((s) => {
+ const contentInfo = {
+ size: s.size,
+ valid: true,
+ }
+ return ssri
+ .checkStream(new fsm.ReadStream(filepath), sri)
+ .catch((err) => {
+ if (err.code !== 'EINTEGRITY') {
+ throw err
+ }
+
+ return rimraf(filepath).then(() => {
+ contentInfo.valid = false
+ })
+ })
+ .then(() => contentInfo)
+ })
+ .catch((err) => {
+ if (err.code === 'ENOENT') {
+ return { size: 0, valid: false }
+ }
+
+ throw err
+ })
+}
+
+function rebuildIndex (cache, opts) {
+ opts.log.silly('verify', 'rebuilding index')
+ return index.ls(cache).then((entries) => {
+ const stats = {
+ missingContent: 0,
+ rejectedEntries: 0,
+ totalEntries: 0,
+ }
+ const buckets = {}
+ for (const k in entries) {
+ /* istanbul ignore else */
+ if (hasOwnProperty(entries, k)) {
+ const hashed = index.hashKey(k)
+ const entry = entries[k]
+ const excluded = opts.filter && !opts.filter(entry)
+ excluded && stats.rejectedEntries++
+ if (buckets[hashed] && !excluded) {
+ buckets[hashed].push(entry)
+ } else if (buckets[hashed] && excluded) {
+ // skip
+ } else if (excluded) {
+ buckets[hashed] = []
+ buckets[hashed]._path = index.bucketPath(cache, k)
+ } else {
+ buckets[hashed] = [entry]
+ buckets[hashed]._path = index.bucketPath(cache, k)
+ }
+ }
+ }
+ return pMap(
+ Object.keys(buckets),
+ (key) => {
+ return rebuildBucket(cache, buckets[key], stats, opts)
+ },
+ { concurrency: opts.concurrency }
+ ).then(() => stats)
+ })
+}
+
+function rebuildBucket (cache, bucket, stats, opts) {
+ return truncate(bucket._path).then(() => {
+ // This needs to be serialized because cacache explicitly
+ // lets very racy bucket conflicts clobber each other.
+ return bucket.reduce((promise, entry) => {
+ return promise.then(() => {
+ const content = contentPath(cache, entry.integrity)
+ return stat(content)
+ .then(() => {
+ return index
+ .insert(cache, entry.key, entry.integrity, {
+ metadata: entry.metadata,
+ size: entry.size,
+ })
+ .then(() => {
+ stats.totalEntries++
+ })
+ })
+ .catch((err) => {
+ if (err.code === 'ENOENT') {
+ stats.rejectedEntries++
+ stats.missingContent++
+ return
+ }
+ throw err
+ })
+ })
+ }, Promise.resolve())
+ })
+}
+
+function cleanTmp (cache, opts) {
+ opts.log.silly('verify', 'cleaning tmp directory')
+ return rimraf(path.join(cache, 'tmp'))
+}
+
+function writeVerifile (cache, opts) {
+ const verifile = path.join(cache, '_lastverified')
+ opts.log.silly('verify', 'writing verifile to ' + verifile)
+ try {
+ return writeFile(verifile, '' + +new Date())
+ } finally {
+ fixOwner.chownr.sync(cache, verifile)
+ }
+}
+
+module.exports.lastRun = lastRun
+
+function lastRun (cache) {
+ return readFile(path.join(cache, '_lastverified'), 'utf8').then(
+ (data) => new Date(+data)
+ )
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js
new file mode 100644
index 000000000..573b6b85b
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/index.js
@@ -0,0 +1,334 @@
+'use strict'
+
+// A linked list to keep track of recently-used-ness
+const Yallist = require('yallist')
+
+const MAX = Symbol('max')
+const LENGTH = Symbol('length')
+const LENGTH_CALCULATOR = Symbol('lengthCalculator')
+const ALLOW_STALE = Symbol('allowStale')
+const MAX_AGE = Symbol('maxAge')
+const DISPOSE = Symbol('dispose')
+const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
+const LRU_LIST = Symbol('lruList')
+const CACHE = Symbol('cache')
+const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
+
+const naiveLength = () => 1
+
+// lruList is a yallist where the head is the youngest
+// item, and the tail is the oldest. the list contains the Hit
+// objects as the entries.
+// Each Hit object has a reference to its Yallist.Node. This
+// never changes.
+//
+// cache is a Map (or PseudoMap) that matches the keys to
+// the Yallist.Node object.
+class LRUCache {
+ constructor (options) {
+ if (typeof options === 'number')
+ options = { max: options }
+
+ if (!options)
+ options = {}
+
+ if (options.max && (typeof options.max !== 'number' || options.max < 0))
+ throw new TypeError('max must be a non-negative number')
+ // Kind of weird to have a default max of Infinity, but oh well.
+ const max = this[MAX] = options.max || Infinity
+
+ const lc = options.length || naiveLength
+ this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
+ this[ALLOW_STALE] = options.stale || false
+ if (options.maxAge && typeof options.maxAge !== 'number')
+ throw new TypeError('maxAge must be a number')
+ this[MAX_AGE] = options.maxAge || 0
+ this[DISPOSE] = options.dispose
+ this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
+ this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
+ this.reset()
+ }
+
+ // resize the cache when the max changes.
+ set max (mL) {
+ if (typeof mL !== 'number' || mL < 0)
+ throw new TypeError('max must be a non-negative number')
+
+ this[MAX] = mL || Infinity
+ trim(this)
+ }
+ get max () {
+ return this[MAX]
+ }
+
+ set allowStale (allowStale) {
+ this[ALLOW_STALE] = !!allowStale
+ }
+ get allowStale () {
+ return this[ALLOW_STALE]
+ }
+
+ set maxAge (mA) {
+ if (typeof mA !== 'number')
+ throw new TypeError('maxAge must be a non-negative number')
+
+ this[MAX_AGE] = mA
+ trim(this)
+ }
+ get maxAge () {
+ return this[MAX_AGE]
+ }
+
+ // resize the cache when the lengthCalculator changes.
+ set lengthCalculator (lC) {
+ if (typeof lC !== 'function')
+ lC = naiveLength
+
+ if (lC !== this[LENGTH_CALCULATOR]) {
+ this[LENGTH_CALCULATOR] = lC
+ this[LENGTH] = 0
+ this[LRU_LIST].forEach(hit => {
+ hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
+ this[LENGTH] += hit.length
+ })
+ }
+ trim(this)
+ }
+ get lengthCalculator () { return this[LENGTH_CALCULATOR] }
+
+ get length () { return this[LENGTH] }
+ get itemCount () { return this[LRU_LIST].length }
+
+ rforEach (fn, thisp) {
+ thisp = thisp || this
+ for (let walker = this[LRU_LIST].tail; walker !== null;) {
+ const prev = walker.prev
+ forEachStep(this, fn, walker, thisp)
+ walker = prev
+ }
+ }
+
+ forEach (fn, thisp) {
+ thisp = thisp || this
+ for (let walker = this[LRU_LIST].head; walker !== null;) {
+ const next = walker.next
+ forEachStep(this, fn, walker, thisp)
+ walker = next
+ }
+ }
+
+ keys () {
+ return this[LRU_LIST].toArray().map(k => k.key)
+ }
+
+ values () {
+ return this[LRU_LIST].toArray().map(k => k.value)
+ }
+
+ reset () {
+ if (this[DISPOSE] &&
+ this[LRU_LIST] &&
+ this[LRU_LIST].length) {
+ this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
+ }
+
+ this[CACHE] = new Map() // hash of items by key
+ this[LRU_LIST] = new Yallist() // list of items in order of use recency
+ this[LENGTH] = 0 // length of items in the list
+ }
+
+ dump () {
+ return this[LRU_LIST].map(hit =>
+ isStale(this, hit) ? false : {
+ k: hit.key,
+ v: hit.value,
+ e: hit.now + (hit.maxAge || 0)
+ }).toArray().filter(h => h)
+ }
+
+ dumpLru () {
+ return this[LRU_LIST]
+ }
+
+ set (key, value, maxAge) {
+ maxAge = maxAge || this[MAX_AGE]
+
+ if (maxAge && typeof maxAge !== 'number')
+ throw new TypeError('maxAge must be a number')
+
+ const now = maxAge ? Date.now() : 0
+ const len = this[LENGTH_CALCULATOR](value, key)
+
+ if (this[CACHE].has(key)) {
+ if (len > this[MAX]) {
+ del(this, this[CACHE].get(key))
+ return false
+ }
+
+ const node = this[CACHE].get(key)
+ const item = node.value
+
+ // dispose of the old one before overwriting
+ // split out into 2 ifs for better coverage tracking
+ if (this[DISPOSE]) {
+ if (!this[NO_DISPOSE_ON_SET])
+ this[DISPOSE](key, item.value)
+ }
+
+ item.now = now
+ item.maxAge = maxAge
+ item.value = value
+ this[LENGTH] += len - item.length
+ item.length = len
+ this.get(key)
+ trim(this)
+ return true
+ }
+
+ const hit = new Entry(key, value, len, now, maxAge)
+
+ // oversized objects fall out of cache automatically.
+ if (hit.length > this[MAX]) {
+ if (this[DISPOSE])
+ this[DISPOSE](key, value)
+
+ return false
+ }
+
+ this[LENGTH] += hit.length
+ this[LRU_LIST].unshift(hit)
+ this[CACHE].set(key, this[LRU_LIST].head)
+ trim(this)
+ return true
+ }
+
+ has (key) {
+ if (!this[CACHE].has(key)) return false
+ const hit = this[CACHE].get(key).value
+ return !isStale(this, hit)
+ }
+
+ get (key) {
+ return get(this, key, true)
+ }
+
+ peek (key) {
+ return get(this, key, false)
+ }
+
+ pop () {
+ const node = this[LRU_LIST].tail
+ if (!node)
+ return null
+
+ del(this, node)
+ return node.value
+ }
+
+ del (key) {
+ del(this, this[CACHE].get(key))
+ }
+
+ load (arr) {
+ // reset the cache
+ this.reset()
+
+ const now = Date.now()
+ // A previous serialized cache has the most recent items first
+ for (let l = arr.length - 1; l >= 0; l--) {
+ const hit = arr[l]
+ const expiresAt = hit.e || 0
+ if (expiresAt === 0)
+ // the item was created without expiration in a non aged cache
+ this.set(hit.k, hit.v)
+ else {
+ const maxAge = expiresAt - now
+ // dont add already expired items
+ if (maxAge > 0) {
+ this.set(hit.k, hit.v, maxAge)
+ }
+ }
+ }
+ }
+
+ prune () {
+ this[CACHE].forEach((value, key) => get(this, key, false))
+ }
+}
+
+const get = (self, key, doUse) => {
+ const node = self[CACHE].get(key)
+ if (node) {
+ const hit = node.value
+ if (isStale(self, hit)) {
+ del(self, node)
+ if (!self[ALLOW_STALE])
+ return undefined
+ } else {
+ if (doUse) {
+ if (self[UPDATE_AGE_ON_GET])
+ node.value.now = Date.now()
+ self[LRU_LIST].unshiftNode(node)
+ }
+ }
+ return hit.value
+ }
+}
+
+const isStale = (self, hit) => {
+ if (!hit || (!hit.maxAge && !self[MAX_AGE]))
+ return false
+
+ const diff = Date.now() - hit.now
+ return hit.maxAge ? diff > hit.maxAge
+ : self[MAX_AGE] && (diff > self[MAX_AGE])
+}
+
+const trim = self => {
+ if (self[LENGTH] > self[MAX]) {
+ for (let walker = self[LRU_LIST].tail;
+ self[LENGTH] > self[MAX] && walker !== null;) {
+ // We know that we're about to delete this one, and also
+ // what the next least recently used key will be, so just
+ // go ahead and set it now.
+ const prev = walker.prev
+ del(self, walker)
+ walker = prev
+ }
+ }
+}
+
+const del = (self, node) => {
+ if (node) {
+ const hit = node.value
+ if (self[DISPOSE])
+ self[DISPOSE](hit.key, hit.value)
+
+ self[LENGTH] -= hit.length
+ self[CACHE].delete(hit.key)
+ self[LRU_LIST].removeNode(node)
+ }
+}
+
+class Entry {
+ constructor (key, value, length, now, maxAge) {
+ this.key = key
+ this.value = value
+ this.length = length
+ this.now = now
+ this.maxAge = maxAge || 0
+ }
+}
+
+const forEachStep = (self, fn, node, thisp) => {
+ let hit = node.value
+ if (isStale(self, hit)) {
+ del(self, node)
+ if (!self[ALLOW_STALE])
+ hit = undefined
+ }
+ if (hit)
+ fn.call(thisp, hit.value, hit.key, self)
+}
+
+module.exports = LRUCache
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json
new file mode 100644
index 000000000..43b7502c3
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/node_modules/lru-cache/package.json
@@ -0,0 +1,34 @@
+{
+ "name": "lru-cache",
+ "description": "A cache object that deletes the least-recently-used items.",
+ "version": "6.0.0",
+ "author": "Isaac Z. Schlueter <i@izs.me>",
+ "keywords": [
+ "mru",
+ "lru",
+ "cache"
+ ],
+ "scripts": {
+ "test": "tap",
+ "snap": "tap",
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags"
+ },
+ "main": "index.js",
+ "repository": "git://github.com/isaacs/node-lru-cache.git",
+ "devDependencies": {
+ "benchmark": "^2.1.4",
+ "tap": "^14.10.7"
+ },
+ "license": "ISC",
+ "dependencies": {
+ "yallist": "^4.0.0"
+ },
+ "files": [
+ "index.js"
+ ],
+ "engines": {
+ "node": ">=10"
+ }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/package.json
new file mode 100644
index 000000000..b9efa92d9
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/package.json
@@ -0,0 +1,88 @@
+{
+ "name": "cacache",
+ "version": "16.0.0",
+ "cache-version": {
+ "content": "2",
+ "index": "5"
+ },
+ "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+ "main": "lib/index.js",
+ "files": [
+ "bin",
+ "lib"
+ ],
+ "scripts": {
+ "benchmarks": "node test/benchmarks",
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "test": "tap",
+ "snap": "tap",
+ "coverage": "tap",
+ "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+ "lint": "eslint '**/*.js'",
+ "npmclilint": "npmcli-lint",
+ "lintfix": "npm run lint -- --fix",
+ "postsnap": "npm run lintfix --",
+ "postlint": "npm-template-check",
+ "template-copy": "npm-template-copy --force",
+ "posttest": "npm run lint"
+ },
+ "repository": "https://github.com/npm/cacache",
+ "keywords": [
+ "cache",
+ "caching",
+ "content-addressable",
+ "sri",
+ "sri hash",
+ "subresource integrity",
+ "cache",
+ "storage",
+ "store",
+ "file store",
+ "filesystem",
+ "disk cache",
+ "disk storage"
+ ],
+ "license": "ISC",
+ "dependencies": {
+ "@npmcli/fs": "^1.0.0",
+ "@npmcli/move-file": "^1.1.2",
+ "chownr": "^2.0.0",
+ "fs-minipass": "^2.1.0",
+ "glob": "^7.1.4",
+ "infer-owner": "^1.0.4",
+ "lru-cache": "^6.0.0",
+ "minipass": "^3.1.1",
+ "minipass-collect": "^1.0.2",
+ "minipass-flush": "^1.0.5",
+ "minipass-pipeline": "^1.2.4",
+ "mkdirp": "^1.0.4",
+ "p-map": "^4.0.0",
+ "promise-inflight": "^1.0.1",
+ "rimraf": "^3.0.2",
+ "ssri": "^8.0.1",
+ "tar": "^6.1.11",
+ "unique-filename": "^1.1.1"
+ },
+ "devDependencies": {
+ "@npmcli/template-oss": "^2.9.2",
+ "benchmark": "^2.1.4",
+ "chalk": "^4.0.0",
+ "require-inject": "^1.4.4",
+ "tacks": "^1.3.0",
+ "tap": "^15.0.9"
+ },
+ "tap": {
+ "100": true,
+ "test-regex": "test/[^/]*.js"
+ },
+ "engines": {
+ "node": "^12.13.0 || ^14.15.0 || >=16"
+ },
+ "templateOSS": {
+ "windowsCI": false,
+ "version": "2.9.2"
+ },
+ "author": "GitHub Inc."
+}
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js
deleted file mode 100644
index b9c8be2ff..000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.js
+++ /dev/null
@@ -1 +0,0 @@
-(()=>{var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>"object"==typeof process&&process&&!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},a=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),n=t=>a(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=n(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:S,maxAge:g,stale:m}=t instanceof d?{}:t;if(!a(i))throw new TypeError("max option must be an integer");const x=n(i);if(!x)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||S,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new x(i),this.prev=new x(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!a(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!m,this.updateAgeOnGet=!!o,this.ttlResolution=a(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||g||0,this.ttl){if(!a(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}m&&e("stale","allowStale"),g&&e("maxAge","ttl"),S&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=a(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes({allowStale:t=this.allowStale}={}){if(this.size)for(let i=this.tail;!t&&this.isStale(i)||(yield i),i!==this.head;i=this.prev[i]);}*rindexes({allowStale:t=this.allowStale}={}){if(this.size)for(let i=this.head;!t&&this.isStale(i)||(yield i),i!==this.tail;i=this.next[i]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;for(const i of this.rindexes({allowStale:!0}))this.isStale(i)&&(this.delete(this.keyList[i]),t=!0);return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let a=0===this.size?void 0:this.keyMap.get(t);if(void 0===a)a=this.newIndex(),this.keyList[a]=t,this.valList[a]=i,this.keyMap.set(t,a),this.next[this.tail]=a,this.prev[a]=this.tail,this.tail=a,this.size++,this.addItemSize(a,i,t,h,l),o=!1;else{const s=this.valList[a];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(a),this.valList[a]=i,this.addItemSize(a,i,t,h,l)),this.moveToTail(a)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(a,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes({allowStale:!0}))this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes({allowStale:!0}))this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={},s=function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10);module.exports=s})(); \ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs b/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs
deleted file mode 100644
index 3a4d674c0..000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/bundle/main.mjs
+++ /dev/null
@@ -1 +0,0 @@
-var t={10:t=>{const i="object"==typeof performance&&performance&&"function"==typeof performance.now?performance:Date,s=new Set,e=(t,i)=>{const s=`LRU_CACHE_OPTION_${t}`;l(s)&&o(s,`${t} option`,`options.${i}`,d)},h=(t,i)=>{const s=`LRU_CACHE_METHOD_${t}`;if(l(s)){const{prototype:e}=d,{get:h}=Object.getOwnPropertyDescriptor(e,t);o(s,`${t} method`,`cache.${i}()`,h)}},l=t=>!(process.noDeprecation||s.has(t)),o=(t,i,e,h)=>{s.add(t),process.emitWarning(`The ${i} is deprecated. Please use ${e} instead.`,"DeprecationWarning",t,h)},n=t=>t&&t===Math.floor(t)&&t>0&&isFinite(t),a=t=>n(t)?t<=Math.pow(2,8)?Uint8Array:t<=Math.pow(2,16)?Uint16Array:t<=Math.pow(2,32)?Uint32Array:t<=Number.MAX_SAFE_INTEGER?r:null:null;class r extends Array{constructor(t){super(t),this.fill(0)}}class p{constructor(t){const i=a(t);this.heap=new i(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}}class d{constructor(t={}){const{max:i,ttl:s,ttlResolution:h=1,ttlAutopurge:l,updateAgeOnGet:o,allowStale:r,dispose:c,disposeAfter:f,noDisposeOnSet:u,noUpdateTTL:z,maxSize:v,sizeCalculation:y}=t,{length:g,maxAge:m,stale:S}=t instanceof d?{}:t;if(!n(i))throw new TypeError("max option must be an integer");const L=a(i);if(!L)throw new Error("invalid max value: "+i);if(this.max=i,this.maxSize=v||0,this.sizeCalculation=y||g,this.sizeCalculation){if(!this.maxSize)throw new TypeError("cannot set sizeCalculation without setting maxSize");if("function"!=typeof this.sizeCalculation)throw new TypeError("sizeCalculating set to non-function")}if(this.keyMap=new Map,this.keyList=new Array(i).fill(null),this.valList=new Array(i).fill(null),this.next=new L(i),this.prev=new L(i),this.head=0,this.tail=0,this.free=new p(i),this.initialFill=1,this.size=0,"function"==typeof c&&(this.dispose=c),"function"==typeof f?(this.disposeAfter=f,this.disposed=[]):(this.disposeAfter=null,this.disposed=null),this.noDisposeOnSet=!!u,this.noUpdateTTL=!!z,this.maxSize){if(!n(this.maxSize))throw new TypeError("maxSize must be a positive integer if specified");this.initializeSizeTracking()}if(this.allowStale=!!r||!!S,this.updateAgeOnGet=!!o,this.ttlResolution=n(h)||0===h?h:1,this.ttlAutopurge=!!l,this.ttl=s||m||0,this.ttl){if(!n(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.initializeTTLTracking()}S&&e("stale","allowStale"),m&&e("maxAge","ttl"),g&&e("length","sizeCalculation")}initializeTTLTracking(){this.ttls=new r(this.max),this.starts=new r(this.max),this.setItemTTL=(t,s)=>{if(this.starts[t]=0!==s?i.now():0,this.ttls[t]=s,0!==s&&this.ttlAutopurge){const i=setTimeout((()=>{this.isStale(t)&&this.delete(this.keyList[t])}),s+1);i.unref&&i.unref()}},this.updateItemAge=t=>{this.starts[t]=0!==this.ttls[t]?i.now():0};let t=0;const s=()=>{const s=i.now();if(this.ttlResolution>0){t=s;const i=setTimeout((()=>t=0),this.ttlResolution);i.unref&&i.unref()}return s};this.isStale=i=>0!==this.ttls[i]&&0!==this.starts[i]&&(t||s())-this.starts[i]>this.ttls[i]}updateItemAge(t){}setItemTTL(t,i){}isStale(t){return!1}initializeSizeTracking(){this.calculatedSize=0,this.sizes=new r(this.max),this.removeItemSize=t=>this.calculatedSize-=this.sizes[t],this.addItemSize=(t,i,s,e,h)=>{const l=e||(h?h(i,s):0);this.sizes[t]=n(l)?l:0;const o=this.maxSize-this.sizes[t];for(;this.calculatedSize>o;)this.evict();this.calculatedSize+=this.sizes[t]},this.delete=t=>{if(0!==this.size){const i=this.keyMap.get(t);void 0!==i&&(this.calculatedSize-=this.sizes[i])}return d.prototype.delete.call(this,t)}}removeItemSize(t){}addItemSize(t,i,s,e,h){}*indexes(){if(this.size)for(let t=this.tail;this.isStale(t)||(yield t),t!==this.head;t=this.prev[t]);}*rindexes(){if(this.size)for(let t=this.head;this.isStale(t)||(yield t),t!==this.tail;t=this.next[t]);}*entries(){for(const t of this.indexes())yield[this.keyList[t],this.valList[t]]}*keys(){for(const t of this.indexes())yield this.keyList[t]}*values(){for(const t of this.indexes())yield this.valList[t]}[Symbol.iterator](){return this.entries()}find(t,i={}){for(const s of this.indexes())if(t(this.valList[s],this.keyList[s],this))return this.get(this.keyList[s],i)}forEach(t,i=this){for(const s of this.indexes())t.call(i,this.valList[s],this.keyList[s],this)}rforEach(t,i=this){for(const s of this.rindexes())t.call(i,this.valList[s],this.keyList[s],this)}get prune(){return h("prune","purgeStale"),this.purgeStale}purgeStale(){let t=!1;if(this.size)for(let i=this.head;;i=this.next[i]){const s=i===this.tail;if(this.isStale(i)&&(this.delete(this.keyList[i]),t=!0),s)break}return t}dump(){const t=[];for(const i of this.indexes()){const s=this.keyList[i],e={value:this.valList[i]};this.ttls&&(e.ttl=this.ttls[i]),this.sizes&&(e.size=this.sizes[i]),t.unshift([s,e])}return t}load(t){this.clear();for(const[i,s]of t)this.set(i,s.value,s)}dispose(t,i,s){}set(t,i,{ttl:s=this.ttl,noDisposeOnSet:e=this.noDisposeOnSet,size:h=0,sizeCalculation:l=this.sizeCalculation,noUpdateTTL:o=this.noUpdateTTL}={}){let n=0===this.size?void 0:this.keyMap.get(t);if(void 0===n)n=this.newIndex(),this.keyList[n]=t,this.valList[n]=i,this.keyMap.set(t,n),this.next[this.tail]=n,this.prev[n]=this.tail,this.tail=n,this.size++,this.addItemSize(n,i,t,h,l),o=!1;else{const s=this.valList[n];i!==s&&(e||(this.dispose(s,t,"set"),this.disposeAfter&&this.disposed.push([s,t,"set"])),this.removeItemSize(n),this.valList[n]=i,this.addItemSize(n,i,t,h,l)),this.moveToTail(n)}if(0===s||0!==this.ttl||this.ttls||this.initializeTTLTracking(),o||this.setItemTTL(n,s),this.disposeAfter)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return this}newIndex(){return 0===this.size?this.tail:this.size===this.max?this.evict():0!==this.free.length?this.free.pop():this.initialFill++}pop(){if(this.size){const t=this.valList[this.head];return this.evict(),t}}evict(){const t=this.head,i=this.keyList[t],s=this.valList[t];return this.dispose(s,i,"evict"),this.disposeAfter&&this.disposed.push([s,i,"evict"]),this.removeItemSize(t),this.head=this.next[t],this.keyMap.delete(i),this.size--,t}has(t){return this.keyMap.has(t)&&!this.isStale(this.keyMap.get(t))}peek(t,{allowStale:i=this.allowStale}={}){const s=this.keyMap.get(t);if(void 0!==s&&(i||!this.isStale(s)))return this.valList[s]}get(t,{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet}={}){const e=this.keyMap.get(t);if(void 0!==e){if(this.isStale(e)){const s=i?this.valList[e]:void 0;return this.delete(t),s}return this.moveToTail(e),s&&this.updateItemAge(e),this.valList[e]}}connect(t,i){this.prev[i]=t,this.next[t]=i}moveToTail(t){t!==this.tail&&(t===this.head?this.head=this.next[t]:this.connect(this.prev[t],this.next[t]),this.connect(this.tail,t),this.tail=t)}get del(){return h("del","delete"),this.delete}delete(t){let i=!1;if(0!==this.size){const s=this.keyMap.get(t);void 0!==s&&(i=!0,1===this.size?this.clear():(this.removeItemSize(s),this.dispose(this.valList[s],t,"delete"),this.disposeAfter&&this.disposed.push([this.valList[s],t,"delete"]),this.keyMap.delete(t),this.keyList[s]=null,this.valList[s]=null,s===this.tail?this.tail=this.prev[s]:s===this.head?this.head=this.next[s]:(this.next[this.prev[s]]=this.next[s],this.prev[this.next[s]]=this.prev[s]),this.size--,this.free.push(s)))}if(this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift());return i}clear(){if(this.dispose!==d.prototype.dispose)for(const t of this.rindexes())this.dispose(this.valList[t],this.keyList[t],"delete");if(this.disposeAfter)for(const t of this.rindexes())this.disposed.push([this.valList[t],this.keyList[t],"delete"]);if(this.keyMap.clear(),this.valList.fill(null),this.keyList.fill(null),this.ttls&&(this.ttls.fill(0),this.starts.fill(0)),this.sizes&&this.sizes.fill(0),this.head=0,this.tail=0,this.initialFill=1,this.free.length=0,this.calculatedSize=0,this.size=0,this.disposed)for(;this.disposed.length;)this.disposeAfter(...this.disposed.shift())}get reset(){return h("reset","clear"),this.clear}get length(){return((t,i)=>{const s="LRU_CACHE_PROPERTY_length";if(l(s)){const{prototype:i}=d,{get:e}=Object.getOwnPropertyDescriptor(i,t);o(s,"length property","cache.size",e)}})("length"),this.size}}t.exports=d}},i={};!function s(e){var h=i[e];if(void 0!==h)return h.exports;var l=i[e]={exports:{}};return t[e](l,l.exports,s),l.exports}(10); \ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/index.js
index 77adaa645..e37f51616 100644
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/index.js
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/index.js
@@ -243,46 +243,78 @@ class LRUCache {
*indexes ({ allowStale = this.allowStale } = {}) {
if (this.size) {
- for (let i = this.tail; true; i = this.prev[i]) {
+ for (let i = this.tail, j; true; ) {
+ if (!this.isValidIndex(i)) {
+ break
+ }
+ j = i === this.head
if (allowStale || !this.isStale(i)) {
yield i
}
if (i === this.head) {
break
+ } else {
+ i = this.prev[i]
}
}
}
}
+
*rindexes ({ allowStale = this.allowStale } = {}) {
if (this.size) {
- for (let i = this.head; true; i = this.next[i]) {
+ for (let i = this.head, j; true; ) {
+ if (!this.isValidIndex(i)) {
+ break
+ }
if (allowStale || !this.isStale(i)) {
yield i
}
+ // either the tail now, or WAS the tail, and deleted
if (i === this.tail) {
break
+ } else {
+ i = this.next[i]
}
}
}
}
+ isValidIndex (index) {
+ return this.keyMap.get(this.keyList[index]) === index
+ }
+
*entries () {
for (const i of this.indexes()) {
yield [this.keyList[i], this.valList[i]]
}
}
+ *rentries () {
+ for (const i of this.rindexes()) {
+ yield [this.keyList[i], this.valList[i]]
+ }
+ }
*keys () {
for (const i of this.indexes()) {
yield this.keyList[i]
}
}
+ *rkeys () {
+ for (const i of this.rindexes()) {
+ yield this.keyList[i]
+ }
+ }
*values () {
for (const i of this.indexes()) {
yield this.valList[i]
}
}
+ *rvalues () {
+ for (const i of this.rindexes()) {
+ yield this.valList[i]
+ }
+ }
[Symbol.iterator] () {
return this.entries()
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
index 313bfcbbf..a62f74c2b 100644
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
- "version": "7.4.2",
+ "version": "7.5.1",
"author": "Isaac Z. Schlueter <i@izs.me>",
"keywords": [
"mru",
@@ -9,9 +9,7 @@
"cache"
],
"scripts": {
- "prepare": "webpack-cli -o bundle ./index.js --node-env production",
- "build": "npm run prepare",
- "presize": "npm run prepare",
+ "build": "",
"test": "tap",
"snap": "tap",
"size": "size-limit",
@@ -20,23 +18,16 @@
"prepublishOnly": "git push origin --follow-tags"
},
"main": "index.js",
- "browser": "./bundle/main.js",
- "exports": {
- ".": "./index.js",
- "./browser": "./bundle/main.js"
- },
"repository": "git://github.com/isaacs/node-lru-cache.git",
"devDependencies": {
"@size-limit/preset-small-lib": "^7.0.8",
"benchmark": "^2.1.4",
"size-limit": "^7.0.8",
- "tap": "^15.1.6",
- "webpack-cli": "^4.9.2"
+ "tap": "^15.1.6"
},
"license": "ISC",
"files": [
- "index.js",
- "bundle"
+ "index.js"
],
"engines": {
"node": ">=12"
@@ -46,7 +37,7 @@
},
"size-limit": [
{
- "path": "./bundle/main.js"
+ "path": "./index.js"
}
]
}
diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json
index c8c5842b3..e52131b8a 100644
--- a/node_modules/make-fetch-happen/package.json
+++ b/node_modules/make-fetch-happen/package.json
@@ -1,6 +1,6 @@
{
"name": "make-fetch-happen",
- "version": "10.0.5",
+ "version": "10.0.6",
"description": "Opinionated, caching, retrying fetch client",
"main": "lib/index.js",
"files": [
@@ -34,15 +34,15 @@
"license": "ISC",
"dependencies": {
"agentkeepalive": "^4.2.1",
- "cacache": "^15.3.0",
+ "cacache": "^16.0.0",
"http-cache-semantics": "^4.1.0",
"http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
"is-lambda": "^1.0.1",
- "lru-cache": "^7.4.1",
+ "lru-cache": "^7.5.1",
"minipass": "^3.1.6",
"minipass-collect": "^1.0.2",
- "minipass-fetch": "^2.0.2",
+ "minipass-fetch": "^2.0.3",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
"negotiator": "^0.6.3",
@@ -52,7 +52,7 @@
},
"devDependencies": {
"@npmcli/template-oss": "^2.9.2",
- "eslint": "^8.10.0",
+ "eslint": "^8.11.0",
"mkdirp": "^1.0.4",
"nock": "^13.2.4",
"rimraf": "^3.0.2",
diff --git a/node_modules/minipass-fetch/lib/index.js b/node_modules/minipass-fetch/lib/index.js
index 473630e1a..22257a417 100644
--- a/node_modules/minipass-fetch/lib/index.js
+++ b/node_modules/minipass-fetch/lib/index.js
@@ -204,6 +204,14 @@ const fetch = async (url, opts) => {
timeout: request.timeout,
}
+ // if the redirect is to a new hostname, strip the authorization and cookie headers
+ const parsedOriginal = new URL(request.url)
+ const parsedRedirect = new URL(locationURL)
+ if (parsedOriginal.hostname !== parsedRedirect.hostname) {
+ requestOpts.headers.delete('authorization')
+ requestOpts.headers.delete('cookie')
+ }
+
// HTTP-redirect fetch step 11
if (res.statusCode === 303 || (
(res.statusCode === 301 || res.statusCode === 302) &&
diff --git a/node_modules/minipass-fetch/package.json b/node_modules/minipass-fetch/package.json
index 47e32dad6..68e1ce134 100644
--- a/node_modules/minipass-fetch/package.json
+++ b/node_modules/minipass-fetch/package.json
@@ -1,6 +1,6 @@
{
"name": "minipass-fetch",
- "version": "2.0.2",
+ "version": "2.0.3",
"description": "An implementation of window.fetch in Node.js using Minipass streams",
"license": "MIT",
"main": "lib/index.js",
@@ -22,11 +22,12 @@
"check-coverage": true
},
"devDependencies": {
- "@npmcli/template-oss": "^2.8.1",
+ "@npmcli/template-oss": "^2.9.2",
"@ungap/url-search-params": "^0.2.2",
"abort-controller": "^3.0.0",
"abortcontroller-polyfill": "~1.7.3",
"form-data": "^4.0.0",
+ "nock": "^13.2.4",
"parted": "^0.1.1",
"string-to-arraybuffer": "^1.0.2",
"tap": "^15.1.6"
@@ -58,6 +59,6 @@
},
"author": "GitHub Inc.",
"templateOSS": {
- "version": "2.8.1"
+ "version": "2.9.2"
}
}