Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGar <gar+gh@danger.computer>2021-08-26 21:20:59 +0300
committerGar <gar+gh@danger.computer>2021-08-26 21:20:59 +0300
commite63a942c685233fa546788981ed9c144220d50e1 (patch)
tree4115e9ae913ad1b688c35711418a528184c9b337
parent7af36bb9f8e5c9facaa8deb114b76368841fbc66 (diff)
cacache@15.3.0
* feat: introduce @npmcli/fs for tmp dir methods
-rw-r--r--node_modules/@gar/promisify/index.js36
-rw-r--r--node_modules/@gar/promisify/package.json32
-rw-r--r--node_modules/@npmcli/fs/LICENSE.md18
-rw-r--r--node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js17
-rw-r--r--node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js120
-rw-r--r--node_modules/@npmcli/fs/lib/common/get-options.js20
-rw-r--r--node_modules/@npmcli/fs/lib/common/node.js9
-rw-r--r--node_modules/@npmcli/fs/lib/common/owner.js92
-rw-r--r--node_modules/@npmcli/fs/lib/copy-file.js22
-rw-r--r--node_modules/@npmcli/fs/lib/fs.js8
-rw-r--r--node_modules/@npmcli/fs/lib/index.js9
-rw-r--r--node_modules/@npmcli/fs/lib/mkdir/index.js32
-rw-r--r--node_modules/@npmcli/fs/lib/mkdir/polyfill.js81
-rw-r--r--node_modules/@npmcli/fs/lib/mkdtemp.js28
-rw-r--r--node_modules/@npmcli/fs/lib/rm/index.js22
-rw-r--r--node_modules/@npmcli/fs/lib/rm/polyfill.js238
-rw-r--r--node_modules/@npmcli/fs/lib/with-temp-dir.js39
-rw-r--r--node_modules/@npmcli/fs/lib/write-file.js19
-rw-r--r--node_modules/@npmcli/fs/package.json36
-rw-r--r--node_modules/cacache/get.js240
-rw-r--r--node_modules/cacache/lib/util/tmp.js18
-rw-r--r--node_modules/cacache/package.json3
-rw-r--r--package-lock.json46
-rw-r--r--package.json2
24 files changed, 1041 insertions, 146 deletions
diff --git a/node_modules/@gar/promisify/index.js b/node_modules/@gar/promisify/index.js
new file mode 100644
index 000000000..d0be95f6f
--- /dev/null
+++ b/node_modules/@gar/promisify/index.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const { promisify } = require('util')
+
+const handler = {
+ get: function (target, prop, receiver) {
+ if (typeof target[prop] !== 'function') {
+ return target[prop]
+ }
+ if (target[prop][promisify.custom]) {
+ return function () {
+ return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments)
+ }
+ }
+ return function () {
+ return new Promise((resolve, reject) => {
+ Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) {
+ if (err) {
+ return reject(err)
+ }
+ resolve(result)
+ }])
+ })
+ }
+ }
+}
+
+module.exports = function (thingToPromisify) {
+ if (typeof thingToPromisify === 'function') {
+ return promisify(thingToPromisify)
+ }
+ if (typeof thingToPromisify === 'object') {
+ return new Proxy(thingToPromisify, handler)
+ }
+ throw new TypeError('Can only promisify functions or objects')
+}
diff --git a/node_modules/@gar/promisify/package.json b/node_modules/@gar/promisify/package.json
new file mode 100644
index 000000000..b5140876c
--- /dev/null
+++ b/node_modules/@gar/promisify/package.json
@@ -0,0 +1,32 @@
+{
+ "name": "@gar/promisify",
+ "version": "1.1.2",
+ "description": "Promisify an entire class or object",
+ "main": "index.js",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/wraithgar/gar-promisify.git"
+ },
+ "scripts": {
+ "lint": "standard",
+ "lint:fix": "standard --fix",
+ "test": "lab -a @hapi/code -t 100",
+ "posttest": "npm run lint"
+ },
+ "files": [
+ "index.js"
+ ],
+ "keywords": [
+ "promisify",
+ "all",
+ "class",
+ "object"
+ ],
+ "author": "Gar <gar+npm@danger.computer>",
+ "license": "MIT",
+ "devDependencies": {
+ "@hapi/code": "^8.0.1",
+ "@hapi/lab": "^24.1.0",
+ "standard": "^16.0.3"
+ }
+}
diff --git a/node_modules/@npmcli/fs/LICENSE.md b/node_modules/@npmcli/fs/LICENSE.md
new file mode 100644
index 000000000..845be76f6
--- /dev/null
+++ b/node_modules/@npmcli/fs/LICENSE.md
@@ -0,0 +1,18 @@
+ISC License
+
+Copyright npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js b/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js
new file mode 100644
index 000000000..7755d1c10
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js
@@ -0,0 +1,17 @@
+const url = require('url')
+
+const node = require('../node.js')
+const polyfill = require('./polyfill.js')
+
+const useNative = node.satisfies('>=10.12.0')
+
+const fileURLToPath = (path) => {
+ // the polyfill is tested separately from this module, no need to hack
+ // process.version to try to trigger it just for coverage
+ // istanbul ignore next
+ return useNative
+ ? url.fileURLToPath(path)
+ : polyfill(path)
+}
+
+module.exports = fileURLToPath
diff --git a/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js b/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js
new file mode 100644
index 000000000..794d9bba4
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js
@@ -0,0 +1,120 @@
+const { URL, domainToUnicode } = require('url')
+
+const CHAR_LOWERCASE_A = 97
+const CHAR_LOWERCASE_Z = 122
+
+const isWindows = process.platform === 'win32'
+
+class ERR_INVALID_FILE_URL_HOST extends TypeError {
+ constructor (platform) {
+ super(`File URL host must be "localhost" or empty on ${platform}`)
+ this.code = 'ERR_INVALID_FILE_URL_HOST'
+ }
+
+ toString () {
+ return `${this.name} [${this.code}]: ${this.message}`
+ }
+}
+
+class ERR_INVALID_FILE_URL_PATH extends TypeError {
+ constructor (msg) {
+ super(`File URL path ${msg}`)
+ this.code = 'ERR_INVALID_FILE_URL_PATH'
+ }
+
+ toString () {
+ return `${this.name} [${this.code}]: ${this.message}`
+ }
+}
+
+class ERR_INVALID_ARG_TYPE extends TypeError {
+ constructor (name, actual) {
+ super(`The "${name}" argument must be one of type string or an instance of URL. Received type ${typeof actual} ${actual}`)
+ this.code = 'ERR_INVALID_ARG_TYPE'
+ }
+
+ toString () {
+ return `${this.name} [${this.code}]: ${this.message}`
+ }
+}
+
+class ERR_INVALID_URL_SCHEME extends TypeError {
+ constructor (expected) {
+ super(`The URL must be of scheme ${expected}`)
+ this.code = 'ERR_INVALID_URL_SCHEME'
+ }
+
+ toString () {
+ return `${this.name} [${this.code}]: ${this.message}`
+ }
+}
+
+const isURLInstance = (input) => {
+ return input != null && input.href && input.origin
+}
+
+const getPathFromURLWin32 = (url) => {
+ const hostname = url.hostname
+ let pathname = url.pathname
+ for (let n = 0; n < pathname.length; n++) {
+ if (pathname[n] === '%') {
+ const third = pathname.codePointAt(n + 2) | 0x20
+ if ((pathname[n + 1] === '2' && third === 102) ||
+ (pathname[n + 1] === '5' && third === 99)) {
+ throw new ERR_INVALID_FILE_URL_PATH('must not include encoded \\ or / characters')
+ }
+ }
+ }
+
+ pathname = pathname.replace(/\//g, '\\')
+ pathname = decodeURIComponent(pathname)
+ if (hostname !== '') {
+ return `\\\\${domainToUnicode(hostname)}${pathname}`
+ }
+
+ const letter = pathname.codePointAt(1) | 0x20
+ const sep = pathname[2]
+ if (letter < CHAR_LOWERCASE_A || letter > CHAR_LOWERCASE_Z ||
+ (sep !== ':')) {
+ throw new ERR_INVALID_FILE_URL_PATH('must be absolute')
+ }
+
+ return pathname.slice(1)
+}
+
+const getPathFromURLPosix = (url) => {
+ if (url.hostname !== '') {
+ throw new ERR_INVALID_FILE_URL_HOST(process.platform)
+ }
+
+ const pathname = url.pathname
+
+ for (let n = 0; n < pathname.length; n++) {
+ if (pathname[n] === '%') {
+ const third = pathname.codePointAt(n + 2) | 0x20
+ if (pathname[n + 1] === '2' && third === 102) {
+ throw new ERR_INVALID_FILE_URL_PATH('must not include encoded / characters')
+ }
+ }
+ }
+
+ return decodeURIComponent(pathname)
+}
+
+const fileURLToPath = (path) => {
+ if (typeof path === 'string') {
+ path = new URL(path)
+ } else if (!isURLInstance(path)) {
+ throw new ERR_INVALID_ARG_TYPE('path', ['string', 'URL'], path)
+ }
+
+ if (path.protocol !== 'file:') {
+ throw new ERR_INVALID_URL_SCHEME('file')
+ }
+
+ return isWindows
+ ? getPathFromURLWin32(path)
+ : getPathFromURLPosix(path)
+}
+
+module.exports = fileURLToPath
diff --git a/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/@npmcli/fs/lib/common/get-options.js
new file mode 100644
index 000000000..cb5982f79
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/common/get-options.js
@@ -0,0 +1,20 @@
+// given an input that may or may not be an object, return an object that has
+// a copy of every defined property listed in 'copy'. if the input is not an
+// object, assign it to the property named by 'wrap'
+const getOptions = (input, { copy, wrap }) => {
+ const result = {}
+
+ if (input && typeof input === 'object') {
+ for (const prop of copy) {
+ if (input[prop] !== undefined) {
+ result[prop] = input[prop]
+ }
+ }
+ } else {
+ result[wrap] = input
+ }
+
+ return result
+}
+
+module.exports = getOptions
diff --git a/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/@npmcli/fs/lib/common/node.js
new file mode 100644
index 000000000..4d13bc037
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/common/node.js
@@ -0,0 +1,9 @@
+const semver = require('semver')
+
+const satisfies = (range) => {
+ return semver.satisfies(process.version, range, { includePrerelease: true })
+}
+
+module.exports = {
+ satisfies,
+}
diff --git a/node_modules/@npmcli/fs/lib/common/owner.js b/node_modules/@npmcli/fs/lib/common/owner.js
new file mode 100644
index 000000000..e3468b077
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/common/owner.js
@@ -0,0 +1,92 @@
+const { dirname, resolve } = require('path')
+
+const fileURLToPath = require('./file-url-to-path/index.js')
+const fs = require('../fs.js')
+
+// given a path, find the owner of the nearest parent
+const find = async (path) => {
+ // if we have no getuid, permissions are irrelevant on this platform
+ if (!process.getuid) {
+ return {}
+ }
+
+ // fs methods accept URL objects with a scheme of file: so we need to unwrap
+ // those into an actual path string before we can resolve it
+ const resolved = path != null && path.href && path.origin
+ ? resolve(fileURLToPath(path))
+ : resolve(path)
+
+ let stat
+
+ try {
+ stat = await fs.lstat(resolved)
+ } finally {
+ // if we got a stat, return its contents
+ if (stat) {
+ return { uid: stat.uid, gid: stat.gid }
+ }
+
+ // try the parent directory
+ if (resolved !== dirname(resolved)) {
+ return find(dirname(resolved))
+ }
+
+ // no more parents, never got a stat, just return an empty object
+ return {}
+ }
+}
+
+// given a path, uid, and gid update the ownership of the path if necessary
+const update = async (path, uid, gid) => {
+ // nothing to update, just exit
+ if (uid === undefined && gid === undefined) {
+ return
+ }
+
+ try {
+ // see if the permissions are already the same, if they are we don't
+ // need to do anything, so return early
+ const stat = await fs.stat(path)
+ if (uid === stat.uid && gid === stat.gid) {
+ return
+ }
+ } catch (err) {}
+
+ try {
+ await fs.chown(path, uid, gid)
+ } catch (err) {}
+}
+
+// accepts a `path` and the `owner` property of an options object and normalizes
+// it into an object with numerical `uid` and `gid`
+const validate = async (path, input) => {
+ let uid
+ let gid
+
+ if (typeof input === 'string' || typeof input === 'number') {
+ uid = input
+ gid = input
+ } else if (input && typeof input === 'object') {
+ uid = input.uid
+ gid = input.gid
+ }
+
+ if (uid === 'inherit' || gid === 'inherit') {
+ const owner = await find(path)
+ if (uid === 'inherit') {
+ uid = owner.uid
+ }
+
+ if (gid === 'inherit') {
+ gid = owner.gid
+ }
+ }
+
+ return { uid, gid }
+}
+
+module.exports = {
+ find,
+ update,
+ validate,
+}
diff --git a/node_modules/@npmcli/fs/lib/copy-file.js b/node_modules/@npmcli/fs/lib/copy-file.js
new file mode 100644
index 000000000..d9875aba1
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/copy-file.js
@@ -0,0 +1,22 @@
+const fs = require('./fs.js')
+const getOptions = require('./common/get-options.js')
+const owner = require('./common/owner.js')
+
+const copyFile = async (src, dest, opts) => {
+ const options = getOptions(opts, {
+ copy: ['mode', 'owner'],
+ wrap: 'mode',
+ })
+
+ const { uid, gid } = await owner.validate(dest, options.owner)
+
+ // the node core method as of 16.5.0 does not support the mode being in an
+ // object, so we have to pass the mode value directly
+ const result = await fs.copyFile(src, dest, options.mode)
+
+ await owner.update(dest, uid, gid)
+
+ return result
+}
+
+module.exports = copyFile
diff --git a/node_modules/@npmcli/fs/lib/fs.js b/node_modules/@npmcli/fs/lib/fs.js
new file mode 100644
index 000000000..29e5fb573
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/fs.js
@@ -0,0 +1,8 @@
+const fs = require('fs')
+const promisify = require('@gar/promisify')
+
+// this module returns the core fs module wrapped in a proxy that promisifies
+// method calls within the getter. we keep it in a separate module so that the
+// overridden methods have a consistent way to get to promisified fs methods
+// without creating a circular dependency
+module.exports = promisify(fs)
diff --git a/node_modules/@npmcli/fs/lib/index.js b/node_modules/@npmcli/fs/lib/index.js
new file mode 100644
index 000000000..f669efc1a
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/index.js
@@ -0,0 +1,9 @@
+module.exports = {
+ ...require('./fs.js'),
+ copyFile: require('./copy-file.js'),
+ mkdir: require('./mkdir/index.js'),
+ mkdtemp: require('./mkdtemp.js'),
+ rm: require('./rm/index.js'),
+ withTempDir: require('./with-temp-dir.js'),
+ writeFile: require('./write-file.js'),
+}
diff --git a/node_modules/@npmcli/fs/lib/mkdir/index.js b/node_modules/@npmcli/fs/lib/mkdir/index.js
new file mode 100644
index 000000000..04ff44790
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/mkdir/index.js
@@ -0,0 +1,32 @@
+const fs = require('../fs.js')
+const getOptions = require('../common/get-options.js')
+const node = require('../common/node.js')
+const owner = require('../common/owner.js')
+
+const polyfill = require('./polyfill.js')
+
+// node 10.12.0 added the options parameter, which allows recursive and mode
+// properties to be passed
+const useNative = node.satisfies('>=10.12.0')
+
+// extends mkdir with the ability to specify an owner of the new dir
+const mkdir = async (path, opts) => {
+ const options = getOptions(opts, {
+ copy: ['mode', 'recursive', 'owner'],
+ wrap: 'mode',
+ })
+ const { uid, gid } = await owner.validate(path, options.owner)
+
+ // the polyfill is tested separately from this module, no need to hack
+ // process.version to try to trigger it just for coverage
+ // istanbul ignore next
+ const result = useNative
+ ? await fs.mkdir(path, options)
+ : await polyfill(path, options)
+
+ await owner.update(path, uid, gid)
+
+ return result
+}
+
+module.exports = mkdir
diff --git a/node_modules/@npmcli/fs/lib/mkdir/polyfill.js b/node_modules/@npmcli/fs/lib/mkdir/polyfill.js
new file mode 100644
index 000000000..4f8e6f006
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/mkdir/polyfill.js
@@ -0,0 +1,81 @@
+const { dirname } = require('path')
+
+const fileURLToPath = require('../common/file-url-to-path/index.js')
+const fs = require('../fs.js')
+
+const defaultOptions = {
+ mode: 0o777,
+ recursive: false,
+}
+
+const mkdir = async (path, opts) => {
+ const options = { ...defaultOptions, ...opts }
+
+ // if we're not in recursive mode, just call the real mkdir with the path and
+ // the mode option only
+ if (!options.recursive) {
+ return fs.mkdir(path, options.mode)
+ }
+
+ const makeDirectory = async (dir, mode) => {
+ // we can't use dirname directly since these functions support URL
+ // objects with the file: protocol as the path input, so first we get a
+ // string path, then we can call dirname on that
+ const parent = dir != null && dir.href && dir.origin
+ ? dirname(fileURLToPath(dir))
+ : dirname(dir)
+
+ // if the parent is the dir itself, try to create it. anything but EISDIR
+ // should be rethrown
+ if (parent === dir) {
+ try {
+ await fs.mkdir(dir, opts)
+ } catch (err) {
+ if (err.code !== 'EISDIR') {
+ throw err
+ }
+ }
+ return undefined
+ }
+
+ try {
+ await fs.mkdir(dir, mode)
+ return dir
+ } catch (err) {
+ // ENOENT means the parent wasn't there, so create that
+ if (err.code === 'ENOENT') {
+ const made = await makeDirectory(parent, mode)
+ await makeDirectory(dir, mode)
+ // return the shallowest path we created, i.e. the result of creating
+ // the parent
+ return made
+ }
+
+ // an EEXIST means there's already something there
+ // an EROFS means we have a read-only filesystem and can't create a dir
+ // any other error is fatal and we should give up now
+ if (err.code !== 'EEXIST' && err.code !== 'EROFS') {
+ throw err
+ }
+
+ // stat the directory, if the result is a directory, then we successfully
+ // created this one so return its path. otherwise, we reject with the
+ // original error by ignoring the error in the catch
+ try {
+ const stat = await fs.stat(dir)
+ if (stat.isDirectory()) {
+ // if it already existed, we didn't create anything so return
+ // undefined
+ return undefined
+ }
+ } catch (_) {}
+
+ // if the thing that's there isn't a directory, then just re-throw
+ throw err
+ }
+ }
+
+ return makeDirectory(path, options.mode)
+}
+
+module.exports = mkdir
diff --git a/node_modules/@npmcli/fs/lib/mkdtemp.js b/node_modules/@npmcli/fs/lib/mkdtemp.js
new file mode 100644
index 000000000..b7f078029
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/mkdtemp.js
@@ -0,0 +1,28 @@
+const { dirname, sep } = require('path')
+
+const fs = require('./fs.js')
+const getOptions = require('./common/get-options.js')
+const owner = require('./common/owner.js')
+
+const mkdtemp = async (prefix, opts) => {
+ const options = getOptions(opts, {
+ copy: ['encoding', 'owner'],
+ wrap: 'encoding',
+ })
+
+ // mkdtemp relies on the trailing path separator to indicate if it should
+ // create a directory inside of the prefix. if that's the case then the root
+ // we infer ownership from is the prefix itself, otherwise it's the dirname
+ // /tmp -> /tmpABCDEF, infers from /
+ // /tmp/ -> /tmp/ABCDEF, infers from /tmp
+ const root = prefix.endsWith(sep) ? prefix : dirname(prefix)
+ const { uid, gid } = await owner.validate(root, options.owner)
+
+ const result = await fs.mkdtemp(prefix, options)
+
+ await owner.update(result, uid, gid)
+
+ return result
+}
+
+module.exports = mkdtemp
diff --git a/node_modules/@npmcli/fs/lib/rm/index.js b/node_modules/@npmcli/fs/lib/rm/index.js
new file mode 100644
index 000000000..cb81fbdf8
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/rm/index.js
@@ -0,0 +1,22 @@
+const fs = require('../fs.js')
+const getOptions = require('../common/get-options.js')
+const node = require('../common/node.js')
+const polyfill = require('./polyfill.js')
+
+// node 14.14.0 added fs.rm, which allows both the force and recursive options
+const useNative = node.satisfies('>=14.14.0')
+
+const rm = async (path, opts) => {
+ const options = getOptions(opts, {
+ copy: ['retryDelay', 'maxRetries', 'recursive', 'force'],
+ })
+
+ // the polyfill is tested separately from this module, no need to hack
+ // process.version to try to trigger it just for coverage
+ // istanbul ignore next
+ return useNative
+ ? fs.rm(path, options)
+ : polyfill(path, options)
+}
+
+module.exports = rm
diff --git a/node_modules/@npmcli/fs/lib/rm/polyfill.js b/node_modules/@npmcli/fs/lib/rm/polyfill.js
new file mode 100644
index 000000000..77196b76b
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/rm/polyfill.js
@@ -0,0 +1,238 @@
+// this file is a modified version of the code in node core >=14.14.0
+// which is, in turn, a modified version of the rimraf module on npm
+// node core changes:
+// - Use of the assert module has been replaced with core's error system.
+// - All code related to the glob dependency has been removed.
+// - Bring your own custom fs module is not currently supported.
+// - Some basic code cleanup.
+// changes here:
+// - remove all callback related code
+// - drop sync support
+// - change assertions back to non-internal methods (see options.js)
+// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows
+const errnos = require('os').constants.errno
+const { join } = require('path')
+const fs = require('../fs.js')
+
+// error codes that mean we need to remove contents
+const notEmptyCodes = new Set([
+ 'ENOTEMPTY',
+ 'EEXIST',
+ 'EPERM',
+])
+
+// error codes we can retry later
+const retryCodes = new Set([
+ 'EBUSY',
+ 'EMFILE',
+ 'ENFILE',
+ 'ENOTEMPTY',
+ 'EPERM',
+])
+
+const isWindows = process.platform === 'win32'
+
+const defaultOptions = {
+ retryDelay: 100,
+ maxRetries: 0,
+ recursive: false,
+ force: false,
+}
+
+// this is drastically simplified, but should be roughly equivalent to what
+// node core throws
+class ERR_FS_EISDIR extends Error {
+ constructor (path) {
+ super()
+ this.info = {
+ code: 'EISDIR',
+ message: 'is a directory',
+ path,
+ syscall: 'rm',
+ errno: errnos.EISDIR,
+ }
+ this.name = 'SystemError'
+ this.code = 'ERR_FS_EISDIR'
+ this.errno = errnos.EISDIR
+ this.syscall = 'rm'
+ this.path = path
+ this.message = `Path is a directory: ${this.syscall} returned ${this.info.code} (is a directory) ${path}`
+ }
+
+ toString () {
+ return `${this.name} [${this.code}]: ${this.message}`
+ }
+}
+
+class ENOTDIR extends Error {
+ constructor (path) {
+ super()
+ this.name = 'Error'
+ this.code = 'ENOTDIR'
+ this.errno = errnos.ENOTDIR
+ this.syscall = 'rmdir'
+ this.path = path
+ this.message = `not a directory, ${this.syscall} '${this.path}'`
+ }
+
+ toString () {
+ return `${this.name}: ${this.code}: ${this.message}`
+ }
+}
+
+// force is passed separately here because we respect it for the first entry
+// into rimraf only, any further calls that are spawned as a result (i.e. to
+// delete content within the target) will ignore ENOENT errors
+const rimraf = async (path, options, isTop = false) => {
+ const force = isTop ? options.force : true
+ const stat = await fs.lstat(path)
+ .catch((err) => {
+ // we only ignore ENOENT if we're forcing this call
+ if (err.code === 'ENOENT' && force) {
+ return
+ }
+
+ if (isWindows && err.code === 'EPERM') {
+ return fixEPERM(path, options, err, isTop)
+ }
+
+ throw err
+ })
+
+ // no stat object here means either lstat threw an ENOENT, or lstat threw
+ // an EPERM and the fixPERM function took care of things. either way, we're
+ // already done, so return early
+ if (!stat) {
+ return
+ }
+
+ if (stat.isDirectory()) {
+ return rmdir(path, options, null, isTop)
+ }
+
+ return fs.unlink(path)
+ .catch((err) => {
+ if (err.code === 'ENOENT' && force) {
+ return
+ }
+
+ if (err.code === 'EISDIR') {
+ return rmdir(path, options, err, isTop)
+ }
+
+ if (err.code === 'EPERM') {
+ // in windows, we handle this through fixEPERM which will also try to
+ // delete things again. everywhere else since deleting the target as a
+ // file didn't work we go ahead and try to delete it as a directory
+ return isWindows
+ ? fixEPERM(path, options, err, isTop)
+ : rmdir(path, options, err, isTop)
+ }
+
+ throw err
+ })
+}
+
+const fixEPERM = async (path, options, originalErr, isTop) => {
+ const force = isTop ? options.force : true
+ const targetMissing = await fs.chmod(path, 0o666)
+ .catch((err) => {
+ if (err.code === 'ENOENT' && force) {
+ return true
+ }
+
+ throw originalErr
+ })
+
+ // got an ENOENT above, return now. no file = no problem
+ if (targetMissing) {
+ return
+ }
+
+ // this function does its own lstat rather than calling rimraf again to avoid
+ // infinite recursion for a repeating EPERM
+ const stat = await fs.lstat(path)
+ .catch((err) => {
+ if (err.code === 'ENOENT' && force) {
+ return
+ }
+
+ throw originalErr
+ })
+
+ if (!stat) {
+ return
+ }
+
+ if (stat.isDirectory()) {
+ return rmdir(path, options, originalErr, isTop)
+ }
+
+ return fs.unlink(path)
+}
+
+const rmdir = async (path, options, originalErr, isTop) => {
+ if (!options.recursive && isTop) {
+ throw originalErr || new ERR_FS_EISDIR(path)
+ }
+ const force = isTop ? options.force : true
+
+ return fs.rmdir(path)
+ .catch(async (err) => {
+ // in Windows, calling rmdir on a file path will fail with ENOENT rather
+ // than ENOTDIR. to determine if that's what happened, we have to do
+ // another lstat on the path. if the path isn't actually gone, we throw
+ // away the ENOENT and replace it with our own ENOTDIR
+ if (isWindows && err.code === 'ENOENT') {
+ const stillExists = await fs.lstat(path).then(() => true, () => false)
+ if (stillExists) {
+ err = new ENOTDIR(path)
+ }
+ }
+
+ // not there, not a problem
+ if (err.code === 'ENOENT' && force) {
+ return
+ }
+
+ // we may not have originalErr if lstat tells us our target is a
+ // directory but that changes before we actually remove it, so
+ // only throw it here if it's set
+ if (originalErr && err.code === 'ENOTDIR') {
+ throw originalErr
+ }
+
+ // the directory isn't empty, remove the contents and try again
+ if (notEmptyCodes.has(err.code)) {
+ const files = await fs.readdir(path)
+ await Promise.all(files.map((file) => {
+ const target = join(path, file)
+ return rimraf(target, options)
+ }))
+ return fs.rmdir(path)
+ }
+
+ throw err
+ })
+}
+
+const rm = async (path, opts) => {
+ const options = { ...defaultOptions, ...opts }
+ let retries = 0
+
+ const errHandler = async (err) => {
+ if (retryCodes.has(err.code) && ++retries < options.maxRetries) {
+ const delay = retries * options.retryDelay
+ await promiseTimeout(delay)
+ return rimraf(path, options, true).catch(errHandler)
+ }
+
+ throw err
+ }
+
+ return rimraf(path, options, true).catch(errHandler)
+}
+
+const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms))
+
+module.exports = rm
diff --git a/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/@npmcli/fs/lib/with-temp-dir.js
new file mode 100644
index 000000000..353d5555d
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/with-temp-dir.js
@@ -0,0 +1,39 @@
+const { join, sep } = require('path')
+
+const getOptions = require('./common/get-options.js')
+const mkdir = require('./mkdir/index.js')
+const mkdtemp = require('./mkdtemp.js')
+const rm = require('./rm/index.js')
+
+// create a temp directory, ensure its permissions match its parent, then call
+// the supplied function passing it the path to the directory. clean up after
+// the function finishes, whether it throws or not
+const withTempDir = async (root, fn, opts) => {
+ const options = getOptions(opts, {
+ copy: ['tmpPrefix'],
+ })
+ // create the directory, and fix its ownership
+ await mkdir(root, { recursive: true, owner: 'inherit' })
+
+ const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' })
+ let err
+ let result
+
+ try {
+ result = await fn(target)
+ } catch (_err) {
+ err = _err
+ }
+
+ try {
+ await rm(target, { force: true, recursive: true })
+ } catch (err) {}
+
+ if (err) {
+ throw err
+ }
+
+ return result
+}
+
+module.exports = withTempDir
diff --git a/node_modules/@npmcli/fs/lib/write-file.js b/node_modules/@npmcli/fs/lib/write-file.js
new file mode 100644
index 000000000..01de531d9
--- /dev/null
+++ b/node_modules/@npmcli/fs/lib/write-file.js
@@ -0,0 +1,19 @@
+const fs = require('./fs.js')
+const getOptions = require('./common/get-options.js')
+const owner = require('./common/owner.js')
+
+const writeFile = async (file, data, opts) => {
+ const options = getOptions(opts, {
+ copy: ['encoding', 'mode', 'flag', 'signal', 'owner'],
+ wrap: 'encoding',
+ })
+ const { uid, gid } = await owner.validate(file, options.owner)
+
+ const result = await fs.writeFile(file, data, options)
+
+ await owner.update(file, uid, gid)
+
+ return result
+}
+
+module.exports = writeFile
diff --git a/node_modules/@npmcli/fs/package.json b/node_modules/@npmcli/fs/package.json
new file mode 100644
index 000000000..b114b73d2
--- /dev/null
+++ b/node_modules/@npmcli/fs/package.json
@@ -0,0 +1,36 @@
+{
+ "name": "@npmcli/fs",
+ "version": "1.0.0",
+ "description": "filesystem utilities for the npm cli",
+ "main": "lib/index.js",
+ "files": [
+ "lib",
+ "bin"
+ ],
+ "scripts": {
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
+ "snap": "tap",
+ "test": "tap",
+ "npmclilint": "npmcli-lint",
+ "lint": "npm run npmclilint -- \"lib/**/*.*js\" \"test/**/*.*js\"",
+ "lintfix": "npm run lint -- --fix",
+ "posttest": "npm run lint --",
+ "postsnap": "npm run lintfix --"
+ },
+ "keywords": [
+ "npm",
+ "oss"
+ ],
+ "author": "GitHub Inc.",
+ "license": "ISC",
+ "devDependencies": {
+ "@npmcli/lint": "^1.0.1",
+ "tap": "^15.0.9"
+ },
+ "dependencies": {
+ "@gar/promisify": "^1.0.1",
+ "semver": "^7.3.5"
+ }
+}
diff --git a/node_modules/cacache/get.js b/node_modules/cacache/get.js
index fe710bbd6..4e905e7cf 100644
--- a/node_modules/cacache/get.js
+++ b/node_modules/cacache/get.js
@@ -1,119 +1,112 @@
'use strict'
-const util = require('util')
+const Collect = require('minipass-collect')
+const Minipass = require('minipass')
+const Pipeline = require('minipass-pipeline')
const fs = require('fs')
+const util = require('util')
+
const index = require('./lib/entry-index')
const memo = require('./lib/memoization')
const read = require('./lib/content/read')
-const Minipass = require('minipass')
-const Collect = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
const writeFile = util.promisify(fs.writeFile)
-module.exports = function get (cache, key, opts) {
- return getData(false, cache, key, opts)
-}
-module.exports.byDigest = function getByDigest (cache, digest, opts) {
- return getData(true, cache, digest, opts)
-}
-
-function getData (byDigest, cache, key, opts = {}) {
+function getData (cache, key, opts = {}) {
const { integrity, memoize, size } = opts
- const memoized = byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
+ const memoized = memo.get(cache, key, opts)
if (memoized && memoize !== false) {
- return Promise.resolve(
- byDigest
- ? memoized
- : {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size,
- }
- )
+ return Promise.resolve({
+ metadata: memoized.entry.metadata,
+ data: memoized.data,
+ integrity: memoized.entry.integrity,
+ size: memoized.entry.size,
+ })
}
- return (byDigest ? Promise.resolve(null) : index.find(cache, key, opts)).then(
- (entry) => {
- if (!entry && !byDigest)
- throw new index.NotFoundError(cache, key)
- return read(cache, byDigest ? key : entry.integrity, {
- integrity,
- size,
- })
- .then((data) =>
- byDigest
- ? data
- : {
- data,
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity,
- }
- )
- .then((res) => {
- if (memoize && byDigest)
- memo.put.byDigest(cache, key, res, opts)
- else if (memoize)
- memo.put(cache, entry, res.data, opts)
-
- return res
- })
- }
- )
-}
+ return index.find(cache, key, opts).then((entry) => {
+ if (!entry)
+ throw new index.NotFoundError(cache, key)
+
+ return read(cache, entry.integrity, { integrity, size }).then((data) => {
+ if (memoize)
+ memo.put(cache, entry, data, opts)
-module.exports.sync = function get (cache, key, opts) {
- return getDataSync(false, cache, key, opts)
+ return {
+ data,
+ metadata: entry.metadata,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
+ })
+ })
}
-module.exports.sync.byDigest = function getByDigest (cache, digest, opts) {
- return getDataSync(true, cache, digest, opts)
+module.exports = getData
+
+function getDataByDigest (cache, key, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get.byDigest(cache, key, opts)
+ if (memoized && memoize !== false)
+ return Promise.resolve(memoized)
+
+ return read(cache, key, { integrity, size }).then((res) => {
+ if (memoize)
+ memo.put.byDigest(cache, key, res, opts)
+ return res
+ })
}
+module.exports.byDigest = getDataByDigest
-function getDataSync (byDigest, cache, key, opts = {}) {
+function getDataSync (cache, key, opts = {}) {
const { integrity, memoize, size } = opts
- const memoized = byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
+ const memoized = memo.get(cache, key, opts)
+
if (memoized && memoize !== false) {
- return byDigest
- ? memoized
- : {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size,
- }
+ return {
+ metadata: memoized.entry.metadata,
+ data: memoized.data,
+ integrity: memoized.entry.integrity,
+ size: memoized.entry.size,
+ }
}
- const entry = !byDigest && index.find.sync(cache, key, opts)
- if (!entry && !byDigest)
+ const entry = index.find.sync(cache, key, opts)
+ if (!entry)
throw new index.NotFoundError(cache, key)
-
- const data = read.sync(cache, byDigest ? key : entry.integrity, {
+ const data = read.sync(cache, entry.integrity, {
integrity: integrity,
size: size,
})
- const res = byDigest
- ? data
- : {
- metadata: entry.metadata,
- data: data,
- size: entry.size,
- integrity: entry.integrity,
- }
- if (memoize && byDigest)
- memo.put.byDigest(cache, key, res, opts)
- else if (memoize)
+ const res = {
+ metadata: entry.metadata,
+ data: data,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
+ if (memoize)
memo.put(cache, entry, res.data, opts)
return res
}
-module.exports.stream = getStream
+module.exports.sync = getDataSync
+
+function getDataByDigestSync (cache, digest, opts = {}) {
+ const { integrity, memoize, size } = opts
+ const memoized = memo.get.byDigest(cache, digest, opts)
+
+ if (memoized && memoize !== false)
+ return memoized
+
+ const res = read.sync(cache, digest, {
+ integrity: integrity,
+ size: size,
+ })
+ if (memoize)
+ memo.put.byDigest(cache, digest, res, opts)
+
+ return res
+}
+module.exports.sync.byDigest = getDataByDigestSync
const getMemoizedStream = (memoized) => {
const stream = new Minipass()
@@ -166,7 +159,7 @@ function getStream (cache, key, opts = {}) {
return stream
}
-module.exports.stream.byDigest = getStreamDigest
+module.exports.stream = getStream
function getStreamDigest (cache, integrity, opts = {}) {
const { memoize } = opts
@@ -191,7 +184,7 @@ function getStreamDigest (cache, integrity, opts = {}) {
}
}
-module.exports.info = info
+module.exports.stream.byDigest = getStreamDigest
function info (cache, key, opts = {}) {
const { memoize } = opts
@@ -201,53 +194,44 @@ function info (cache, key, opts = {}) {
else
return index.find(cache, key)
}
+module.exports.info = info
-module.exports.hasContent = read.hasContent
-
-function cp (cache, key, dest, opts) {
- return copy(false, cache, key, dest, opts)
-}
-
-module.exports.copy = cp
-
-function cpDigest (cache, digest, dest, opts) {
- return copy(true, cache, digest, dest, opts)
-}
-
-module.exports.copy.byDigest = cpDigest
-
-function copy (byDigest, cache, key, dest, opts = {}) {
+function copy (cache, key, dest, opts = {}) {
if (read.copy) {
- return (byDigest
- ? Promise.resolve(null)
- : index.find(cache, key, opts)
- ).then((entry) => {
- if (!entry && !byDigest)
+ return index.find(cache, key, opts).then((entry) => {
+ if (!entry)
throw new index.NotFoundError(cache, key)
-
- return read
- .copy(cache, byDigest ? key : entry.integrity, dest, opts)
+ return read.copy(cache, entry.integrity, dest, opts)
.then(() => {
- return byDigest
- ? key
- : {
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity,
- }
+ return {
+ metadata: entry.metadata,
+ size: entry.size,
+ integrity: entry.integrity,
+ }
})
})
}
- return getData(byDigest, cache, key, opts).then((res) => {
- return writeFile(dest, byDigest ? res : res.data).then(() => {
- return byDigest
- ? key
- : {
- metadata: res.metadata,
- size: res.size,
- integrity: res.integrity,
- }
+ return getData(cache, key, opts).then((res) => {
+ return writeFile(dest, res.data).then(() => {
+ return {
+ metadata: res.metadata,
+ size: res.size,
+ integrity: res.integrity,
+ }
})
})
}
+module.exports.copy = copy
+
+function copyByDigest (cache, key, dest, opts = {}) {
+ if (read.copy)
+ return read.copy(cache, key, dest, opts).then(() => key)
+
+ return getDataByDigest(cache, key, opts).then((res) => {
+ return writeFile(dest, res).then(() => key)
+ })
+}
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/cacache/lib/util/tmp.js b/node_modules/cacache/lib/util/tmp.js
index fbcd2ab13..0a5a50eba 100644
--- a/node_modules/cacache/lib/util/tmp.js
+++ b/node_modules/cacache/lib/util/tmp.js
@@ -1,21 +1,21 @@
'use strict'
-const util = require('util')
+const fs = require('@npmcli/fs')
const fixOwner = require('./fix-owner')
const path = require('path')
-const rimraf = util.promisify(require('rimraf'))
-const uniqueFilename = require('unique-filename')
-const { disposer } = require('./disposer')
module.exports.mkdir = mktmpdir
function mktmpdir (cache, opts = {}) {
const { tmpPrefix } = opts
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), tmpPrefix)
- return fixOwner.mkdirfix(cache, tmpTarget).then(() => {
- return tmpTarget
- })
+ const tmpDir = path.join(cache, 'tmp')
+ return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+ .then(() => {
+ // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+ const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+ return fs.mkdtemp(target, { owner: 'inherit' })
+ })
}
module.exports.withTmp = withTmp
@@ -25,7 +25,7 @@ function withTmp (cache, opts, cb) {
cb = opts
opts = {}
}
- return disposer(mktmpdir(cache, opts), rimraf, cb)
+ return fs.withTempDir(path.join(cache, 'tmp'), cb, opts)
}
module.exports.fix = fixtmpdir
diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json
index 3c2e65c04..6cb414015 100644
--- a/node_modules/cacache/package.json
+++ b/node_modules/cacache/package.json
@@ -1,6 +1,6 @@
{
"name": "cacache",
- "version": "15.2.0",
+ "version": "15.3.0",
"cache-version": {
"content": "2",
"index": "5"
@@ -43,6 +43,7 @@
],
"license": "ISC",
"dependencies": {
+ "@npmcli/fs": "^1.0.0",
"@npmcli/move-file": "^1.0.1",
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
diff --git a/package-lock.json b/package-lock.json
index 90d57084b..d1fda9965 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -93,7 +93,7 @@
"ansicolors": "~0.3.2",
"ansistyles": "~0.1.3",
"archy": "~1.0.0",
- "cacache": "^15.2.0",
+ "cacache": "^15.3.0",
"chalk": "^4.1.2",
"chownr": "^2.0.0",
"cli-columns": "^3.1.2",
@@ -590,6 +590,12 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/@gar/promisify": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.2.tgz",
+ "integrity": "sha512-82cpyJyKRoQoRi+14ibCeGPu0CwypgtBAdBhq1WfvagpCZNKqwXbKwXllYSMG91DhmG4jt9gN8eP6lGOtozuaw==",
+ "inBundle": true
+ },
"node_modules/@humanwhocodes/config-array": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz",
@@ -833,6 +839,16 @@
"node": ">=10"
}
},
+ "node_modules/@npmcli/fs": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.0.0.tgz",
+ "integrity": "sha512-8ltnOpRR/oJbOp8vaGUnipOi3bqkcW+sLHFlyXIr08OGHmVJLB1Hn7QtGXbYcpVtH1gAYZTlmDXtE4YV0+AMMQ==",
+ "inBundle": true,
+ "dependencies": {
+ "@gar/promisify": "^1.0.1",
+ "semver": "^7.3.5"
+ }
+ },
"node_modules/@npmcli/git": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.1.0.tgz",
@@ -1527,11 +1543,12 @@
"inBundle": true
},
"node_modules/cacache": {
- "version": "15.2.0",
- "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz",
- "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==",
+ "version": "15.3.0",
+ "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz",
+ "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==",
"inBundle": true,
"dependencies": {
+ "@npmcli/fs": "^1.0.0",
"@npmcli/move-file": "^1.0.1",
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
@@ -10865,6 +10882,11 @@
}
}
},
+ "@gar/promisify": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.2.tgz",
+ "integrity": "sha512-82cpyJyKRoQoRi+14ibCeGPu0CwypgtBAdBhq1WfvagpCZNKqwXbKwXllYSMG91DhmG4jt9gN8eP6lGOtozuaw=="
+ },
"@humanwhocodes/config-array": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz",
@@ -11054,6 +11076,15 @@
"ansi-styles": "^4.3.0"
}
},
+ "@npmcli/fs": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.0.0.tgz",
+ "integrity": "sha512-8ltnOpRR/oJbOp8vaGUnipOi3bqkcW+sLHFlyXIr08OGHmVJLB1Hn7QtGXbYcpVtH1gAYZTlmDXtE4YV0+AMMQ==",
+ "requires": {
+ "@gar/promisify": "^1.0.1",
+ "semver": "^7.3.5"
+ }
+ },
"@npmcli/git": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.1.0.tgz",
@@ -11578,10 +11609,11 @@
"integrity": "sha1-y5T662HIaWRR2zZTThQi+U8K7og="
},
"cacache": {
- "version": "15.2.0",
- "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz",
- "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==",
+ "version": "15.3.0",
+ "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz",
+ "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==",
"requires": {
+ "@npmcli/fs": "^1.0.0",
"@npmcli/move-file": "^1.0.1",
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
diff --git a/package.json b/package.json
index fa729d026..66e1a60d0 100644
--- a/package.json
+++ b/package.json
@@ -63,7 +63,7 @@
"ansicolors": "~0.3.2",
"ansistyles": "~0.1.3",
"archy": "~1.0.0",
- "cacache": "^15.2.0",
+ "cacache": "^15.3.0",
"chalk": "^4.1.2",
"chownr": "^2.0.0",
"cli-columns": "^3.1.2",