Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'node_modules/cacache')
-rw-r--r--node_modules/cacache/lib/content/read.js47
-rw-r--r--node_modules/cacache/lib/content/rm.js5
-rw-r--r--node_modules/cacache/lib/content/write.js13
-rw-r--r--node_modules/cacache/lib/entry-index.js62
-rw-r--r--node_modules/cacache/lib/get.js (renamed from node_modules/cacache/get.js)50
-rw-r--r--node_modules/cacache/lib/index.js (renamed from node_modules/cacache/index.js)11
-rw-r--r--node_modules/cacache/lib/memoization.js9
-rw-r--r--node_modules/cacache/lib/put.js (renamed from node_modules/cacache/put.js)18
-rw-r--r--node_modules/cacache/lib/rm.js (renamed from node_modules/cacache/rm.js)6
-rw-r--r--node_modules/cacache/lib/util/disposer.js3
-rw-r--r--node_modules/cacache/lib/util/fix-owner.js18
-rw-r--r--node_modules/cacache/lib/util/move-file.js6
-rw-r--r--node_modules/cacache/lib/verify.js16
-rw-r--r--node_modules/cacache/ls.js6
-rw-r--r--node_modules/cacache/package.json34
-rw-r--r--node_modules/cacache/verify.js3
16 files changed, 188 insertions, 119 deletions
diff --git a/node_modules/cacache/lib/content/read.js b/node_modules/cacache/lib/content/read.js
index 034e8eee0..8bffb2af8 100644
--- a/node_modules/cacache/lib/content/read.js
+++ b/node_modules/cacache/lib/content/read.js
@@ -20,15 +20,18 @@ function read (cache, integrity, opts = {}) {
// get size
return lstat(cpath).then(stat => ({ stat, cpath, sri }))
}).then(({ stat, cpath, sri }) => {
- if (typeof size === 'number' && stat.size !== size)
+ if (typeof size === 'number' && stat.size !== size) {
throw sizeError(size, stat.size)
+ }
- if (stat.size > MAX_SINGLE_READ_SIZE)
+ if (stat.size > MAX_SINGLE_READ_SIZE) {
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+ }
return readFile(cpath, null).then((data) => {
- if (!ssri.checkData(data, sri))
+ if (!ssri.checkData(data, sri)) {
throw integrityError(sri, cpath)
+ }
return data
})
@@ -55,11 +58,13 @@ function readSync (cache, integrity, opts = {}) {
const { size } = opts
return withContentSriSync(cache, integrity, (cpath, sri) => {
const data = fs.readFileSync(cpath)
- if (typeof size === 'number' && size !== data.length)
+ if (typeof size === 'number' && size !== data.length) {
throw sizeError(size, data.length)
+ }
- if (ssri.checkData(data, sri))
+ if (ssri.checkData(data, sri)) {
return data
+ }
throw integrityError(sri, cpath)
})
@@ -75,8 +80,9 @@ function readStream (cache, integrity, opts = {}) {
// just lstat to ensure it exists
return lstat(cpath).then((stat) => ({ stat, cpath, sri }))
}).then(({ stat, cpath, sri }) => {
- if (typeof size === 'number' && size !== stat.size)
+ if (typeof size === 'number' && size !== stat.size) {
return stream.emit('error', sizeError(size, stat.size))
+ }
readPipeline(cpath, stat.size, sri, stream)
}, er => stream.emit('error', er))
@@ -106,21 +112,24 @@ function copySync (cache, integrity, dest) {
module.exports.hasContent = hasContent
function hasContent (cache, integrity) {
- if (!integrity)
+ if (!integrity) {
return Promise.resolve(false)
+ }
return withContentSri(cache, integrity, (cpath, sri) => {
return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat }))
}).catch((err) => {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return false
+ }
if (err.code === 'EPERM') {
/* istanbul ignore else */
- if (process.platform !== 'win32')
+ if (process.platform !== 'win32') {
throw err
- else
+ } else {
return false
+ }
}
})
}
@@ -128,23 +137,26 @@ function hasContent (cache, integrity) {
module.exports.hasContent.sync = hasContentSync
function hasContentSync (cache, integrity) {
- if (!integrity)
+ if (!integrity) {
return false
+ }
return withContentSriSync(cache, integrity, (cpath, sri) => {
try {
const stat = fs.lstatSync(cpath)
return { size: stat.size, sri, stat }
} catch (err) {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return false
+ }
if (err.code === 'EPERM') {
/* istanbul ignore else */
- if (process.platform !== 'win32')
+ if (process.platform !== 'win32') {
throw err
- else
+ } else {
return false
+ }
}
}
})
@@ -180,13 +192,15 @@ function withContentSri (cache, integrity, fn) {
.then((results) => {
// Return the first non error if it is found
const result = results.find((r) => !(r instanceof Error))
- if (result)
+ if (result) {
return result
+ }
// Throw the No matching content found error
const enoentError = results.find((r) => r.code === 'ENOENT')
- if (enoentError)
+ if (enoentError) {
throw enoentError
+ }
// Throw generic error
throw results.find((r) => r instanceof Error)
@@ -228,6 +242,7 @@ function withContentSriSync (cache, integrity, fn) {
}
function sizeError (expected, found) {
+ /* eslint-disable-next-line max-len */
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
err.expected = expected
err.found = found
diff --git a/node_modules/cacache/lib/content/rm.js b/node_modules/cacache/lib/content/rm.js
index 6a3d1a3d0..50612364e 100644
--- a/node_modules/cacache/lib/content/rm.js
+++ b/node_modules/cacache/lib/content/rm.js
@@ -11,9 +11,10 @@ module.exports = rm
function rm (cache, integrity) {
return hasContent(cache, integrity).then((content) => {
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
- if (content && content.sri)
+ if (content && content.sri) {
return rimraf(contentPath(cache, content.sri)).then(() => true)
- else
+ } else {
return false
+ }
})
}
diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js
index dde1bd1dd..a71e81ad5 100644
--- a/node_modules/cacache/lib/content/write.js
+++ b/node_modules/cacache/lib/content/write.js
@@ -22,15 +22,18 @@ module.exports = write
function write (cache, data, opts = {}) {
const { algorithms, size, integrity } = opts
- if (algorithms && algorithms.length > 1)
+ if (algorithms && algorithms.length > 1) {
throw new Error('opts.algorithms only supports a single algorithm for now')
+ }
- if (typeof size === 'number' && data.length !== size)
+ if (typeof size === 'number' && data.length !== size) {
return Promise.reject(sizeError(size, data.length))
+ }
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
- if (integrity && !ssri.checkData(data, integrity, opts))
+ if (integrity && !ssri.checkData(data, integrity, opts)) {
return Promise.reject(checksumError(integrity, sri))
+ }
return disposer(makeTmp(cache, opts), makeTmpDisposer,
(tmp) => {
@@ -149,8 +152,9 @@ function makeTmp (cache, opts) {
}
function makeTmpDisposer (tmp) {
- if (tmp.moved)
+ if (tmp.moved) {
return Promise.resolve()
+ }
return rimraf(tmp.target)
}
@@ -171,6 +175,7 @@ function moveToDestination (tmp, cache, sri, opts) {
}
function sizeError (expected, found) {
+ /* eslint-disable-next-line max-len */
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
err.expected = expected
err.found = found
diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js
index 71aac5ed7..426778b85 100644
--- a/node_modules/cacache/lib/entry-index.js
+++ b/node_modules/cacache/lib/entry-index.js
@@ -49,8 +49,9 @@ async function compact (cache, key, matchFn, opts = {}) {
// if the integrity is null and no validateEntry is provided, we break
// as we consider the null integrity to be a deletion of everything
// that came before it.
- if (entry.integrity === null && !opts.validateEntry)
+ if (entry.integrity === null && !opts.validateEntry) {
break
+ }
// if this entry is valid, and it is either the first entry or
// the newEntries array doesn't already include an entry that
@@ -58,8 +59,9 @@ async function compact (cache, key, matchFn, opts = {}) {
// it to the beginning of our list
if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
(newEntries.length === 0 ||
- !newEntries.find((oldEntry) => matchFn(oldEntry, entry))))
+ !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
newEntries.unshift(entry)
+ }
}
const newIndex = '\n' + newEntries.map((entry) => {
@@ -78,8 +80,9 @@ async function compact (cache, key, matchFn, opts = {}) {
}
const teardown = async (tmp) => {
- if (!tmp.moved)
+ if (!tmp.moved) {
return rimraf(tmp.target)
+ }
}
const write = async (tmp) => {
@@ -92,8 +95,9 @@ async function compact (cache, key, matchFn, opts = {}) {
try {
await fixOwner.chownr(cache, bucket)
} catch (err) {
- if (err.code !== 'ENOENT')
+ if (err.code !== 'ENOENT') {
throw err
+ }
}
}
@@ -136,8 +140,9 @@ function insert (cache, key, integrity, opts = {}) {
})
.then(() => fixOwner.chownr(cache, bucket))
.catch((err) => {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return undefined
+ }
throw err
// There's a class of race conditions that happen when things get deleted
@@ -169,8 +174,9 @@ function insertSync (cache, key, integrity, opts = {}) {
try {
fixOwner.chownr.sync(cache, bucket)
} catch (err) {
- if (err.code !== 'ENOENT')
+ if (err.code !== 'ENOENT') {
throw err
+ }
}
return formatEntry(cache, entry)
}
@@ -182,17 +188,19 @@ function find (cache, key) {
return bucketEntries(bucket)
.then((entries) => {
return entries.reduce((latest, next) => {
- if (next && next.key === key)
+ if (next && next.key === key) {
return formatEntry(cache, next)
- else
+ } else {
return latest
+ }
}, null)
})
.catch((err) => {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return null
- else
+ } else {
throw err
+ }
})
}
@@ -202,24 +210,27 @@ function findSync (cache, key) {
const bucket = bucketPath(cache, key)
try {
return bucketEntriesSync(bucket).reduce((latest, next) => {
- if (next && next.key === key)
+ if (next && next.key === key) {
return formatEntry(cache, next)
- else
+ } else {
return latest
+ }
}, null)
} catch (err) {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return null
- else
+ } else {
throw err
+ }
}
}
module.exports.delete = del
function del (cache, key, opts = {}) {
- if (!opts.removeFully)
+ if (!opts.removeFully) {
return insert(cache, key, null, opts)
+ }
const bucket = bucketPath(cache, key)
return rimraf(bucket)
@@ -228,8 +239,9 @@ function del (cache, key, opts = {}) {
module.exports.delete.sync = delSync
function delSync (cache, key, opts = {}) {
- if (!opts.removeFully)
+ if (!opts.removeFully) {
return insertSync(cache, key, null, opts)
+ }
const bucket = bucketPath(cache, key)
return rimraf.sync(bucket)
@@ -263,12 +275,14 @@ function lsStream (cache) {
// reduced is a map of key => entry
for (const entry of reduced.values()) {
const formatted = formatEntry(cache, entry)
- if (formatted)
+ if (formatted) {
stream.write(formatted)
+ }
}
}).catch(err => {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return undefined
+ }
throw err
})
})
@@ -312,8 +326,9 @@ function bucketEntriesSync (bucket, filter) {
function _bucketEntries (data, filter) {
const entries = []
data.split('\n').forEach((entry) => {
- if (!entry)
+ if (!entry) {
return
+ }
const pieces = entry.split('\t')
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
@@ -328,8 +343,9 @@ function _bucketEntries (data, filter) {
// Entry is corrupted!
return
}
- if (obj)
+ if (obj) {
entries.push(obj)
+ }
})
return entries
}
@@ -371,8 +387,9 @@ function hash (str, digest) {
function formatEntry (cache, entry, keepAll) {
// Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.integrity && !keepAll)
+ if (!entry.integrity && !keepAll) {
return null
+ }
return {
key: entry.key,
@@ -386,8 +403,9 @@ function formatEntry (cache, entry, keepAll) {
function readdirOrEmpty (dir) {
return readdir(dir).catch((err) => {
- if (err.code === 'ENOENT' || err.code === 'ENOTDIR')
+ if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
return []
+ }
throw err
})
diff --git a/node_modules/cacache/get.js b/node_modules/cacache/lib/get.js
index 4e905e7cf..d9d4bf4c6 100644
--- a/node_modules/cacache/get.js
+++ b/node_modules/cacache/lib/get.js
@@ -6,9 +6,9 @@ const Pipeline = require('minipass-pipeline')
const fs = require('fs')
const util = require('util')
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const read = require('./lib/content/read')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
const writeFile = util.promisify(fs.writeFile)
@@ -25,12 +25,14 @@ function getData (cache, key, opts = {}) {
}
return index.find(cache, key, opts).then((entry) => {
- if (!entry)
+ if (!entry) {
throw new index.NotFoundError(cache, key)
+ }
return read(cache, entry.integrity, { integrity, size }).then((data) => {
- if (memoize)
+ if (memoize) {
memo.put(cache, entry, data, opts)
+ }
return {
data,
@@ -46,12 +48,14 @@ module.exports = getData
function getDataByDigest (cache, key, opts = {}) {
const { integrity, memoize, size } = opts
const memoized = memo.get.byDigest(cache, key, opts)
- if (memoized && memoize !== false)
+ if (memoized && memoize !== false) {
return Promise.resolve(memoized)
+ }
return read(cache, key, { integrity, size }).then((res) => {
- if (memoize)
+ if (memoize) {
memo.put.byDigest(cache, key, res, opts)
+ }
return res
})
}
@@ -70,8 +74,9 @@ function getDataSync (cache, key, opts = {}) {
}
}
const entry = index.find.sync(cache, key, opts)
- if (!entry)
+ if (!entry) {
throw new index.NotFoundError(cache, key)
+ }
const data = read.sync(cache, entry.integrity, {
integrity: integrity,
size: size,
@@ -82,8 +87,9 @@ function getDataSync (cache, key, opts = {}) {
size: entry.size,
integrity: entry.integrity,
}
- if (memoize)
+ if (memoize) {
memo.put(cache, entry, res.data, opts)
+ }
return res
}
@@ -94,15 +100,17 @@ function getDataByDigestSync (cache, digest, opts = {}) {
const { integrity, memoize, size } = opts
const memoized = memo.get.byDigest(cache, digest, opts)
- if (memoized && memoize !== false)
+ if (memoized && memoize !== false) {
return memoized
+ }
const res = read.sync(cache, digest, {
integrity: integrity,
size: size,
})
- if (memoize)
+ if (memoize) {
memo.put.byDigest(cache, digest, res, opts)
+ }
return res
}
@@ -122,15 +130,17 @@ const getMemoizedStream = (memoized) => {
function getStream (cache, key, opts = {}) {
const { memoize, size } = opts
const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false)
+ if (memoized && memoize !== false) {
return getMemoizedStream(memoized)
+ }
const stream = new Pipeline()
index
.find(cache, key)
.then((entry) => {
- if (!entry)
+ if (!entry) {
throw new index.NotFoundError(cache, key)
+ }
stream.emit('metadata', entry.metadata)
stream.emit('integrity', entry.integrity)
@@ -170,8 +180,9 @@ function getStreamDigest (cache, integrity, opts = {}) {
return stream
} else {
const stream = read.readStream(cache, integrity, opts)
- if (!memoize)
+ if (!memoize) {
return stream
+ }
const memoStream = new Collect.PassThrough()
memoStream.on('collect', data => memo.put.byDigest(
@@ -189,18 +200,20 @@ module.exports.stream.byDigest = getStreamDigest
function info (cache, key, opts = {}) {
const { memoize } = opts
const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false)
+ if (memoized && memoize !== false) {
return Promise.resolve(memoized.entry)
- else
+ } else {
return index.find(cache, key)
+ }
}
module.exports.info = info
function copy (cache, key, dest, opts = {}) {
if (read.copy) {
return index.find(cache, key, opts).then((entry) => {
- if (!entry)
+ if (!entry) {
throw new index.NotFoundError(cache, key)
+ }
return read.copy(cache, entry.integrity, dest, opts)
.then(() => {
return {
@@ -225,8 +238,9 @@ function copy (cache, key, dest, opts = {}) {
module.exports.copy = copy
function copyByDigest (cache, key, dest, opts = {}) {
- if (read.copy)
+ if (read.copy) {
return read.copy(cache, key, dest, opts).then(() => key)
+ }
return getDataByDigest(cache, key, opts).then((res) => {
return writeFile(dest, res).then(() => key)
diff --git a/node_modules/cacache/index.js b/node_modules/cacache/lib/index.js
index c8c52b041..1c56be68d 100644
--- a/node_modules/cacache/index.js
+++ b/node_modules/cacache/lib/index.js
@@ -1,20 +1,19 @@
'use strict'
-const ls = require('./ls.js')
const get = require('./get.js')
const put = require('./put.js')
const rm = require('./rm.js')
const verify = require('./verify.js')
-const { clearMemoized } = require('./lib/memoization.js')
-const tmp = require('./lib/util/tmp.js')
-const index = require('./lib/entry-index.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
module.exports.index = {}
module.exports.index.compact = index.compact
module.exports.index.insert = index.insert
-module.exports.ls = ls
-module.exports.ls.stream = ls.stream
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
module.exports.get = get
module.exports.get.byDigest = get.byDigest
diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js
index d5465f39f..e1b13dd5f 100644
--- a/node_modules/cacache/lib/memoization.js
+++ b/node_modules/cacache/lib/memoization.js
@@ -62,12 +62,13 @@ class ObjProxy {
}
function pickMem (opts) {
- if (!opts || !opts.memoize)
+ if (!opts || !opts.memoize) {
return MEMOIZED
- else if (opts.memoize.get && opts.memoize.set)
+ } else if (opts.memoize.get && opts.memoize.set) {
return opts.memoize
- else if (typeof opts.memoize === 'object')
+ } else if (typeof opts.memoize === 'object') {
return new ObjProxy(opts.memoize)
- else
+ } else {
return MEMOIZED
+ }
}
diff --git a/node_modules/cacache/put.js b/node_modules/cacache/lib/put.js
index 84e9562bc..d6904fa30 100644
--- a/node_modules/cacache/put.js
+++ b/node_modules/cacache/lib/put.js
@@ -1,8 +1,8 @@
'use strict'
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const write = require('./lib/content/write')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
const Flush = require('minipass-flush')
const { PassThrough } = require('minipass-collect')
const Pipeline = require('minipass-pipeline')
@@ -21,8 +21,9 @@ function putData (cache, key, data, opts = {}) {
return index
.insert(cache, key, res.integrity, { ...opts, size: res.size })
.then((entry) => {
- if (memoize)
+ if (memoize) {
memo.put(cache, entry, data, opts)
+ }
return res.integrity
})
@@ -67,14 +68,17 @@ function putStream (cache, key, opts = {}) {
return index
.insert(cache, key, integrity, { ...opts, size })
.then((entry) => {
- if (memoize && memoData)
+ if (memoize && memoData) {
memo.put(cache, entry, memoData, opts)
+ }
- if (integrity)
+ if (integrity) {
pipeline.emit('integrity', integrity)
+ }
- if (size)
+ if (size) {
pipeline.emit('size', size)
+ }
})
},
}))
diff --git a/node_modules/cacache/rm.js b/node_modules/cacache/lib/rm.js
index f2ef6b190..5f0007177 100644
--- a/node_modules/cacache/rm.js
+++ b/node_modules/cacache/lib/rm.js
@@ -2,11 +2,11 @@
const util = require('util')
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
+const index = require('./entry-index')
+const memo = require('./memoization')
const path = require('path')
const rimraf = util.promisify(require('rimraf'))
-const rmContent = require('./lib/content/rm')
+const rmContent = require('./content/rm')
module.exports = entry
module.exports.entry = entry
diff --git a/node_modules/cacache/lib/util/disposer.js b/node_modules/cacache/lib/util/disposer.js
index aa8aed54d..52d7d3edd 100644
--- a/node_modules/cacache/lib/util/disposer.js
+++ b/node_modules/cacache/lib/util/disposer.js
@@ -8,8 +8,9 @@ function disposer (creatorFn, disposerFn, fn) {
.then(
// disposer resolved, do something with original fn's promise
() => {
- if (shouldThrow)
+ if (shouldThrow) {
throw result
+ }
return result
},
diff --git a/node_modules/cacache/lib/util/fix-owner.js b/node_modules/cacache/lib/util/fix-owner.js
index 90ffece52..bc14def4e 100644
--- a/node_modules/cacache/lib/util/fix-owner.js
+++ b/node_modules/cacache/lib/util/fix-owner.js
@@ -49,8 +49,9 @@ function fixOwner (cache, filepath) {
const { uid, gid } = owner
// No need to override if it's already what we used.
- if (self.uid === uid && self.gid === gid)
+ if (self.uid === uid && self.gid === gid) {
return
+ }
return inflight('fixOwner: fixing ownership on ' + filepath, () =>
chownr(
@@ -58,8 +59,9 @@ function fixOwner (cache, filepath) {
typeof uid === 'number' ? uid : self.uid,
typeof gid === 'number' ? gid : self.gid
).catch((err) => {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return null
+ }
throw err
})
@@ -93,8 +95,9 @@ function fixOwnerSync (cache, filepath) {
)
} catch (err) {
// only catch ENOENT, any other error is a problem.
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return null
+ }
throw err
}
@@ -110,12 +113,14 @@ function mkdirfix (cache, p, cb) {
return Promise.resolve(inferOwner(cache)).then(() => {
return mkdirp(p)
.then((made) => {
- if (made)
+ if (made) {
return fixOwner(cache, made).then(() => made)
+ }
})
.catch((err) => {
- if (err.code === 'EEXIST')
+ if (err.code === 'EEXIST') {
return fixOwner(cache, p).then(() => null)
+ }
throw err
})
@@ -136,7 +141,8 @@ function mkdirfixSync (cache, p) {
if (err.code === 'EEXIST') {
fixOwnerSync(cache, p)
return null
- } else
+ } else {
throw err
+ }
}
}
diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js
index c3f9e35eb..3739cea3d 100644
--- a/node_modules/cacache/lib/util/move-file.js
+++ b/node_modules/cacache/lib/util/move-file.js
@@ -38,10 +38,12 @@ function moveFile (src, dest) {
} else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
// file already exists, so whatever
return resolve()
- } else
+ } else {
return reject(err)
- } else
+ }
+ } else {
return resolve()
+ }
})
})
.then(() => {
diff --git a/node_modules/cacache/lib/verify.js b/node_modules/cacache/lib/verify.js
index e9d679ece..300cd9f9d 100644
--- a/node_modules/cacache/lib/verify.js
+++ b/node_modules/cacache/lib/verify.js
@@ -54,8 +54,9 @@ function verify (cache, opts) {
stats[k] = s[k]
})
const end = new Date()
- if (!stats.runTime)
+ if (!stats.runTime) {
stats.runTime = {}
+ }
stats.runTime[label] = end - start
return Promise.resolve(stats)
@@ -108,8 +109,9 @@ function garbageCollect (cache, opts) {
const indexStream = index.lsStream(cache)
const liveContent = new Set()
indexStream.on('data', (entry) => {
- if (opts.filter && !opts.filter(entry))
+ if (opts.filter && !opts.filter(entry)) {
return
+ }
liveContent.add(entry.integrity.toString())
})
@@ -176,8 +178,9 @@ function verifyContent (filepath, sri) {
return ssri
.checkStream(new fsm.ReadStream(filepath), sri)
.catch((err) => {
- if (err.code !== 'EINTEGRITY')
+ if (err.code !== 'EINTEGRITY') {
throw err
+ }
return rimraf(filepath).then(() => {
contentInfo.valid = false
@@ -186,8 +189,9 @@ function verifyContent (filepath, sri) {
.then(() => contentInfo)
})
.catch((err) => {
- if (err.code === 'ENOENT')
+ if (err.code === 'ENOENT') {
return { size: 0, valid: false }
+ }
throw err
})
@@ -209,9 +213,9 @@ function rebuildIndex (cache, opts) {
const entry = entries[k]
const excluded = opts.filter && !opts.filter(entry)
excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded)
+ if (buckets[hashed] && !excluded) {
buckets[hashed].push(entry)
- else if (buckets[hashed] && excluded) {
+ } else if (buckets[hashed] && excluded) {
// skip
} else if (excluded) {
buckets[hashed] = []
diff --git a/node_modules/cacache/ls.js b/node_modules/cacache/ls.js
deleted file mode 100644
index 6006c99e3..000000000
--- a/node_modules/cacache/ls.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict'
-
-const index = require('./lib/entry-index')
-
-module.exports = index.ls
-module.exports.stream = index.lsStream
diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json
index 6cb414015..b9efa92d9 100644
--- a/node_modules/cacache/package.json
+++ b/node_modules/cacache/package.json
@@ -1,14 +1,14 @@
{
"name": "cacache",
- "version": "15.3.0",
+ "version": "16.0.0",
"cache-version": {
"content": "2",
"index": "5"
},
"description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
- "main": "index.js",
+ "main": "lib/index.js",
"files": [
- "*.js",
+ "bin",
"lib"
],
"scripts": {
@@ -20,10 +20,13 @@
"snap": "tap",
"coverage": "tap",
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "lint": "npm run npmclilint -- \"*.*js\" \"lib/**/*.*js\" \"test/**/*.*js\"",
+ "lint": "eslint '**/*.js'",
"npmclilint": "npmcli-lint",
"lintfix": "npm run lint -- --fix",
- "postsnap": "npm run lintfix --"
+ "postsnap": "npm run lintfix --",
+ "postlint": "npm-template-check",
+ "template-copy": "npm-template-copy --force",
+ "posttest": "npm run lint"
},
"repository": "https://github.com/npm/cacache",
"keywords": [
@@ -44,26 +47,26 @@
"license": "ISC",
"dependencies": {
"@npmcli/fs": "^1.0.0",
- "@npmcli/move-file": "^1.0.1",
+ "@npmcli/move-file": "^1.1.2",
"chownr": "^2.0.0",
- "fs-minipass": "^2.0.0",
+ "fs-minipass": "^2.1.0",
"glob": "^7.1.4",
"infer-owner": "^1.0.4",
"lru-cache": "^6.0.0",
"minipass": "^3.1.1",
"minipass-collect": "^1.0.2",
"minipass-flush": "^1.0.5",
- "minipass-pipeline": "^1.2.2",
- "mkdirp": "^1.0.3",
+ "minipass-pipeline": "^1.2.4",
+ "mkdirp": "^1.0.4",
"p-map": "^4.0.0",
"promise-inflight": "^1.0.1",
"rimraf": "^3.0.2",
"ssri": "^8.0.1",
- "tar": "^6.0.2",
+ "tar": "^6.1.11",
"unique-filename": "^1.1.1"
},
"devDependencies": {
- "@npmcli/lint": "^1.0.1",
+ "@npmcli/template-oss": "^2.9.2",
"benchmark": "^2.1.4",
"chalk": "^4.0.0",
"require-inject": "^1.4.4",
@@ -75,6 +78,11 @@
"test-regex": "test/[^/]*.js"
},
"engines": {
- "node": ">= 10"
- }
+ "node": "^12.13.0 || ^14.15.0 || >=16"
+ },
+ "templateOSS": {
+ "windowsCI": false,
+ "version": "2.9.2"
+ },
+ "author": "GitHub Inc."
}
diff --git a/node_modules/cacache/verify.js b/node_modules/cacache/verify.js
deleted file mode 100644
index db7763d7a..000000000
--- a/node_modules/cacache/verify.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./lib/verify')