Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGar <gar+gh@danger.computer>2022-08-18 18:00:46 +0300
committerLuke Karrys <luke@lukekarrys.com>2022-08-22 21:14:50 +0300
commitbd2ae5d79eb8807bfca6075e98432c545a9ededa (patch)
tree283f8cfd5e989303279331a105adfdd0c024e1be
parentd54f03132a5e8247cdd8a3c165669477e95980fb (diff)
fix: linting
In preparation for @npmcli/eslint-config@3.1.0
-rw-r--r--lib/commands/audit.js5
-rw-r--r--lib/commands/edit.js9
-rw-r--r--lib/commands/org.js135
-rw-r--r--lib/commands/outdated.js1
-rw-r--r--lib/commands/token.js45
-rw-r--r--lib/npm.js20
-rw-r--r--lib/utils/queryable.js4
-rw-r--r--scripts/dependency-graph.js2
-rw-r--r--test/lib/commands/shrinkwrap.js4
-rwxr-xr-xworkspaces/arborist/bin/index.js1
-rw-r--r--workspaces/arborist/lib/arborist/build-ideal-tree.js43
-rw-r--r--workspaces/arborist/lib/arborist/load-actual.js1
-rw-r--r--workspaces/arborist/lib/arborist/rebuild.js3
-rw-r--r--workspaces/arborist/lib/arborist/reify.js82
-rw-r--r--workspaces/arborist/lib/audit-report.js2
-rw-r--r--workspaces/arborist/lib/link.js1
-rw-r--r--workspaces/arborist/lib/node.js3
-rw-r--r--workspaces/arborist/lib/shrinkwrap.js62
-rw-r--r--workspaces/arborist/lib/signal-handling.js8
-rw-r--r--workspaces/arborist/lib/spec-from-lock.js6
-rw-r--r--workspaces/arborist/scripts/benchmark.js7
-rw-r--r--workspaces/arborist/scripts/benchmark/reify.js16
-rw-r--r--workspaces/arborist/test/arborist/build-ideal-tree.js18
-rw-r--r--workspaces/arborist/test/arborist/load-actual.js42
-rw-r--r--workspaces/arborist/test/arborist/reify.js47
-rw-r--r--workspaces/arborist/test/shrinkwrap.js404
-rw-r--r--workspaces/arborist/test/yarn-lock.js8
-rw-r--r--workspaces/libnpmversion/lib/version.js4
-rw-r--r--workspaces/libnpmversion/test/enforce-clean.js76
29 files changed, 535 insertions, 524 deletions
diff --git a/lib/commands/audit.js b/lib/commands/audit.js
index 779bc22fc..6ec870f03 100644
--- a/lib/commands/audit.js
+++ b/lib/commands/audit.js
@@ -178,11 +178,12 @@ class VerifySignatures {
let name = edge.name
try {
name = npa(edge.spec).subSpec.name
- } catch (_) {
+ } catch {
+ // leave it as edge.name
}
try {
return npa(`${name}@${edge.spec}`)
- } catch (_) {
+ } catch {
// Skip packages with invalid spec
}
}
diff --git a/lib/commands/edit.js b/lib/commands/edit.js
index 0256f4f3a..67ac32e01 100644
--- a/lib/commands/edit.js
+++ b/lib/commands/edit.js
@@ -58,11 +58,16 @@ class Edit extends BaseCommand {
}
const [bin, ...args] = this.npm.config.get('editor').split(/\s+/)
const editor = cp.spawn(bin, [...args, dir], { stdio: 'inherit' })
- editor.on('exit', (code) => {
+ editor.on('exit', async (code) => {
if (code) {
return reject(new Error(`editor process exited with code: ${code}`))
}
- this.npm.exec('rebuild', [dir]).catch(reject).then(resolve)
+ try {
+ await this.npm.exec('rebuild', [dir])
+ } catch (err) {
+ reject(err)
+ }
+ resolve()
})
})
})
diff --git a/lib/commands/org.js b/lib/commands/org.js
index 599b4b9c8..f49556c8d 100644
--- a/lib/commands/org.js
+++ b/lib/commands/org.js
@@ -50,7 +50,7 @@ class Org extends BaseCommand {
})
}
- set (org, user, role, opts) {
+ async set (org, user, role, opts) {
role = role || 'developer'
if (!org) {
throw new Error('First argument `orgname` is required.')
@@ -67,27 +67,26 @@ class Org extends BaseCommand {
)
}
- return liborg.set(org, user, role, opts).then(memDeets => {
- if (opts.json) {
- this.npm.output(JSON.stringify(memDeets, null, 2))
- } else if (opts.parseable) {
- this.npm.output(['org', 'orgsize', 'user', 'role'].join('\t'))
- this.npm.output(
- [memDeets.org.name, memDeets.org.size, memDeets.user, memDeets.role].join('\t')
- )
- } else if (!this.npm.silent) {
- this.npm.output(
- `Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${
+ const memDeets = await liborg.set(org, user, role, opts)
+ if (opts.json) {
+ this.npm.output(JSON.stringify(memDeets, null, 2))
+ } else if (opts.parseable) {
+ this.npm.output(['org', 'orgsize', 'user', 'role'].join('\t'))
+ this.npm.output(
+ [memDeets.org.name, memDeets.org.size, memDeets.user, memDeets.role].join('\t')
+ )
+ } else if (!this.npm.silent) {
+ this.npm.output(
+ `Added ${memDeets.user} as ${memDeets.role} to ${memDeets.org.name}. You now have ${
memDeets.org.size
} member${memDeets.org.size === 1 ? '' : 's'} in this org.`
- )
- }
+ )
+ }
- return memDeets
- })
+ return memDeets
}
- rm (org, user, opts) {
+ async rm (org, user, opts) {
if (!org) {
throw new Error('First argument `orgname` is required.')
}
@@ -96,68 +95,62 @@ class Org extends BaseCommand {
throw new Error('Second argument `username` is required.')
}
- return liborg
- .rm(org, user, opts)
- .then(() => {
- return liborg.ls(org, opts)
- })
- .then(roster => {
- user = user.replace(/^[~@]?/, '')
- org = org.replace(/^[~@]?/, '')
- const userCount = Object.keys(roster).length
- if (opts.json) {
- this.npm.output(
- JSON.stringify({
- user,
- org,
- userCount,
- deleted: true,
- })
- )
- } else if (opts.parseable) {
- this.npm.output(['user', 'org', 'userCount', 'deleted'].join('\t'))
- this.npm.output([user, org, userCount, true].join('\t'))
- } else if (!this.npm.silent) {
- this.npm.output(
- `Successfully removed ${user} from ${org}. You now have ${userCount} member${
- userCount === 1 ? '' : 's'
- } in this org.`
- )
- }
- })
+ await liborg.rm(org, user, opts)
+ const roster = await liborg.ls(org, opts)
+ user = user.replace(/^[~@]?/, '')
+ org = org.replace(/^[~@]?/, '')
+ const userCount = Object.keys(roster).length
+ if (opts.json) {
+ this.npm.output(
+ JSON.stringify({
+ user,
+ org,
+ userCount,
+ deleted: true,
+ })
+ )
+ } else if (opts.parseable) {
+ this.npm.output(['user', 'org', 'userCount', 'deleted'].join('\t'))
+ this.npm.output([user, org, userCount, true].join('\t'))
+ } else if (!this.npm.silent) {
+ this.npm.output(
+ `Successfully removed ${user} from ${org}. You now have ${userCount} member${
+ userCount === 1 ? '' : 's'
+ } in this org.`
+ )
+ }
}
- ls (org, user, opts) {
+ async ls (org, user, opts) {
if (!org) {
throw new Error('First argument `orgname` is required.')
}
- return liborg.ls(org, opts).then(roster => {
- if (user) {
- const newRoster = {}
- if (roster[user]) {
- newRoster[user] = roster[user]
- }
-
- roster = newRoster
+ let roster = await liborg.ls(org, opts)
+ if (user) {
+ const newRoster = {}
+ if (roster[user]) {
+ newRoster[user] = roster[user]
}
- if (opts.json) {
- this.npm.output(JSON.stringify(roster, null, 2))
- } else if (opts.parseable) {
- this.npm.output(['user', 'role'].join('\t'))
- Object.keys(roster).forEach(user => {
- this.npm.output([user, roster[user]].join('\t'))
+
+ roster = newRoster
+ }
+ if (opts.json) {
+ this.npm.output(JSON.stringify(roster, null, 2))
+ } else if (opts.parseable) {
+ this.npm.output(['user', 'role'].join('\t'))
+ Object.keys(roster).forEach(user => {
+ this.npm.output([user, roster[user]].join('\t'))
+ })
+ } else if (!this.npm.silent) {
+ const table = new Table({ head: ['user', 'role'] })
+ Object.keys(roster)
+ .sort()
+ .forEach(user => {
+ table.push([user, roster[user]])
})
- } else if (!this.npm.silent) {
- const table = new Table({ head: ['user', 'role'] })
- Object.keys(roster)
- .sort()
- .forEach(user => {
- table.push([user, roster[user]])
- })
- this.npm.output(table.toString())
- }
- })
+ this.npm.output(table.toString())
+ }
}
}
module.exports = Org
diff --git a/lib/commands/outdated.js b/lib/commands/outdated.js
index 042b776f7..9e2060658 100644
--- a/lib/commands/outdated.js
+++ b/lib/commands/outdated.js
@@ -196,6 +196,7 @@ class Outdated extends ArboristWorkspaceCmd {
try {
alias = npa(edge.spec).subSpec
} catch (err) {
+ // ignore errors, no alias
}
const spec = npa(alias ? alias.name : edge.name)
const node = edge.to || edge
diff --git a/lib/commands/token.js b/lib/commands/token.js
index cf3b8cbee..de8e61101 100644
--- a/lib/commands/token.js
+++ b/lib/commands/token.js
@@ -140,32 +140,27 @@ class Token extends BaseCommand {
const cidr = conf.cidr
const readonly = conf.readOnly
- return readUserInfo
- .password()
- .then(password => {
- const validCIDR = this.validateCIDRList(cidr)
- log.info('token', 'creating')
- return pulseTillDone.withPromise(
- otplease(this.npm, conf, conf => {
- return profile.createToken(password, readonly, validCIDR, conf)
- })
- )
- })
- .then(result => {
- delete result.key
- delete result.updated
- if (conf.json) {
- this.npm.output(JSON.stringify(result))
- } else if (conf.parseable) {
- Object.keys(result).forEach(k => this.npm.output(k + '\t' + result[k]))
- } else {
- const table = new Table()
- for (const k of Object.keys(result)) {
- table.push({ [chalk.bold(k)]: String(result[k]) })
- }
- this.npm.output(table.toString())
- }
+ const password = await readUserInfo.password()
+ const validCIDR = this.validateCIDRList(cidr)
+ log.info('token', 'creating')
+ const result = await pulseTillDone.withPromise(
+ otplease(this.npm, conf, conf => {
+ return profile.createToken(password, readonly, validCIDR, conf)
})
+ )
+ delete result.key
+ delete result.updated
+ if (conf.json) {
+ this.npm.output(JSON.stringify(result))
+ } else if (conf.parseable) {
+ Object.keys(result).forEach(k => this.npm.output(k + '\t' + result[k]))
+ } else {
+ const table = new Table()
+ for (const k of Object.keys(result)) {
+ table.push({ [chalk.bold(k)]: String(result[k]) })
+ }
+ this.npm.output(table.toString())
+ }
}
config () {
diff --git a/lib/npm.js b/lib/npm.js
index 66111cab8..b116ec5cc 100644
--- a/lib/npm.js
+++ b/lib/npm.js
@@ -112,6 +112,7 @@ class Npm extends EventEmitter {
// this is async but we dont await it, since its ok if it doesnt
// finish before the command finishes running. it uses command and argv
// so it must be initiated here, after the command name is set
+ // eslint-disable-next-line promise/catch-or-return
updateNotifier(this).then((msg) => (this.updateNotification = msg))
// Options are prefixed by a hyphen-minus (-, \u2d).
@@ -173,16 +174,15 @@ class Npm extends EventEmitter {
async load () {
if (!this.#loadPromise) {
- this.#loadPromise = this.time('npm:load', () => this[_load]().catch(er => er).then((er) => {
- this.loadErr = er
- if (!er) {
- if (this.config.get('force')) {
- log.warn('using --force', 'Recommended protections disabled.')
- }
- } else {
+ this.#loadPromise = this.time('npm:load', async () => {
+ await this[_load]().catch((er) => {
+ this.loadErr = er
throw er
+ })
+ if (this.config.get('force')) {
+ log.warn('using --force', 'Recommended protections disabled.')
}
- }))
+ })
}
return this.#loadPromise
}
@@ -229,7 +229,9 @@ class Npm extends EventEmitter {
const node = this.time('npm:load:whichnode', () => {
try {
return which.sync(process.argv[0])
- } catch {} // TODO should we throw here?
+ } catch {
+ // TODO should we throw here?
+ }
})
if (node && node.toUpperCase() !== process.execPath.toUpperCase()) {
diff --git a/lib/utils/queryable.js b/lib/utils/queryable.js
index ceb06bdcc..7c5bb7fe8 100644
--- a/lib/utils/queryable.js
+++ b/lib/utils/queryable.js
@@ -148,7 +148,9 @@ const setter = ({ data, key, value, force }) => {
let maybeIndex = Number.NaN
try {
maybeIndex = Number(_key)
- } catch (err) {}
+ } catch {
+ // leave it NaN
+ }
if (!Number.isNaN(maybeIndex)) {
_key = maybeIndex
}
diff --git a/scripts/dependency-graph.js b/scripts/dependency-graph.js
index 318b9f39b..1f1830758 100644
--- a/scripts/dependency-graph.js
+++ b/scripts/dependency-graph.js
@@ -168,7 +168,9 @@ const iterate = function (node, dependedBy, annotations, onlyOurs) {
main().then(() => {
process.exit(0)
+ return 0
}).catch(err => {
console.error(err)
process.exit(1)
+ return 1
})
diff --git a/test/lib/commands/shrinkwrap.js b/test/lib/commands/shrinkwrap.js
index e3fc1f935..812a9e23e 100644
--- a/test/lib/commands/shrinkwrap.js
+++ b/test/lib/commands/shrinkwrap.js
@@ -13,7 +13,9 @@ t.formatSnapshot = obj =>
(k, v) => {
try {
return JSON.parse(v)
- } catch {}
+ } catch {
+ // leave invalid JSON as a string
+ }
return v
},
2
diff --git a/workspaces/arborist/bin/index.js b/workspaces/arborist/bin/index.js
index 0c1e98445..ff356fafa 100755
--- a/workspaces/arborist/bin/index.js
+++ b/workspaces/arborist/bin/index.js
@@ -99,6 +99,7 @@ for (const file of commandFiles) {
if (bin.loglevel !== 'silent') {
console[process.exitCode ? 'error' : 'log'](r)
}
+ return r
})
}
}
diff --git a/workspaces/arborist/lib/arborist/build-ideal-tree.js b/workspaces/arborist/lib/arborist/build-ideal-tree.js
index 945bae56b..31a4e8c82 100644
--- a/workspaces/arborist/lib/arborist/build-ideal-tree.js
+++ b/workspaces/arborist/lib/arborist/build-ideal-tree.js
@@ -378,6 +378,7 @@ Try using the package name instead, e.g:
this.idealTree = tree
this.virtualTree = null
process.emit('timeEnd', 'idealTree:init')
+ return tree
})
}
@@ -531,12 +532,12 @@ Try using the package name instead, e.g:
// This returns a promise because we might not have the name yet,
// and need to call pacote.manifest to find the name.
- [_add] (tree, { add, saveType = null, saveBundle = false }) {
+ async [_add] (tree, { add, saveType = null, saveBundle = false }) {
// get the name for each of the specs in the list.
// ie, doing `foo@bar` we just return foo
// but if it's a url or git, we don't know the name until we
// fetch it and look in its manifest.
- return Promise.all(add.map(async rawSpec => {
+ const resolvedAdd = await Promise.all(add.map(async rawSpec => {
// We do NOT provide the path to npa here, because user-additions
// need to be resolved relative to the CWD the user is in.
const spec = await this[_retrieveSpecName](npa(rawSpec))
@@ -544,17 +545,16 @@ Try using the package name instead, e.g:
.then(spec => this[_followSymlinkPath](spec))
spec.tree = tree
return spec
- })).then(add => {
- this[_resolvedAdd].push(...add)
- // now add is a list of spec objects with names.
- // find a home for each of them!
- addRmPkgDeps.add({
- pkg: tree.package,
- add,
- saveBundle,
- saveType,
- path: this.path,
- })
+ }))
+ this[_resolvedAdd].push(...resolvedAdd)
+ // now resolvedAdd is a list of spec objects with names.
+ // find a home for each of them!
+ addRmPkgDeps.add({
+ pkg: tree.package,
+ add: resolvedAdd,
+ saveBundle,
+ saveType,
+ path: this.path,
})
}
@@ -781,17 +781,18 @@ This is a one-time fix-up, please be patient...
const spec = npa.resolve(name, id, dirname(path))
const t = `idealTree:inflate:${location}`
this.addTracker(t)
- await pacote.manifest(spec, {
- ...this.options,
- resolved: resolved,
- integrity: integrity,
- fullMetadata: false,
- }).then(mani => {
+ try {
+ const mani = await pacote.manifest(spec, {
+ ...this.options,
+ resolved: resolved,
+ integrity: integrity,
+ fullMetadata: false,
+ })
node.package = { ...mani, _id: `${mani.name}@${mani.version}` }
- }).catch((er) => {
+ } catch (er) {
const warning = `Could not fetch metadata for ${name}@${id}`
log.warn(heading, warning, er)
- })
+ }
this.finishTracker(t)
})
}
diff --git a/workspaces/arborist/lib/arborist/load-actual.js b/workspaces/arborist/lib/arborist/load-actual.js
index bca7cef94..7ab65f5b0 100644
--- a/workspaces/arborist/lib/arborist/load-actual.js
+++ b/workspaces/arborist/lib/arborist/load-actual.js
@@ -347,6 +347,7 @@ module.exports = cls => class ActualLoader extends cls {
// node_modules hierarchy, then load that node as well.
return this[_loadFSTree](link.target).then(() => link)
} else if (target.then) {
+ // eslint-disable-next-line promise/catch-or-return
target.then(node => link.target = node)
}
diff --git a/workspaces/arborist/lib/arborist/rebuild.js b/workspaces/arborist/lib/arborist/rebuild.js
index e9b79031e..7e97984c0 100644
--- a/workspaces/arborist/lib/arborist/rebuild.js
+++ b/workspaces/arborist/lib/arborist/rebuild.js
@@ -359,6 +359,9 @@ module.exports = cls => class Builder extends cls {
pkg,
path,
event,
+ // I do not know why this needs to be on THIS line but refactoring
+ // this function would be quite a process
+ // eslint-disable-next-line promise/always-return
cmd: args && args[args.length - 1],
env,
code,
diff --git a/workspaces/arborist/lib/arborist/reify.js b/workspaces/arborist/lib/arborist/reify.js
index 7663a3a34..0c9026f5e 100644
--- a/workspaces/arborist/lib/arborist/reify.js
+++ b/workspaces/arborist/lib/arborist/reify.js
@@ -69,7 +69,6 @@ const _symlink = Symbol('symlink')
const _warnDeprecated = Symbol('warnDeprecated')
const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees')
const _submitQuickAudit = Symbol('submitQuickAudit')
-const _awaitQuickAudit = Symbol('awaitQuickAudit')
const _unpackNewModules = Symbol.for('unpackNewModules')
const _moveContents = Symbol.for('moveContents')
const _moveBackRetiredUnchanged = Symbol.for('moveBackRetiredUnchanged')
@@ -156,7 +155,8 @@ module.exports = cls => class Reifier extends cls {
await this[_reifyPackages]()
await this[_saveIdealTree](options)
await this[_copyIdealToActual]()
- await this[_awaitQuickAudit]()
+ // This is a very bad pattern and I can't wait to stop doing it
+ this.auditReport = await this.auditReport
this.finishTracker('reify')
process.emit('timeEnd', 'reify')
@@ -531,12 +531,12 @@ module.exports = cls => class Reifier extends cls {
const targets = [...roots, ...Object.keys(this[_retiredPaths])]
const unlinks = targets
.map(path => rimraf(path).catch(er => failures.push([path, er])))
- return promiseAllRejectLate(unlinks)
- .then(() => {
- if (failures.length) {
- log.warn('cleanup', 'Failed to remove some directories', failures)
- }
- })
+ return promiseAllRejectLate(unlinks).then(() => {
+ // eslint-disable-next-line promise/always-return
+ if (failures.length) {
+ log.warn('cleanup', 'Failed to remove some directories', failures)
+ }
+ })
.then(() => process.emit('timeEnd', 'reify:rollback:createSparse'))
.then(() => this[_rollbackRetireShallowNodes](er))
}
@@ -592,21 +592,21 @@ module.exports = cls => class Reifier extends cls {
this.addTracker('reify', node.name, node.location)
const { npmVersion, nodeVersion } = this.options
- const p = Promise.resolve()
- .then(async () => {
- // when we reify an optional node, check the engine and platform
- // first. be sure to ignore the --force and --engine-strict flags,
- // since we always want to skip any optional packages we can't install.
- // these checks throwing will result in a rollback and removal
- // of the mismatches
- if (node.optional) {
- checkEngine(node.package, npmVersion, nodeVersion, false)
- checkPlatform(node.package, false)
- }
- await this[_checkBins](node)
- await this[_extractOrLink](node)
- await this[_warnDeprecated](node)
- })
+ const p = Promise.resolve().then(async () => {
+ // when we reify an optional node, check the engine and platform
+ // first. be sure to ignore the --force and --engine-strict flags,
+ // since we always want to skip any optional packages we can't install.
+ // these checks throwing will result in a rollback and removal
+ // of the mismatches
+ // eslint-disable-next-line promise/always-return
+ if (node.optional) {
+ checkEngine(node.package, npmVersion, nodeVersion, false)
+ checkPlatform(node.package, false)
+ }
+ await this[_checkBins](node)
+ await this[_extractOrLink](node)
+ await this[_warnDeprecated](node)
+ })
return this[_handleOptionalFailure](node, p)
.then(() => {
@@ -916,9 +916,10 @@ module.exports = cls => class Reifier extends cls {
}
}
- [_submitQuickAudit] () {
+ async [_submitQuickAudit] () {
if (this.options.audit === false) {
- return this.auditReport = null
+ this.auditReport = null
+ return
}
// we submit the quick audit at this point in the process, as soon as
@@ -940,16 +941,10 @@ module.exports = cls => class Reifier extends cls {
)
}
- this.auditReport = AuditReport.load(tree, options)
- .then(res => {
- process.emit('timeEnd', 'reify:audit')
- this.auditReport = res
- })
- }
-
- // return the promise if we're waiting for it, or the replaced result
- [_awaitQuickAudit] () {
- return this.auditReport
+ this.auditReport = AuditReport.load(tree, options).then(res => {
+ process.emit('timeEnd', 'reify:audit')
+ return res
+ })
}
// ok! actually unpack stuff into their target locations!
@@ -1126,7 +1121,7 @@ module.exports = cls => class Reifier extends cls {
// remove the retired folders, and any deleted nodes
// If this fails, there isn't much we can do but tell the user about it.
// Thankfully, it's pretty unlikely that it'll fail, since rimraf is a tank.
- [_removeTrash] () {
+ async [_removeTrash] () {
process.emit('time', 'reify:trash')
const promises = []
const failures = []
@@ -1136,12 +1131,11 @@ module.exports = cls => class Reifier extends cls {
promises.push(rm(path))
}
- return promiseAllRejectLate(promises).then(() => {
- if (failures.length) {
- log.warn('cleanup', 'Failed to remove some directories', failures)
- }
- })
- .then(() => process.emit('timeEnd', 'reify:trash'))
+ await promiseAllRejectLate(promises)
+ if (failures.length) {
+ log.warn('cleanup', 'Failed to remove some directories', failures)
+ }
+ process.emit('timeEnd', 'reify:trash')
}
// last but not least, we save the ideal tree metadata to the package-lock
@@ -1302,7 +1296,9 @@ module.exports = cls => class Reifier extends cls {
if (semver.subset(edge.spec, node.version)) {
return false
}
- } catch {}
+ } catch {
+ // ignore errors
+ }
}
return true
}
diff --git a/workspaces/arborist/lib/audit-report.js b/workspaces/arborist/lib/audit-report.js
index 9bef84686..387919f61 100644
--- a/workspaces/arborist/lib/audit-report.js
+++ b/workspaces/arborist/lib/audit-report.js
@@ -175,7 +175,9 @@ class AuditReport extends Map {
} else {
// calculate a metavuln, if necessary
const calc = this.calculator.calculate(dep.packageName, advisory)
+ // eslint-disable-next-line promise/always-return
p.push(calc.then(meta => {
+ // eslint-disable-next-line promise/always-return
if (meta.testVersion(dep.version, spec)) {
advisories.add(meta)
}
diff --git a/workspaces/arborist/lib/link.js b/workspaces/arborist/lib/link.js
index 6fed06377..d58c6e237 100644
--- a/workspaces/arborist/lib/link.js
+++ b/workspaces/arborist/lib/link.js
@@ -66,6 +66,7 @@ class Link extends Node {
// can set to a promise during an async tree build operation
// wait until then to assign it.
this[_target] = target
+ // eslint-disable-next-line promise/always-return, promise/catch-or-return
target.then(node => {
this[_target] = null
this.target = node
diff --git a/workspaces/arborist/lib/node.js b/workspaces/arborist/lib/node.js
index 8ec90ff3c..60ce3eda0 100644
--- a/workspaces/arborist/lib/node.js
+++ b/workspaces/arborist/lib/node.js
@@ -564,7 +564,8 @@ class Node {
// this allows us to do new Node({...}) and then set the root later.
// just make the assignment so we don't lose it, and move on.
if (!this.path || !root.realpath || !root.path) {
- return this[_root] = root
+ this[_root] = root
+ return
}
// temporarily become a root node
diff --git a/workspaces/arborist/lib/shrinkwrap.js b/workspaces/arborist/lib/shrinkwrap.js
index e2180fd4c..d5448bbcb 100644
--- a/workspaces/arborist/lib/shrinkwrap.js
+++ b/workspaces/arborist/lib/shrinkwrap.js
@@ -184,34 +184,32 @@ const assertNoNewer = async (path, data, lockTime, dir = path, seen = null) => {
? Promise.resolve([{ name: 'node_modules', isDirectory: () => true }])
: readdir(parent, { withFileTypes: true })
- return children.catch(() => [])
- .then(ents => Promise.all(ents.map(async ent => {
- const child = resolve(parent, ent.name)
- if (ent.isDirectory() && !/^\./.test(ent.name)) {
- await assertNoNewer(path, data, lockTime, child, seen)
- } else if (ent.isSymbolicLink()) {
- const target = resolve(parent, await readlink(child))
- const tstat = await stat(target).catch(
- /* istanbul ignore next - windows */ () => null)
- seen.add(relpath(path, child))
- /* istanbul ignore next - windows cannot do this */
- if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) {
- await assertNoNewer(path, data, lockTime, target, seen)
- }
- }
- })))
- .then(() => {
- if (dir !== path) {
- return
+ const ents = await children.catch(() => [])
+ await Promise.all(ents.map(async ent => {
+ const child = resolve(parent, ent.name)
+ if (ent.isDirectory() && !/^\./.test(ent.name)) {
+ await assertNoNewer(path, data, lockTime, child, seen)
+ } else if (ent.isSymbolicLink()) {
+ const target = resolve(parent, await readlink(child))
+ const tstat = await stat(target).catch(
+ /* istanbul ignore next - windows */ () => null)
+ seen.add(relpath(path, child))
+ /* istanbul ignore next - windows cannot do this */
+ if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) {
+ await assertNoNewer(path, data, lockTime, target, seen)
}
+ }
+ }))
+ if (dir !== path) {
+ return
+ }
- // assert that all the entries in the lockfile were seen
- for (const loc of new Set(Object.keys(data.packages))) {
- if (!seen.has(loc)) {
- throw 'missing from node_modules: ' + loc
- }
- }
- })
+ // assert that all the entries in the lockfile were seen
+ for (const loc of new Set(Object.keys(data.packages))) {
+ if (!seen.has(loc)) {
+ throw 'missing from node_modules: ' + loc
+ }
+ }
}
const _awaitingUpdate = Symbol('_awaitingUpdate')
@@ -261,7 +259,9 @@ class Shrinkwrap {
s.lockfileVersion = json.lockfileVersion
}
}
- } catch (e) {}
+ } catch {
+ // ignore errors
+ }
return s
}
@@ -442,7 +442,7 @@ class Shrinkwrap {
this.newline = newline !== undefined ? newline : this.newline
}
- load () {
+ async load () {
// we don't need to load package-lock.json except for top of tree nodes,
// only npm-shrinkwrap.json.
return this[_maybeRead]().then(([sw, lock, yarn]) => {
@@ -464,7 +464,9 @@ class Shrinkwrap {
// ignore invalid yarn data. we'll likely clobber it later anyway.
try {
this.yarnLock.parse(yarn)
- } catch (_) {}
+ } catch {
+ // ignore errors
+ }
}
return data ? parseJSON(data) : {}
@@ -515,8 +517,10 @@ class Shrinkwrap {
!(lock.lockfileVersion >= 2) && !lock.requires
// load old lockfile deps into the packages listing
+ // eslint-disable-next-line promise/always-return
if (lock.dependencies && !lock.packages) {
return rpj(this.path + '/package.json').then(pkg => pkg, er => ({}))
+ // eslint-disable-next-line promise/always-return
.then(pkg => {
this[_loadAll]('', null, this.data)
this[_fixDependencies](pkg)
diff --git a/workspaces/arborist/lib/signal-handling.js b/workspaces/arborist/lib/signal-handling.js
index 0afbb05dc..18841d944 100644
--- a/workspaces/arborist/lib/signal-handling.js
+++ b/workspaces/arborist/lib/signal-handling.js
@@ -19,7 +19,9 @@ const setup = fn => {
for (const sig of signals) {
try {
process.removeListener(sig, sigListeners[sig])
- } catch (er) {}
+ } catch {
+ // ignore errors
+ }
}
process.removeListener('beforeExit', onBeforeExit)
sigListeners.loaded = false
@@ -62,7 +64,9 @@ const setup = fn => {
process.setMaxListeners(length + 1)
}
process.on(sig, sigListeners[sig])
- } catch (er) {}
+ } catch {
+ // ignore errors
+ }
}
sigListeners.loaded = true
diff --git a/workspaces/arborist/lib/spec-from-lock.js b/workspaces/arborist/lib/spec-from-lock.js
index 789741976..49b53c8f6 100644
--- a/workspaces/arborist/lib/spec-from-lock.js
+++ b/workspaces/arborist/lib/spec-from-lock.js
@@ -21,10 +21,12 @@ const specFromLock = (name, lock, where) => {
if (lock.resolved) {
return npa.resolve(name, lock.resolved, where)
}
- } catch (_) { }
+ } catch {
+ // ignore errors
+ }
try {
return npa.resolve(name, lock.version, where)
- } catch (_) {
+ } catch {
return {}
}
}
diff --git a/workspaces/arborist/scripts/benchmark.js b/workspaces/arborist/scripts/benchmark.js
index f6b2b0294..f4d26871b 100644
--- a/workspaces/arborist/scripts/benchmark.js
+++ b/workspaces/arborist/scripts/benchmark.js
@@ -151,7 +151,7 @@ const suite = new Suite({
}
},
- onComplete () {
+ async onComplete () {
rimraf.sync(lastBenchmark)
mkdirp.sync(resolve(__dirname, 'benchmark/saved'))
// always save with sha
@@ -168,12 +168,13 @@ const suite = new Suite({
}
linkSync(saveThis, lastBenchmark)
- teardown().then(() => Promise.all([
+ await teardown()
+ await Promise.all([
registryServer.stop(),
new Promise((res, rej) => {
rimraf(this.cache, er => er ? rej(er) : res())
}),
- ]))
+ ])
},
})
diff --git a/workspaces/arborist/scripts/benchmark/reify.js b/workspaces/arborist/scripts/benchmark/reify.js
index b826533d0..f477cfbd1 100644
--- a/workspaces/arborist/scripts/benchmark/reify.js
+++ b/workspaces/arborist/scripts/benchmark/reify.js
@@ -49,10 +49,9 @@ const suite = async (suite, { registry, cache }) => {
version: '1.0.0',
dependencies,
}))
- await arb.reify().then(() => {
- // grab this so we can make setup faster
- packageLock = require(resolve(path, 'package-lock.json'))
- })
+ await arb.reify()
+ // grab this so we can make setup faster
+ packageLock = require(resolve(path, 'package-lock.json'))
}
// just reify them all fast. we'll remove the bits we don't want later.
@@ -96,14 +95,13 @@ const suite = async (suite, { registry, cache }) => {
rimraf.sync(resolve(path, 'cache'))
}
},
- fn (d) {
- new Arborist({
+ async fn (d) {
+ await new Arborist({
path,
registry,
cache: /no-cache/.test(path) ? resolve(path, 'cache') : cache,
- }).reify().then(() => d.resolve(), er => {
- throw er
- })
+ }).reify()
+ d.resolve()
},
})
}
diff --git a/workspaces/arborist/test/arborist/build-ideal-tree.js b/workspaces/arborist/test/arborist/build-ideal-tree.js
index 0aadd7adf..87783086b 100644
--- a/workspaces/arborist/test/arborist/build-ideal-tree.js
+++ b/workspaces/arborist/test/arborist/build-ideal-tree.js
@@ -1080,7 +1080,7 @@ t.test('pathologically nested dependency cycle', async t => {
resolve(fixtures, 'pathological-dep-nesting-cycle')))
})
-t.test('resolve file deps from cwd', t => {
+t.test('resolve file deps from cwd', async t => {
const cwd = process.cwd()
t.teardown(() => process.chdir(cwd))
const path = t.testdir({
@@ -1094,17 +1094,16 @@ t.test('resolve file deps from cwd', t => {
path: resolve(path, 'global'),
...OPT,
})
- return arb.buildIdealTree({
+ const tree = await arb.buildIdealTree({
path: `${path}/local`,
add: ['child-1.2.3.tgz'],
global: true,
- }).then(tree => {
- const resolved = `file:${resolve(fixturedir, 'child-1.2.3.tgz')}`
- t.equal(normalizePath(tree.children.get('child').resolved), normalizePath(resolved))
})
+ const resolved = `file:${resolve(fixturedir, 'child-1.2.3.tgz')}`
+ t.equal(normalizePath(tree.children.get('child').resolved), normalizePath(resolved))
})
-t.test('resolve links in global mode', t => {
+t.test('resolve links in global mode', async t => {
const cwd = process.cwd()
t.teardown(() => process.chdir(cwd))
const path = t.testdir({
@@ -1127,13 +1126,12 @@ t.test('resolve links in global mode', t => {
global: true,
path: resolve(path, 'global'),
})
- return arb.buildIdealTree({
+ const tree = await arb.buildIdealTree({
add: ['file:../../linked-dep'],
global: true,
- }).then(tree => {
- const resolved = 'file:../../linked-dep'
- t.equal(tree.children.get('linked-dep').resolved, resolved)
})
+ const resolved = 'file:../../linked-dep'
+ t.equal(tree.children.get('linked-dep').resolved, resolved)
})
t.test('dont get confused if root matches duped metadep', async t => {
diff --git a/workspaces/arborist/test/arborist/load-actual.js b/workspaces/arborist/test/arborist/load-actual.js
index 72ab5e60f..9c2c8bf1c 100644
--- a/workspaces/arborist/test/arborist/load-actual.js
+++ b/workspaces/arborist/test/arborist/load-actual.js
@@ -170,34 +170,34 @@ t.test('shake out Link target timing issue', t => {
t.matchSnapshot(tree, 'loaded tree'))
})
-t.test('broken json', t =>
- loadActual(resolve(fixtures, 'bad')).then(d => {
- t.ok(d.errors.length, 'Got an error object')
- t.equal(d.errors[0] && d.errors[0].code, 'EJSONPARSE')
- t.ok(d, 'Got a tree')
- }))
+t.test('broken json', async t => {
+ const d = await loadActual(resolve(fixtures, 'bad'))
+ t.ok(d.errors.length, 'Got an error object')
+ t.equal(d.errors[0] && d.errors[0].code, 'EJSONPARSE')
+ t.ok(d, 'Got a tree')
+})
-t.test('missing json does not obscure deeper errors', t =>
- loadActual(resolve(fixtures, 'empty')).then(d => {
- t.match(d, { errors: [{ code: 'ENOENT' }] },
- 'Error reading json of top level')
- t.match(d.children.get('foo'), { errors: [{ code: 'EJSONPARSE' }] },
- 'Error parsing JSON of child node')
- }))
+t.test('missing json does not obscure deeper errors', async t => {
+ const d = await loadActual(resolve(fixtures, 'empty'))
+ t.match(d, { errors: [{ code: 'ENOENT' }] },
+ 'Error reading json of top level')
+ t.match(d.children.get('foo'), { errors: [{ code: 'EJSONPARSE' }] },
+ 'Error parsing JSON of child node')
+})
t.test('missing folder', t =>
t.rejects(loadActual(resolve(fixtures, 'does-not-exist')), {
code: 'ENOENT',
}))
-t.test('missing symlinks', t =>
- loadActual(resolve(fixtures, 'badlink')).then(d => {
- t.equal(d.children.size, 2, 'both broken children are included')
- t.match(d.children.get('foo'), { errors: [{ code: 'ELOOP' }] },
- 'foo has error')
- t.match(d.children.get('bar'), { errors: [{ code: 'ENOENT' }] },
- 'bar has error')
- }))
+t.test('missing symlinks', async t => {
+ const d = await loadActual(resolve(fixtures, 'badlink'))
+ t.equal(d.children.size, 2, 'both broken children are included')
+ t.match(d.children.get('foo'), { errors: [{ code: 'ELOOP' }] },
+ 'foo has error')
+ t.match(d.children.get('bar'), { errors: [{ code: 'ENOENT' }] },
+ 'bar has error')
+})
t.test('load from a hidden lockfile', async (t) => {
const tree = await loadActual(resolve(fixtures, 'hidden-lockfile'))
diff --git a/workspaces/arborist/test/arborist/reify.js b/workspaces/arborist/test/arborist/reify.js
index db5a9c1fe..01945f713 100644
--- a/workspaces/arborist/test/arborist/reify.js
+++ b/workspaces/arborist/test/arborist/reify.js
@@ -232,6 +232,7 @@ t.test('omit peer deps', t => {
}
const lock = require(tree.path + '/package-lock.json')
+ // eslint-disable-next-line promise/always-return
for (const [loc, meta] of Object.entries(lock.packages)) {
if (meta.peer) {
t.throws(() => fs.statSync(resolve(path, loc)), 'peer not reified')
@@ -240,6 +241,7 @@ t.test('omit peer deps', t => {
}
}
})
+ // eslint-disable-next-line promise/always-return
.then(() => {
process.removeListener('time', onTime)
process.removeListener('timeEnd', onTimeEnd)
@@ -335,15 +337,15 @@ t.test('omit optional dep', t => {
const ignoreScripts = true
const arb = newArb({ path, ignoreScripts })
- return arb.reify({ omit: ['optional'] })
- .then(tree => {
- t.equal(tree.children.get('fsevents'), undefined, 'no fsevents in tree')
- t.throws(() => fs.statSync(path + '/node_modules/fsevents'), 'no fsevents unpacked')
- t.match(require(path + '/package-lock.json').dependencies.fsevents, {
- dev: true,
- optional: true,
- }, 'fsevents present in lockfile')
- })
+ // eslint-disable-next-line promise/always-return
+ return arb.reify({ omit: ['optional'] }).then(tree => {
+ t.equal(tree.children.get('fsevents'), undefined, 'no fsevents in tree')
+ t.throws(() => fs.statSync(path + '/node_modules/fsevents'), 'no fsevents unpacked')
+ t.match(require(path + '/package-lock.json').dependencies.fsevents, {
+ dev: true,
+ optional: true,
+ }, 'fsevents present in lockfile')
+ })
.then(() => t.ok(arb.diff, 'has a diff tree'))
})
@@ -737,6 +739,7 @@ t.test('rollbacks', { buffered: false }, t => {
return t.rejects(a.reify({
update: ['@isaacs/testing-bundledeps-parent'],
}).then(tree => 'it worked'), new Error('poop'))
+ // eslint-disable-next-line promise/always-return
.then(() => {
const warnings = check()
t.equal(warnings.length, 2)
@@ -858,6 +861,7 @@ t.test('rollbacks', { buffered: false }, t => {
return t.resolveMatchSnapshot(a.reify({
update: ['@isaacs/testing-bundledeps-parent'],
save: false,
+ // eslint-disable-next-line promise/always-return
}).then(tree => printTree(tree))).then(() => {
const warnings = check()
t.equal(warnings.length, 2)
@@ -1019,6 +1023,7 @@ t.test('saving the ideal tree', t => {
// NB: these are all going to be marked as extraneous, because we're
// skipping the actual buildIdealTree step that flags them properly
return a[kSaveIdealTree]({})
+ // eslint-disable-next-line promise/always-return
}).then(saved => {
t.ok(saved, 'true, because it was saved')
t.matchSnapshot(require(path + '/package-lock.json'), 'lock after save')
@@ -1159,12 +1164,10 @@ t.test('workspaces', t => {
t.test('reify simple-workspaces', t =>
t.resolveMatchSnapshot(printReified(fixture(t, 'workspaces-simple')), 'should reify simple workspaces'))
- t.test('reify workspaces lockfile', t => {
+ t.test('reify workspaces lockfile', async t => {
const path = fixture(t, 'workspaces-simple')
- reify(path).then(() => {
- t.matchSnapshot(require(path + '/package-lock.json'), 'should lock workspaces config')
- t.end()
- })
+ await reify(path)
+ t.matchSnapshot(require(path + '/package-lock.json'), 'should lock workspaces config')
})
t.test('reify workspaces bin files', t => {
@@ -1195,20 +1198,16 @@ t.test('workspaces', t => {
'should not clean up entire nm folder for no reason'
))
- t.test('add new workspaces dep', t => {
+ t.test('add new workspaces dep', async t => {
const path = fixture(t, 'workspaces-add-new-dep')
- reify(path).then(() => {
- t.matchSnapshot(require(path + '/package-lock.json'), 'should update package-lock with new added dep')
- t.end()
- })
+ await reify(path)
+ t.matchSnapshot(require(path + '/package-lock.json'), 'should update package-lock with new added dep')
})
- t.test('root as-a-workspace', t => {
+ t.test('root as-a-workspace', async t => {
const path = fixture(t, 'workspaces-root-linked')
- reify(path).then(() => {
- t.matchSnapshot(require(path + '/package-lock.json'), 'should produce expected package-lock file')
- t.end()
- })
+ await reify(path)
+ t.matchSnapshot(require(path + '/package-lock.json'), 'should produce expected package-lock file')
})
t.end()
diff --git a/workspaces/arborist/test/shrinkwrap.js b/workspaces/arborist/test/shrinkwrap.js
index 07178062e..46f8ceccf 100644
--- a/workspaces/arborist/test/shrinkwrap.js
+++ b/workspaces/arborist/test/shrinkwrap.js
@@ -54,31 +54,31 @@ t.test('load and change lockfileVersion', async t => {
t.equal(v3Data.dependencies, undefined, 'v3 data does not have dependencies')
})
-t.test('load and then reset gets empty lockfile', t =>
- Shrinkwrap.load({ path: fixture }).then(sw => {
- sw.reset()
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, true)
- t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
- }))
+t.test('load and then reset gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: fixture })
+ sw.reset()
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, true)
+ t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
+})
-t.test('starting out with a reset lockfile is an empty lockfile', t =>
- Shrinkwrap.reset({ path: fixture }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.originalLockfileVersion, 2)
- t.equal(sw.loadedFromDisk, true)
- t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
- }))
+t.test('starting out with a reset lockfile is an empty lockfile', async t => {
+ const sw = await Shrinkwrap.reset({ path: fixture })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.originalLockfileVersion, 2)
+ t.equal(sw.loadedFromDisk, true)
+ t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
+})
t.test('reset in a bad dir gets an empty lockfile with no lockfile version', async t => {
const nullLockDir = t.testdir({
@@ -109,103 +109,103 @@ t.test('reset in a bad dir gets an empty lockfile with no lockfile version', asy
t.equal(swNullLock.loadedFromDisk, true)
})
-t.test('loading in bad dir gets empty lockfile', t =>
- Shrinkwrap.load({ path: 'path/which/does/not/exist' }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, false)
- }))
+t.test('loading in bad dir gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: 'path/which/does/not/exist' })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, false)
+})
-t.test('failure to parse json gets empty lockfile', t =>
- Shrinkwrap.load({ path: badJsonFixture }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, false)
- }))
+t.test('failure to parse json gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: badJsonFixture })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, false)
+})
-t.test('loading in empty dir gets empty lockfile', t =>
- Shrinkwrap.load({ path: emptyFixture }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, false)
- // update with an empty node, set name to node name, not package name
- const root = new Node({
- path: emptyFixture,
- realpath: emptyFixture,
- })
- root.peer = false
- root.dev = false
- root.devOptional = false
- root.optional = false
- root.extraneous = false
- sw.add(root)
- t.strictSame(sw.commit(), {
- name: 'empty',
- lockfileVersion: 2,
- requires: true,
- packages: {},
- })
- }))
+t.test('loading in empty dir gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: emptyFixture })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, false)
+ // update with an empty node, set name to node name, not package name
+ const root = new Node({
+ path: emptyFixture,
+ realpath: emptyFixture,
+ })
+ root.peer = false
+ root.dev = false
+ root.devOptional = false
+ root.optional = false
+ root.extraneous = false
+ sw.add(root)
+ t.strictSame(sw.commit(), {
+ name: 'empty',
+ lockfileVersion: 2,
+ requires: true,
+ packages: {},
+ })
+})
-t.test('look up from locks and such', t =>
- new Shrinkwrap({ path: fixture }).load().then(m => {
- t.strictSame(m.get(''), {
- name: 'a',
- version: '1.2.3',
- dependencies: {
- abbrev: '^1.1.1',
- 'full-git-url': 'git+https://github.com/isaacs/abbrev-js.git',
- ghshort: 'github:isaacs/abbrev-js',
- old: 'npm:abbrev@^1.0.3',
- pinned: 'npm:abbrev@^1.1.1',
- reg: 'npm:abbrev@^1.1.1',
- remote: 'https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz',
- symlink: 'file:./abbrev-link-target',
- tarball: 'file:abbrev-1.1.1.tgz',
- bundler: '1.2.3',
- },
- }, 'root metadata')
- t.match(m.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: Object,
- packages: Object,
- })
- t.equal(m.loadedFromDisk, true)
- t.matchSnapshot(m.get('node_modules/abbrev'), 'basic package')
+t.test('look up from locks and such', async t => {
+ const m = await new Shrinkwrap({ path: fixture }).load()
+ t.strictSame(m.get(''), {
+ name: 'a',
+ version: '1.2.3',
+ dependencies: {
+ abbrev: '^1.1.1',
+ 'full-git-url': 'git+https://github.com/isaacs/abbrev-js.git',
+ ghshort: 'github:isaacs/abbrev-js',
+ old: 'npm:abbrev@^1.0.3',
+ pinned: 'npm:abbrev@^1.1.1',
+ reg: 'npm:abbrev@^1.1.1',
+ remote: 'https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz',
+ symlink: 'file:./abbrev-link-target',
+ tarball: 'file:abbrev-1.1.1.tgz',
+ bundler: '1.2.3',
+ },
+ }, 'root metadata')
+ t.match(m.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: Object,
+ packages: Object,
+ })
+ t.equal(m.loadedFromDisk, true)
+ t.matchSnapshot(m.get('node_modules/abbrev'), 'basic package')
+ t.matchSnapshot(m.get(
+ 'node_modules/abbrev/node_modules/@scope/name/node_modules/@otherscope/othername', 'scoped package'))
+ t.matchSnapshot(m.get('package/not/found'), 'not found')
+
+ t.matchSnapshot(m.get('node_modules/old/node_modules/notfound'), 'fall off the dep tree')
+
+ t.test('lockfile', t => {
+ const p = m.data.packages
+ m.data.packages = {}
+ t.matchSnapshot(m.get('node_modules/abbrev'), 'basic pkg, from lock')
+ t.matchSnapshot(m.data.packages, 'saved fetched metadata back to packages section')
t.matchSnapshot(m.get(
'node_modules/abbrev/node_modules/@scope/name/node_modules/@otherscope/othername', 'scoped package'))
t.matchSnapshot(m.get('package/not/found'), 'not found')
-
- t.matchSnapshot(m.get('node_modules/old/node_modules/notfound'), 'fall off the dep tree')
-
- t.test('lockfile', t => {
- const p = m.data.packages
- m.data.packages = {}
- t.matchSnapshot(m.get('node_modules/abbrev'), 'basic pkg, from lock')
- t.matchSnapshot(m.data.packages, 'saved fetched metadata back to packages section')
- t.matchSnapshot(m.get(
- 'node_modules/abbrev/node_modules/@scope/name/node_modules/@otherscope/othername', 'scoped package'))
- t.matchSnapshot(m.get('package/not/found'), 'not found')
- t.matchSnapshot(m.get('node_modules/full-git-url'), 'full git')
- t.matchSnapshot(m.get('node_modules/symlink'), 'symlink')
- t.matchSnapshot(m.get('node_modules/unhosted-git'), 'unhosted git')
- m.data.packages = p
- t.end()
- })
- }))
+ t.matchSnapshot(m.get('node_modules/full-git-url'), 'full git')
+ t.matchSnapshot(m.get('node_modules/symlink'), 'symlink')
+ t.matchSnapshot(m.get('node_modules/unhosted-git'), 'unhosted git')
+ m.data.packages = p
+ t.end()
+ })
+})
t.test('load a shrinkwrap with some dev and optional flags', t =>
Shrinkwrap.load({ path: depTypesFixture }).then(m =>
@@ -616,39 +616,38 @@ t.test('saving dependency-free shrinkwrap object', t => {
t.test('write the shrinkwrap back to disk', t => {
const dir = t.testdir({})
- t.test('just read and write back', t =>
- Shrinkwrap.load({ path: fixture }).then(s => {
- s.filename = dir + '/test-shrinkwrap.json'
- return s.save().then(() =>
- t.strictSame(require(s.filename), s.data, 'saved json matches data'))
- }))
- t.test('write back with pending changes', t =>
- Shrinkwrap.load({ path: fixture }).then(s => {
- const dir = t.testdir({})
- s.filename = dir + '/test-shrinkwrap-with-changes.json'
- const node = new Node({
- path: fixture + '/node_modules/newthing',
- pkg: {
- name: 'newthing',
- version: '1.2.3',
- },
- })
- s.add(node)
- const preCommit = JSON.parse(JSON.stringify(s.data))
- const postCommit = s.commit()
- t.notSame(postCommit, preCommit, 'committing changes the data')
- // delete and re-add to put us back in the pre-commit state
- s.delete(node.location)
- s.add(node)
- return s.save().then(() => {
- t.strictSame(s.data, postCommit, 'committed changes to data')
- t.strictSame(require(s.filename), s.data, 'saved json matches data')
- })
- }))
+ t.test('just read and write back', async t => {
+ const s = await Shrinkwrap.load({ path: fixture })
+ s.filename = dir + '/test-shrinkwrap.json'
+ await s.save()
+ t.strictSame(require(s.filename), s.data, 'saved json matches data')
+ })
+ t.test('write back with pending changes', async t => {
+ const s = await Shrinkwrap.load({ path: fixture })
+ const dir = t.testdir({})
+ s.filename = dir + '/test-shrinkwrap-with-changes.json'
+ const node = new Node({
+ path: fixture + '/node_modules/newthing',
+ pkg: {
+ name: 'newthing',
+ version: '1.2.3',
+ },
+ })
+ s.add(node)
+ const preCommit = JSON.parse(JSON.stringify(s.data))
+ const postCommit = s.commit()
+ t.notSame(postCommit, preCommit, 'committing changes the data')
+ // delete and re-add to put us back in the pre-commit state
+ s.delete(node.location)
+ s.add(node)
+ await s.save()
+ t.strictSame(s.data, postCommit, 'committed changes to data')
+ t.strictSame(require(s.filename), s.data, 'saved json matches data')
+ })
t.end()
})
-t.test('load shrinkwrap if no package-lock.json present', t => {
+t.test('load shrinkwrap if no package-lock.json present', async t => {
const dir = t.testdir({
'npm-shrinkwrap.json': JSON.stringify({
lockfileVersion: 1,
@@ -656,41 +655,39 @@ t.test('load shrinkwrap if no package-lock.json present', t => {
version: '1.2.3',
}),
})
- return Promise.all([
- Shrinkwrap.load({ path: dir, shrinkwrapOnly: true }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded with swonly')),
- Shrinkwrap.reset({ path: dir, shrinkwrapOnly: true }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh')),
- Shrinkwrap.load({ path: dir }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded without swonly')),
- Shrinkwrap.reset({ path: dir }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh without swonly')),
- ])
+ let s
+ s = await Shrinkwrap.load({ path: dir, shrinkwrapOnly: true })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded with swonly')
+ s = await Shrinkwrap.reset({ path: dir, shrinkwrapOnly: true })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh')
+ s = await Shrinkwrap.load({ path: dir })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded without swonly')
+ s = await Shrinkwrap.reset({ path: dir })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh without swonly')
})
-t.test('load yarn.lock file if present', t =>
- Shrinkwrap.load({ path: yarnFixture }).then(s => {
- t.type(s.yarnLock, YarnLock, 'loaded a yarn lock file')
- t.not(s.yarnLock.entries.size, 0, 'got some entries')
- }))
+t.test('load yarn.lock file if present', async t => {
+ const s = await Shrinkwrap.load({ path: yarnFixture })
+ t.type(s.yarnLock, YarnLock, 'loaded a yarn lock file')
+ t.not(s.yarnLock.entries.size, 0, 'got some entries')
+})
-t.test('save yarn lock if loaded', t =>
- Shrinkwrap.load({ path: yarnFixture }).then(s => {
- s.path = t.testdir()
- s.filename = s.path + '/package-lock.json'
- return s.save()
- .then(() => Shrinkwrap.load({ path: s.path }))
- .then(ss => t.strictSame(s.yarnLock, ss.yarnLock))
- }))
+t.test('save yarn lock if loaded', async t => {
+ const s = await Shrinkwrap.load({ path: yarnFixture })
+ s.path = t.testdir()
+ s.filename = s.path + '/package-lock.json'
+ await s.save()
+ const ss = await Shrinkwrap.load({ path: s.path })
+ t.strictSame(s.yarnLock, ss.yarnLock)
+})
-t.test('ignore yarn lock file parse errors', t => {
+t.test('ignore yarn lock file parse errors', async t => {
const dir = t.testdir({
'yarn.lock': 'this is not a yarn lock file!',
})
- return Shrinkwrap.load({ path: dir }).then(s => {
- t.type(s.yarnLock, YarnLock, 'got a yarn lock object because a yarn lock exists')
- t.equal(s.yarnLock.entries.size, 0, 'did not get any entries out of it')
- })
+ const s = await Shrinkwrap.load({ path: dir })
+ t.type(s.yarnLock, YarnLock, 'got a yarn lock object because a yarn lock exists')
+ t.equal(s.yarnLock.entries.size, 0, 'did not get any entries out of it')
})
t.test('load a resolution from yarn.lock if we dont have our own', async t => {
@@ -811,38 +808,38 @@ t.test('handle missing dependencies object without borking', t => {
t.end()
})
-t.test('load a hidden lockfile', t => {
+t.test('load a hidden lockfile', async t => {
// ensure the hidden lockfile is newer than the contents
// otherwise this can fail on a fresh checkout.
fs.utimesSync(resolve(hiddenLockfileFixture, hidden), new Date(), new Date())
- return Shrinkwrap.load({
+ const s = await Shrinkwrap.load({
path: hiddenLockfileFixture,
hiddenLockfile: true,
- }).then(s => {
- t.matchSnapshot(s.data)
- // make sure it does not add to the dependencies block when a new
- // node is added.
- s.data.dependencies = {}
- s.add(new Node({
- path: hiddenLockfileFixture + '/node_modules/foo',
- pkg: {
- name: 'foo',
- version: '1.2.3',
- _integrity: 'sha512-deadbeef',
- _resolved: 'https://registry.npmjs.org/foo/-/foo-1.2.3.tgz',
- },
- }))
- t.strictSame(s.data.dependencies, {}, 'did not add to legacy data')
- const data = s.commit()
- t.equal(data.packages[''], undefined, 'no root entry')
- t.equal(data.dependencies, undefined, 'deleted legacy metadata')
})
+ t.matchSnapshot(s.data)
+ // make sure it does not add to the dependencies block when a new
+ // node is added.
+ s.data.dependencies = {}
+ s.add(new Node({
+ path: hiddenLockfileFixture + '/node_modules/foo',
+ pkg: {
+ name: 'foo',
+ version: '1.2.3',
+ _integrity: 'sha512-deadbeef',
+ _resolved: 'https://registry.npmjs.org/foo/-/foo-1.2.3.tgz',
+ },
+ }))
+ t.strictSame(s.data.dependencies, {}, 'did not add to legacy data')
+ const data = s.commit()
+ t.equal(data.packages[''], undefined, 'no root entry')
+ t.equal(data.dependencies, undefined, 'deleted legacy metadata')
})
-t.test('load a fresh hidden lockfile', t => Shrinkwrap.reset({
- path: hiddenLockfileFixture,
- hiddenLockfile: true,
-}).then(sw => {
+t.test('load a fresh hidden lockfile', async t => {
+ const sw = await Shrinkwrap.reset({
+ path: hiddenLockfileFixture,
+ hiddenLockfile: true,
+ })
t.strictSame(sw.data, {
lockfileVersion: 3,
requires: true,
@@ -851,7 +848,7 @@ t.test('load a fresh hidden lockfile', t => Shrinkwrap.reset({
})
t.equal(sw.loadedFromDisk, true)
t.equal(sw.filename, resolve(hiddenLockfileFixture, hidden))
-}))
+})
t.test('hidden lockfile only used if up to date', async t => {
const lockdata = require(resolve(hiddenLockfileFixture, hidden))
@@ -1188,18 +1185,19 @@ t.test('loadActual tests', t => {
roots.push('tap-with-yarn-lock')
- t.plan(roots.length)
- roots.forEach(root => {
+ for (const root of roots) {
const path = resolve(fixtures, root)
- t.test(root, t => new Arborist({ path }).loadActual().then(tree => {
+ t.test(root, async t => {
+ const tree = await new Arborist({ path }).loadActual()
const shrinkwrap = tree.meta.commit()
t.matchSnapshot(shrinkwrap, 'shrinkwrap data')
if (tree.meta.yarnLock) {
const yarnLock = tree.meta.yarnLock.toString()
t.matchSnapshot(yarnLock, 'yarn.lock data')
}
- }))
- })
+ })
+ }
+ t.end()
})
t.test('set integrity because location and resolved match', async t => {
diff --git a/workspaces/arborist/test/yarn-lock.js b/workspaces/arborist/test/yarn-lock.js
index 06c16556c..402afb717 100644
--- a/workspaces/arborist/test/yarn-lock.js
+++ b/workspaces/arborist/test/yarn-lock.js
@@ -86,11 +86,13 @@ t.test('load a yarn lock from an actual tree', t => {
resolve(__dirname, 'fixtures/install-types'),
resolve(__dirname, 'fixtures/links-all-over'),
]
- fixtures.forEach(fixture => t.test(basename(fixture), t =>
- new Arborist({ path: fixture }).loadActual().then(tree => {
+ for (const fixture of fixtures) {
+ t.test(basename(fixture), async t => {
+ const tree = await new Arborist({ path: fixture }).loadActual()
const y = YarnLock.fromTree(tree)
t.matchSnapshot(y.toString(), 'yarn.lock from a package tree')
- })))
+ })
+ }
t.end()
})
diff --git a/workspaces/libnpmversion/lib/version.js b/workspaces/libnpmversion/lib/version.js
index 12be89b04..f14b95e32 100644
--- a/workspaces/libnpmversion/lib/version.js
+++ b/workspaces/libnpmversion/lib/version.js
@@ -90,7 +90,9 @@ module.exports = async (newversion, opts) => {
}
await writeJson(lock, sw)
haveLocks.push(lock)
- } catch (er) {}
+ } catch {
+ // ignore errors
+ }
}
if (!ignoreScripts) {
diff --git a/workspaces/libnpmversion/test/enforce-clean.js b/workspaces/libnpmversion/test/enforce-clean.js
index d96fb09ff..3badf47ea 100644
--- a/workspaces/libnpmversion/test/enforce-clean.js
+++ b/workspaces/libnpmversion/test/enforce-clean.js
@@ -21,48 +21,42 @@ const enforceClean = requireInject('../lib/enforce-clean.js', {
const warnings = []
-t.test('clean, ok', t =>
- t.resolveMatch(enforceClean({ cwd: 'clean' }), true)
- .then(() => t.strictSame(warnings, []))
- .then(() => {
- warnings.length = 0
- }))
+t.afterEach(() => {
+ warnings.length = 0
+})
-t.test('unclean, no force, throws', t =>
- t.rejects(enforceClean({ cwd: 'unclean' }))
- .then(() => t.strictSame(warnings, []))
- .then(() => {
- warnings.length = 0
- }))
+t.test('clean, ok', async t => {
+ await t.resolveMatch(enforceClean({ cwd: 'clean' }), true)
+ t.strictSame(warnings, [])
+})
-t.test('unclean, forced, no throw', t =>
- t.resolveMatch(enforceClean({ cwd: 'unclean', force: true }), true)
- .then(() => t.strictSame(warnings, [
- [
- 'version',
- 'Git working directory not clean, proceeding forcefully.',
- ],
- ]))
- .then(() => {
- warnings.length = 0
- }))
+t.test('unclean, no force, throws', async t => {
+ await t.rejects(enforceClean({ cwd: 'unclean' }))
+ t.strictSame(warnings, [])
+})
-t.test('nogit, return false, no throw', t =>
- t.resolveMatch(enforceClean({ cwd: 'nogit' }), false)
- .then(() => t.strictSame(warnings, [
- [
- 'version',
- 'This is a Git checkout, but the git command was not found.',
- 'npm could not create a Git tag for this release!',
- ],
- ]))
- .then(() => {
- warnings.length = 0
- }))
+t.test('unclean, forced, no throw', async t => {
+ await t.resolveMatch(enforceClean({ cwd: 'unclean', force: true }), true)
+ t.strictSame(warnings, [
+ [
+ 'version',
+ 'Git working directory not clean, proceeding forcefully.',
+ ],
+ ])
+})
-t.test('other error, throw it', t =>
- t.rejects(enforceClean({ cwd: 'error' }), new Error('poop'))
- .then(() => t.strictSame(warnings, []))
- .then(() => {
- warnings.length = 0
- }))
+t.test('nogit, return false, no throw', async t => {
+ await t.resolveMatch(enforceClean({ cwd: 'nogit' }), false)
+ t.strictSame(warnings, [
+ [
+ 'version',
+ 'This is a Git checkout, but the git command was not found.',
+ 'npm could not create a Git tag for this release!',
+ ],
+ ])
+})
+
+t.test('other error, throw it', async t => {
+ await t.rejects(enforceClean({ cwd: 'error' }), new Error('poop'))
+ t.strictSame(warnings, [])
+})