Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGar <gar+gh@danger.computer>2022-08-18 18:00:46 +0300
committerLuke Karrys <luke@lukekarrys.com>2022-08-22 21:14:50 +0300
commitbd2ae5d79eb8807bfca6075e98432c545a9ededa (patch)
tree283f8cfd5e989303279331a105adfdd0c024e1be /workspaces/arborist/test
parentd54f03132a5e8247cdd8a3c165669477e95980fb (diff)
fix: linting
In preparation for @npmcli/eslint-config@3.1.0
Diffstat (limited to 'workspaces/arborist/test')
-rw-r--r--workspaces/arborist/test/arborist/build-ideal-tree.js18
-rw-r--r--workspaces/arborist/test/arborist/load-actual.js42
-rw-r--r--workspaces/arborist/test/arborist/reify.js47
-rw-r--r--workspaces/arborist/test/shrinkwrap.js404
-rw-r--r--workspaces/arborist/test/yarn-lock.js8
5 files changed, 258 insertions, 261 deletions
diff --git a/workspaces/arborist/test/arborist/build-ideal-tree.js b/workspaces/arborist/test/arborist/build-ideal-tree.js
index 0aadd7adf..87783086b 100644
--- a/workspaces/arborist/test/arborist/build-ideal-tree.js
+++ b/workspaces/arborist/test/arborist/build-ideal-tree.js
@@ -1080,7 +1080,7 @@ t.test('pathologically nested dependency cycle', async t => {
resolve(fixtures, 'pathological-dep-nesting-cycle')))
})
-t.test('resolve file deps from cwd', t => {
+t.test('resolve file deps from cwd', async t => {
const cwd = process.cwd()
t.teardown(() => process.chdir(cwd))
const path = t.testdir({
@@ -1094,17 +1094,16 @@ t.test('resolve file deps from cwd', t => {
path: resolve(path, 'global'),
...OPT,
})
- return arb.buildIdealTree({
+ const tree = await arb.buildIdealTree({
path: `${path}/local`,
add: ['child-1.2.3.tgz'],
global: true,
- }).then(tree => {
- const resolved = `file:${resolve(fixturedir, 'child-1.2.3.tgz')}`
- t.equal(normalizePath(tree.children.get('child').resolved), normalizePath(resolved))
})
+ const resolved = `file:${resolve(fixturedir, 'child-1.2.3.tgz')}`
+ t.equal(normalizePath(tree.children.get('child').resolved), normalizePath(resolved))
})
-t.test('resolve links in global mode', t => {
+t.test('resolve links in global mode', async t => {
const cwd = process.cwd()
t.teardown(() => process.chdir(cwd))
const path = t.testdir({
@@ -1127,13 +1126,12 @@ t.test('resolve links in global mode', t => {
global: true,
path: resolve(path, 'global'),
})
- return arb.buildIdealTree({
+ const tree = await arb.buildIdealTree({
add: ['file:../../linked-dep'],
global: true,
- }).then(tree => {
- const resolved = 'file:../../linked-dep'
- t.equal(tree.children.get('linked-dep').resolved, resolved)
})
+ const resolved = 'file:../../linked-dep'
+ t.equal(tree.children.get('linked-dep').resolved, resolved)
})
t.test('dont get confused if root matches duped metadep', async t => {
diff --git a/workspaces/arborist/test/arborist/load-actual.js b/workspaces/arborist/test/arborist/load-actual.js
index 72ab5e60f..9c2c8bf1c 100644
--- a/workspaces/arborist/test/arborist/load-actual.js
+++ b/workspaces/arborist/test/arborist/load-actual.js
@@ -170,34 +170,34 @@ t.test('shake out Link target timing issue', t => {
t.matchSnapshot(tree, 'loaded tree'))
})
-t.test('broken json', t =>
- loadActual(resolve(fixtures, 'bad')).then(d => {
- t.ok(d.errors.length, 'Got an error object')
- t.equal(d.errors[0] && d.errors[0].code, 'EJSONPARSE')
- t.ok(d, 'Got a tree')
- }))
+t.test('broken json', async t => {
+ const d = await loadActual(resolve(fixtures, 'bad'))
+ t.ok(d.errors.length, 'Got an error object')
+ t.equal(d.errors[0] && d.errors[0].code, 'EJSONPARSE')
+ t.ok(d, 'Got a tree')
+})
-t.test('missing json does not obscure deeper errors', t =>
- loadActual(resolve(fixtures, 'empty')).then(d => {
- t.match(d, { errors: [{ code: 'ENOENT' }] },
- 'Error reading json of top level')
- t.match(d.children.get('foo'), { errors: [{ code: 'EJSONPARSE' }] },
- 'Error parsing JSON of child node')
- }))
+t.test('missing json does not obscure deeper errors', async t => {
+ const d = await loadActual(resolve(fixtures, 'empty'))
+ t.match(d, { errors: [{ code: 'ENOENT' }] },
+ 'Error reading json of top level')
+ t.match(d.children.get('foo'), { errors: [{ code: 'EJSONPARSE' }] },
+ 'Error parsing JSON of child node')
+})
t.test('missing folder', t =>
t.rejects(loadActual(resolve(fixtures, 'does-not-exist')), {
code: 'ENOENT',
}))
-t.test('missing symlinks', t =>
- loadActual(resolve(fixtures, 'badlink')).then(d => {
- t.equal(d.children.size, 2, 'both broken children are included')
- t.match(d.children.get('foo'), { errors: [{ code: 'ELOOP' }] },
- 'foo has error')
- t.match(d.children.get('bar'), { errors: [{ code: 'ENOENT' }] },
- 'bar has error')
- }))
+t.test('missing symlinks', async t => {
+ const d = await loadActual(resolve(fixtures, 'badlink'))
+ t.equal(d.children.size, 2, 'both broken children are included')
+ t.match(d.children.get('foo'), { errors: [{ code: 'ELOOP' }] },
+ 'foo has error')
+ t.match(d.children.get('bar'), { errors: [{ code: 'ENOENT' }] },
+ 'bar has error')
+})
t.test('load from a hidden lockfile', async (t) => {
const tree = await loadActual(resolve(fixtures, 'hidden-lockfile'))
diff --git a/workspaces/arborist/test/arborist/reify.js b/workspaces/arborist/test/arborist/reify.js
index db5a9c1fe..01945f713 100644
--- a/workspaces/arborist/test/arborist/reify.js
+++ b/workspaces/arborist/test/arborist/reify.js
@@ -232,6 +232,7 @@ t.test('omit peer deps', t => {
}
const lock = require(tree.path + '/package-lock.json')
+ // eslint-disable-next-line promise/always-return
for (const [loc, meta] of Object.entries(lock.packages)) {
if (meta.peer) {
t.throws(() => fs.statSync(resolve(path, loc)), 'peer not reified')
@@ -240,6 +241,7 @@ t.test('omit peer deps', t => {
}
}
})
+ // eslint-disable-next-line promise/always-return
.then(() => {
process.removeListener('time', onTime)
process.removeListener('timeEnd', onTimeEnd)
@@ -335,15 +337,15 @@ t.test('omit optional dep', t => {
const ignoreScripts = true
const arb = newArb({ path, ignoreScripts })
- return arb.reify({ omit: ['optional'] })
- .then(tree => {
- t.equal(tree.children.get('fsevents'), undefined, 'no fsevents in tree')
- t.throws(() => fs.statSync(path + '/node_modules/fsevents'), 'no fsevents unpacked')
- t.match(require(path + '/package-lock.json').dependencies.fsevents, {
- dev: true,
- optional: true,
- }, 'fsevents present in lockfile')
- })
+ // eslint-disable-next-line promise/always-return
+ return arb.reify({ omit: ['optional'] }).then(tree => {
+ t.equal(tree.children.get('fsevents'), undefined, 'no fsevents in tree')
+ t.throws(() => fs.statSync(path + '/node_modules/fsevents'), 'no fsevents unpacked')
+ t.match(require(path + '/package-lock.json').dependencies.fsevents, {
+ dev: true,
+ optional: true,
+ }, 'fsevents present in lockfile')
+ })
.then(() => t.ok(arb.diff, 'has a diff tree'))
})
@@ -737,6 +739,7 @@ t.test('rollbacks', { buffered: false }, t => {
return t.rejects(a.reify({
update: ['@isaacs/testing-bundledeps-parent'],
}).then(tree => 'it worked'), new Error('poop'))
+ // eslint-disable-next-line promise/always-return
.then(() => {
const warnings = check()
t.equal(warnings.length, 2)
@@ -858,6 +861,7 @@ t.test('rollbacks', { buffered: false }, t => {
return t.resolveMatchSnapshot(a.reify({
update: ['@isaacs/testing-bundledeps-parent'],
save: false,
+ // eslint-disable-next-line promise/always-return
}).then(tree => printTree(tree))).then(() => {
const warnings = check()
t.equal(warnings.length, 2)
@@ -1019,6 +1023,7 @@ t.test('saving the ideal tree', t => {
// NB: these are all going to be marked as extraneous, because we're
// skipping the actual buildIdealTree step that flags them properly
return a[kSaveIdealTree]({})
+ // eslint-disable-next-line promise/always-return
}).then(saved => {
t.ok(saved, 'true, because it was saved')
t.matchSnapshot(require(path + '/package-lock.json'), 'lock after save')
@@ -1159,12 +1164,10 @@ t.test('workspaces', t => {
t.test('reify simple-workspaces', t =>
t.resolveMatchSnapshot(printReified(fixture(t, 'workspaces-simple')), 'should reify simple workspaces'))
- t.test('reify workspaces lockfile', t => {
+ t.test('reify workspaces lockfile', async t => {
const path = fixture(t, 'workspaces-simple')
- reify(path).then(() => {
- t.matchSnapshot(require(path + '/package-lock.json'), 'should lock workspaces config')
- t.end()
- })
+ await reify(path)
+ t.matchSnapshot(require(path + '/package-lock.json'), 'should lock workspaces config')
})
t.test('reify workspaces bin files', t => {
@@ -1195,20 +1198,16 @@ t.test('workspaces', t => {
'should not clean up entire nm folder for no reason'
))
- t.test('add new workspaces dep', t => {
+ t.test('add new workspaces dep', async t => {
const path = fixture(t, 'workspaces-add-new-dep')
- reify(path).then(() => {
- t.matchSnapshot(require(path + '/package-lock.json'), 'should update package-lock with new added dep')
- t.end()
- })
+ await reify(path)
+ t.matchSnapshot(require(path + '/package-lock.json'), 'should update package-lock with new added dep')
})
- t.test('root as-a-workspace', t => {
+ t.test('root as-a-workspace', async t => {
const path = fixture(t, 'workspaces-root-linked')
- reify(path).then(() => {
- t.matchSnapshot(require(path + '/package-lock.json'), 'should produce expected package-lock file')
- t.end()
- })
+ await reify(path)
+ t.matchSnapshot(require(path + '/package-lock.json'), 'should produce expected package-lock file')
})
t.end()
diff --git a/workspaces/arborist/test/shrinkwrap.js b/workspaces/arborist/test/shrinkwrap.js
index 07178062e..46f8ceccf 100644
--- a/workspaces/arborist/test/shrinkwrap.js
+++ b/workspaces/arborist/test/shrinkwrap.js
@@ -54,31 +54,31 @@ t.test('load and change lockfileVersion', async t => {
t.equal(v3Data.dependencies, undefined, 'v3 data does not have dependencies')
})
-t.test('load and then reset gets empty lockfile', t =>
- Shrinkwrap.load({ path: fixture }).then(sw => {
- sw.reset()
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, true)
- t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
- }))
+t.test('load and then reset gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: fixture })
+ sw.reset()
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, true)
+ t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
+})
-t.test('starting out with a reset lockfile is an empty lockfile', t =>
- Shrinkwrap.reset({ path: fixture }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.originalLockfileVersion, 2)
- t.equal(sw.loadedFromDisk, true)
- t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
- }))
+t.test('starting out with a reset lockfile is an empty lockfile', async t => {
+ const sw = await Shrinkwrap.reset({ path: fixture })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.originalLockfileVersion, 2)
+ t.equal(sw.loadedFromDisk, true)
+ t.equal(sw.filename, resolve(fixture, 'package-lock.json'))
+})
t.test('reset in a bad dir gets an empty lockfile with no lockfile version', async t => {
const nullLockDir = t.testdir({
@@ -109,103 +109,103 @@ t.test('reset in a bad dir gets an empty lockfile with no lockfile version', asy
t.equal(swNullLock.loadedFromDisk, true)
})
-t.test('loading in bad dir gets empty lockfile', t =>
- Shrinkwrap.load({ path: 'path/which/does/not/exist' }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, false)
- }))
+t.test('loading in bad dir gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: 'path/which/does/not/exist' })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, false)
+})
-t.test('failure to parse json gets empty lockfile', t =>
- Shrinkwrap.load({ path: badJsonFixture }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, false)
- }))
+t.test('failure to parse json gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: badJsonFixture })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, false)
+})
-t.test('loading in empty dir gets empty lockfile', t =>
- Shrinkwrap.load({ path: emptyFixture }).then(sw => {
- t.strictSame(sw.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: {},
- packages: {},
- })
- t.equal(sw.loadedFromDisk, false)
- // update with an empty node, set name to node name, not package name
- const root = new Node({
- path: emptyFixture,
- realpath: emptyFixture,
- })
- root.peer = false
- root.dev = false
- root.devOptional = false
- root.optional = false
- root.extraneous = false
- sw.add(root)
- t.strictSame(sw.commit(), {
- name: 'empty',
- lockfileVersion: 2,
- requires: true,
- packages: {},
- })
- }))
+t.test('loading in empty dir gets empty lockfile', async t => {
+ const sw = await Shrinkwrap.load({ path: emptyFixture })
+ t.strictSame(sw.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: {},
+ packages: {},
+ })
+ t.equal(sw.loadedFromDisk, false)
+ // update with an empty node, set name to node name, not package name
+ const root = new Node({
+ path: emptyFixture,
+ realpath: emptyFixture,
+ })
+ root.peer = false
+ root.dev = false
+ root.devOptional = false
+ root.optional = false
+ root.extraneous = false
+ sw.add(root)
+ t.strictSame(sw.commit(), {
+ name: 'empty',
+ lockfileVersion: 2,
+ requires: true,
+ packages: {},
+ })
+})
-t.test('look up from locks and such', t =>
- new Shrinkwrap({ path: fixture }).load().then(m => {
- t.strictSame(m.get(''), {
- name: 'a',
- version: '1.2.3',
- dependencies: {
- abbrev: '^1.1.1',
- 'full-git-url': 'git+https://github.com/isaacs/abbrev-js.git',
- ghshort: 'github:isaacs/abbrev-js',
- old: 'npm:abbrev@^1.0.3',
- pinned: 'npm:abbrev@^1.1.1',
- reg: 'npm:abbrev@^1.1.1',
- remote: 'https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz',
- symlink: 'file:./abbrev-link-target',
- tarball: 'file:abbrev-1.1.1.tgz',
- bundler: '1.2.3',
- },
- }, 'root metadata')
- t.match(m.data, {
- lockfileVersion: 2,
- requires: true,
- dependencies: Object,
- packages: Object,
- })
- t.equal(m.loadedFromDisk, true)
- t.matchSnapshot(m.get('node_modules/abbrev'), 'basic package')
+t.test('look up from locks and such', async t => {
+ const m = await new Shrinkwrap({ path: fixture }).load()
+ t.strictSame(m.get(''), {
+ name: 'a',
+ version: '1.2.3',
+ dependencies: {
+ abbrev: '^1.1.1',
+ 'full-git-url': 'git+https://github.com/isaacs/abbrev-js.git',
+ ghshort: 'github:isaacs/abbrev-js',
+ old: 'npm:abbrev@^1.0.3',
+ pinned: 'npm:abbrev@^1.1.1',
+ reg: 'npm:abbrev@^1.1.1',
+ remote: 'https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz',
+ symlink: 'file:./abbrev-link-target',
+ tarball: 'file:abbrev-1.1.1.tgz',
+ bundler: '1.2.3',
+ },
+ }, 'root metadata')
+ t.match(m.data, {
+ lockfileVersion: 2,
+ requires: true,
+ dependencies: Object,
+ packages: Object,
+ })
+ t.equal(m.loadedFromDisk, true)
+ t.matchSnapshot(m.get('node_modules/abbrev'), 'basic package')
+ t.matchSnapshot(m.get(
+ 'node_modules/abbrev/node_modules/@scope/name/node_modules/@otherscope/othername', 'scoped package'))
+ t.matchSnapshot(m.get('package/not/found'), 'not found')
+
+ t.matchSnapshot(m.get('node_modules/old/node_modules/notfound'), 'fall off the dep tree')
+
+ t.test('lockfile', t => {
+ const p = m.data.packages
+ m.data.packages = {}
+ t.matchSnapshot(m.get('node_modules/abbrev'), 'basic pkg, from lock')
+ t.matchSnapshot(m.data.packages, 'saved fetched metadata back to packages section')
t.matchSnapshot(m.get(
'node_modules/abbrev/node_modules/@scope/name/node_modules/@otherscope/othername', 'scoped package'))
t.matchSnapshot(m.get('package/not/found'), 'not found')
-
- t.matchSnapshot(m.get('node_modules/old/node_modules/notfound'), 'fall off the dep tree')
-
- t.test('lockfile', t => {
- const p = m.data.packages
- m.data.packages = {}
- t.matchSnapshot(m.get('node_modules/abbrev'), 'basic pkg, from lock')
- t.matchSnapshot(m.data.packages, 'saved fetched metadata back to packages section')
- t.matchSnapshot(m.get(
- 'node_modules/abbrev/node_modules/@scope/name/node_modules/@otherscope/othername', 'scoped package'))
- t.matchSnapshot(m.get('package/not/found'), 'not found')
- t.matchSnapshot(m.get('node_modules/full-git-url'), 'full git')
- t.matchSnapshot(m.get('node_modules/symlink'), 'symlink')
- t.matchSnapshot(m.get('node_modules/unhosted-git'), 'unhosted git')
- m.data.packages = p
- t.end()
- })
- }))
+ t.matchSnapshot(m.get('node_modules/full-git-url'), 'full git')
+ t.matchSnapshot(m.get('node_modules/symlink'), 'symlink')
+ t.matchSnapshot(m.get('node_modules/unhosted-git'), 'unhosted git')
+ m.data.packages = p
+ t.end()
+ })
+})
t.test('load a shrinkwrap with some dev and optional flags', t =>
Shrinkwrap.load({ path: depTypesFixture }).then(m =>
@@ -616,39 +616,38 @@ t.test('saving dependency-free shrinkwrap object', t => {
t.test('write the shrinkwrap back to disk', t => {
const dir = t.testdir({})
- t.test('just read and write back', t =>
- Shrinkwrap.load({ path: fixture }).then(s => {
- s.filename = dir + '/test-shrinkwrap.json'
- return s.save().then(() =>
- t.strictSame(require(s.filename), s.data, 'saved json matches data'))
- }))
- t.test('write back with pending changes', t =>
- Shrinkwrap.load({ path: fixture }).then(s => {
- const dir = t.testdir({})
- s.filename = dir + '/test-shrinkwrap-with-changes.json'
- const node = new Node({
- path: fixture + '/node_modules/newthing',
- pkg: {
- name: 'newthing',
- version: '1.2.3',
- },
- })
- s.add(node)
- const preCommit = JSON.parse(JSON.stringify(s.data))
- const postCommit = s.commit()
- t.notSame(postCommit, preCommit, 'committing changes the data')
- // delete and re-add to put us back in the pre-commit state
- s.delete(node.location)
- s.add(node)
- return s.save().then(() => {
- t.strictSame(s.data, postCommit, 'committed changes to data')
- t.strictSame(require(s.filename), s.data, 'saved json matches data')
- })
- }))
+ t.test('just read and write back', async t => {
+ const s = await Shrinkwrap.load({ path: fixture })
+ s.filename = dir + '/test-shrinkwrap.json'
+ await s.save()
+ t.strictSame(require(s.filename), s.data, 'saved json matches data')
+ })
+ t.test('write back with pending changes', async t => {
+ const s = await Shrinkwrap.load({ path: fixture })
+ const dir = t.testdir({})
+ s.filename = dir + '/test-shrinkwrap-with-changes.json'
+ const node = new Node({
+ path: fixture + '/node_modules/newthing',
+ pkg: {
+ name: 'newthing',
+ version: '1.2.3',
+ },
+ })
+ s.add(node)
+ const preCommit = JSON.parse(JSON.stringify(s.data))
+ const postCommit = s.commit()
+ t.notSame(postCommit, preCommit, 'committing changes the data')
+ // delete and re-add to put us back in the pre-commit state
+ s.delete(node.location)
+ s.add(node)
+ await s.save()
+ t.strictSame(s.data, postCommit, 'committed changes to data')
+ t.strictSame(require(s.filename), s.data, 'saved json matches data')
+ })
t.end()
})
-t.test('load shrinkwrap if no package-lock.json present', t => {
+t.test('load shrinkwrap if no package-lock.json present', async t => {
const dir = t.testdir({
'npm-shrinkwrap.json': JSON.stringify({
lockfileVersion: 1,
@@ -656,41 +655,39 @@ t.test('load shrinkwrap if no package-lock.json present', t => {
version: '1.2.3',
}),
})
- return Promise.all([
- Shrinkwrap.load({ path: dir, shrinkwrapOnly: true }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded with swonly')),
- Shrinkwrap.reset({ path: dir, shrinkwrapOnly: true }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh')),
- Shrinkwrap.load({ path: dir }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded without swonly')),
- Shrinkwrap.reset({ path: dir }).then(s =>
- t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh without swonly')),
- ])
+ let s
+ s = await Shrinkwrap.load({ path: dir, shrinkwrapOnly: true })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded with swonly')
+ s = await Shrinkwrap.reset({ path: dir, shrinkwrapOnly: true })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh')
+ s = await Shrinkwrap.load({ path: dir })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded without swonly')
+ s = await Shrinkwrap.reset({ path: dir })
+ t.equal(s.type, 'npm-shrinkwrap.json', 'loaded fresh without swonly')
})
-t.test('load yarn.lock file if present', t =>
- Shrinkwrap.load({ path: yarnFixture }).then(s => {
- t.type(s.yarnLock, YarnLock, 'loaded a yarn lock file')
- t.not(s.yarnLock.entries.size, 0, 'got some entries')
- }))
+t.test('load yarn.lock file if present', async t => {
+ const s = await Shrinkwrap.load({ path: yarnFixture })
+ t.type(s.yarnLock, YarnLock, 'loaded a yarn lock file')
+ t.not(s.yarnLock.entries.size, 0, 'got some entries')
+})
-t.test('save yarn lock if loaded', t =>
- Shrinkwrap.load({ path: yarnFixture }).then(s => {
- s.path = t.testdir()
- s.filename = s.path + '/package-lock.json'
- return s.save()
- .then(() => Shrinkwrap.load({ path: s.path }))
- .then(ss => t.strictSame(s.yarnLock, ss.yarnLock))
- }))
+t.test('save yarn lock if loaded', async t => {
+ const s = await Shrinkwrap.load({ path: yarnFixture })
+ s.path = t.testdir()
+ s.filename = s.path + '/package-lock.json'
+ await s.save()
+ const ss = await Shrinkwrap.load({ path: s.path })
+ t.strictSame(s.yarnLock, ss.yarnLock)
+})
-t.test('ignore yarn lock file parse errors', t => {
+t.test('ignore yarn lock file parse errors', async t => {
const dir = t.testdir({
'yarn.lock': 'this is not a yarn lock file!',
})
- return Shrinkwrap.load({ path: dir }).then(s => {
- t.type(s.yarnLock, YarnLock, 'got a yarn lock object because a yarn lock exists')
- t.equal(s.yarnLock.entries.size, 0, 'did not get any entries out of it')
- })
+ const s = await Shrinkwrap.load({ path: dir })
+ t.type(s.yarnLock, YarnLock, 'got a yarn lock object because a yarn lock exists')
+ t.equal(s.yarnLock.entries.size, 0, 'did not get any entries out of it')
})
t.test('load a resolution from yarn.lock if we dont have our own', async t => {
@@ -811,38 +808,38 @@ t.test('handle missing dependencies object without borking', t => {
t.end()
})
-t.test('load a hidden lockfile', t => {
+t.test('load a hidden lockfile', async t => {
// ensure the hidden lockfile is newer than the contents
// otherwise this can fail on a fresh checkout.
fs.utimesSync(resolve(hiddenLockfileFixture, hidden), new Date(), new Date())
- return Shrinkwrap.load({
+ const s = await Shrinkwrap.load({
path: hiddenLockfileFixture,
hiddenLockfile: true,
- }).then(s => {
- t.matchSnapshot(s.data)
- // make sure it does not add to the dependencies block when a new
- // node is added.
- s.data.dependencies = {}
- s.add(new Node({
- path: hiddenLockfileFixture + '/node_modules/foo',
- pkg: {
- name: 'foo',
- version: '1.2.3',
- _integrity: 'sha512-deadbeef',
- _resolved: 'https://registry.npmjs.org/foo/-/foo-1.2.3.tgz',
- },
- }))
- t.strictSame(s.data.dependencies, {}, 'did not add to legacy data')
- const data = s.commit()
- t.equal(data.packages[''], undefined, 'no root entry')
- t.equal(data.dependencies, undefined, 'deleted legacy metadata')
})
+ t.matchSnapshot(s.data)
+ // make sure it does not add to the dependencies block when a new
+ // node is added.
+ s.data.dependencies = {}
+ s.add(new Node({
+ path: hiddenLockfileFixture + '/node_modules/foo',
+ pkg: {
+ name: 'foo',
+ version: '1.2.3',
+ _integrity: 'sha512-deadbeef',
+ _resolved: 'https://registry.npmjs.org/foo/-/foo-1.2.3.tgz',
+ },
+ }))
+ t.strictSame(s.data.dependencies, {}, 'did not add to legacy data')
+ const data = s.commit()
+ t.equal(data.packages[''], undefined, 'no root entry')
+ t.equal(data.dependencies, undefined, 'deleted legacy metadata')
})
-t.test('load a fresh hidden lockfile', t => Shrinkwrap.reset({
- path: hiddenLockfileFixture,
- hiddenLockfile: true,
-}).then(sw => {
+t.test('load a fresh hidden lockfile', async t => {
+ const sw = await Shrinkwrap.reset({
+ path: hiddenLockfileFixture,
+ hiddenLockfile: true,
+ })
t.strictSame(sw.data, {
lockfileVersion: 3,
requires: true,
@@ -851,7 +848,7 @@ t.test('load a fresh hidden lockfile', t => Shrinkwrap.reset({
})
t.equal(sw.loadedFromDisk, true)
t.equal(sw.filename, resolve(hiddenLockfileFixture, hidden))
-}))
+})
t.test('hidden lockfile only used if up to date', async t => {
const lockdata = require(resolve(hiddenLockfileFixture, hidden))
@@ -1188,18 +1185,19 @@ t.test('loadActual tests', t => {
roots.push('tap-with-yarn-lock')
- t.plan(roots.length)
- roots.forEach(root => {
+ for (const root of roots) {
const path = resolve(fixtures, root)
- t.test(root, t => new Arborist({ path }).loadActual().then(tree => {
+ t.test(root, async t => {
+ const tree = await new Arborist({ path }).loadActual()
const shrinkwrap = tree.meta.commit()
t.matchSnapshot(shrinkwrap, 'shrinkwrap data')
if (tree.meta.yarnLock) {
const yarnLock = tree.meta.yarnLock.toString()
t.matchSnapshot(yarnLock, 'yarn.lock data')
}
- }))
- })
+ })
+ }
+ t.end()
})
t.test('set integrity because location and resolved match', async t => {
diff --git a/workspaces/arborist/test/yarn-lock.js b/workspaces/arborist/test/yarn-lock.js
index 06c16556c..402afb717 100644
--- a/workspaces/arborist/test/yarn-lock.js
+++ b/workspaces/arborist/test/yarn-lock.js
@@ -86,11 +86,13 @@ t.test('load a yarn lock from an actual tree', t => {
resolve(__dirname, 'fixtures/install-types'),
resolve(__dirname, 'fixtures/links-all-over'),
]
- fixtures.forEach(fixture => t.test(basename(fixture), t =>
- new Arborist({ path: fixture }).loadActual().then(tree => {
+ for (const fixture of fixtures) {
+ t.test(basename(fixture), async t => {
+ const tree = await new Arborist({ path: fixture }).loadActual()
const y = YarnLock.fromTree(tree)
t.matchSnapshot(y.toString(), 'yarn.lock from a package tree')
- })))
+ })
+ }
t.end()
})