Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGar <gar+gh@danger.computer>2021-06-04 17:11:41 +0300
committerGar <gar+gh@danger.computer>2021-06-10 20:09:03 +0300
commitf130a81d62bf4f540ab252a09ff5a618827f9265 (patch)
treeae41e2ddd68a09bf486c96375ee5c80e2d842934
parent26d00c47785dfb300eab6a926f9d7c4d566776b1 (diff)
fix(linting): add scripts, docs, smoke-tests
There is code in all of these places we control so it should be linted PR-URL: https://github.com/npm/cli/pull/3367 Credit: @wraithgar Close: #3367 Reviewed-by: @isaacs
-rw-r--r--docs/dockhand.js525
-rw-r--r--package.json2
-rw-r--r--scripts/bundle-and-gitignore-deps.js2
-rw-r--r--scripts/changelog.js59
-rw-r--r--scripts/config-doc-command.js2
-rw-r--r--scripts/config-doc.js6
-rw-r--r--scripts/docs-build.js13
-rw-r--r--scripts/update-dist-tags.js8
-rw-r--r--smoke-tests/index.js4
9 files changed, 303 insertions, 318 deletions
diff --git a/docs/dockhand.js b/docs/dockhand.js
index 7f2c90dae..77a20f7de 100644
--- a/docs/dockhand.js
+++ b/docs/dockhand.js
@@ -1,341 +1,328 @@
#!/usr/bin/env node
-const path = require('path');
-const fs = require('fs');
-const yaml = require('yaml');
-const cmark = require('cmark-gfm');
-const mdx = require('@mdx-js/mdx');
-const mkdirp = require('mkdirp');
-const jsdom = require('jsdom');
+const path = require('path')
+const fs = require('fs')
+const yaml = require('yaml')
+const cmark = require('cmark-gfm')
+const mdx = require('@mdx-js/mdx')
+const mkdirp = require('mkdirp')
+const jsdom = require('jsdom')
const npm = require('../lib/npm.js')
-const config = require('./config.json');
+const config = require('./config.json')
-const docsRoot = __dirname;
-const inputRoot = path.join(docsRoot, 'content');
-const outputRoot = path.join(docsRoot, 'output');
+const docsRoot = __dirname
+const inputRoot = path.join(docsRoot, 'content')
+const outputRoot = path.join(docsRoot, 'output')
-const template = fs.readFileSync('template.html').toString();
+const template = fs.readFileSync('template.html').toString()
-const run = async function() {
- try {
- const navPaths = await getNavigationPaths();
- const fsPaths = await renderFilesystemPaths();
+const run = async function () {
+ try {
+ const navPaths = await getNavigationPaths()
+ const fsPaths = await renderFilesystemPaths()
- if (!ensureNavigationComplete(navPaths, fsPaths)) {
- process.exit(1);
- }
- }
- catch (error) {
- console.error(error);
- }
+ if (!ensureNavigationComplete(navPaths, fsPaths))
+ process.exit(1)
+ } catch (error) {
+ console.error(error)
+ }
}
-run();
+run()
-function ensureNavigationComplete(navPaths, fsPaths) {
- const unmatchedNav = { }, unmatchedFs = { };
+function ensureNavigationComplete (navPaths, fsPaths) {
+ const unmatchedNav = { }; const unmatchedFs = { }
- for (const navPath of navPaths) {
- unmatchedNav[navPath] = true;
- }
+ for (const navPath of navPaths)
+ unmatchedNav[navPath] = true
- for (let fsPath of fsPaths) {
- fsPath = '/' + fsPath.replace(/\.md$/, "");
+ for (let fsPath of fsPaths) {
+ fsPath = '/' + fsPath.replace(/\.md$/, '')
- if (unmatchedNav[fsPath]) {
- delete unmatchedNav[fsPath];
- }
- else {
- unmatchedFs[fsPath] = true;
- }
- }
+ if (unmatchedNav[fsPath])
+ delete unmatchedNav[fsPath]
- const missingNav = Object.keys(unmatchedNav).sort();
- const missingFs = Object.keys(unmatchedFs).sort()
+ else
+ unmatchedFs[fsPath] = true
+ }
- if (missingNav.length > 0 || missingFs.length > 0) {
- let message = "Error: documentation navigation (nav.yml) does not match filesystem.\n";
+ const missingNav = Object.keys(unmatchedNav).sort()
+ const missingFs = Object.keys(unmatchedFs).sort()
- if (missingNav.length > 0) {
- message += "\nThe following path(s) exist on disk but are not present in nav.yml:\n\n";
+ if (missingNav.length > 0 || missingFs.length > 0) {
+ let message = 'Error: documentation navigation (nav.yml) does not match filesystem.\n'
- for (const nav of missingNav) {
- message += ` ${nav}\n`;
- }
- }
+ if (missingNav.length > 0) {
+ message += '\nThe following path(s) exist on disk but are not present in nav.yml:\n\n'
- if (missingNav.length > 0 && missingFs.length > 0) {
- message += "\nThe following path(s) exist in nav.yml but are not present on disk:\n\n";
+ for (const nav of missingNav)
+ message += ` ${nav}\n`
+ }
- for (const fs of missingFs) {
- message += ` ${fs}\n`;
- }
- }
+ if (missingNav.length > 0 && missingFs.length > 0) {
+ message += '\nThe following path(s) exist in nav.yml but are not present on disk:\n\n'
- message += "\nUpdate nav.yml to ensure that all files are listed in the appropriate place.";
+ for (const fs of missingFs)
+ message += ` ${fs}\n`
+ }
- console.error(message);
+ message += '\nUpdate nav.yml to ensure that all files are listed in the appropriate place.'
- return false;
- }
+ console.error(message)
- return true;
+ return false
+ }
+
+ return true
}
-function getNavigationPaths() {
- const navFilename = path.join(docsRoot, 'nav.yml');
- const nav = yaml.parse(fs.readFileSync(navFilename).toString(), 'utf8');
+function getNavigationPaths () {
+ const navFilename = path.join(docsRoot, 'nav.yml')
+ const nav = yaml.parse(fs.readFileSync(navFilename).toString(), 'utf8')
- return walkNavigation(nav);
+ return walkNavigation(nav)
}
-function walkNavigation(entries) {
- const paths = [ ]
+function walkNavigation (entries) {
+ const paths = []
- for (const entry of entries) {
- if (entry.children) {
- paths.push(... walkNavigation(entry.children));
- }
- else {
- paths.push(entry.url);
- }
- }
+ for (const entry of entries) {
+ if (entry.children)
+ paths.push(...walkNavigation(entry.children))
- return paths;
+ else
+ paths.push(entry.url)
+ }
+
+ return paths
}
-async function renderFilesystemPaths() {
- return await walkFilesystem(inputRoot);
+async function renderFilesystemPaths () {
+ return await walkFilesystem(inputRoot)
}
-async function walkFilesystem(root, dirRelative) {
- const paths = [ ]
+async function walkFilesystem (root, dirRelative) {
+ const paths = []
+
+ const dirPath = dirRelative ? path.join(root, dirRelative) : root
+ const children = fs.readdirSync(dirPath)
- const dirPath = dirRelative ? path.join(root, dirRelative) : root;
- const children = fs.readdirSync(dirPath);
+ for (const childFilename of children) {
+ const childRelative = dirRelative ?
+ path.join(dirRelative, childFilename) :
+ childFilename
+ const childPath = path.join(root, childRelative)
- for (const childFilename of children) {
- const childRelative = dirRelative ? path.join(dirRelative, childFilename) : childFilename;
- const childPath = path.join(root, childRelative);
+ if (fs.lstatSync(childPath).isDirectory())
+ paths.push(...await walkFilesystem(root, childRelative))
- if (fs.lstatSync(childPath).isDirectory()) {
- paths.push(... await walkFilesystem(root, childRelative));
- }
- else {
- await renderFile(childRelative);
- paths.push(childRelative);
- }
+ else {
+ await renderFile(childRelative)
+ paths.push(childRelative)
}
+ }
- return paths;
+ return paths
}
-async function renderFile(childPath) {
- const inputPath = path.join(inputRoot, childPath);
-
- if (!inputPath.match(/\.md$/)) {
- console.log(`warning: unknown file type ${inputPath}, ignored`);
- return;
+async function renderFile (childPath) {
+ const inputPath = path.join(inputRoot, childPath)
+
+ if (!inputPath.match(/\.md$/)) {
+ console.log(`warning: unknown file type ${inputPath}, ignored`)
+ return
+ }
+
+ const outputPath = path.join(outputRoot, childPath.replace(/\.md$/, '.html'))
+
+ let md = fs.readFileSync(inputPath).toString()
+ let frontmatter = { }
+
+ // Take the leading frontmatter out of the markdown
+ md = md.replace(/^---\n([\s\S]+)\n---\n/, (header, fm) => {
+ frontmatter = yaml.parse(fm, 'utf8')
+ return ''
+ })
+
+ // Replace any tokens in the source
+ md = md.replace(/@VERSION@/, npm.version)
+
+ // Render the markdown into an HTML snippet using a GFM renderer.
+ const content = cmark.renderHtmlSync(md, {
+ smart: true,
+ githubPreLang: true,
+ strikethroughDoubleTilde: true,
+ unsafe: false,
+ extensions: {
+ table: true,
+ strikethrough: true,
+ tagfilter: true,
+ autolink: true,
+ },
+ })
+
+ // Test that mdx can parse this markdown file. We don't actually
+ // use the output, it's just to ensure that the upstream docs
+ // site (docs.npmjs.com) can parse it when this file gets there.
+ try {
+ await mdx(md, { skipExport: true })
+ } catch (error) {
+ throw new MarkdownError(childPath, error)
+ }
+
+ // Inject this data into the template, using a mustache-like
+ // replacement scheme.
+ const html = template.replace(/{{\s*([\w.]+)\s*}}/g, (token, key) => {
+ switch (key) {
+ case 'content':
+ return `<div id="_content">${content}</div>`
+ case 'path':
+ return childPath
+ case 'url_path':
+ return encodeURI(childPath)
+
+ case 'toc':
+ return '<div id="_table_of_contents"></div>'
+
+ case 'title':
+ case 'section':
+ case 'description':
+ return frontmatter[key]
+
+ case 'config.github_repo':
+ case 'config.github_branch':
+ case 'config.github_path':
+ return config[key.replace(/^config\./, '')]
+
+ default:
+ console.log(`warning: unknown token '${token}' in ${inputPath}`)
+ return ''
}
+ })
- const outputPath = path.join(outputRoot, childPath.replace(/\.md$/, '.html'));
-
- let md = fs.readFileSync(inputPath).toString();
- let frontmatter = { };
-
- // Take the leading frontmatter out of the markdown
- md = md.replace(/^---\n([\s\S]+)\n---\n/, (header, fm) => {
- frontmatter = yaml.parse(fm, 'utf8');
- return '';
- });
-
- // Replace any tokens in the source
- md = md.replace(/@VERSION@/, npm.version);
-
- // Render the markdown into an HTML snippet using a GFM renderer.
- const content = cmark.renderHtmlSync(md, {
- 'smart': true,
- 'githubPreLang': true,
- 'strikethroughDoubleTilde': true,
- 'unsafe': false,
- extensions: {
- 'table': true,
- 'strikethrough': true,
- 'tagfilter': true,
- 'autolink': true
- }
- });
-
- // Test that mdx can parse this markdown file. We don't actually
- // use the output, it's just to ensure that the upstream docs
- // site (docs.npmjs.com) can parse it when this file gets there.
- try {
- await mdx(md, { skipExport: true });
- }
- catch (error) {
- throw new MarkdownError(childPath, error);
- }
+ const dom = new jsdom.JSDOM(html)
+ const document = dom.window.document
- // Inject this data into the template, using a mustache-like
- // replacement scheme.
- const html = template.replace(/\{\{\s*([\w\.]+)\s*\}\}/g, (token, key) => {
- switch (key) {
- case 'content':
- return `<div id="_content">${content}</div>`;
- case 'path':
- return childPath;
- case 'url_path':
- return encodeURI(childPath);
-
- case 'toc':
- return '<div id="_table_of_contents"></div>';
-
- case 'title':
- case 'section':
- case 'description':
- return frontmatter[key];
-
- case 'config.github_repo':
- case 'config.github_branch':
- case 'config.github_path':
- return config[key.replace(/^config\./, '')];
-
- default:
- console.log(`warning: unknown token '${token}' in ${inputPath}`);
- return '';
- }
- return key;
- });
-
- const dom = new jsdom.JSDOM(html);
- const document = dom.window.document;
-
- // Rewrite relative URLs in links and image sources to be relative to
- // this file; this is for supporting `file://` links. HTML pages need
- // suffix appended.
- const links = [
- { tag: 'a', attr: 'href', suffix: '.html' },
- { tag: 'img', attr: 'src' }
- ];
-
- for (let linktype of links) {
- for (let tag of document.querySelectorAll(linktype.tag)) {
- let url = tag.getAttribute(linktype.attr);
-
- if (url.startsWith('/')) {
- const childDepth = childPath.split('/').length - 1;
- const prefix = childDepth > 0 ? '../'.repeat(childDepth) : './';
-
- url = url.replace(/^\//, prefix);
-
- if (linktype.suffix) {
- url += linktype.suffix;
- }
-
- tag.setAttribute(linktype.attr, url);
- }
- }
- }
-
- // Give headers a unique id so that they can be linked within the doc
- const headerIds = [ ];
- for (let header of document.querySelectorAll('h1, h2, h3, h4, h5, h6')) {
- if (header.getAttribute('id')) {
- headerIds.push(header.getAttribute('id'));
- continue;
- }
+ // Rewrite relative URLs in links and image sources to be relative to
+ // this file; this is for supporting `file://` links. HTML pages need
+ // suffix appended.
+ const links = [
+ { tag: 'a', attr: 'href', suffix: '.html' },
+ { tag: 'img', attr: 'src' },
+ ]
- const headerText = header.textContent.replace(/[A-Z]/g, x => x.toLowerCase()).replace(/ /g, '-').replace(/[^a-z0-9\-]/g, '');
- let headerId = headerText;
- let headerIncrement = 1;
+ for (const linktype of links) {
+ for (const tag of document.querySelectorAll(linktype.tag)) {
+ let url = tag.getAttribute(linktype.attr)
- while (document.getElementById(headerId) !== null) {
- headerId = headerText + (++headerIncrement);
- }
+ if (url.startsWith('/')) {
+ const childDepth = childPath.split('/').length - 1
+ const prefix = childDepth > 0 ? '../'.repeat(childDepth) : './'
- headerIds.push(headerId);
- header.setAttribute('id', headerId);
- }
+ url = url.replace(/^\//, prefix)
- // Walk the dom and build a table of contents
- const toc = document.getElementById('_table_of_contents');
+ if (linktype.suffix)
+ url += linktype.suffix
- if (toc) {
- toc.appendChild(generateTableOfContents(document));
+ tag.setAttribute(linktype.attr, url)
+ }
+ }
+ }
+
+ // Give headers a unique id so that they can be linked within the doc
+ const headerIds = []
+ for (const header of document.querySelectorAll('h1, h2, h3, h4, h5, h6')) {
+ if (header.getAttribute('id')) {
+ headerIds.push(header.getAttribute('id'))
+ continue
}
- // Write the final output
- const output = dom.serialize();
-
- mkdirp.sync(path.dirname(outputPath));
- fs.writeFileSync(outputPath, output);
-}
+ const headerText = header.textContent.replace(/[A-Z]/g, x => x.toLowerCase()).replace(/ /g, '-').replace(/[^a-z0-9-]/g, '')
+ let headerId = headerText
+ let headerIncrement = 1
-function generateTableOfContents(document) {
- const headers = [ ];
- walkHeaders(document.getElementById('_content'), headers);
+ while (document.getElementById(headerId) !== null)
+ headerId = headerText + (++headerIncrement)
- let parent = null;
+ headerIds.push(headerId)
+ header.setAttribute('id', headerId)
+ }
- // The nesting depth of headers are not necessarily the header level.
- // (eg, h1 > h3 > h5 is a depth of three even though there's an h5.)
- const hierarchy = [ ];
- for (let header of headers) {
- const level = headerLevel(header);
+ // Walk the dom and build a table of contents
+ const toc = document.getElementById('_table_of_contents')
- while (hierarchy.length && hierarchy[hierarchy.length - 1].headerLevel > level) {
- hierarchy.pop();
- }
+ if (toc)
+ toc.appendChild(generateTableOfContents(document))
- if (!hierarchy.length || hierarchy[hierarchy.length - 1].headerLevel < level) {
- const newList = document.createElement('ul');
- newList.headerLevel = level;
+ // Write the final output
+ const output = dom.serialize()
- if (hierarchy.length) {
- hierarchy[hierarchy.length - 1].appendChild(newList);
- }
+ mkdirp.sync(path.dirname(outputPath))
+ fs.writeFileSync(outputPath, output)
+}
- hierarchy.push(newList);
- }
+function generateTableOfContents (document) {
+ const headers = []
+ walkHeaders(document.getElementById('_content'), headers)
+
+ // The nesting depth of headers are not necessarily the header level.
+ // (eg, h1 > h3 > h5 is a depth of three even though there's an h5.)
+ const hierarchy = []
+ for (const header of headers) {
+ const level = headerLevel(header)
+
+ while (
+ hierarchy.length &&
+ hierarchy[hierarchy.length - 1].headerLevel > level
+ )
+ hierarchy.pop()
+
+ if (
+ !hierarchy.length ||
+ hierarchy[hierarchy.length - 1].headerLevel < level
+ ) {
+ const newList = document.createElement('ul')
+ newList.headerLevel = level
+
+ if (hierarchy.length)
+ hierarchy[hierarchy.length - 1].appendChild(newList)
+
+ hierarchy.push(newList)
+ }
- const element = document.createElement('li');
+ const element = document.createElement('li')
- const link = document.createElement('a');
- link.setAttribute('href', `#${header.getAttribute('id')}`);
- link.innerHTML = header.innerHTML;
- element.appendChild(link);
+ const link = document.createElement('a')
+ link.setAttribute('href', `#${header.getAttribute('id')}`)
+ link.innerHTML = header.innerHTML
+ element.appendChild(link)
- const list = hierarchy[hierarchy.length - 1];
- list.appendChild(element);
- }
+ const list = hierarchy[hierarchy.length - 1]
+ list.appendChild(element)
+ }
- return hierarchy[0];
+ return hierarchy[0]
}
-function walkHeaders(element, headers) {
- for (let child of element.childNodes) {
- if (headerLevel(child)) {
- headers.push(child);
- }
-
- walkHeaders(child, headers);
- }
-}
+function walkHeaders (element, headers) {
+ for (const child of element.childNodes) {
+ if (headerLevel(child))
+ headers.push(child)
-function headerLevel(node) {
- const level = node.tagName ? node.tagName.match(/^[Hh]([123456])$/) : null;
- return level ? level[1] : 0;
+ walkHeaders(child, headers)
+ }
}
-function debug(str) {
- console.log(str);
+function headerLevel (node) {
+ const level = node.tagName ? node.tagName.match(/^[Hh]([123456])$/) : null
+ return level ? level[1] : 0
}
class MarkdownError extends Error {
- constructor(file, inner) {
- super(`failed to parse ${file}`);
- this.file = file;
- this.inner = inner;
- }
+ constructor (file, inner) {
+ super(`failed to parse ${file}`)
+ this.file = file
+ this.inner = inner
+ }
}
diff --git a/package.json b/package.json
index 3f54979cb..822bbb7d6 100644
--- a/package.json
+++ b/package.json
@@ -204,7 +204,7 @@
"sudotest:nocleanup": "sudo NO_TEST_CLEANUP=1 npm run test --",
"posttest": "npm run lint",
"eslint": "eslint",
- "lint": "npm run eslint -- test/lib test/bin lib",
+ "lint": "npm run eslint -- test/lib test/bin lib scripts docs smoke-tests",
"lintfix": "npm run lint -- --fix",
"prelint": "rimraf test/npm_cache*",
"resetdeps": "bash scripts/resetdeps.sh",
diff --git a/scripts/bundle-and-gitignore-deps.js b/scripts/bundle-and-gitignore-deps.js
index b15720c03..1c7f0eb36 100644
--- a/scripts/bundle-and-gitignore-deps.js
+++ b/scripts/bundle-and-gitignore-deps.js
@@ -9,7 +9,7 @@ const arb = new Arborist({ path: resolve(__dirname, '..') })
const shouldIgnore = []
arb.loadVirtual().then(tree => {
- for (const [name, node] of tree.children.entries()) {
+ for (const node of tree.children.values()) {
if (node.dev || node.isLink) {
console.error('ignore', node.name)
shouldIgnore.push(node.name)
diff --git a/scripts/changelog.js b/scripts/changelog.js
index f36ad56c9..0951bd027 100644
--- a/scripts/changelog.js
+++ b/scripts/changelog.js
@@ -18,40 +18,38 @@ const log = execSync(`git log --reverse --pretty='format:%h %H%d %s (%aN)%n%b%n-
main()
function shortname (url) {
- let matched = url.match(/https:\/\/github\.com\/([^/]+\/[^/]+)\/(?:pull|issues)\/(\d+)/) ||
+ const matched = url.match(/https:\/\/github\.com\/([^/]+\/[^/]+)\/(?:pull|issues)\/(\d+)/) ||
url.match(/https:\/\/(npm\.community)\/t\/(?:[^/]+\/)(\d+)/)
- if (!matched) return false
- let repo = matched[1]
- let id = matched[2]
- if (repo !== 'npm/cli') {
+ if (!matched)
+ return false
+ const repo = matched[1]
+ const id = matched[2]
+ if (repo !== 'npm/cli')
return `${repo}#${id}`
- } else {
+ else
return `#${id}`
- }
}
function printCommit (c) {
console.log(`* [\`${c.shortid}\`](https://github.com/npm/cli/commit/${c.fullid})`)
if (c.fixes.length) {
for (const fix of c.fixes) {
- let label = shortname(fix)
- if (label) {
+ const label = shortname(fix)
+ if (label)
console.log(` [${label}](${fix})`)
- }
}
} else if (c.prurl) {
- let label = shortname(c.prurl)
- if (label) {
+ const label = shortname(c.prurl)
+ if (label)
console.log(` [${label}](${c.prurl})`)
- } else {
+ else
console.log(` [#](${c.prurl})`)
- }
}
- let msg = c.message
+ const msg = c.message
.replace(/^\s+/mg, '')
.replace(/^[-a-z]+: /, '')
.replace(/^/mg, ' ')
- .replace(/^ Reviewed-by: @.*/mg, '')
+ .replace(/^ {2}Reviewed-by: @.*/mg, '')
.replace(/\n$/, '')
// backtickify package@version
.replace(/^(\s*@?[^@\s]+@\d+[.]\d+[.]\d+)\b(\s*\S)/g, '$1:$2')
@@ -60,14 +58,13 @@ function printCommit (c) {
.replace(/\b([a-f0-9]{7,8})\b/g, '[`$1`](https://github.com/npm/cli/commit/$1)')
console.log(msg)
// don't assign credit for dep updates
- if (!/^ `[^`]+@\d+\.\d+\.\d+[^`]*`:?$/m.test(msg)) {
+ if (!/^ {2}`[^`]+@\d+\.\d+\.\d+[^`]*`:?$/m.test(msg)) {
if (c.credit) {
c.credit.forEach(function (credit) {
console.log(` ([@${credit}](https://github.com/${credit}))`)
})
- } else {
+ } else
console.log(` ([@${c.author}](https://github.com/${c.author}))`)
- }
}
}
@@ -77,9 +74,9 @@ function main () {
line = line.replace(/\r/g, '')
let m
/* eslint no-cond-assign:0 */
- if (/^---$/.test(line)) {
+ if (/^---$/.test(line))
printCommit(commit)
- } else if (m = line.match(/^([a-f0-9]{7,10}) ([a-f0-9]+) (?:[(]([^)]+)[)] )?(.*?) [(](.*?)[)]/)) {
+ else if (m = line.match(/^([a-f0-9]{7,10}) ([a-f0-9]+) (?:[(]([^)]+)[)] )?(.*?) [(](.*?)[)]/)) {
commit = {
shortid: m[1],
fullid: m[2],
@@ -88,23 +85,23 @@ function main () {
author: m[5],
prurl: null,
fixes: [],
- credit: null
+ credit: null,
}
- } else if (m = line.match(/^PR-URL: (.*)/)) {
+ } else if (m = line.match(/^PR-URL: (.*)/))
commit.prurl = m[1]
- } else if (m = line.match(/^Credit: @(.*)/)) {
- if (!commit.credit) commit.credit = []
+ else if (m = line.match(/^Credit: @(.*)/)) {
+ if (!commit.credit)
+ commit.credit = []
commit.credit.push(m[1])
- } else if (m = line.match(/^(?:Fix(?:es)|Closes?): #?([0-9]+)/)) {
+ } else if (m = line.match(/^(?:Fix(?:es)|Closes?): #?([0-9]+)/))
commit.fixes.push(`https://github.com/npm/cli/issues/${m[1]}`)
- } else if (m = line.match(/^(?:Fix(?:es)|Closes?): ([^#]+)#([0-9]*)/)) {
+ else if (m = line.match(/^(?:Fix(?:es)|Closes?): ([^#]+)#([0-9]*)/))
commit.fixes.push(`https://github.com/${m[1]}/issues/${m[2]}`)
- } else if (m = line.match(/^(?:Fix(?:es)|Closes?): (https?:\/\/.*)/)) {
+ else if (m = line.match(/^(?:Fix(?:es)|Closes?): (https?:\/\/.*)/))
commit.fixes.push(m[1])
- } else if (m = line.match(/^Reviewed-By: @(.*)/)) {
+ else if (m = line.match(/^Reviewed-By: @(.*)/))
commit.reviewed = m[1]
- } else if (/\S/.test(line)) {
+ else if (/\S/.test(line))
commit.message += `\n${line}`
- }
})
}
diff --git a/scripts/config-doc-command.js b/scripts/config-doc-command.js
index d7f734ed5..48bc02654 100644
--- a/scripts/config-doc-command.js
+++ b/scripts/config-doc-command.js
@@ -1,6 +1,6 @@
const { definitions } = require('../lib/utils/config/index.js')
const { writeFileSync, readFileSync } = require('fs')
-const { resolve, basename, relative } = require('path')
+const { resolve } = require('path')
const configDoc = process.argv[2]
const commandFile = process.argv[3]
diff --git a/scripts/config-doc.js b/scripts/config-doc.js
index 8d8294906..5014bcdc4 100644
--- a/scripts/config-doc.js
+++ b/scripts/config-doc.js
@@ -39,9 +39,9 @@ const addShorthands = doc => {
shorta.localeCompare(shortb, 'en')
})
.map(([short, expansion]) => {
- const dash = short.length === 1 ? '-' : '--'
- return `* \`${dash}${short}\`: \`${expansion.join(' ')}\``
- }).join('\n')
+ const dash = short.length === 1 ? '-' : '--'
+ return `* \`${dash}${short}\`: \`${expansion.join(' ')}\``
+ }).join('\n')
return addBetweenTags(doc, startTag, endTag, body)
}
diff --git a/scripts/docs-build.js b/scripts/docs-build.js
index a1540ebb9..8e217d225 100644
--- a/scripts/docs-build.js
+++ b/scripts/docs-build.js
@@ -8,19 +8,19 @@ var src = args[0]
var dest = args[1] || src
fs.readFile(src, 'utf8', function (err, data) {
- if (err) return console.log(err)
+ if (err)
+ return console.log(err)
function frontmatter (match, p1) {
const fm = { }
p1.split(/\r?\n/).forEach((kv) => {
- let result = kv.match(/^([^\s:]+):\s*(.*)/)
- if (result) {
+ const result = kv.match(/^([^\s:]+):\s*(.*)/)
+ if (result)
fm[result[1]] = result[2]
- }
})
- return `# ${fm['title']}(${fm['section']}) - ${fm['description']}`
+ return `# ${fm.title}(${fm.section}) - ${fm.description}`
}
function replacer (match, p1) {
@@ -35,6 +35,7 @@ fs.readFile(src, 'utf8', function (err, data) {
.trim()
fs.writeFile(dest, marked(result), 'utf8', function (err) {
- if (err) return console.log(err)
+ if (err)
+ return console.log(err)
})
})
diff --git a/scripts/update-dist-tags.js b/scripts/update-dist-tags.js
index f28bfd0b9..371d0c03a 100644
--- a/scripts/update-dist-tags.js
+++ b/scripts/update-dist-tags.js
@@ -79,9 +79,9 @@ function parseOTP (args) {
}
case 1: {
// --otp=123456 or --otp123456
- if (otp) {
+ if (otp)
return otp
- }
+
console.error('Invalid otp value supplied. [CASE 1]')
process.exit(1)
}
@@ -89,9 +89,9 @@ function parseOTP (args) {
// --otp 123456
// INFO: validating the second argument is an otp code
const isValidOtp = PARSE_OTP_VALUE.test(args[1])
- if (isValidOtp) {
+ if (isValidOtp)
return args[1]
- }
+
console.error('Invalid otp value supplied. [CASE 2]')
process.exit(1)
}
diff --git a/smoke-tests/index.js b/smoke-tests/index.js
index d16f49872..c7b2d2a1c 100644
--- a/smoke-tests/index.js
+++ b/smoke-tests/index.js
@@ -12,8 +12,8 @@ t.cleanSnapshot = s => s.split(cwd).join('{CWD}')
.split(process.cwd()).join('{CWD}')
.replace(/\\+/g, '/')
.replace(/\r\n/g, '\n')
- .replace(/\ \(in a browser\)/g, '')
- .replace(/^npm@.*\ /mg, 'npm ')
+ .replace(/ \(in a browser\)/g, '')
+ .replace(/^npm@.* /mg, 'npm ')
// setup server
const { start, stop, registry } = require('./server.js')