Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorclaudiahdz <cghr1990@gmail.com>2020-03-10 22:38:54 +0300
committerisaacs <i@izs.me>2020-05-08 04:12:57 +0300
commitc922372b4d673b19d0f5d0e317da764f77888923 (patch)
tree3426f086ad6f8076ef8f84edcd92885821fa8292 /lib
parent0cc10795347ec5f9f32d0fa098bdb8ca884d27e7 (diff)
feat: publish only from local directory
Diffstat (limited to 'lib')
-rw-r--r--lib/pack.js107
-rw-r--r--lib/publish.js207
-rw-r--r--lib/utils/tar.js112
3 files changed, 183 insertions, 243 deletions
diff --git a/lib/pack.js b/lib/pack.js
index 8189cb6ce..92345d95d 100644
--- a/lib/pack.js
+++ b/lib/pack.js
@@ -9,15 +9,14 @@ module.exports = pack
const BB = require('bluebird')
-const byteSize = require('byte-size')
const cacache = require('cacache')
-const columnify = require('columnify')
const cp = require('child_process')
const deprCheck = require('./utils/depr-check')
const fpm = require('./fetch-package-metadata')
const fs = require('graceful-fs')
const install = require('./install')
const lifecycle = BB.promisify(require('./utils/lifecycle'))
+const { getTarContents, logTarContents } = ('/.utils/tar-contents')
const log = require('npmlog')
const move = require('move-concurrently')
const npm = require('./npm')
@@ -33,7 +32,6 @@ const pinflight = require('promise-inflight')
const readJson = BB.promisify(require('read-package-json'))
const tar = require('tar')
const packlist = require('npm-packlist')
-const ssri = require('ssri')
pack.usage = 'npm pack [[<@scope>/]<pkg>...] [--dry-run]'
@@ -55,7 +53,7 @@ function pack (args, silent, cb) {
if (!silent && npm.config.get('json')) {
output(JSON.stringify(tarballs, null, 2))
} else if (!silent) {
- tarballs.forEach(logContents)
+ tarballs.forEach(logTarContents)
output(tarballs.map((f) => path.relative(cwd, f.filename)).join('\n'))
}
return tarballs
@@ -97,7 +95,7 @@ function packFromPackage (arg, target, filename) {
return pacote.extract(arg, tmpTarget, opts)
.then(() => readJson(path.join(tmpTarget, 'package.json')))
}))
- .then((pkg) => getContents(pkg, target, filename))
+ .then((pkg) => getTarContents(pkg, target, filename))
}
module.exports.prepareDirectory = prepareDirectory
@@ -156,7 +154,7 @@ function packDirectory (mani, dir, target, filename, logIt, dryRun) {
// specifically with @ signs, so we just neutralize that one
// and any such future "features" by prepending `./`
.then((files) => tar.create(tarOpt, files.map((f) => `./${f}`)))
- .then(() => getContents(pkg, tmpTarget, filename, logIt))
+ .then(() => getTarContents(pkg, tmpTarget, filename, logIt))
// thread the content info through
.tap(() => {
if (dryRun) {
@@ -170,103 +168,6 @@ function packDirectory (mani, dir, target, filename, logIt, dryRun) {
})
}
-module.exports.logContents = logContents
-function logContents (tarball) {
- log.notice('')
- log.notice('', `${npm.config.get('unicode') ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
- log.notice('=== Tarball Contents ===')
- if (tarball.files.length) {
- log.notice('', columnify(tarball.files.map((f) => {
- const bytes = byteSize(f.size)
- return {path: f.path, size: `${bytes.value}${bytes.unit}`}
- }), {
- include: ['size', 'path'],
- showHeaders: false
- }))
- }
- if (tarball.bundled.length) {
- log.notice('=== Bundled Dependencies ===')
- tarball.bundled.forEach((name) => log.notice('', name))
- }
- log.notice('=== Tarball Details ===')
- log.notice('', columnify([
- {name: 'name:', value: tarball.name},
- {name: 'version:', value: tarball.version},
- tarball.filename && {name: 'filename:', value: tarball.filename},
- {name: 'package size:', value: byteSize(tarball.size)},
- {name: 'unpacked size:', value: byteSize(tarball.unpackedSize)},
- {name: 'shasum:', value: tarball.shasum},
- {
- name: 'integrity:',
- value: tarball.integrity.toString().substr(0, 20) + '[...]' + tarball.integrity.toString().substr(80)},
- tarball.bundled.length && {name: 'bundled deps:', value: tarball.bundled.length},
- tarball.bundled.length && {name: 'bundled files:', value: tarball.entryCount - tarball.files.length},
- tarball.bundled.length && {name: 'own files:', value: tarball.files.length},
- {name: 'total files:', value: tarball.entryCount}
- ].filter((x) => x), {
- include: ['name', 'value'],
- showHeaders: false
- }))
- log.notice('', '')
-}
-
-module.exports.getContents = getContents
-function getContents (pkg, target, filename, silent) {
- const bundledWanted = new Set(
- pkg.bundleDependencies ||
- pkg.bundledDependencies ||
- []
- )
- const files = []
- const bundled = new Set()
- let totalEntries = 0
- let totalEntrySize = 0
- return tar.t({
- file: target,
- onentry (entry) {
- totalEntries++
- totalEntrySize += entry.size
- const p = entry.path
- if (p.startsWith('package/node_modules/')) {
- const name = p.match(/^package\/node_modules\/((?:@[^/]+\/)?[^/]+)/)[1]
- if (bundledWanted.has(name)) {
- bundled.add(name)
- }
- } else {
- files.push({
- path: entry.path.replace(/^package\//, ''),
- size: entry.size,
- mode: entry.mode
- })
- }
- },
- strip: 1
- })
- .then(() => BB.all([
- BB.fromNode((cb) => fs.stat(target, cb)),
- ssri.fromStream(fs.createReadStream(target), {
- algorithms: ['sha1', 'sha512']
- })
- ]))
- .then(([stat, integrity]) => {
- const shasum = integrity['sha1'][0].hexDigest()
- return {
- id: pkg._id,
- name: pkg.name,
- version: pkg.version,
- from: pkg._from,
- size: stat.size,
- unpackedSize: totalEntrySize,
- shasum,
- integrity: ssri.parse(integrity['sha512'][0]),
- filename,
- files,
- entryCount: totalEntries,
- bundled: Array.from(bundled)
- }
- })
-}
-
const PASSTHROUGH_OPTS = [
'always-auth',
'auth-type',
diff --git a/lib/publish.js b/lib/publish.js
index 0d3aef678..4c496a3e4 100644
--- a/lib/publish.js
+++ b/lib/publish.js
@@ -1,167 +1,94 @@
'use strict'
-const BB = require('bluebird')
-
-const cacache = require('cacache')
-const figgyPudding = require('figgy-pudding')
-const libpub = require('libnpmpublish').publish
-const libunpub = require('libnpmpublish').unpublish
-const lifecycle = BB.promisify(require('./utils/lifecycle.js'))
+const util = require('util')
const log = require('npmlog')
-const npa = require('npm-package-arg')
-const npmConfig = require('./config/figgy-config.js')
+const semver = require('semver')
+const pack = require('libnpmpack')
+const libpub = require('libnpmpublish').publish
+const runScript = require('@npmcli/run-script')
+
+const npm = require('./npm.js')
const output = require('./utils/output.js')
const otplease = require('./utils/otplease.js')
-const pack = require('./pack')
-const { tarball, extract } = require('pacote')
-const path = require('path')
-const readFileAsync = BB.promisify(require('graceful-fs').readFile)
-const readJson = BB.promisify(require('read-package-json'))
-const semver = require('semver')
-const statAsync = BB.promisify(require('graceful-fs').stat)
+const { getContents, logTar } = require('./utils/tar.js')
+
+const readJson = util.promisify(require('read-package-json'))
-publish.usage = 'npm publish [<tarball>|<folder>] [--tag <tag>] [--access <public|restricted>] [--dry-run]' +
+cmd.usage = 'npm publish [<folder>] [--tag <tag>] [--access <public|restricted>] [--dry-run]' +
"\n\nPublishes '.' if no argument supplied" +
'\n\nSets tag `latest` if no --tag specified'
-publish.completion = function (opts, cb) {
- // publish can complete to a folder with a package.json
- // or a tarball, or a tarball url.
- // for now, not yet implemented.
- return cb()
+module.exports = cmd
+function cmd(args, cb) {
+ publish(args, cb)
+ .then(() => cb())
+ .catch(cb)
}
-const PublishConfig = figgyPudding({
- dryRun: 'dry-run',
- 'dry-run': { default: false },
- force: { default: false },
- json: { default: false },
- Promise: { default: () => Promise },
- tag: { default: 'latest' },
- tmp: {}
-})
-
-module.exports = publish
-function publish (args, isRetry, cb) {
- if (typeof cb !== 'function') {
- cb = isRetry
- isRetry = false
- }
+async function publish (args, cb) {
if (args.length === 0) args = ['.']
if (args.length !== 1) return cb(publish.usage)
- log.verbose('publish', args)
+ log.verbose('publish', args)
- const opts = PublishConfig(npmConfig())
- const t = opts.tag.trim()
- if (semver.validRange(t)) {
+ const opts = { ...npm.flatOptions }
+ const { json, defaultTag } = opts
+ if (semver.validRange(defaultTag)) {
return cb(new Error('Tag name must not be a valid SemVer range: ' + t))
}
- return publish_(args[0], opts)
- .then((tarball) => {
- const silent = log.level === 'silent'
- if (!silent && opts.json) {
- output(JSON.stringify(tarball, null, 2))
- } else if (!silent) {
- output(`+ ${tarball.id}`)
- }
- })
- .nodeify(cb)
+ const tarball = await publish_(args[0], opts)
+ const silent = log.level === 'silent'
+ if (!silent && json) {
+ output(JSON.stringify(tarball, null, 2))
+ } else if (!silent) {
+ output(`+ ${tarball.id}`)
+ }
+
+ return tarball
}
-function publish_ (arg, opts) {
- return statAsync(arg).then((stat) => {
- if (stat.isDirectory()) {
- return stat
- } else {
- const err = new Error('not a directory')
- err.code = 'ENOTDIR'
- throw err
- }
- }).then(() => {
- return publishFromDirectory(arg, opts)
- }, (err) => {
- if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') {
- throw err
- } else {
- return publishFromPackage(arg, opts)
- }
+async function publish_ (arg, opts) {
+ const { unicode, dryRun, json } = opts
+ let manifest = await readJson(`${arg}/package.json`)
+
+ // prepublishOnly
+ await runScript({
+ event: 'prepublishOnly',
+ path: arg,
+ stdio: 'inherit',
+ pkg: manifest
})
-}
+
+ const tarballData = await pack(arg)
+ const pkgContents = await getContents(manifest, tarballData)
-function publishFromDirectory (arg, opts) {
- // All this readJson is because any of the given scripts might modify the
- // package.json in question, so we need to refresh after every step.
- let contents
- return pack.prepareDirectory(arg).then(() => {
- return readJson(path.join(arg, 'package.json'))
- }).then((pkg) => {
- return lifecycle(pkg, 'prepublishOnly', arg)
- }).then(() => {
- return readJson(path.join(arg, 'package.json'))
- }).then((pkg) => {
- return cacache.tmp.withTmp(opts.tmp, {tmpPrefix: 'fromDir'}, (tmpDir) => {
- const target = path.join(tmpDir, 'package.tgz')
- return pack.packDirectory(pkg, arg, target, null, true)
- .tap((c) => { contents = c })
- .then((c) => !opts.json && pack.logContents(c))
- .then(() => upload(pkg, false, target, opts))
- })
- }).then(() => {
- return readJson(path.join(arg, 'package.json'))
- }).tap((pkg) => {
- return lifecycle(pkg, 'publish', arg)
- }).tap((pkg) => {
- return lifecycle(pkg, 'postpublish', arg)
+ if (!json) {
+ logTar(pkgContents, { log, unicode })
+ }
+
+ if (!dryRun) {
+ // The purpose of re-reading the manifest is in case it changed,
+ // so that we send the latest and greatest thing to the registry
+ manifest = await readJson(`${arg}/package.json`)
+ await otplease(opts, opts => libpub(arg, manifest, opts))
+ }
+
+ // publish
+ await runScript({
+ event: 'publish',
+ path: arg,
+ stdio: 'inherit',
+ pkg: manifest
})
- .then(() => contents)
-}
-function publishFromPackage (arg, opts) {
- return cacache.tmp.withTmp(opts.tmp, {tmpPrefix: 'fromPackage'}, tmp => {
- const extracted = path.join(tmp, 'package')
- const target = path.join(tmp, 'package.json')
- return tarball.toFile(arg, target, opts)
- .then(() => extract(arg, extracted, opts))
- .then(() => readJson(path.join(extracted, 'package.json')))
- .then((pkg) => {
- return BB.resolve(pack.getContents(pkg, target))
- .tap((c) => !opts.json && pack.logContents(c))
- .tap(() => upload(pkg, false, target, opts))
- })
+ // postpublish
+ await runScript({
+ event: 'postpublish',
+ path: arg,
+ stdio: 'inherit',
+ pkg: manifest
})
-}
-function upload (pkg, isRetry, cached, opts) {
- if (!opts.dryRun) {
- return readFileAsync(cached).then(tarball => {
- return otplease(opts, opts => {
- return libpub(pkg, tarball, opts)
- }).catch(err => {
- if (
- err.code === 'EPUBLISHCONFLICT' &&
- opts.force &&
- !isRetry
- ) {
- log.warn('publish', 'Forced publish over ' + pkg._id)
- return otplease(opts, opts => libunpub(
- npa.resolve(pkg.name, pkg.version), opts
- )).finally(() => {
- // ignore errors. Use the force. Reach out with your feelings.
- return otplease(opts, opts => {
- return upload(pkg, true, tarball, opts)
- }).catch(() => {
- // but if it fails again, then report the first error.
- throw err
- })
- })
- } else {
- throw err
- }
- })
- })
- } else {
- return opts.Promise.resolve(true)
- }
+ return pkgContents
}
diff --git a/lib/utils/tar.js b/lib/utils/tar.js
new file mode 100644
index 000000000..c1cd7e714
--- /dev/null
+++ b/lib/utils/tar.js
@@ -0,0 +1,112 @@
+'use strict'
+
+const tar = require('tar')
+const ssri = require('ssri')
+const byteSize = require('byte-size')
+const columnify = require('columnify')
+
+module.exports = { logTar, getContents }
+
+function logTar (tarball, opts = {}) {
+ const { unicode, log } = opts
+ log.notice('')
+ log.notice('', `${unicode ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
+ log.notice('=== Tarball Contents ===')
+ if (tarball.files.length) {
+ log.notice('', columnify(tarball.files.map((f) => {
+ const bytes = byteSize(f.size)
+ return { path: f.path, size: `${bytes.value}${bytes.unit}` }
+ }), {
+ include: ['size', 'path'],
+ showHeaders: false
+ }))
+ }
+ if (tarball.bundled.length) {
+ log.notice('=== Bundled Dependencies ===')
+ tarball.bundled.forEach((name) => log.notice('', name))
+ }
+ log.notice('=== Tarball Details ===')
+ log.notice('', columnify([
+ { name: 'name:', value: tarball.name },
+ { name: 'version:', value: tarball.version },
+ tarball.filename && { name: 'filename:', value: tarball.filename },
+ { name: 'package size:', value: byteSize(tarball.size) },
+ { name: 'unpacked size:', value: byteSize(tarball.unpackedSize) },
+ { name: 'shasum:', value: tarball.shasum },
+ {
+ name: 'integrity:',
+ value: tarball.integrity.toString().substr(0, 20) + '[...]' + tarball.integrity.toString().substr(80)
+ },
+ tarball.bundled.length && { name: 'bundled deps:', value: tarball.bundled.length },
+ tarball.bundled.length && { name: 'bundled files:', value: tarball.entryCount - tarball.files.length },
+ tarball.bundled.length && { name: 'own files:', value: tarball.files.length },
+ { name: 'total files:', value: tarball.entryCount }
+ ].filter((x) => x), {
+ include: ['name', 'value'],
+ showHeaders: false
+ }))
+ log.notice('', '')
+}
+
+async function getContents (manifest, tarball) {
+ const files = []
+ const bundled = new Set()
+ let totalEntries = 0
+ let totalEntrySize = 0
+
+ // reads contents of tarball
+ const stream = tar.t({
+ onentry (entry) {
+ totalEntries++
+ totalEntrySize += entry.size
+ const p = entry.path
+ if (p.startsWith('package/node_modules/')) {
+ const name = p.match(/^package\/node_modules\/((?:@[^/]+\/)?[^/]+)/)[1]
+ bundled.add(name)
+ }
+ files.push({
+ path: entry.path.replace(/^package\//, ''),
+ size: entry.size,
+ mode: entry.mode
+ })
+ }
+ })
+ stream.end(tarball)
+
+ const integrity = await ssri.fromData(tarball, {
+ algorithms: ['sha1', 'sha512']
+ })
+
+ const comparator = (a, b) => {
+ return a.path.localeCompare(b.path, undefined, {
+ sensitivity: 'case',
+ numeric: true
+ })
+ }
+
+ const isUpper = (str) => {
+ const ch = str.charAt(0)
+ return ch >= 'A' && ch <= 'Z'
+ }
+
+ const uppers = files.filter(file => isUpper(file.path))
+ const others = files.filter(file => !isUpper(file.path))
+
+ uppers.sort(comparator)
+ others.sort(comparator)
+
+ const shasum = integrity.sha1[0].hexDigest()
+ return {
+ id: manifest._id || `${manifest.name}@${manifest.version}`,
+ name: manifest.name,
+ version: manifest.version,
+ size: tarball.length,
+ unpackedSize: totalEntrySize,
+ shasum,
+ integrity: ssri.parse(integrity.sha512[0]),
+ filename: `${manifest.name}-${manifest.version}.tgz`,
+ files: uppers.concat(others),
+ entryCount: totalEntries,
+ bundled: Array.from(bundled)
+ }
+}