Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/build.js4
-rw-r--r--lib/cache/add-local.js34
-rw-r--r--lib/cache/add-named.js2
-rw-r--r--lib/cache/add-remote-git.js2
-rw-r--r--lib/config/defaults.js1
-rw-r--r--lib/dedupe.js420
-rw-r--r--lib/fetch-package-metadata.js5
-rw-r--r--lib/install.js1417
-rw-r--r--lib/install/action/build.js12
-rw-r--r--lib/install/action/extract.js17
-rw-r--r--lib/install/action/fetch.js27
-rw-r--r--lib/install/action/finalize.js77
-rw-r--r--lib/install/action/install.js7
-rw-r--r--lib/install/action/move.js32
-rw-r--r--lib/install/action/postinstall.js7
-rw-r--r--lib/install/action/preinstall.js7
-rw-r--r--lib/install/action/prepublish.js7
-rw-r--r--lib/install/action/remove.js64
-rw-r--r--lib/install/action/test.js7
-rw-r--r--lib/install/actions.js78
-rw-r--r--lib/install/and-add-parent-to-errors.js13
-rw-r--r--lib/install/and-finish-tracker.js16
-rw-r--r--lib/install/decompose-actions.js37
-rw-r--r--lib/install/deps.js423
-rw-r--r--lib/install/diff-trees.js91
-rw-r--r--lib/install/flatten-tree.js25
-rw-r--r--lib/install/inflate-shrinkwrap.js27
-rw-r--r--lib/install/logical-tree.js69
-rw-r--r--lib/install/node.js40
-rw-r--r--lib/install/prune-tree.js31
-rw-r--r--lib/install/save.js197
-rw-r--r--lib/install/update-package-json.js30
-rw-r--r--lib/install/validate-tree.js38
-rw-r--r--lib/link.js4
-rw-r--r--lib/ls.js58
-rw-r--r--lib/outdated.js313
-rw-r--r--lib/rebuild.js2
-rw-r--r--lib/unbuild.js3
-rw-r--r--lib/uninstall.js139
-rw-r--r--lib/update.js69
-rw-r--r--lib/utils/error-handler.js9
-rw-r--r--lib/utils/locker.js4
-rw-r--r--lib/utils/tar.js148
-rw-r--r--test/tap/404-parent.js5
-rw-r--r--test/tap/dedupe.js5
-rw-r--r--test/tap/hosted-shortcut.js (renamed from test/tap/github-shortcut.js)12
-rw-r--r--test/tap/ls-l-depth-0.js2
47 files changed, 2245 insertions, 1792 deletions
diff --git a/lib/build.js b/lib/build.js
index 62f411226..1c683da35 100644
--- a/lib/build.js
+++ b/lib/build.js
@@ -85,7 +85,7 @@ var linkStuff = build.linkStuff = function (pkg, folder, global, didRB, cb) {
// if it's global, and folder is in {prefix}/node_modules,
// then bins are in {prefix}/bin
// otherwise, then bins are in folder/../.bin
- var parent = pkg.name[0] === '@' ? path.dirname(path.dirname(folder)) : path.dirname(folder)
+ var parent = pkg.name && pkg.name[0] === "@" ? path.dirname(path.dirname(folder)) : path.dirname(folder)
var gnm = global && npm.globalDir
var gtop = parent === gnm
@@ -199,7 +199,9 @@ function linkBins (pkg, folder, parent, gtop, cb) {
, out = npm.config.get("parseable")
? dest + "::" + src + ":BINFILE"
: dest + " -> " + src
+ log.clearProgress()
console.log(out)
+ log.showProgress()
cb()
})
})
diff --git a/lib/cache/add-local.js b/lib/cache/add-local.js
index e7d286e4f..dbe55ddc2 100644
--- a/lib/cache/add-local.js
+++ b/lib/cache/add-local.js
@@ -13,6 +13,8 @@ var assert = require("assert")
, addLocalTarball = require("./add-local-tarball.js")
, sha = require("sha")
, inflight = require("inflight")
+ , lifecycle = require("../utils/lifecycle.js")
+ , iferr = require("iferr")
module.exports = addLocal
@@ -27,7 +29,7 @@ function addLocal (p, pkgData, cb_) {
log.error("addLocal", "Could not install %s", p.spec)
return cb_(er)
}
- if (data && !data._fromGithub) {
+ if (data && !data._fromHosted) {
data._from = path.relative(npm.prefix, p.spec) || "."
var resolved = path.relative(npm.prefix, p.spec)
if (resolved) data._resolved = "file:"+resolved
@@ -90,17 +92,25 @@ function addLocalDirectory (p, pkgData, shasum, cb) {
getCacheStat(function (er, cs) {
mkdir(path.dirname(pj), function (er, made) {
if (er) return cb(er)
- var fancy = !pathIsInside(p, npm.tmp)
- tar.pack(tgz, p, data, fancy, function (er) {
- if (er) {
- log.error("addLocalDirectory", "Could not pack", p, "to", tgz)
- return cb(er)
- }
-
- if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) wrapped()
-
- chownr(made || tgz, cs.uid, cs.gid, wrapped)
- })
+ var doPrePublish = !pathIsInside(p, npm.tmp)
+ if (doPrePublish) {
+ lifecycle(data, "prepublish", p, iferr(cb, thenPack))
+ }
+ else {
+ thenPack()
+ }
+ function thenPack () {
+ tar.pack(tgz, p, data, function (er) {
+ if (er) {
+ log.error("addLocalDirectory", "Could not pack", p, "to", tgz)
+ return cb(er)
+ }
+
+ if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) wrapped()
+
+ chownr(made || tgz, cs.uid, cs.gid, wrapped)
+ })
+ }
})
})
diff --git a/lib/cache/add-named.js b/lib/cache/add-named.js
index cd06aa288..fb973b59e 100644
--- a/lib/cache/add-named.js
+++ b/lib/cache/add-named.js
@@ -49,7 +49,7 @@ function addNamed (name, version, data, cb_) {
log.silly("addNamed", key)
function cb (er, data) {
- if (data && !data._fromGithub) data._from = key
+ if (data && !data._fromHosted) data._from = key
cb_(er, data)
}
diff --git a/lib/cache/add-remote-git.js b/lib/cache/add-remote-git.js
index f0438c9bc..5a136190b 100644
--- a/lib/cache/add-remote-git.js
+++ b/lib/cache/add-remote-git.js
@@ -76,7 +76,7 @@ function addRemoteGit (uri, _cb) {
function tryGitProto (from, hostedInfo, cb) {
var gitURL = hostedInfo.git()
- if (!gitURL) return trySSH(from, hostedInfo, cb)
+ if (!gitURL) return tryHTTPS(from, hostedInfo, cb)
log.silly('tryGitProto', 'attempting to clone', gitURL)
tryClone(from, gitURL, true, function (er) {
diff --git a/lib/config/defaults.js b/lib/config/defaults.js
index b2d3f96e6..de505f95c 100644
--- a/lib/config/defaults.js
+++ b/lib/config/defaults.js
@@ -131,6 +131,7 @@ Object.defineProperty(exports, "defaults", {get: function () {
, depth: Infinity
, description : true
, dev : false
+ , "dry-run" : false
, editor : osenv.editor()
, "engine-strict": false
, force : false
diff --git a/lib/dedupe.js b/lib/dedupe.js
index c63705e18..a51df149a 100644
--- a/lib/dedupe.js
+++ b/lib/dedupe.js
@@ -1,375 +1,79 @@
-// traverse the node_modules/package.json tree
-// looking for duplicates. If any duplicates are found,
-// then move them up to the highest level necessary
-// in order to make them no longer duplicated.
-//
-// This is kind of ugly, and really highlights the need for
-// much better "put pkg X at folder Y" abstraction. Oh well,
-// whatever. Perfect enemy of the good, and all that.
-
-var fs = require("fs")
-var asyncMap = require("slide").asyncMap
-var path = require("path")
-var readJson = require("read-package-json")
-var semver = require("semver")
-var rm = require("./utils/gently-rm.js")
-var log = require("npmlog")
-var npm = require("./npm.js")
-var mapToRegistry = require("./utils/map-to-registry.js")
+var util = require('util')
+var path = require('path')
+var validate = require('aproba')
+var without = require('lodash.without')
+var asyncMap = require('slide').asyncMap
+var chain = require('slide').chain
+var npm = require('./npm.js')
+var Installer = require('./install.js').Installer
+var findRequirement = require('./install/deps.js').findRequirement
+var earliestInstallable = require('./install/deps.js').earliestInstallable
+var decomposeActions = require('./install/decompose-actions.js')
+var npa = require('npm-package-arg')
+var recalculateMetadata = require('./install/deps.js').recalculateMetadata
+var log = require('npmlog')
module.exports = dedupe
+module.exports.Deduper = Deduper
-dedupe.usage = "npm dedupe [pkg pkg...]"
+dedupe.usage = 'npm dedupe'
-function dedupe (args, silent, cb) {
- if (typeof silent === "function") cb = silent, silent = false
+function dedupe (args, cb) {
+ validate('AF', arguments)
+ // the /path/to/node_modules/..
+ var where = path.resolve(npm.dir, '..')
var dryrun = false
if (npm.command.match(/^find/)) dryrun = true
- return dedupe_(npm.prefix, args, {}, dryrun, silent, cb)
-}
-
-function dedupe_ (dir, filter, unavoidable, dryrun, silent, cb) {
- readInstalled(path.resolve(dir), {}, null, function (er, data, counter) {
- if (er) {
- return cb(er)
- }
-
- if (!data) {
- return cb()
- }
-
- // find out which things are dupes
- var dupes = Object.keys(counter || {}).filter(function (k) {
- if (filter.length && -1 === filter.indexOf(k)) return false
- return counter[k] > 1 && !unavoidable[k]
- }).reduce(function (s, k) {
- s[k] = []
- return s
- }, {})
-
- // any that are unavoidable need to remain as they are. don't even
- // try to touch them or figure it out. Maybe some day, we can do
- // something a bit more clever here, but for now, just skip over it,
- // and all its children.
- ;(function U (obj) {
- if (unavoidable[obj.name]) {
- obj.unavoidable = true
- }
- if (obj.parent && obj.parent.unavoidable) {
- obj.unavoidable = true
- }
- Object.keys(obj.children).forEach(function (k) {
- U(obj.children[k])
- })
- })(data)
-
- // then collect them up and figure out who needs them
- ;(function C (obj) {
- if (dupes[obj.name] && !obj.unavoidable) {
- dupes[obj.name].push(obj)
- obj.duplicate = true
- }
- obj.dependents = whoDepends(obj)
- Object.keys(obj.children).forEach(function (k) {
- C(obj.children[k])
- })
- })(data)
-
- if (dryrun) {
- var k = Object.keys(dupes)
- if (!k.length) return cb()
- return npm.commands.ls(k, silent, cb)
- }
-
- var summary = Object.keys(dupes).map(function (n) {
- return [n, dupes[n].filter(function (d) {
- return d && d.parent && !d.parent.duplicate && !d.unavoidable
- }).map(function M (d) {
- return [d.path, d.version, d.dependents.map(function (k) {
- return [k.path, k.version, k.dependencies[d.name] || ""]
- })]
- })]
- }).map(function (item) {
- var set = item[1]
-
- var ranges = set.map(function (i) {
- return i[2].map(function (d) {
- return d[2]
- })
- }).reduce(function (l, r) {
- return l.concat(r)
- }, []).map(function (v, i, set) {
- if (set.indexOf(v) !== i) return false
- return v
- }).filter(function (v) {
- return v !== false
- })
-
- var locs = set.map(function (i) {
- return i[0]
- })
-
- var versions = set.map(function (i) {
- return i[1]
- }).filter(function (v, i, set) {
- return set.indexOf(v) === i
- })
-
- var has = set.map(function (i) {
- return [i[0], i[1]]
- }).reduce(function (set, kv) {
- set[kv[0]] = kv[1]
- return set
- }, {})
+ if (npm.config.get('dry-run')) dryrun = true
- var loc = locs.length ? locs.reduce(function (a, b) {
- // a=/path/to/node_modules/foo/node_modules/bar
- // b=/path/to/node_modules/elk/node_modules/bar
- // ==/path/to/node_modules/bar
- var nmReg = new RegExp("\\" + path.sep + "node_modules\\" + path.sep)
- a = a.split(nmReg)
- b = b.split(nmReg)
- var name = a.pop()
- b.pop()
- // find the longest chain that both A and B share.
- // then push the name back on it, and join by /node_modules/
- for (var i = 0, al = a.length, bl = b.length; i < al && i < bl && a[i] === b[i]; i++);
- return a.slice(0, i).concat(name).join(path.sep + "node_modules" + path.sep)
- }) : undefined
-
- return [item[0], { item: item
- , ranges: ranges
- , locs: locs
- , loc: loc
- , has: has
- , versions: versions
- }]
- }).filter(function (i) {
- return i[1].loc
- })
-
- findVersions(npm, summary, function (er, set) {
- if (er) return cb(er)
- if (!set.length) return cb()
- installAndRetest(set, filter, dir, unavoidable, silent, cb)
- })
- })
+ new Deduper(where, dryrun).run(cb)
}
-function installAndRetest (set, filter, dir, unavoidable, silent, cb) {
- //return cb(null, set)
- var remove = []
-
- asyncMap(set, function (item, cb) {
- // [name, has, loc, locMatch, regMatch, others]
- var name = item[0]
- var has = item[1]
- var where = item[2]
- var locMatch = item[3]
- var regMatch = item[4]
- var others = item[5]
-
- // nothing to be done here. oh well. just a conflict.
- if (!locMatch && !regMatch) {
- log.warn("unavoidable conflict", item[0], item[1])
- log.warn("unavoidable conflict", "Not de-duplicating")
- unavoidable[item[0]] = true
- return cb()
- }
-
- // nothing to do except to clean up the extraneous deps
- if (locMatch && has[where] === locMatch) {
- remove.push.apply(remove, others)
- return cb()
- }
-
- if (regMatch) {
- var what = name + "@" + regMatch
- // where is /path/to/node_modules/foo/node_modules/bar
- // for package "bar", but we need it to be just
- // /path/to/node_modules/foo
- var nmReg = new RegExp("\\" + path.sep + "node_modules\\" + path.sep)
- where = where.split(nmReg)
- where.pop()
- where = where.join(path.sep + "node_modules" + path.sep)
- remove.push.apply(remove, others)
-
- return npm.commands.install(where, what, cb)
- }
-
- // hrm?
- return cb(new Error("danger zone\n" + name + " " +
- regMatch + " " + locMatch))
-
- }, function (er) {
- if (er) return cb(er)
- asyncMap(remove, rm, function (er) {
- if (er) return cb(er)
- remove.forEach(function (r) {
- log.info("rm", r)
- })
- dedupe_(dir, filter, unavoidable, false, silent, cb)
- })
- })
+function Deduper (where, dryrun) {
+ validate('SB', arguments)
+ Installer.call(this, where, dryrun, [])
+ this.noPackageJsonOk = true
}
-
-function findVersions (npm, summary, cb) {
- // now, for each item in the summary, try to find the maximum version
- // that will satisfy all the ranges. next step is to install it at
- // the specified location.
- asyncMap(summary, function (item, cb) {
- var name = item[0]
- var data = item[1]
- var loc = data.loc
- var locs = data.locs.filter(function (l) {
- return l !== loc
- })
-
- // not actually a dupe, or perhaps all the other copies were
- // children of a dupe, so this'll maybe be picked up later.
- if (locs.length === 0) {
- return cb(null, [])
- }
-
- // { <folder>: <version> }
- var has = data.has
-
- // the versions that we already have.
- // if one of these is ok, then prefer to use that.
- // otherwise, try fetching from the registry.
- var versions = data.versions
-
- var ranges = data.ranges
- mapToRegistry(name, npm.config, function (er, uri, auth) {
- if (er) return cb(er)
-
- npm.registry.get(uri, { auth : auth }, next)
- })
-
- function next (er, data) {
- var regVersions = er ? [] : Object.keys(data.versions)
- var locMatch = bestMatch(versions, ranges)
- var tag = npm.config.get("tag")
- var distTag = data["dist-tags"] && data["dist-tags"][tag]
-
- var regMatch
- if (distTag && data.versions[distTag] && matches(distTag, ranges)) {
- regMatch = distTag
- } else {
- regMatch = bestMatch(regVersions, ranges)
- }
-
- cb(null, [[name, has, loc, locMatch, regMatch, locs]])
- }
- }, cb)
-}
-
-function matches (version, ranges) {
- return !ranges.some(function (r) {
- return !semver.satisfies(version, r, true)
+util.inherits(Deduper, Installer)
+Deduper.prototype.loadAllDepsIntoIdealTree = function (cb) {
+ validate('F', arguments)
+ var idealTree = this.idealTree
+ var differences = this.differences
+ Installer.prototype.loadAllDepsIntoIdealTree.call(this, function (er) {
+ if (er) return cb(er)
+ hoistChildren(idealTree, differences, cb)
})
}
-function bestMatch (versions, ranges) {
- return versions.filter(function (v) {
- return matches(v, ranges)
- }).sort(semver.compareLoose).pop()
+Deduper.prototype.generateActionsToTake = function (cb) {
+ validate('F', arguments)
+ decomposeActions(this.differences, this.todo, this.progress.generateActionsToTake, cb)
}
-
-function readInstalled (dir, counter, parent, cb) {
- var pkg, children, realpath
-
- fs.realpath(dir, function (er, rp) {
- realpath = rp
- next()
- })
-
- readJson(path.resolve(dir, "package.json"), function (er, data) {
- if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
- if (er) return cb() // not a package, probably.
- counter[data.name] = counter[data.name] || 0
- counter[data.name]++
- pkg =
- { _id: data._id
- , name: data.name
- , version: data.version
- , dependencies: data.dependencies || {}
- , optionalDependencies: data.optionalDependencies || {}
- , devDependencies: data.devDependencies || {}
- , bundledDependencies: data.bundledDependencies || []
- , path: dir
- , realPath: dir
- , children: {}
- , parent: parent
- , family: Object.create(parent ? parent.family : null)
- , unavoidable: false
- , duplicate: false
- }
- if (parent) {
- parent.children[data.name] = pkg
- parent.family[data.name] = pkg
+function hoistChildren (tree, diff, next) {
+ validate('OAF', arguments)
+ asyncMap(tree.children, function (child, done) {
+ if (!tree.parent) return hoistChildren(child, diff, done)
+ var better = findRequirement(tree.parent, child.package.name, child.package._requested || npa(child.package.name + '@' + child.package.version))
+ if (better) {
+ tree.children = without(tree.children, child)
+ diff.push(['remove', child])
+ return recalculateMetadata(tree, log, done)
}
- next()
- })
-
- fs.readdir(path.resolve(dir, "node_modules"), function (er, c) {
- children = children || [] // error is ok, just means no children.
- // check if there are scoped packages.
- asyncMap(c || [], function (child, cb) {
- if (child.indexOf('@') === 0) {
- fs.readdir(path.resolve(dir, "node_modules", child), function (er, scopedChildren) {
- // error is ok, just means no children.
- (scopedChildren || []).forEach(function (sc) {
- children.push(path.join(child, sc))
- })
- cb()
- })
- } else {
- children.push(child)
- cb()
- }
- }, function (er) {
- if (er) return cb(er)
- children = children.filter(function (p) {
- return !p.match(/^[\._-]/)
- })
- next();
- });
- })
-
- function next () {
- if (!children || !pkg || !realpath) return
-
- // ignore devDependencies. Just leave them where they are.
- children = children.filter(function (c) {
- return !pkg.devDependencies.hasOwnProperty(c)
- })
-
- pkg.realPath = realpath
- if (pkg.realPath !== pkg.path) children = []
- var d = path.resolve(dir, "node_modules")
- asyncMap(children, function (child, cb) {
- readInstalled(path.resolve(d, child), counter, pkg, cb)
- }, function (er) {
- cb(er, pkg, counter)
- })
- }
-}
-
-function whoDepends (pkg) {
- var start = pkg.parent || pkg
- return whoDepends_(pkg, [], start)
-}
-
-function whoDepends_ (pkg, who, test) {
- if (test !== pkg &&
- test.dependencies[pkg.name] &&
- test.family[pkg.name] === pkg) {
- who.push(test)
- }
- Object.keys(test.children).forEach(function (n) {
- whoDepends_(pkg, who, test.children[n])
- })
- return who
+ var hoistTo = earliestInstallable(tree, tree.parent, child.package)
+ if (hoistTo) {
+ tree.children = without(tree.children, child)
+ hoistTo.children.push(child)
+ child.fromPath = child.path
+ child.path = path.resolve(hoistTo.path, 'node_modules', child.package.name)
+ child.parent = hoistTo
+ diff.push(['move', child])
+ chain([
+ [recalculateMetadata, hoistTo, log],
+ [hoistChildren, child, diff]
+ ], done)
+ } else {
+ done()
+ }
+ }, next)
}
diff --git a/lib/fetch-package-metadata.js b/lib/fetch-package-metadata.js
index f5feae3b9..fb41feea5 100644
--- a/lib/fetch-package-metadata.js
+++ b/lib/fetch-package-metadata.js
@@ -187,7 +187,10 @@ module.exports.addShrinkwrap = function addShrinkwrap (pkg, next) {
entry.resume()
})
untar.on('end', function () {
- if (!foundShrinkwrap) next(null, pkg)
+ if (!foundShrinkwrap) {
+ pkg._shrinkwrap = null
+ next(null, pkg)
+ }
})
}
diff --git a/lib/install.js b/lib/install.js
index 4ab248a49..3bd86f2b2 100644
--- a/lib/install.js
+++ b/lib/install.js
@@ -1,3 +1,4 @@
+'use strict'
// npm install <pkg> <pkg> <pkg>
//
// See doc/install.md for more description
@@ -12,23 +13,25 @@
// added, and then that's passed to the next generation of installation.
module.exports = install
-
-install.usage = "npm install"
- + "\nnpm install <pkg>"
- + "\nnpm install <pkg>@<tag>"
- + "\nnpm install <pkg>@<version>"
- + "\nnpm install <pkg>@<version range>"
- + "\nnpm install <folder>"
- + "\nnpm install <tarball file>"
- + "\nnpm install <tarball url>"
- + "\nnpm install <git:// url>"
- + "\nnpm install <github username>/<github project>"
- + "\n\nCan specify one or more: npm install ./foo.tgz bar@stable /some/folder"
- + "\nIf no argument is supplied and ./npm-shrinkwrap.json is "
- + "\npresent, installs dependencies specified in the shrinkwrap."
- + "\nOtherwise, installs dependencies from ./package.json."
+module.exports.Installer = Installer
+
+install.usage = 'npm install' +
+ '\nnpm install <pkg>' +
+ '\nnpm install <pkg>@<tag>' +
+ '\nnpm install <pkg>@<version>' +
+ '\nnpm install <pkg>@<version range>' +
+ '\nnpm install <folder>' +
+ '\nnpm install <tarball file>' +
+ '\nnpm install <tarball url>' +
+ '\nnpm install <git:// url>' +
+ '\nnpm install <github username>/<github project>' +
+ '\n\nCan specify one or more: npm install ./foo.tgz bar@stable /some/folder' +
+ '\nIf no argument is supplied and ./npm-shrinkwrap.json is ' +
+ '\npresent, installs dependencies specified in the shrinkwrap.' +
+ '\nOtherwise, installs dependencies from ./package.json.'
install.completion = function (opts, cb) {
+ validate('OF', arguments)
// install can complete to a folder with a package.json, or any package.
// if it has a slash, then it's gotta be a folder
// if it starts with https?://, then just give up, because it's a url
@@ -42,12 +45,12 @@ install.completion = function (opts, cb) {
// is a folder containing a package.json file. If that is not the
// case we return 0 matches, which will trigger the default bash
// complete.
- var lastSlashIdx = opts.partialWord.lastIndexOf("/")
+ var lastSlashIdx = opts.partialWord.lastIndexOf('/')
var partialName = opts.partialWord.slice(lastSlashIdx + 1)
var partialPath = opts.partialWord.slice(0, lastSlashIdx)
- if (partialPath === "") partialPath = "/"
+ if (partialPath === '') partialPath = '/'
- function annotatePackageDirMatch (sibling, cb) {
+ var annotatePackageDirMatch = function (sibling, cb) {
var fullPath = path.join(partialPath, sibling)
if (sibling.slice(0, partialName.length) !== partialName) {
return cb(null, null) // not name match
@@ -59,7 +62,7 @@ install.completion = function (opts, cb) {
null,
{
fullPath: fullPath,
- isPackage: contents.indexOf("package.json") !== -1
+ isPackage: contents.indexOf('package.json') !== -1
}
)
})
@@ -86,1110 +89,396 @@ install.completion = function (opts, cb) {
cb()
}
-var npm = require("./npm.js")
- , semver = require("semver")
- , readJson = require("read-package-json")
- , readInstalled = require("read-installed")
- , log = require("npmlog")
- , path = require("path")
- , fs = require("graceful-fs")
- , writeFileAtomic = require("write-file-atomic")
- , cache = require("./cache.js")
- , asyncMap = require("slide").asyncMap
- , chain = require("slide").chain
- , url = require("url")
- , mkdir = require("mkdirp")
- , lifecycle = require("./utils/lifecycle.js")
- , archy = require("archy")
- , npmInstallChecks = require("npm-install-checks")
- , sortedObject = require("sorted-object")
- , mapToRegistry = require("./utils/map-to-registry.js")
- , npa = require("npm-package-arg")
- , inflight = require("inflight")
- , locker = require("./utils/locker.js")
- , lock = locker.lock
- , unlock = locker.unlock
- , warnStrict = require("./utils/warn-deprecated.js")("engineStrict")
- , warnPeers = require("./utils/warn-deprecated.js")("peerDependencies")
-
-function install (args, cb_) {
- var hasArguments = !!args.length
-
- function cb (er, installed) {
- if (er) return cb_(er)
-
- validateInstall(where, function (er, problem) {
- if (er) return cb_(er)
-
- if (problem) {
- var peerInvalidError = new Error("The package " + problem.name +
- " does not satisfy its siblings' peerDependencies requirements!")
- peerInvalidError.code = "EPEERINVALID"
- peerInvalidError.packageName = problem.name
- peerInvalidError.peersDepending = problem.peersDepending
- return cb(peerInvalidError)
+// system packages
+var fs = require('fs')
+var path = require('path')
+
+// dependencies
+var log = require('npmlog')
+var readPackageTree = require('read-package-tree')
+var chain = require('slide').chain
+var asyncMap = require('slide').asyncMap
+var archy = require('archy')
+var mkdirp = require('mkdirp')
+var rimraf = require('rimraf')
+var clone = require('lodash.clonedeep')
+var iferr = require('iferr')
+var validate = require('aproba')
+
+// npm internal utils
+var npm = require('./npm.js')
+var locker = require('./utils/locker.js')
+var lock = locker.lock
+var unlock = locker.unlock
+
+// install specific libraries
+var inflateShrinkwrap = require('./install/inflate-shrinkwrap.js')
+var recalculateMetadata = require('./install/deps.js').recalculateMetadata
+var loadDeps = require('./install/deps.js').loadDeps
+var loadDevDeps = require('./install/deps.js').loadDevDeps
+var loadRequestedDeps = require('./install/deps.js').loadRequestedDeps
+var loadExtraneous = require('./install/deps.js').loadExtraneous
+var pruneTree = require('./install/prune-tree.js')
+var diffTrees = require('./install/diff-trees.js')
+var decomposeActions = require('./install/decompose-actions.js')
+var validateTree = require('./install/validate-tree.js')
+var saveRequested = require('./install/save.js').saveRequested
+var getSaveType = require('./install/save.js').getSaveType
+var doSerialActions = require('./install/actions.js').doSerial
+var doParallelActions = require('./install/actions.js').doParallel
+var doOneAction = require('./install/actions.js').doOne
+
+function unlockCB (lockPath, name, cb) {
+ validate('SSF', arguments)
+ return function (installEr) {
+ var args = arguments
+ try {
+ unlock(lockPath, name, reportErrorAndReturn)
+ } catch (unlockEx) {
+ process.nextTick(function () {
+ reportErrorAndReturn(unlockEx)
+ })
+ }
+ function reportErrorAndReturn (unlockEr) {
+ if (installEr) {
+ if (unlockEr && unlockEr.code !== 'ENOTLOCKED') {
+ log.warn('unlock' + name, unlockEr)
+ }
+ return cb.apply(null, args)
}
-
- var tree = treeify(installed || [])
- , pretty = prettify(tree, installed).trim()
-
- if (pretty) console.log(pretty)
- save(where, installed, tree, pretty, hasArguments, cb_)
- })
+ if (unlockEr) return cb(unlockEr)
+ return cb.apply(null, args)
+ }
}
+}
- // the /path/to/node_modules/..
- var where = path.resolve(npm.dir, "..")
-
- // internal api: install(where, what, cb)
- if (arguments.length === 3) {
- where = args
- args = [].concat(cb_) // pass in [] to do default dep-install
- cb_ = arguments[2]
- log.verbose("install", "where, what", [where, args])
+function install (where, args, cb) {
+ if (!cb) {
+ cb = args
+ args = where
+ where = null
+ }
+ if (!where) {
+ where = npm.config.get('global')
+ ? path.resolve(npm.globalDir, '..')
+ : npm.prefix
}
+ validate('SAF', [where, args, cb])
+ // the /path/to/node_modules/..
+ var dryrun = !!npm.config.get('dry-run')
- if (!npm.config.get("global")) {
+ if (!npm.config.get('global')) {
args = args.filter(function (a) {
- return path.resolve(a) !== where
+ return path.resolve(a) !== npm.prefix
})
}
- mkdir(where, function (er) {
- if (er) return cb(er)
- // install dependencies locally by default,
- // or install current folder globally
- if (!args.length) {
- var opt = { dev: npm.config.get("dev") || !npm.config.get("production") }
-
- if (npm.config.get("global")) args = ["."]
- else return readDependencies(null, where, opt, function (er, data) {
- if (er) {
- log.error("install", "Couldn't read dependencies")
- return cb(er)
- }
- var deps = Object.keys(data.dependencies || {})
- log.verbose("install", "where, deps", [where, deps])
-
- // FIXME: Install peerDependencies as direct dependencies, but only at
- // the top level. Should only last until peerDependencies are nerfed to
- // no longer implicitly install themselves.
- var peers = []
- Object.keys(data.peerDependencies || {}).forEach(function (dep) {
- if (!data.dependencies[dep]) {
- log.verbose(
- "install",
- "peerDependency", dep, "wasn't going to be installed; adding"
- )
- warnPeers([
- "The peer dependency "+dep+" included from "+data.name+" will no",
- "longer be automatically installed to fulfill the peerDependency ",
- "in npm 3+. Your application will need to depend on it explicitly."
- ], dep+","+data.name)
- peers.push(dep)
- }
- })
- log.verbose("install", "where, peers", [where, peers])
-
- var context = { family: {}
- , ancestors: {}
- , explicit: false
- , parent: data
- , root: true
- , wrap: null }
-
- if (data.name === path.basename(where) &&
- path.basename(path.dirname(where)) === "node_modules") {
- // Only include in ancestry if it can actually be required.
- // Otherwise, it does not count.
- context.family[data.name] =
- context.ancestors[data.name] = data.version
- }
-
- installManyTop(deps.map(function (dep) {
- var target = data.dependencies[dep]
- return dep + "@" + target
- }).concat(peers.map(function (dep) {
- var target = data.peerDependencies[dep]
- return dep + "@" + target
- })), where, context, function(er, results) {
- if (er || npm.config.get("production")) return cb(er, results)
- lifecycle(data, "prepublish", where, function(er) {
- return cb(er, results)
- })
- })
- })
- }
-
- // initial "family" is the name:version of the root, if it's got
- // a package.json file.
- var jsonPath = path.resolve(where, "package.json")
- log.verbose('install', 'initial load of', jsonPath)
- readJson(jsonPath, log.warn, function (er, data) {
- if (er
- && er.code !== "ENOENT"
- && er.code !== "ENOTDIR") return cb(er)
- if (er) data = null
- var context = { family: {}
- , ancestors: {}
- , explicit: true
- , parent: data
- , root: true
- , wrap: null }
- if (data && data.name === path.basename(where) &&
- path.basename(path.dirname(where)) === "node_modules") {
- context.family[data.name] = context.ancestors[data.name] = data.version
- }
- var fn = npm.config.get("global") ? installMany : installManyTop
- fn(args, where, context, cb)
- })
- })
+ new Installer(where, dryrun, args).run(cb)
}
-function validateInstall (where, cb) {
- var jsonPath = path.resolve(where, 'package.json')
- log.verbose('validateInstall', 'loading', jsonPath, 'for validation')
- readJson(jsonPath, log.warn, function (er, data) {
- if (er
- && er.code !== 'ENOENT'
- && er.code !== 'ENOTDIR') return cb(er)
-
- if (data && data.engineStrict) {
- warnStrict([
- "Per-package engineStrict (found in this package's package.json) ",
- "won't be used in npm 3+. Use the config setting `engine-strict` instead."
- ], data.name)
- }
-
- readInstalled(where, { log: log.warn, dev: true }, function (er, data) {
- if (er) return cb(er)
-
- cb(null, findPeerInvalid_(data.dependencies, []))
- })
- })
+function Installer (where, dryrun, args) {
+ validate('SBA', arguments)
+ this.where = where
+ this.dryrun = dryrun
+ this.args = args
+ this.currentTree = null
+ this.idealTree = null
+ this.differences = []
+ this.todo = []
+ this.progress = {}
+ this.noPackageJsonOk = !!args.length
+ this.topLevelLifecycles = !args.length
+ this.npat = npm.config.get('npat')
+ this.dev = !npm.config.get('production')
}
-
-function findPeerInvalid_ (packageMap, fpiList) {
- if (fpiList.indexOf(packageMap) !== -1)
- return undefined
-
- fpiList.push(packageMap)
-
- for (var packageName in packageMap) {
- var pkg = packageMap[packageName]
-
- if (pkg.peerInvalid) {
- var peersDepending = {}
- for (var peerName in packageMap) {
- var peer = packageMap[peerName]
- if (peer.peerDependencies && peer.peerDependencies[packageName]) {
- peersDepending[peer.name + "@" + peer.version] =
- peer.peerDependencies[packageName]
- }
- }
- return { name: pkg.name, peersDepending: peersDepending }
- }
-
- if (pkg.dependencies) {
- var invalid = findPeerInvalid_(pkg.dependencies, fpiList)
- if (invalid)
- return invalid
+Installer.prototype = {}
+
+Installer.prototype.run = function (cb) {
+ validate('F', arguments)
+ this.newTracker(log, 'loadCurrentTree', 4)
+ this.newTracker(log, 'loadIdealTree', 12)
+ this.newTracker(log, 'generateActionsToTake')
+ this.newTracker(log, 'executeActions', 8)
+ this.newTracker(log, 'runTopLevelLifecycles', 2)
+
+ var steps = []
+ steps.push(
+ [this, this.loadCurrentTree],
+ [this, this.finishTracker, 'loadCurrentTree'],
+
+ [this, this.loadIdealTree],
+ [this, this.finishTracker, 'loadIdealTree'],
+
+ [this, this.debugTree, 'currentTree', 'currentTree'],
+ [this, this.debugTree, 'idealTree', 'idealTree'],
+
+ [this, this.generateActionsToTake],
+ [this, this.finishTracker, 'generateActionsToTake'],
+
+ [this, this.debugActions, 'diffTrees', 'differences'],
+ [this, this.debugActions, 'decomposeActions', 'todo'])
+ if (!this.dryrun) {
+ steps.push(
+ [this, this.executeActions],
+ [this, this.finishTracker, 'executeActions'],
+
+ [this, this.runTopLevelLifecycles],
+ [this, this.finishTracker, 'runTopLevelLifecycles'])
+
+ if (getSaveType(this.args)) {
+ steps.push(
+ [this, this.saveToDependencies])
}
}
+ steps.push(
+ [this, this.printInstalled])
- return null
+ chain(steps, cb)
}
-// reads dependencies for the package at "where". There are several cases,
-// depending on our current state and the package's configuration:
-//
-// 1. If "context" is specified, then we examine the context to see if there's a
-// shrinkwrap there. In that case, dependencies are read from the shrinkwrap.
-// 2. Otherwise, if an npm-shrinkwrap.json file is present, dependencies are
-// read from there.
-// 3. Otherwise, dependencies come from package.json.
-//
-// Regardless of which case we fall into, "cb" is invoked with a first argument
-// describing the full package (as though readJson had been used) but with
-// "dependencies" read as described above. The second argument to "cb" is the
-// shrinkwrap to use in processing this package's dependencies, which may be
-// "wrap" (in case 1) or a new shrinkwrap (in case 2).
-function readDependencies (context, where, opts, cb) {
- var wrap = context ? context.wrap : null
-
- var jsonPath = path.resolve(where, 'package.json')
- log.verbose('readDependencies', 'loading dependencies from', jsonPath)
- readJson(jsonPath, log.warn, function (er, data) {
- if (er && er.code === "ENOENT") er.code = "ENOPACKAGEJSON"
- if (er) return cb(er)
-
- if (opts && opts.dev) {
- if (!data.dependencies) data.dependencies = {}
- Object.keys(data.devDependencies || {}).forEach(function (k) {
- if (data.dependencies[k]) {
- log.warn("package.json", "Dependency '%s' exists in both dependencies " +
- "and devDependencies, using '%s@%s' from dependencies",
- k, k, data.dependencies[k])
- } else {
- data.dependencies[k] = data.devDependencies[k]
- }
- })
- }
-
- if (!npm.config.get("optional") && data.optionalDependencies) {
- Object.keys(data.optionalDependencies).forEach(function (d) {
- delete data.dependencies[d]
- })
- }
-
- // User has opted out of shrinkwraps entirely
- if (npm.config.get("shrinkwrap") === false)
- return cb(null, data, null)
-
- if (wrap) {
- log.verbose("readDependencies: using existing wrap", [where, wrap])
- var rv = {}
- Object.keys(data).forEach(function (key) {
- rv[key] = data[key]
- })
- rv.dependencies = {}
- Object.keys(wrap).forEach(function (key) {
- log.verbose("from wrap", [key, wrap[key]])
- rv.dependencies[key] = readWrap(wrap[key])
- })
- log.verbose("readDependencies returned deps", rv.dependencies)
- return cb(null, rv, wrap)
- }
-
- var wrapfile = path.resolve(where, "npm-shrinkwrap.json")
-
- fs.readFile(wrapfile, "utf8", function (er, wrapjson) {
- if (er) return cb(null, data, null)
-
- log.verbose("readDependencies", "npm-shrinkwrap.json is overriding dependencies")
- var newwrap
- try {
- newwrap = JSON.parse(wrapjson)
- } catch (ex) {
- return cb(ex)
- }
-
- log.info("shrinkwrap", "file %j", wrapfile)
- var rv = {}
- Object.keys(data).forEach(function (key) {
- rv[key] = data[key]
- })
- rv.dependencies = {}
- Object.keys(newwrap.dependencies || {}).forEach(function (key) {
- rv.dependencies[key] = readWrap(newwrap.dependencies[key])
- })
-
- // fold in devDependencies if not already present, at top level
- if (opts && opts.dev) {
- Object.keys(data.devDependencies || {}).forEach(function (k) {
- rv.dependencies[k] = rv.dependencies[k] || data.devDependencies[k]
- })
- }
-
- log.verbose("readDependencies returned deps", rv.dependencies)
- return cb(null, rv, newwrap.dependencies)
- })
- })
+Installer.prototype.newTracker = function (tracker, name, size) {
+ validate('OS', [tracker, name])
+ if (size) validate('N', [size])
+ this.progress[name] = tracker.newGroup(name, size)
}
-function readWrap (w) {
- return (w.resolved) ? w.resolved
- : (w.from && url.parse(w.from).protocol) ? w.from
- : w.version
+Installer.prototype.finishTracker = function (tracker, cb) {
+ validate('SF', arguments)
+ this.progress[tracker].finish()
+ cb()
}
-// if the -S|--save option is specified, then write installed packages
-// as dependencies to a package.json file.
-function save (where, installed, tree, pretty, hasArguments, cb) {
- if (!hasArguments ||
- !npm.config.get("save") &&
- !npm.config.get("save-dev") &&
- !npm.config.get("save-optional") ||
- npm.config.get("global")) {
- return cb(null, installed, tree, pretty)
- }
-
- var saveBundle = npm.config.get("save-bundle")
- var savePrefix = npm.config.get("save-prefix")
-
- // each item in the tree is a top-level thing that should be saved
- // to the package.json file.
- // The relevant tree shape is { <folder>: {what:<pkg>} }
- var saveTarget = path.resolve(where, "package.json")
-
- asyncMap(Object.keys(tree), function (k, cb) {
- // if "from" is remote, git, or hosted, then save that instead.
- var t = tree[k]
- , f = npa(t.from)
- , a = npa(t.what)
- , w = [a.name, a.spec]
-
-
- fs.stat(t.from, function (er){
- if (!er) {
- w[1] = "file:" + t.from
- } else if (['hosted', 'git', 'remote'].indexOf(f.type) !== -1) {
- w[1] = t.from
- }
- cb(null, [w])
- })
- }
- , function (er, arr) {
- var things = arr.reduce(function (set, k) {
- var rangeDescriptor = semver.valid(k[1], true) &&
- semver.gte(k[1], "0.1.0", true) &&
- !npm.config.get("save-exact")
- ? savePrefix : ""
- set[k[0]] = rangeDescriptor + k[1]
- return set
- }, {})
-
-
- // don't use readJson, because we don't want to do all the other
- // tricky npm-specific stuff that's in there.
- fs.readFile(saveTarget, function (er, data) {
- // ignore errors here, just don't save it.
- try {
- data = JSON.parse(data.toString("utf8"))
- } catch (ex) {
- er = ex
- }
-
- if (er) {
- return cb(null, installed, tree, pretty)
- }
-
- var deps = npm.config.get("save-optional") ? "optionalDependencies"
- : npm.config.get("save-dev") ? "devDependencies"
- : "dependencies"
-
- if (saveBundle) {
- var bundle = data.bundleDependencies || data.bundledDependencies
- delete data.bundledDependencies
- if (!Array.isArray(bundle)) bundle = []
- data.bundleDependencies = bundle.sort()
- }
-
- log.verbose("save", "saving", things)
- data[deps] = data[deps] || {}
- Object.keys(things).forEach(function (t) {
- data[deps][t] = things[t]
- if (saveBundle) {
- var i = bundle.indexOf(t)
- if (i === -1) bundle.push(t)
- data.bundleDependencies = bundle.sort()
- }
- })
-
- data[deps] = sortedObject(data[deps])
-
- log.silly("save", "writing", saveTarget)
- data = JSON.stringify(data, null, 2) + "\n"
- writeFileAtomic(saveTarget, data, function (er) {
- cb(er, installed, tree, pretty)
- })
- })
- })
+Installer.prototype.loadCurrentTree = function (cb) {
+ validate('F', arguments)
+ chain([
+ [this, this.readLocalPackageData],
+ [this, this.normalizeTree, log.newGroup('normalizeTree')]
+ ], cb)
}
+Installer.prototype.loadIdealTree = function (cb) {
+ validate('F', arguments)
+ this.newTracker(this.progress.loadIdealTree, 'cloneCurrentTree')
+ this.newTracker(this.progress.loadIdealTree, 'loadShrinkwrap')
+ this.newTracker(this.progress.loadIdealTree, 'loadAllDepsIntoIdealTree', 10)
+ chain([
+ [this, this.cloneCurrentTreeToIdealTree],
+ [this, this.finishTracker, 'cloneCurrentTree'],
+ [this, this.loadShrinkwrap],
+ [this, this.finishTracker, 'loadShrinkwrap'],
+ [this, this.loadAllDepsIntoIdealTree],
+ [this, function (next) { next(pruneTree(this.idealTree)) } ]
+ ], cb)
+}
-// Outputting *all* the installed modules is a bit confusing,
-// because the length of the path does not make it clear
-// that the submodules are not immediately require()able.
-// TODO: Show the complete tree, ls-style, but only if --long is provided
-function prettify (tree, installed) {
- function red (set, kv) {
- set[kv[0]] = kv[1]
- return set
- }
-
- if (npm.config.get("json")) {
- tree = Object.keys(tree).map(function (p) {
- if (!tree[p]) return null
- var what = npa(tree[p].what)
- , name = what.name
- , version = what.spec
- , o = { name: name, version: version, from: tree[p].from }
- o.dependencies = tree[p].children.map(function P (dep) {
- var what = npa(dep.what)
- , name = what.name
- , version = what.spec
- , o = { version: version, from: dep.from }
- o.dependencies = dep.children.map(P).reduce(red, {})
- return [name, o]
- }).reduce(red, {})
- return o
- })
-
- return JSON.stringify(tree, null, 2)
+Installer.prototype.loadAllDepsIntoIdealTree = function (cb) {
+ validate('F', arguments)
+ var saveDeps = getSaveType(this.args)
+
+ var dev = npm.config.get('dev') || !npm.config.get('production')
+
+ var cg = this.progress.loadAllDepsIntoIdealTree
+ var installNewModules = !!this.args.length
+ var steps = []
+
+ if (installNewModules) {
+ steps.push(
+ [loadRequestedDeps, this.args, this.idealTree, saveDeps, cg.newGroup('loadRequestedDeps')])
+ } else {
+ steps.push(
+ [loadDeps, this.idealTree, cg.newGroup('loadDeps')])
+ if (dev) {
+ steps.push(
+ [loadDevDeps, this.idealTree, cg.newGroup('loadDevDeps')])
+ }
}
- if (npm.config.get("parseable")) return parseable(installed)
-
- return Object.keys(tree).map(function (p) {
- return archy({ label: tree[p].what + " " + p
- , nodes: (tree[p].children || []).map(function P (c) {
- if (npm.config.get("long")) {
- return { label: c.what, nodes: c.children.map(P) }
- }
- var g = c.children.map(function (g) {
- return g.what
- }).join(", ")
- if (g) g = " (" + g + ")"
- return c.what + g
- })
- }, "", { unicode: npm.config.get("unicode") })
- }).join("\n")
+ steps.push(
+ [loadExtraneous, this.idealTree, cg.newGroup('loadExtraneous')])
+ chain(steps, cb)
}
-function parseable (installed) {
- var long = npm.config.get("long")
- , cwd = process.cwd()
- return installed.map(function (item) {
- return path.resolve(cwd, item[1]) +
- ( long ? ":" + item[0] : "" )
- }).join("\n")
+Installer.prototype.generateActionsToTake = function (cb) {
+ validate('F', arguments)
+ var cg = this.progress.generateActionsToTake
+ chain([
+ [validateTree, this.idealTree, cg.newGroup('validateTree')],
+ [diffTrees, this.currentTree, this.idealTree, this.differences, cg.newGroup('diffTrees')],
+ [decomposeActions, this.differences, this.todo, cg.newGroup('decomposeActions')]
+ ], cb)
}
-function treeify (installed) {
- // each item is [what, where, parent, parentDir]
- // If no parent, then report it.
- // otherwise, tack it into the parent's children list.
- // If the parent isn't a top-level then ignore it.
- var whatWhere = installed.reduce(function (l, r) {
- var parentDir = r[3]
- , parent = r[2]
- , where = r[1]
- , what = r[0]
- , from = r[4]
- l[where] = { parentDir: parentDir
- , parent: parent
- , children: []
- , where: where
- , what: what
- , from: from }
- return l
- }, {})
+Installer.prototype.executeActions = function (cb) {
+ validate('F', arguments)
+ var todo = this.todo
+ var cg = this.progress.executeActions
+
+ var node_modules = path.resolve(this.where, 'node_modules')
+ var staging = path.resolve(node_modules, '.staging')
+ var steps = []
+ var trackLifecycle = cg.newGroup('lifecycle')
+
+ cb = unlockCB(node_modules, '.staging', cb)
+
+ steps.push(
+ [doParallelActions, 'fetch', staging, todo, cg.newGroup('fetch', 10)],
+ [lock, node_modules, '.staging'],
+ [rimraf, staging],
+ [mkdirp, staging],
+ [doParallelActions, 'extract', staging, todo, cg.newGroup('extract', 10)],
+ [doParallelActions, 'preinstall', staging, todo, trackLifecycle.newGroup('preinstall')],
+ [doSerialActions, 'remove', staging, todo, cg.newGroup('remove')],
+ [doSerialActions, 'move', staging, todo, cg.newGroup('move')],
+ [doSerialActions, 'finalize', staging, todo, cg.newGroup('finalize')],
+ [doParallelActions, 'build', staging, todo, trackLifecycle.newGroup('build')],
+ [doSerialActions, 'install', staging, todo, trackLifecycle.newGroup('install')],
+ [doSerialActions, 'postinstall', staging, todo, trackLifecycle.newGroup('postinstall')])
+ if (this.npat) {
+ steps.push(
+ [doParallelActions, 'test', staging, todo, trackLifecycle.newGroup('npat')])
+ }
+ steps.push(
+ // TODO add check that .staging is empty? DUBIOUS
+ [rimraf, staging])
- // log.warn("install", whatWhere, "whatWhere")
- return Object.keys(whatWhere).reduce(function (l, r) {
- var ww = whatWhere[r]
- //log.warn("r, ww", [r, ww])
- if (!ww.parent) {
- l[r] = ww
- } else {
- var p = whatWhere[ww.parentDir]
- if (p) p.children.push(ww)
- else l[r] = ww
- }
- return l
- }, {})
+ chain(steps, cb)
}
-
-// just like installMany, but also add the existing packages in
-// where/node_modules to the family object.
-function installManyTop (what, where, context, cb_) {
- function cb (er, d) {
- if (context.explicit || er) return cb_(er, d)
- // since this wasn't an explicit install, let's build the top
- // folder, so that `npm install` also runs the lifecycle scripts.
- npm.commands.build([where], false, true, function (er) {
- return cb_(er, d)
- })
+Installer.prototype.runTopLevelLifecycles = function (cb) {
+ validate('F', arguments)
+ var steps = []
+ var trackLifecycle = this.progress.runTopLevelLifecycles
+ if (!this.topLevelLifecycles) {
+ trackLifecycle.finish()
+ return cb()
}
- if (context.explicit) return next()
-
- var jsonPath = path.join(where, 'package.json')
- log.verbose('installManyTop', 'reading for lifecycle', jsonPath)
- readJson(jsonPath, log.warn, function (er, data) {
- if (er) return next(er)
- lifecycle(data, "preinstall", where, next)
- })
-
- function next (er) {
- if (er) return cb(er)
- installManyTop_(what, where, context, cb)
+ steps.push(
+ [doOneAction, 'preinstall', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('preinstall:.')],
+ [doOneAction, 'build', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('build:.')],
+ [doOneAction, 'postinstall', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('postinstall:.')])
+ if (this.npat) {
+ steps.push(
+ [doOneAction, 'test', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('npat:.')])
}
+ if (this.dev) {
+ steps.push(
+ [doOneAction, 'prepublish', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('prepublish')])
+ }
+ chain(steps, cb)
}
-function installManyTop_ (what, where, context, cb) {
- var nm = path.resolve(where, "node_modules")
-
- fs.readdir(nm, function (er, pkgs) {
- if (er) return installMany(what, where, context, cb)
+Installer.prototype.saveToDependencies = function (cb) {
+ validate('F', arguments)
+ saveRequested(this.args, this.idealTree, cb)
+}
- var scopes = [], unscoped = []
- pkgs.filter(function (p) {
- return !p.match(/^[\._-]/)
- }).forEach(function (p) {
- // @names deserve deeper investigation
- if (p[0] === "@") {
- scopes.push(p)
- }
- else {
- unscoped.push(p)
+Installer.prototype.readLocalPackageData = function (cb) {
+ validate('F', arguments)
+ var self = this
+ readPackageTree(this.where, iferr(cb, function (currentTree) {
+ self.currentTree = currentTree
+ if (!self.noPackageJsonOk && !currentTree.package) {
+ log.error('install', "Couldn't read dependencies")
+ var er = new Error("ENOENT, open '" + path.join(self.where, 'package.json') + "'")
+ er.code = 'ENOPACKAGEJSON'
+ er.errno = 34
+ return cb(er)
+ }
+ if (!currentTree.package) currentTree.package = {}
+ if (currentTree.package._shrinkwrap) return cb()
+ fs.readFile(path.join(self.where, 'npm-shrinkwrap.json'), {encoding: 'utf8'}, function (er, data) {
+ if (er) return cb()
+ try {
+ currentTree.package._shrinkwrap = JSON.parse(data)
+ } catch (ex) {
+ return cb(ex)
}
+ return cb()
})
+ }))
- maybeScoped(scopes, nm, function (er, scoped) {
- if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
- // recombine unscoped with @scope/package packages
- asyncMap(unscoped.concat(scoped).map(function (p) {
- return path.resolve(nm, p, "package.json")
- }), function (jsonPath, cb) {
- log.verbose('installManyTop', 'reading scoped package data from', jsonPath)
- readJson(jsonPath, log.info, function (er, data) {
- if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
- if (er) return cb(null, [])
- cb(null, [[data.name, data.version]])
- })
- }, function (er, packages) {
- // if there's nothing in node_modules, then don't freak out.
- if (er) packages = []
- // add all the existing packages to the family list.
- // however, do not add to the ancestors list.
- packages.forEach(function (p) {
- context.family[p[0]] = p[1]
- })
- installMany(what, where, context, cb)
- })
- })
- })
}
-function maybeScoped (scopes, where, cb) {
- // find packages in scopes
- asyncMap(scopes, function (scope, cb) {
- fs.readdir(path.resolve(where, scope), function (er, scoped) {
- if (er) return cb(er)
- var paths = scoped.map(function (p) {
- return path.join(scope, p)
- })
- cb(null, paths)
- })
- }, cb)
+Installer.prototype.cloneCurrentTreeToIdealTree = function (cb) {
+ validate('F', arguments)
+ this.idealTree = clone(this.currentTree)
+ cb()
}
-function installMany (what, where, context, cb) {
- // readDependencies takes care of figuring out whether the list of
- // dependencies we'll iterate below comes from an existing shrinkwrap from a
- // parent level, a new shrinkwrap at this level, or package.json at this
- // level, as well as which shrinkwrap (if any) our dependencies should use.
- var opt = { dev: npm.config.get("dev") }
- readDependencies(context, where, opt, function (er, data, wrap) {
- if (er) data = {}
-
- var parent = data
-
- // if we're explicitly installing "what" into "where", then the shrinkwrap
- // for "where" doesn't apply. This would be the case if someone were adding
- // a new package to a shrinkwrapped package. (data.dependencies will not be
- // used here except to indicate what packages are already present, so
- // there's no harm in using that.)
- if (context.explicit) wrap = null
-
- var deps = data.dependencies || {}
- var devDeps = data.devDependencies || {}
-
- // what is a list of things.
- // resolve each one.
- asyncMap( what
- , targetResolver(where, context, deps, devDeps)
- , function (er, targets) {
-
- if (er) return cb(er)
-
- var bundled = data.bundleDependencies || data.bundledDependencies || []
- // only take the hit for readInstalled if there are probably bundled
- // dependencies to read
- if (bundled.length) {
- readInstalled(where, { dev: true }, andBuildResolvedTree)
- } else {
- andBuildResolvedTree()
- }
-
- function andBuildResolvedTree (er, current) {
- if (er) return cb(er)
-
- // each target will be a data object corresponding
- // to a package, folder, or whatever that is in the cache now.
- var newPrev = Object.create(context.family)
- , newAnc = Object.create(context.ancestors)
-
- if (!context.root) {
- newAnc[data.name] = data.version
- }
- bundled.forEach(function (bundle) {
- var bundleData = current.dependencies[bundle]
- if ((!bundleData || !bundleData.version) && current.devDependencies) {
- log.verbose(
- 'installMany', bundle, 'was bundled with',
- data.name + '@' + data.version +
- ", but wasn't found in dependencies. Trying devDependencies"
- )
- bundleData = current.devDependencies[bundle]
- }
-
- if (!bundleData || !bundleData.version) {
- log.warn(
- 'installMany', bundle, 'was bundled with',
- data.name + '@' + data.version +
- ", but bundled package wasn't found in unpacked tree"
- )
- } else {
- log.verbose(
- 'installMany', bundle + '@' + bundleData.version,
- 'was bundled with', data.name + '@' + data.version
- )
- newPrev[bundle] = bundleData.version
- }
- })
- targets.forEach(function (t) {
- newPrev[t.name] = t.version
- })
- log.silly("install resolved", targets)
- targets.filter(function (t) { return t }).forEach(function (t) {
- log.info("install", "%s into %s", t._id, where)
- })
- asyncMap(targets, function (target, cb) {
- log.info("installOne", target._id)
- var wrapData = wrap ? wrap[target.name] : null
- var newWrap = wrapData && wrapData.dependencies
- ? wrap[target.name].dependencies || {}
- : null
- var newContext = { family: newPrev
- , ancestors: newAnc
- , parent: parent
- , explicit: false
- , wrap: newWrap }
- installOne(target, where, newContext, cb)
- }, cb)
- }
- })
- })
+Installer.prototype.loadShrinkwrap = function (cb) {
+ validate('F', arguments)
+ if (!this.idealTree.package._shrinkwrap || this.idealTree.package._shrinkwrap.dependencies) return cb()
+ inflateShrinkwrap(this.idealTree, this.idealTree.package._shrinkwrap.dependencies, cb)
}
-function targetResolver (where, context, deps, devDeps) {
- var alreadyInstalledManually = []
- , resolveLeft = 0
- , nm = path.resolve(where, "node_modules")
- , parent = context.parent
- , wrap = context.wrap
-
- if (!context.explicit) readdir(nm)
-
- function readdir(name) {
- resolveLeft++
- fs.readdir(name, function (er, inst) {
- if (er) return resolveLeft--
-
- // don't even mess with non-package looking things
- inst = inst.filter(function (p) {
- if (!p.match(/^[@\._-]/)) return true
- // scoped packages
- readdir(path.join(name, p))
- })
-
- asyncMap(inst, function (pkg, cb) {
- var jsonPath = path.resolve(name, pkg, 'package.json')
- log.verbose('targetResolver', 'reading package data from', jsonPath)
- readJson(jsonPath, log.info, function (er, d) {
- if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
- // error means it's not a package, most likely.
- if (er) return cb(null, [])
-
- // if it's a bundled dep, then assume that anything there is valid.
- // otherwise, make sure that it's a semver match with what we want.
- var bd = parent.bundleDependencies
- var isBundled = bd && bd.indexOf(d.name) !== -1
- var expectedVersion = deps[d.name] || (devDeps && devDeps[d.name]) || "*"
- var currentIsSatisfactory = semver.satisfies(d.version, expectedVersion, true)
- if (isBundled || currentIsSatisfactory || deps[d.name] === d._resolved) {
- return cb(null, d.name)
- }
-
- // see if the package had been previously linked
- fs.lstat(path.resolve(nm, pkg), function(err, s) {
- if (err) return cb(null, [])
- if (s.isSymbolicLink()) {
- return cb(null, d.name)
- }
-
- // something is there, but it's not satisfactory. Clobber it.
- return cb(null, [])
- })
- })
- }, function (er, inst) {
- // this is the list of things that are valid and should be ignored.
- alreadyInstalledManually = alreadyInstalledManually.concat(inst)
- resolveLeft--
- })
- })
- }
-
- var to = 0
- return function resolver (what, cb) {
- if (resolveLeft) return setTimeout(function () {
- resolver(what, cb)
- }, to++)
-
- // now we know what's been installed here manually,
- // or tampered with in some way that npm doesn't want to overwrite.
- if (alreadyInstalledManually.indexOf(npa(what).name) !== -1) {
- log.verbose("already installed", "skipping %s %s", what, where)
- return cb(null, [])
- }
-
- // check for a version installed higher in the tree.
- // If installing from a shrinkwrap, it must match exactly.
- if (context.family[what]) {
- log.verbose('install', what, 'is installed as', context.family[what])
- if (wrap && wrap[what].version === context.family[what]) {
- log.verbose("shrinkwrap", "use existing", what)
- return cb(null, [])
- }
- }
-
- // if it's identical to its parent, then it's probably someone
- // doing `npm install foo` inside of the foo project. Print
- // a warning, and skip it.
- if (parent && parent.name === what && !npm.config.get("force")) {
- log.warn("install", "Refusing to install %s as a dependency of itself"
- , what)
- return cb(null, [])
- }
-
- if (wrap) {
- var name = npa(what).name
- if (wrap[name]) {
- var wrapTarget = readWrap(wrap[name])
- what = name + "@" + wrapTarget
- } else {
- log.verbose("shrinkwrap", "skipping %s (not in shrinkwrap)", what)
- }
- } else if (deps[what]) {
- what = what + "@" + deps[what]
- }
-
- // This is where we actually fetch the package, if it's not already
- // in the cache.
- // If it's a git repo, then we want to install it, even if the parent
- // already has a matching copy.
- // If it's not a git repo, and the parent already has that pkg, then
- // we can skip installing it again.
- var pkgroot = path.resolve(npm.prefix, (parent && parent._from) || "")
- cache.add(what, null, pkgroot, false, function (er, data) {
- if (er && parent && parent.optionalDependencies &&
- parent.optionalDependencies.hasOwnProperty(npa(what).name)) {
- log.warn("optional dep failed, continuing", what)
- log.verbose("optional dep failed, continuing", [what, er])
- return cb(null, [])
+Installer.prototype.normalizeTree = function (log, cb) {
+ validate('OF', arguments)
+ recalculateMetadata(this.currentTree, log, iferr(cb, function (tree) {
+ tree.children.forEach(function (child) {
+ if (child.package._requiredBy.length === 0) {
+ child.package._requiredBy.push('#EXISTING')
}
-
- var type = npa(what).type
- var isGit = type === "git" || type === "hosted"
-
- if (!er &&
- data &&
- !context.explicit &&
- context.family[data.name] === data.version &&
- !npm.config.get("force") &&
- !isGit) {
- log.info("already installed", data.name + "@" + data.version)
- return cb(null, [])
- }
-
-
- if (data && !data._from) data._from = what
- if (er && parent && parent.name) er.parent = parent.name
- return cb(er, data || [])
})
- }
+ cb(null, tree)
+ }))
}
-// we've already decided to install this. if anything's in the way,
-// then uninstall it first.
-function installOne (target, where, context, cb) {
- // the --link flag makes this a "link" command if it's at the
- // the top level.
- var isGit = false
- var type = npa(target._from).type
- if (target && target._from) isGit = type === 'git' || type === 'hosted'
-
- if (where === npm.prefix && npm.config.get("link")
- && !npm.config.get("global") && !isGit) {
- return localLink(target, where, context, cb)
- }
- installOne_(target, where, context, function (er, installedWhat) {
-
- // check if this one is optional to its parent.
- if (er && context.parent && context.parent.optionalDependencies &&
- context.parent.optionalDependencies.hasOwnProperty(target.name)) {
- log.warn("optional dep failed, continuing", target._id)
- log.verbose("optional dep failed, continuing", [target._id, er])
- er = null
- }
-
- cb(er, installedWhat)
+Installer.prototype.printInstalled = function (cb) {
+ validate('F', arguments)
+ log.clearProgress()
+ /*
+ TODO: What we actually want to do here is build a tree of installed modules.
+ Tricky due to the fact that we can have empty layers. Need to scan up to find the next installed module.
+ Since actions include actual link to the point in the tree that we need, we can flag modules
+ as installed.
+ */
+ var self = this
+ this.differences.forEach(function (action) {
+ var mutation = action[0]
+ if (mutation === 'add' || mutation === 'update') mutation = '+'
+ else if (mutation === 'remove') mutation = '-'
+ else if (mutation === 'move') mutation = '>'
+ var child = action[1]
+ var name = child.package.name + '@' + child.package.version
+ console.log(mutation + ' ' + name + ' ' + path.relative(self.where, child.path))
})
-
+ log.showProgress()
+ cb()
}
-function localLink (target, where, context, cb) {
- log.verbose("localLink", target._id)
- var jsonPath = path.resolve(npm.globalDir, target.name , 'package.json')
- var parent = context.parent
-
- log.verbose('localLink', 'reading data to link', target.name, 'from', jsonPath)
- readJson(jsonPath, log.warn, function (er, data) {
- function thenLink () {
- npm.commands.link([target.name], function (er, d) {
- log.silly("localLink", "back from link", [er, d])
- cb(er, [resultList(target, where, parent && parent._id)])
- })
- }
-
- if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
- if (er || data._id === target._id) {
- if (er) {
- install( path.resolve(npm.globalDir, "..")
- , target._id
- , function (er) {
- if (er) return cb(er, [])
- thenLink()
- })
- } else thenLink()
- } else {
- log.verbose("localLink", "install locally (no link)", target._id)
- installOne_(target, where, context, cb)
- }
+Installer.prototype.debugActions = function (name, actionListName, cb) {
+ validate('SSF', arguments)
+ var actionsToLog = this[actionListName]
+ log.silly(name, 'action count', actionsToLog.length)
+ actionsToLog.forEach(function (action) {
+ log.silly(name, action.map(function (value) {
+ return (value && value.package) ? value.package.name + '@' + value.package.version : value
+ }).join(' '))
})
+ cb()
}
-function resultList (target, where, parentId) {
- var nm = path.resolve(where, "node_modules")
- , targetFolder = path.resolve(nm, target.name)
- , prettyWhere = where
-
- if (!npm.config.get("global")) {
- prettyWhere = path.relative(process.cwd(), where)
- }
-
- if (prettyWhere === ".") prettyWhere = null
-
- if (!npm.config.get("global")) {
- // print out the folder relative to where we are right now.
- targetFolder = path.relative(process.cwd(), targetFolder)
- }
-
- return [ target._id
- , targetFolder
- , prettyWhere && parentId
- , parentId && prettyWhere
- , target._from ]
-}
-
-var installed = Object.create(null)
-
-function installOne_ (target, where, context, cb_) {
- var nm = path.resolve(where, "node_modules")
- , targetFolder = path.resolve(nm, target.name)
- , prettyWhere = path.relative(process.cwd(), where)
- , parent = context.parent
-
- if (prettyWhere === ".") prettyWhere = null
-
- cb_ = inflight(target.name + ":" + where, cb_)
- if (!cb_) {
- return log.verbose("installOne", "of", target.name, "to", where, "already in flight; waiting")
- }
- else {
- log.verbose("installOne", "of", target.name, "to", where, "not in flight; installing")
- }
-
- function cb(er, data) {
- unlock(nm, target.name, function () { cb_(er, data) })
- }
-
- lock(nm, target.name, function (er) {
- if (er) return cb(er)
-
- if (targetFolder in installed) {
- log.error("install", "trying to install", target.version, "to", targetFolder)
- log.error("install", "but already installed versions", installed[targetFolder])
- installed[targetFolder].push(target.version)
- }
- else {
- installed[targetFolder] = [target.version]
- }
-
- var force = npm.config.get("force")
- , nodeVersion = npm.config.get("node-version")
- , strict = npm.config.get("engine-strict")
- , c = npmInstallChecks
-
- chain(
- [ [c.checkEngine, target, npm.version, nodeVersion, force, strict]
- , [c.checkPlatform, target, force]
- , [c.checkCycle, target, context.ancestors]
- , [c.checkGit, targetFolder]
- , [write, target, targetFolder, context] ]
- , function (er, d) {
- if (er) return cb(er)
-
- d.push(resultList(target, where, parent && parent._id))
- cb(er, d)
- }
- )
- })
+// This takes an object and a property name instead of a value to allow us
+// to define the arguments for use by chain before the property exists yet.
+Installer.prototype.debugTree = function (name, treeName, cb) {
+ validate('SSF', arguments)
+ log.silly(name, this.prettify(this[treeName]).trim())
+ cb()
}
-function write (target, targetFolder, context, cb_) {
- var up = npm.config.get("unsafe-perm")
- , user = up ? null : npm.config.get("user")
- , group = up ? null : npm.config.get("group")
- , family = context.family
-
- function cb (er, data) {
- // cache.unpack returns the data object, and all we care about
- // is the list of installed packages from that last thing.
- if (!er) return cb_(er, data)
-
- if (npm.config.get("rollback") === false) return cb_(er)
- npm.rollbacks.push(targetFolder)
- cb_(er, data)
+Installer.prototype.prettify = function (tree) {
+ validate('O', arguments)
+ function byName (aa, bb) {
+ return aa.package.name.localeCompare(bb)
}
-
- var bundled = []
-
- log.silly("install write", "writing", target.name, target.version, "to", targetFolder)
- chain(
- [ [ cache.unpack, target.name, target.version, targetFolder, null, null, user, group ],
- function writePackageJSON (cb) {
- var jsonPath = path.resolve(targetFolder, 'package.json')
- log.verbose('write', 'writing to', jsonPath)
- writeFileAtomic(jsonPath, JSON.stringify(target, null, 2) + '\n', cb)
- },
- [ lifecycle, target, "preinstall", targetFolder ],
- function collectBundled (cb) {
- if (!target.bundleDependencies) return cb()
-
- var bd = path.resolve(targetFolder, "node_modules")
- fs.readdir(bd, function (er, b) {
- // nothing bundled, maybe
- if (er) return cb()
- bundled = b || []
- cb()
- })
- } ]
-
- // nest the chain so that we can throw away the results returned
- // up until this point, since we really don't care about it.
- , function X (er) {
- if (er) return cb(er)
-
- // before continuing to installing dependencies, check for a shrinkwrap.
- var opt = { dev: npm.config.get("dev") }
- readDependencies(context, targetFolder, opt, function (er, data, wrap) {
- if (er) return cb(er);
- var deps = prepareForInstallMany(data, "dependencies", bundled, wrap,
- family)
- var depsTargetFolder = targetFolder
- var depsContext = { family: family
- , ancestors: context.ancestors
- , parent: target
- , explicit: false
- , wrap: wrap }
-
- var actions =
- [ [ installManyAndBuild, deps, depsTargetFolder, depsContext ] ]
-
- // FIXME: This is an accident waiting to happen!
- //
- // 1. If multiple children at the same level of the tree share a
- // peerDependency that's not in the parent's dependencies, because
- // the peerDeps don't get added to the family, they will keep
- // getting reinstalled (worked around by inflighting installOne).
- // 2. The installer can't safely build at the parent level because
- // that's already being done by the parent's installAndBuild. This
- // runs the risk of the peerDependency never getting built.
- //
- // The fix: Don't install peerDependencies; require them to be
- // included as explicit dependencies / devDependencies, and warn
- // or error when they're missing. See #5080 for more arguments in
- // favor of killing implicit peerDependency installs with fire.
- var peerDeps = prepareForInstallMany(data, "peerDependencies", bundled,
- wrap, family)
- peerDeps.forEach(function (pd) {
- warnPeers([
- "The peer dependency "+pd+" included from "+data.name+" will no",
- "longer be automatically installed to fulfill the peerDependency ",
- "in npm 3+. Your application will need to depend on it explicitly."
- ], pd+","+data.name)
- })
-
- // Package scopes cause an addditional tree level which needs to be
- // considered when resolving a peerDependency's target folder.
- var pdTargetFolder
- if (npa(target.name).scope) {
- pdTargetFolder = path.resolve(targetFolder, '../../..')
- } else {
- pdTargetFolder = path.resolve(targetFolder, '../..')
- }
-
- var pdContext = context
- if (peerDeps.length > 0) {
- actions.push(
- [ installMany, peerDeps, pdTargetFolder, pdContext ]
- )
- }
-
- chain(actions, cb)
- })
- })
-}
-
-function installManyAndBuild (deps, targetFolder, context, cb) {
- installMany(deps, targetFolder, context, function (er, d) {
- log.verbose("about to build", targetFolder)
- if (er) return cb(er)
- npm.commands.build( [targetFolder]
- , npm.config.get("global")
- , true
- , function (er) { return cb(er, d) })
- })
-}
-
-function prepareForInstallMany (packageData, depsKey, bundled, wrap, family) {
- var deps = Object.keys(packageData[depsKey] || {})
-
- // don't install bundleDependencies, unless they're missing.
- if (packageData.bundleDependencies) {
- deps = deps.filter(function (d) {
- return packageData.bundleDependencies.indexOf(d) === -1 ||
- bundled.indexOf(d) === -1
+ return archy({
+ label: tree.package.name + '@' + tree.package.version +
+ ' ' + tree.path,
+ nodes: (tree.children || []).sort(byName).map(function expandChild (child) {
+ return {
+ label: child.package.name + '@' + child.package.version,
+ nodes: child.children.sort(byName).map(expandChild)
+ }
})
- }
-
- return deps.filter(function (d) {
- // prefer to not install things that are satisfied by
- // something in the "family" list, unless we're installing
- // from a shrinkwrap.
- if (wrap) return wrap
- if (semver.validRange(family[d], true)) {
- return !semver.satisfies(family[d], packageData[depsKey][d], true)
- }
- return true
- }).map(function (d) {
- var v = packageData[depsKey][d]
- var t = d + "@" + v
- log.silly("prepareForInstallMany", "adding", t, "from", packageData.name, depsKey)
- return t
- })
+ }, '', { unicode: npm.config.get('unicode') })
}
diff --git a/lib/install/action/build.js b/lib/install/action/build.js
new file mode 100644
index 000000000..0feb3a0fb
--- /dev/null
+++ b/lib/install/action/build.js
@@ -0,0 +1,12 @@
+'use strict'
+var chain = require('slide').chain
+var build = require('../../build.js')
+var npm = require('../../npm.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('build', pkg.package.name)
+ chain([
+ [build.linkStuff, pkg.package, pkg.path, npm.config.get('global'), false],
+ [build.writeBuiltinConf, pkg.package, pkg.path]
+ ], next)
+}
diff --git a/lib/install/action/extract.js b/lib/install/action/extract.js
new file mode 100644
index 000000000..f9a4f8b2f
--- /dev/null
+++ b/lib/install/action/extract.js
@@ -0,0 +1,17 @@
+'use strict'
+var updatePackageJson = require('../update-package-json')
+var npm = require('../../npm.js')
+var cache = require('../../cache.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('extract', pkg.package.name)
+ var up = npm.config.get('unsafe-perm')
+ var user = up ? null : npm.config.get('user')
+ var group = up ? null : npm.config.get('group')
+ cache.unpack(pkg.package.name, pkg.package.version
+ , buildpath
+ , null, null, user, group, function (er) {
+ if (er) return next(er)
+ updatePackageJson(pkg, buildpath, next)
+ })
+}
diff --git a/lib/install/action/fetch.js b/lib/install/action/fetch.js
new file mode 100644
index 000000000..16a94244b
--- /dev/null
+++ b/lib/install/action/fetch.js
@@ -0,0 +1,27 @@
+'use strict'
+// var cache = require('../../cache.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ next()
+/*
+// FIXME: Unnecessary as long as we have to have the tarball to resolve all deps, which
+// is progressively seeming to be likely for the indefinite future.
+// ALSO fails for local deps specified with relative URLs outside of the top level.
+
+ var name = pkg.package.name
+ var version
+ switch (pkg.package._requested.type) {
+ case 'version':
+ case 'range':
+ version = pkg.package.version
+ break
+ case 'hosted':
+ name = name + '@' + pkg.package._requested.spec
+ break
+ default:
+ name = pkg.package._requested.raw
+ }
+ log.silly('fetch', name, version)
+ cache.add(name, version, top, false, next)
+*/
+}
diff --git a/lib/install/action/finalize.js b/lib/install/action/finalize.js
new file mode 100644
index 000000000..321c254e1
--- /dev/null
+++ b/lib/install/action/finalize.js
@@ -0,0 +1,77 @@
+'use strict'
+var path = require('path')
+var rimraf = require('rimraf')
+var fs = require('graceful-fs')
+var mkdirp = require('mkdirp')
+var asyncMap = require('slide').asyncMap
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('finalize', pkg.path)
+
+ var delpath = path.join(path.dirname(pkg.path), '.' + path.basename(pkg.path) + '.DELETE')
+
+ mkdirp(path.resolve(pkg.path, '..'), whenParentExists)
+
+ function whenParentExists (mkdirEr) {
+ if (mkdirEr) return next(mkdirEr)
+ // We stat first, because we can't rely on ENOTEMPTY from Windows.
+ // Windows, by contrast, gives the generic EPERM of a folder already exists.
+ fs.lstat(pkg.path, destStated)
+ }
+
+ function destStated (doesNotExist) {
+ if (doesNotExist) {
+ fs.rename(buildpath, pkg.path, whenMoved)
+ } else {
+ moveAway()
+ }
+ }
+
+ function whenMoved (renameEr) {
+ if (!renameEr) return next()
+ if (renameEr.code !== 'ENOTEMPTY') return next(renameEr)
+ moveAway()
+ }
+
+ function moveAway () {
+ fs.rename(pkg.path, delpath, whenOldMovedAway)
+ }
+
+ function whenOldMovedAway (renameEr) {
+ if (renameEr) return next(renameEr)
+ fs.rename(buildpath, pkg.path, whenConflictMoved)
+ }
+
+ function whenConflictMoved (renameEr) {
+ // if we got an error we'll try to put back the original module back,
+ // succeed or fail though we want the original error that caused this
+ if (renameEr) return fs.rename(delpath, pkg.path, function () { next(renameEr) })
+ fs.readdir(path.join(delpath, 'node_modules'), makeTarget)
+ }
+
+ function makeTarget (readdirEr, files) {
+ if (readdirEr) return cleanup()
+ if (!files.length) return cleanup()
+ mkdirp(path.join(pkg.path, 'node_modules'), function (mkdirEr) { moveModules(mkdirEr, files) })
+ }
+
+ function moveModules (mkdirEr, files) {
+ if (mkdirEr) return next(mkdirEr)
+ asyncMap(files, function (file, done) {
+ var from = path.join(delpath, 'node_modules', file)
+ var to = path.join(pkg.path, 'node_modules', file)
+ // we ignore errors here, because they can legitimately happen, for instance,
+ // bundled modules will be in both node_modules folders
+ fs.rename(from, to, function () { done() })
+ }, cleanup)
+ }
+
+ function cleanup () {
+ rimraf(delpath, afterCleanup)
+ }
+
+ function afterCleanup (rimrafEr) {
+ if (rimrafEr) log.warn('finalize', rimrafEr)
+ next()
+ }
+}
diff --git a/lib/install/action/install.js b/lib/install/action/install.js
new file mode 100644
index 000000000..f6c8d56e0
--- /dev/null
+++ b/lib/install/action/install.js
@@ -0,0 +1,7 @@
+'use strict'
+var lifecycle = require('../../utils/lifecycle.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('install', pkg.package.name, buildpath)
+ lifecycle(pkg.package, 'install', pkg.path, false, false, next)
+}
diff --git a/lib/install/action/move.js b/lib/install/action/move.js
new file mode 100644
index 000000000..030bcf7ba
--- /dev/null
+++ b/lib/install/action/move.js
@@ -0,0 +1,32 @@
+'use strict'
+var fs = require('fs')
+var path = require('path')
+var chain = require('slide').chain
+var rmStuff = require('../../unbuild.js').rmStuff
+var lifecycle = require('../../utils/lifecycle.js')
+var finalize = require('./finalize.js')
+var updatePackageJson = require('../update-package-json')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.warn('move', pkg.fromPath, pkg.path)
+ chain([
+ [lifecycle, pkg.package, 'preuninstall', pkg.path, false, true],
+ [lifecycle, pkg.package, 'uninstall', pkg.path, false, true],
+ [rmStuff, pkg.package, pkg.path],
+ [lifecycle, pkg.package, 'postuninstall', pkg.path, false, true],
+ [finalize, top, pkg.fromPath, pkg, log],
+ [removeEmptyParents, path.resolve(pkg.fromPath, '..')],
+ [updatePackageJson, pkg, pkg.path]
+ ], next)
+}
+
+function removeEmptyParents (pkgdir, next) {
+ _removeEmptyParents()
+ next()
+ function _removeEmptyParents () {
+ fs.rmdir(pkgdir, function (er) {
+ if (er) return
+ _removeEmptyParents(path.resolve(pkgdir, '..'))
+ })
+ }
+}
diff --git a/lib/install/action/postinstall.js b/lib/install/action/postinstall.js
new file mode 100644
index 000000000..5460c8364
--- /dev/null
+++ b/lib/install/action/postinstall.js
@@ -0,0 +1,7 @@
+'use strict'
+var lifecycle = require('../../utils/lifecycle.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('postinstall', pkg.package.name, buildpath)
+ lifecycle(pkg.package, 'postinstall', pkg.path, false, false, next)
+}
diff --git a/lib/install/action/preinstall.js b/lib/install/action/preinstall.js
new file mode 100644
index 000000000..989519c26
--- /dev/null
+++ b/lib/install/action/preinstall.js
@@ -0,0 +1,7 @@
+'use strict'
+var lifecycle = require('../../utils/lifecycle.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('preinstall', pkg.package.name, buildpath)
+ lifecycle(pkg.package, 'preinstall', buildpath, false, false, next)
+}
diff --git a/lib/install/action/prepublish.js b/lib/install/action/prepublish.js
new file mode 100644
index 000000000..95ff22b66
--- /dev/null
+++ b/lib/install/action/prepublish.js
@@ -0,0 +1,7 @@
+'use strict'
+var lifecycle = require('../../utils/lifecycle.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('prepublish', pkg.package.name, buildpath)
+ lifecycle(pkg.package, 'prepublish', buildpath, false, false, next)
+}
diff --git a/lib/install/action/remove.js b/lib/install/action/remove.js
new file mode 100644
index 000000000..8375490a8
--- /dev/null
+++ b/lib/install/action/remove.js
@@ -0,0 +1,64 @@
+'use strict'
+var path = require('path')
+var fs = require('graceful-fs')
+var rimraf = require('rimraf')
+var asyncMap = require('slide').asyncMap
+var mkdirp = require('mkdirp')
+var npm = require('../../npm.js')
+
+// This is weird because we want to remove the module but not it's node_modules folder
+// allowing for this allows us to not worry about the order of operations
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('remove', pkg.path)
+ var modpath = path.join(path.dirname(pkg.path), '.' + path.basename(pkg.path) + '.MODULES')
+
+ fs.rename(path.join(pkg.path, 'node_modules'), modpath, unbuildPackage)
+
+ function unbuildPackage (renameEr) {
+ npm.commands.unbuild(pkg.path, renameEr ? andRemoveEmptyParents(pkg.path) : moveModulesBack)
+ }
+
+ function andRemoveEmptyParents (path) {
+ return function (er) {
+ if (er) return next(er)
+ removeEmptyParents(pkg.path)
+ }
+ }
+
+ function moveModulesBack () {
+ fs.readdir(modpath, makeTarget)
+ }
+
+ function makeTarget (readdirEr, files) {
+ if (readdirEr) return cleanup()
+ if (!files.length) return cleanup()
+ mkdirp(path.join(pkg.path, 'node_modules'), function (mkdirEr) { moveModules(mkdirEr, files) })
+ }
+
+ function moveModules (mkdirEr, files) {
+ if (mkdirEr) return next(mkdirEr)
+ asyncMap(files, function (file, done) {
+ var from = path.join(modpath, file)
+ var to = path.join(pkg.path, 'node_modules', file)
+ // we ignore errors here, because they can legitimately happen, for instance,
+ // bundled modules will be in both node_modules folders
+ fs.rename(from, to, function () { done() })
+ }, cleanup)
+ }
+
+ function cleanup () {
+ rimraf(modpath, afterCleanup)
+ }
+
+ function afterCleanup (rimrafEr) {
+ if (rimrafEr) log.warn('finalize', rimrafEr)
+ removeEmptyParents(path.resolve(pkg.path, '..'))
+ }
+
+ function removeEmptyParents (pkgdir) {
+ fs.rmdir(pkgdir, function (er) {
+ if (er) return next()
+ removeEmptyParents(path.resolve(pkgdir, '..'))
+ })
+ }
+}
diff --git a/lib/install/action/test.js b/lib/install/action/test.js
new file mode 100644
index 000000000..354519d06
--- /dev/null
+++ b/lib/install/action/test.js
@@ -0,0 +1,7 @@
+'use strict'
+var lifecycle = require('../../utils/lifecycle.js')
+
+module.exports = function (top, buildpath, pkg, log, next) {
+ log.silly('test', pkg.package.name, buildpath)
+ lifecycle(pkg.package, 'test', buildpath, false, false, next)
+}
diff --git a/lib/install/actions.js b/lib/install/actions.js
new file mode 100644
index 000000000..caf7b7a45
--- /dev/null
+++ b/lib/install/actions.js
@@ -0,0 +1,78 @@
+'use strict'
+var path = require('path')
+var validate = require('aproba')
+var chain = require('slide').chain
+var asyncMap = require('slide').asyncMap
+var andFinishTracker = require('./and-finish-tracker.js')
+var andAddParentToErrors = require('./and-add-parent-to-errors.js')
+var uniqueFilename = require('../utils/get-name.js').uniqueFilename
+
+var actions = {}
+
+actions.fetch = require('./action/fetch.js')
+actions.extract = require('./action/extract.js')
+actions.build = require('./action/build.js')
+actions.test = require('./action/test.js')
+actions.preinstall = require('./action/preinstall.js')
+actions.install = require('./action/install.js')
+actions.postinstall = require('./action/postinstall.js')
+actions.prepublish = require('./action/prepublish.js')
+actions.finalize = require('./action/finalize.js')
+actions.remove = require('./action/remove.js')
+actions.move = require('./action/move.js')
+
+Object.keys(actions).forEach(function (actionName) {
+ var action = actions[actionName]
+ actions[actionName] = function (top, buildpath, pkg, log, next) {
+ validate('SSOOF', arguments)
+ return action(top, buildpath, pkg, log, andFinishTracker(log, andAddParentToErrors(pkg.parent, next)))
+ }
+})
+
+function prepareAction (staging, log) {
+ validate('SO', arguments)
+ return function (action) {
+ validate('SO', action)
+ var cmd = action[0]
+ var pkg = action[1]
+ if (!actions[cmd]) throw new Error('Unknown decomposed command "' + cmd + '" (is it new?)')
+ var buildpath = uniqueFilename(staging, pkg.package.name, pkg.realpath)
+ var top = path.resolve(staging, '../..')
+ return [actions[cmd], top, buildpath, pkg, log.newGroup(cmd + ':' + pkg.package.name)]
+ }
+}
+
+exports.actions = actions
+
+function execAction (todo, done) {
+ validate('AF', arguments)
+ var cmd = todo.shift()
+ todo.push(done)
+ cmd.apply(null, todo)
+}
+
+exports.doOne = function (cmd, staging, pkg, log, next) {
+ validate('SSOOF', arguments)
+ execAction(prepareAction(staging, log)([cmd, pkg]), next)
+}
+
+exports.doSerial = function (type, staging, actionsToRun, log, next) {
+ validate('SSAOF', arguments)
+ actionsToRun = actionsToRun
+ .filter(function (value) { return value[0] === type })
+ .sort(function (aa, bb) {
+ var aapath = aa[1].fromPath || aa[1].path
+ var bbpath = bb[1].fromPath || bb[1].path
+ return bbpath.length - aapath.length || aapath.localeCompare(bbpath)
+ })
+ log.silly('doSerial', '%s %d', type, actionsToRun.length)
+ chain(actionsToRun.map(prepareAction(staging, log)), andFinishTracker(log, next))
+}
+
+exports.doParallel = function (type, staging, actionsToRun, log, next) {
+ validate('SSAOF', arguments)
+ actionsToRun = actionsToRun.filter(function (value) { return value[0] === type })
+ log.silly('doParallel', type + ' ' + actionsToRun.length)
+
+ asyncMap(actionsToRun.map(prepareAction(staging, log)), execAction, andFinishTracker(log, next))
+}
diff --git a/lib/install/and-add-parent-to-errors.js b/lib/install/and-add-parent-to-errors.js
new file mode 100644
index 000000000..e549afac1
--- /dev/null
+++ b/lib/install/and-add-parent-to-errors.js
@@ -0,0 +1,13 @@
+'use strict'
+var validate = require('aproba')
+
+module.exports = function (parent, cb) {
+ validate('F', [cb])
+ return function (er) {
+ if (!er) return cb.apply(null, arguments)
+ if (parent && parent.package && parent.package.name) {
+ er.parent = parent.package.name
+ }
+ cb(er)
+ }
+}
diff --git a/lib/install/and-finish-tracker.js b/lib/install/and-finish-tracker.js
new file mode 100644
index 000000000..2bab60ddc
--- /dev/null
+++ b/lib/install/and-finish-tracker.js
@@ -0,0 +1,16 @@
+'use strict'
+var validate = require('aproba')
+
+module.exports = function (tracker, cb) {
+ validate('OF', [tracker, cb])
+ return function () {
+ tracker.finish()
+ cb.apply(null, arguments)
+ }
+}
+
+module.exports.now = function (tracker, cb) {
+ validate('OF', [tracker, cb])
+ tracker.finish()
+ cb.apply(null, Array.prototype.slice.call(arguments, 2))
+}
diff --git a/lib/install/decompose-actions.js b/lib/install/decompose-actions.js
new file mode 100644
index 000000000..746fac829
--- /dev/null
+++ b/lib/install/decompose-actions.js
@@ -0,0 +1,37 @@
+'use strict'
+var validate = require('aproba')
+
+module.exports = function (differences, decomposed, log, next) {
+ validate('AAOF', arguments)
+ differences.forEach(function (action) {
+ var cmd = action[0]
+ var pkg = action[1]
+ switch (cmd) {
+ case 'add':
+ case 'update':
+ decomposed.push(['fetch', pkg])
+ decomposed.push(['extract', pkg])
+ decomposed.push(['preinstall', pkg])
+ decomposed.push(['build', pkg])
+ decomposed.push(['install', pkg])
+ decomposed.push(['postinstall', pkg])
+ decomposed.push(['test', pkg])
+ decomposed.push(['finalize', pkg])
+ break
+ case 'move':
+ decomposed.push(['move', pkg])
+ decomposed.push(['preinstall', pkg])
+ decomposed.push(['build', pkg])
+ decomposed.push(['install', pkg])
+ decomposed.push(['postinstall', pkg])
+ decomposed.push(['test', pkg])
+ break
+ case 'remove':
+ // todo
+ default:
+ decomposed.push([cmd, pkg])
+ }
+ })
+ log.finish()
+ next()
+}
diff --git a/lib/install/deps.js b/lib/install/deps.js
new file mode 100644
index 000000000..bfcc2071f
--- /dev/null
+++ b/lib/install/deps.js
@@ -0,0 +1,423 @@
+'use strict'
+var assert = require('assert')
+var fs = require('fs')
+var path = require('path')
+var semver = require('semver')
+var asyncMap = require('slide').asyncMap
+var chain = require('slide').chain
+var union = require('lodash.union')
+var iferr = require('iferr')
+var npa = require('npm-package-arg')
+var validate = require('aproba')
+var realizePackageSpecifier = require('realize-package-specifier')
+var fetchPackageMetadata = require('../fetch-package-metadata.js')
+var andAddParentToErrors = require('./and-add-parent-to-errors.js')
+var addShrinkwrap = require('../fetch-package-metadata.js').addShrinkwrap
+var addBundled = require('../fetch-package-metadata.js').addBundled
+var inflateShrinkwrap = require('./inflate-shrinkwrap.js')
+var andFinishTracker = require('./and-finish-tracker.js')
+var npm = require('../npm.js')
+var flatName = require('./flatten-tree.js').flatName
+var createChild = require('./node.js').create
+var resetMetadata = require('./node.js').reset
+
+// The export functions in this module mutate a dependency tree, adding
+// items to them.
+
+function isDep (tree, child) {
+ var deps = tree.package.dependencies || {}
+ var devDeps = tree.package.devDependencies || {}
+ var reqVer = deps[child.package.name] || devDeps[child.package.name]
+ if (reqVer == null) return false
+ var requested = npa(child.package.name + "@" + reqVer)
+ return doesChildVersionMatch(child, requested)
+}
+
+function doesChildVersionMatch(child, requested) {
+ var childReq = child.package._requested
+ if (childReq && childReq.rawSpec == requested.rawSpec) return true
+ if (childReq && childReq.type === requested.type && childReq.spec === requested.spec) return true
+ if (!childReq && (requested.type === 'directory' || requested.type === 'local') && requested.rawSpec === child.package._from) return true
+ if (requested.type !== 'range' && requested.type !== 'version') return false
+ return semver.satisfies(child.package.version, requested.spec)
+}
+
+var recalculateMetadata = exports.recalculateMetadata = function (tree, log, next) {
+ validate('OOF', arguments)
+ if (tree.parent == null) resetMetadata(tree)
+ function markDeps (spec, done) {
+ validate('SF', arguments)
+ realizePackageSpecifier(spec, tree.path, function (er, req) {
+ if (er) return done()
+ var child = findRequirement(tree, req.name, req)
+ if (child) {
+ resolveWithExistingModule(child, child.package, tree, log, function () { done() })
+ } else {
+ tree.missingDeps[req.name] = req.rawSpec
+ done()
+ }
+ })
+ }
+ function deptospec (deps) {
+ return function (depname) {
+ return depname + '@' + deps[depname]
+ }
+ }
+ var tomark = union(
+ Object.keys(tree.package.dependencies).map(deptospec(tree.package.dependencies)),
+ tree.parent == null
+ ? Object.keys(tree.package.devDependencies).map(deptospec(tree.package.devDependencies))
+ : []
+ )
+ chain([
+ [asyncMap, tomark, markDeps],
+ [asyncMap, tree.children, function (child, done) { recalculateMetadata(child, log, done) }]
+ ], function () { next(null, tree) })
+}
+
+// Add a list of args to tree's top level dependencies
+exports.loadRequestedDeps = function (args, tree, saveToDependencies, log, next) {
+ validate('AOOF', [args, tree, log, next])
+ asyncMap(args, function (spec, done) {
+ replaceDependency(spec, tree, log.newGroup('loadRequestedDeps'), iferr(done, function (child, tracker) {
+ validate('OO', arguments)
+ if (npm.config.get('global')) {
+ child.isGlobal = true
+ }
+ if (saveToDependencies) {
+ tree.package[saveToDependencies][child.package.name] = child.package._requested.spec
+ }
+ if (saveToDependencies && saveToDependencies !== 'devDependencies') {
+ tree.package.dependencies[child.package.name] = child.package._requested.spec
+ }
+ child.directlyRequested = true
+ child.save = saveToDependencies
+
+ // For things the user asked to install, that aren't a dependency (or
+ // won't be when we're done), flag it as "depending" on the user
+ // themselves, so we don't remove it as a dep that no longer exists
+ if (isDep(tree, child)) {
+ child.package._requiredBy = union(child.package._requiredBy || [], [flatNameFromTree(tree)])
+ } else {
+ child.package._requiredBy = union(child.package._requiredBy, ['#USER'])
+ }
+ recalculateMetadata(tree, log, function () {
+ done(null, child, tracker)
+ })
+ }))
+ }, andForEachChild(loadDeps, andFinishTracker(log, next)))
+}
+
+exports.removeDeps = function (args, tree, saveToDependencies, log, next) {
+ validate('AOOF', [args, tree, log, next])
+ asyncMap(args, function (name, done) {
+ var toRemove = tree.children.filter(function (child) { return child.package.name === name })
+ tree.removed = union(tree.removed || [], toRemove)
+ toRemove.forEach(function (child) {
+ child.save = saveToDependencies
+ })
+ tree.children = tree.children.filter(function (child) { return child.package.name !== name })
+ done()
+ }, andFinishTracker(log, next))
+}
+
+function andForEachChild (load, next) {
+ validate('F', [next])
+ return function (er, children, logs) {
+ // when children is empty, logs won't be passed in at all (asyncMap is weird)
+ // so shortcircuit before arg validation
+ if (!er && (!children || children.length === 0)) return next()
+ validate('EAA', arguments)
+ if (er) return next(er)
+ assert(children.length === logs.length)
+ var cmds = []
+ for (var ii = 0; ii < children.length; ++ii) {
+ cmds.push([load, children[ii], logs[ii]])
+ }
+ var sortedCmds = cmds.sort(function installOrder (aa, bb) {
+ return aa[1].package.name.localeCompare(bb[1].package.name)
+ })
+ chain(sortedCmds, next)
+ }
+}
+
+function depAdded (done) {
+ validate('F', arguments)
+ return function () {
+ validate('EOO', arguments)
+ done.apply(null, arguments)
+ }
+}
+
+// Load any missing dependencies in the given tree
+exports.loadDeps = loadDeps
+function loadDeps (tree, log, next) {
+ validate('OOF', arguments)
+ if (tree.loaded) return andFinishTracker.now(log, next)
+ tree.loaded = true
+ if (!tree.package.dependencies) tree.package.dependencies = {}
+ asyncMap(Object.keys(tree.package.dependencies), function (dep, done) {
+ var version = tree.package.dependencies[dep]
+ if (tree.package.optionalDependencies &&
+ tree.package.optionalDependencies[dep]) {
+ done = andWarnOnError(log, done)
+ }
+ var spec = dep + '@' + version
+ addDependency(spec, tree, log.newGroup('loadDep:' + dep), depAdded(done))
+ }, andForEachChild(loadDeps, andFinishTracker(log, next)))
+}
+
+function andWarnOnError (log, next) {
+ validate('OF', arguments)
+ return function (er, child, childLog) {
+ validate('EOO', arguments)
+ if (er) {
+ log.warn('install', "Couldn't install optional dependency:", er.message)
+ log.verbose('install', er.stack)
+ }
+ next(null, child, childLog)
+ }
+}
+
+// Load development dependencies into the given tree
+exports.loadDevDeps = function (tree, log, next) {
+ validate('OOF', arguments)
+ if (!tree.package.devDependencies) return andFinishTracker.now(log, next)
+ asyncMap(Object.keys(tree.package.devDependencies), function (dep, done) {
+ // things defined as both dev dependencies and regular dependencies are treated
+ // as the former
+ if (tree.package.dependencies[dep]) return done()
+
+ var spec = dep + '@' + tree.package.devDependencies[dep]
+ var logGroup = log.newGroup('loadDevDep:' + dep)
+ addDependency(spec, tree, logGroup, iferr(done, function (child, tracker) {
+ validate('OO', arguments)
+ child.devDependency = true
+ done(null, child, tracker)
+ }))
+ }, andForEachChild(loadDeps, andFinishTracker(log, next)))
+}
+
+exports.loadExtraneous = function (tree, log, next) {
+ validate('OOF', arguments)
+ asyncMap(tree.children.filter(function (child) { return !child.loaded }), function (child, done) {
+ resolveWithExistingModule(child, child.package, tree, log, done)
+ }, andForEachChild(loadDeps, andFinishTracker(log, next)))
+}
+
+function replaceDependency (spec, tree, log, cb) {
+ validate('SOOF', arguments)
+ var next = andAddParentToErrors(tree, cb)
+ fetchPackageMetadata(spec, tree.path, log.newItem('fetchMetadata'), iferr(next, function (pkg) {
+ tree.children = tree.children.filter(function (child) {
+ return child.package.name !== pkg.name
+ })
+ resolveRequirement(pkg, tree, log, next)
+ }))
+}
+
+function addDependency (spec, tree, log, done) {
+ validate('SOOF', arguments)
+ var cb = function (er, child, log) {
+ validate('EOO', arguments)
+ if (er) return done(er)
+ done(null, child, log)
+ }
+ var next = andAddParentToErrors(tree, cb)
+ fetchPackageMetadata(spec, tree.path, log.newItem('fetchMetadata'), iferr(next, function (pkg) {
+ var child = findRequirement(tree, pkg.name, npa(spec))
+ if (child) {
+ resolveWithExistingModule(child, pkg, tree, log, next)
+ } else {
+ resolveRequirement(pkg, tree, log, next)
+ }
+ }))
+}
+
+function resolveWithExistingModule (child, pkg, tree, log, next) {
+ validate('OOOOF', arguments)
+ if (!child.package._requested) {
+ child.package._requested = pkg._requested
+ }
+ if (child.package._requested && child.package._requested.spec !== pkg._requested.spec && (child.package._requested.type === "version" || child.package._requested.type === "range")) {
+ child.package._requested.spec += ' ' + pkg._requested.spec
+ child.package._requested.type = 'range'
+ }
+ if (isDep(tree, child)) {
+ child.package._requiredBy = union(child.package._requiredBy || [], [flatNameFromTree(tree)])
+ }
+
+ tree.requires = union(tree.requires || [], [child])
+
+ if (tree.parent && child.parent !== tree) updatePhantomChildren(tree.parent, child)
+
+ if (!child.loaded && pkg._shrinkwrap === undefined) {
+ fs.readFile(path.join(child.path, 'npm-shrinkwrap.json'), function (er, data) {
+ if (er) {
+ pkg._shrinkwrap = null
+ return next(null, child, log)
+ }
+ try {
+ pkg._shrinkwrap = JSON.parse(data)
+ } catch (ex) {
+ return next(null, child, log)
+ }
+ if (pkg._shrinkwrap && pkg._shrinkwrap.dependencies) {
+ return inflateShrinkwrap(child, pkg._shrinkwrap.dependencies, iferr(next, function () {
+ next(null, child, log)
+ }))
+ } else {
+ return next(null, child, log)
+ }
+ })
+ } else {
+ return next(null, child, log)
+ }
+}
+
+var updatePhantomChildren = exports.updatePhantomChildren = function (current, child) {
+ validate('OO', arguments)
+ while (current && current !== child.parent) {
+ // FIXME: phantomChildren doesn't actually belong in the package.json
+ if (!current.package._phantomChildren) current.package._phantomChildren = {}
+ current.package._phantomChildren[child.package.name] = child.package.version
+ current = current.parent
+ }
+}
+
+function flatNameFromTree (tree) {
+ validate('O', arguments)
+ if (!tree.parent) return '/'
+ var path = flatNameFromTree(tree.parent)
+ if (path !== '/') path += '/'
+ return flatName(path, tree)
+}
+
+function resolveRequirement (pkg, tree, log, next) {
+ validate('OOOF', arguments)
+ pkg._from = pkg._requested.name + '@' + pkg._requested.spec
+ addShrinkwrap(pkg, iferr(next, function () {
+ addBundled(pkg, iferr(next, function () {
+ var parent = earliestInstallable(tree, tree, pkg) || tree
+ var child = createChild({
+ package: pkg,
+ parent: parent,
+ path: path.join(parent.path, 'node_modules', pkg.name),
+ realpath: path.resolve(parent.realpath, 'node_modules', pkg.name),
+ children: pkg._bundled || []
+ })
+
+ parent.children.push(child)
+ if (isDep(tree, child)) {
+ child.package._requiredBy = union(child.package._requiredBy || [], [flatNameFromTree(tree)])
+ }
+
+ if (tree.parent && parent !== tree) updatePhantomChildren(tree.parent, child)
+
+ tree.requires = union(tree.requires || [], [child])
+
+ if (pkg._bundled) {
+ inflateBundled(child, child.children)
+ }
+
+ if (pkg._shrinkwrap && pkg._shrinkwrap.dependencies) {
+ return inflateShrinkwrap(child, pkg._shrinkwrap.dependencies, function () {
+ next(null, child, log)
+ })
+ }
+
+ next(null, child, log)
+ }))
+ }))
+}
+
+function inflateBundled (parent, children) {
+ validate('OA', arguments)
+ children.forEach(function (child) {
+ child.fromBundle = true
+ child.parent = parent
+ child.path = path.join(parent.path, child.package.name)
+ child.realpath = path.resolve(parent.path, child.package.name)
+ inflateBundled(child, child.children)
+ })
+}
+
+exports.validatePeerDeps = function validatePeerDeps (tree, log) {
+ validate('OO', arguments)
+ if (tree.package.peerDependencies) {
+ Object.keys(tree.package.peerDependencies).forEach(function (pkgname) {
+ var version = tree.package.peerDependencies[pkgname]
+ var match = findRequirement(tree, pkgname, npa(pkgname + "@" + version))
+ if (!match) {
+ log.warn('validatePeerDeps', tree.package.name + '@' + tree.package.version +
+ ' requires a peer of ' + pkgname + '@' + version + ' but none was installed.')
+ }
+ })
+ }
+ tree.children.forEach(function (child) { validatePeerDeps(child, log) })
+}
+
+// Determine if a module requirement is already met by the tree at or above
+// our current location in the tree.
+var findRequirement = exports.findRequirement = function (tree, name, requested) {
+ validate('OSO', arguments)
+ var nameMatch = function (child) {
+ return child.package.name === name && child.parent
+ }
+ var versionMatch = function (child) {
+ return doesChildVersionMatch(child, requested)
+ }
+ if (nameMatch(tree)) {
+ // this *is* the module, but it doesn't match the version, so a
+ // new copy will have to be installed
+ return versionMatch(tree) ? tree : null
+ }
+
+ var matches = tree.children.filter(nameMatch)
+ if (matches.length) {
+ matches = matches.filter(versionMatch)
+ // the module exists as a dependent, but the version doesn't match, so
+ // a new copy will have to be installed above here
+ if (matches.length) return matches[0]
+ return null
+ }
+ if (!tree.parent) return null
+ return findRequirement(tree.parent, name, requested)
+}
+
+// Find the highest level in the tree that we can install this module in.
+// If the module isn't installed above us yet, that'd be the very top.
+// If it is, then it's the level below where its installed.
+var earliestInstallable = exports.earliestInstallable = function (requiredBy, tree, pkg) {
+ validate('OOO', arguments)
+ var nameMatch = function (child) {
+ return child.package.name === pkg.name
+ }
+
+ var nameMatches = tree.children.filter(nameMatch)
+ if (nameMatches.length) return null
+
+ // If any of the children of this tree have conflicting
+ // binaries then we need to decline to install this package here.
+ var binaryMatches = tree.children.filter(function (child) {
+ return Object.keys(child.package.bin || {}).filter(function (bin) {
+ return pkg.bin && pkg.bin[bin]
+ }).length
+ })
+ if (binaryMatches.length) return null
+
+ // if this tree location requested the same module then we KNOW it
+ // isn't compatible because if it were findRequirement would have
+ // found that version.
+ if (requiredBy !== tree && tree.package.dependencies && tree.package.dependencies[pkg.name]) {
+ return null
+ }
+
+ // FIXME: phantomChildren doesn't actually belong in the package.json
+ if (tree.package._phantomChildren && tree.package._phantomChildren[pkg.name]) return null
+
+ if (!tree.parent) return tree
+ if (tree.isGlobal) return tree
+
+ return (earliestInstallable(requiredBy, tree.parent, pkg) || tree)
+}
diff --git a/lib/install/diff-trees.js b/lib/install/diff-trees.js
new file mode 100644
index 000000000..e9cfac2ef
--- /dev/null
+++ b/lib/install/diff-trees.js
@@ -0,0 +1,91 @@
+'use strict'
+var validate = require('aproba')
+var npa = require('npm-package-arg')
+var flattenTree = require('./flatten-tree.js')
+
+function nonRegistrySource (pkg) {
+ validate('O', arguments)
+ var requested = pkg._requested || (pkg._from && npa(pkg._from))
+ if (!requested) return false
+
+ if (requested.type === 'hosted') return true
+ if (requested.type === 'local') return true
+ return false
+}
+
+function pkgAreEquiv (aa, bb) {
+ var aaSha = (aa.dist && aa.dist.shasum) || aa._shasum
+ var bbSha = (bb.dist && bb.dist.shasum) || bb._shasum
+ if (aaSha === bbSha) return true
+ if (aaSha || bbSha) return false
+ if (nonRegistrySource(aa) || nonRegistrySource(bb)) return false
+ if (aa.version === bb.version) return true
+ return false
+}
+
+function getNameAndVersion (pkg) {
+ var versionspec = pkg._shasum
+
+ if (!versionspec && nonRegistrySource(pkg)) {
+ if (pkg._requested) {
+ versionspec = pkg._requested.spec
+ } else if (pkg._from) {
+ versionspec = npa(pkg._from).spec
+ }
+ }
+ if (!versionspec) {
+ versionspec = pkg.version
+ }
+ return pkg.name + '@' + versionspec
+}
+
+function pushAll (aa, bb) {
+ Array.prototype.push.apply(aa, bb)
+}
+
+module.exports = function (oldTree, newTree, differences, log, next) {
+ validate('OOAOF', arguments)
+ pushAll(differences, diffTrees(oldTree, newTree))
+ log.finish()
+ next()
+}
+
+function diffTrees (oldTree, newTree) {
+ validate('OO', arguments)
+ var differences = []
+ var flatOldTree = flattenTree(oldTree)
+ var flatNewTree = flattenTree(newTree)
+ var toRemove = {}
+ var toRemoveByNameAndVer = {}
+ // find differences
+ Object.keys(flatOldTree).forEach(function (flatname) {
+ if (flatNewTree[flatname]) return
+ var pkg = flatOldTree[flatname]
+ toRemove[flatname] = pkg
+ var namever = getNameAndVersion(pkg.package)
+ if (!toRemoveByNameAndVer[namever]) toRemoveByNameAndVer[namever] = []
+ toRemoveByNameAndVer[namever].push(flatname)
+ })
+ Object.keys(flatNewTree).forEach(function (path) {
+ if (flatNewTree[path].fromBundle) return
+ if (flatOldTree[path]) {
+ if (!flatNewTree[path].directlyRequested && pkgAreEquiv(flatOldTree[path].package, flatNewTree[path].package)) return
+ differences.push(['update', flatNewTree[path]])
+ } else {
+ var pkg = flatNewTree[path]
+ var vername = getNameAndVersion(pkg.package)
+ if (toRemoveByNameAndVer[vername] && toRemoveByNameAndVer[vername].length) {
+ var flatname = toRemoveByNameAndVer[vername].shift()
+ pkg.fromPath = toRemove[flatname].path
+ differences.push(['move', pkg])
+ delete toRemove[flatname]
+ } else {
+ differences.push(['add', pkg])
+ }
+ }
+ })
+ Object.keys(toRemove).forEach(function (path) {
+ differences.push(['remove', toRemove[path]])
+ })
+ return differences
+}
diff --git a/lib/install/flatten-tree.js b/lib/install/flatten-tree.js
new file mode 100644
index 000000000..d35b3f243
--- /dev/null
+++ b/lib/install/flatten-tree.js
@@ -0,0 +1,25 @@
+'use strict'
+var validate = require('aproba')
+
+module.exports = function (tree) {
+ validate('O', arguments)
+ var flat = {}
+ var todo = [[tree, '/']]
+ while (todo.length) {
+ var next = todo.shift()
+ var pkg = next[0]
+ var path = next[1]
+ flat[path] = pkg
+ if (path !== '/') path += '/'
+ for (var ii = 0; ii < pkg.children.length; ++ii) {
+ var child = pkg.children[ii]
+ todo.push([child, flatName(path, child)])
+ }
+ }
+ return flat
+}
+
+var flatName = module.exports.flatName = function (path, child) {
+ validate('SO', arguments)
+ return path + (child.package.name || 'TOP')
+}
diff --git a/lib/install/inflate-shrinkwrap.js b/lib/install/inflate-shrinkwrap.js
new file mode 100644
index 000000000..f6d85569b
--- /dev/null
+++ b/lib/install/inflate-shrinkwrap.js
@@ -0,0 +1,27 @@
+'use strict'
+var asyncMap = require('slide').asyncMap
+var path = require('path')
+var validate = require('aproba')
+var fetchPackageMetadata = require('../fetch-package-metadata.js')
+var createChild = require('./node.js').create
+
+var inflateShrinkwrap = module.exports = function (tree, swdeps, finishInflating) {
+ validate('OOF', arguments)
+ tree.children = []
+ asyncMap(Object.keys(swdeps), function (name, next) {
+ var sw = swdeps[name]
+ var spec = sw.resolved || name + '@' + sw.version
+ fetchPackageMetadata(spec, tree.path, function (er, pkg) {
+ if (er) return next(er)
+ var child = createChild({
+ package: pkg,
+ loaded: false,
+ parent: tree,
+ path: path.join(tree.path, 'node_modules', pkg.name),
+ realpath: path.resolve(tree.realpath, 'node_modules', pkg.name)
+ })
+ tree.children.push(child)
+ inflateShrinkwrap(child, sw.dependencies || {}, next)
+ })
+ }, finishInflating)
+}
diff --git a/lib/install/logical-tree.js b/lib/install/logical-tree.js
new file mode 100644
index 000000000..7fc826b95
--- /dev/null
+++ b/lib/install/logical-tree.js
@@ -0,0 +1,69 @@
+'use strict'
+var clone = require('lodash.clonedeep')
+var union = require('lodash.union')
+var without = require('lodash.without')
+var validate = require('aproba')
+var flattenTree = require('./flatten-tree.js')
+var npm = require('../npm.js')
+
+var logicalTree = module.exports = function (tree) {
+ validate('O', arguments)
+ var newTree = clone(tree)
+ var flat = flattenTree(newTree)
+ function getNode (flatname) { return flat[flatname] }
+ Object.keys(flat).sort().forEach(function (flatname) {
+ var node = flat[flatname]
+ var requiredBy = node.package._requiredBy || []
+ var requiredByNames = requiredBy.filter(function (parentFlatname) {
+ var parentNode = getNode(parentFlatname)
+ if (!parentNode) return false
+ return parentNode.package.dependencies[node.package.name] ||
+ parentNode.package.devDependencies[node.package.name]
+ })
+ var requiredBy = requiredByNames.map(getNode)
+
+ node.requiredBy = requiredBy
+
+ if (!requiredBy.length) return
+
+ if (node.parent) node.parent.children = without(node.parent.children, node)
+
+ requiredBy.forEach(function (parentNode) {
+ parentNode.children = union(parentNode.children, [node])
+ })
+ })
+ return newTree
+}
+
+module.exports.asReadInstalled = function (tree) {
+ return translateTree(logicalTree(tree))
+}
+
+function translateTree (tree) {
+ var pkg = tree.package
+ if (pkg._dependencies) return pkg
+ pkg._dependencies = pkg.dependencies
+ pkg.dependencies = {}
+ tree.children.forEach(function (child) {
+ pkg.dependencies[child.package.name] = translateTree(child)
+ })
+ Object.keys(tree.missingDeps).forEach(function (name) {
+ if (pkg.dependencies[name]) {
+ pkg.dependencies[name].invalid = true
+ pkg.dependencies[name].realName = name
+ pkg.dependencies[name].extraneous = false
+ } else {
+ pkg.dependencies[name] = tree.missingDeps[name]
+ }
+ })
+ pkg.path = tree.path
+
+ // All package-only requiredBys (exclude #USER and #EXISTING)
+ var requiredBy = pkg._requiredBy.filter(function (req) { return req[0] !== '#' })
+
+ var isTopLevel = tree.parent == null
+ var isChildOfTop = !isTopLevel && tree.parent.parent == null
+ var topHasNoPackageJson = isChildOfTop && tree.parent.package.name === undefined && tree.parent.package.version === undefined && tree.parent.package._id === undefined
+ pkg.extraneous = !isTopLevel && (!isChildOfTop || !topHasNoPackageJson) && requiredBy.length === 0
+ return pkg
+}
diff --git a/lib/install/node.js b/lib/install/node.js
new file mode 100644
index 000000000..2f1422f7c
--- /dev/null
+++ b/lib/install/node.js
@@ -0,0 +1,40 @@
+'use strict'
+
+var defaultTemplate = {
+ package: {
+ dependencies: {},
+ devDependencies: {},
+ _requiredBy: [],
+ _phantomChildren: {}
+ },
+ loaded: false,
+ children: [],
+ requires: [],
+ missingDeps: {},
+ path: null,
+ realpath: null
+}
+
+var create = exports.create = function (node, template) {
+ if (!template) template = defaultTemplate
+ Object.keys(template).forEach(function (key) {
+ if (template[key] != null && typeof template[key] === 'object' && !(template[key] instanceof Array)) {
+ if (!node[key]) node[key] = {}
+ return create(node[key], template[key])
+ }
+ if (node[key] != null) return
+ node[key] = template[key]
+ })
+ return node
+}
+var reset = exports.reset = function (node) {
+ if (node.parent && !node.parent.parent && node.package && !node.package._requiredBy) node.package._requiredBy = ['#EXISTING']
+ var child = create(node)
+ child.package._requiredBy = child.package._requiredBy.filter(function (req) {
+ return req[0] === '#'
+ })
+ child.requires = []
+ child.package._phantomChildren = {}
+ child.missingDeps = {}
+ child.children.forEach(reset)
+}
diff --git a/lib/install/prune-tree.js b/lib/install/prune-tree.js
new file mode 100644
index 000000000..9a744d031
--- /dev/null
+++ b/lib/install/prune-tree.js
@@ -0,0 +1,31 @@
+'use strict'
+var validate = require('aproba')
+var flattenTree = require('./flatten-tree.js')
+
+function isNotPackage (mod) {
+ return function (parentMod) { return mod !== parentMod }
+}
+
+module.exports = function pruneTree (tree) {
+ validate('O', arguments)
+ var flat = flattenTree(tree)
+ // we just do this repeatedly until there are no more orphaned packages
+ // which isn't as effecient as it could be on a REALLY big tree
+ // but we'll face that if it proves to be an issue
+ var removedPackage
+ do {
+ removedPackage = false
+ Object.keys(flat).forEach(function (flatname) {
+ var child = flat[flatname]
+ if (!child.parent) return
+ child.package._requiredBy = (child.package._requiredBy || []).filter(function (req) {
+ return req[0] === '#' || flat[req]
+ })
+ if (!child.package._requiredBy.length) {
+ removedPackage = true
+ delete flat[flatname]
+ child.parent.children = child.parent.children.filter(isNotPackage(child))
+ }
+ })
+ } while (removedPackage)
+}
diff --git a/lib/install/save.js b/lib/install/save.js
new file mode 100644
index 000000000..a73702c09
--- /dev/null
+++ b/lib/install/save.js
@@ -0,0 +1,197 @@
+'use strict'
+var fs = require('fs')
+var path = require('path')
+var url = require('url')
+var writeFileAtomic = require('write-file-atomic')
+var log = require('npmlog')
+var semver = require('semver')
+var iferr = require('iferr')
+var sortedObject = require('sorted-object')
+var validate = require('aproba')
+var without = require('lodash.without')
+var npm = require('../npm.js')
+
+// if the -S|--save option is specified, then write installed packages
+// as dependencies to a package.json file.
+
+exports.saveRequested = function (args, tree, andReturn) {
+ validate('AOF', arguments)
+ savePackageJson(args, tree, andWarnErrors(andSaveShrinkwrap(tree, andReturn)))
+}
+
+function andSaveShrinkwrap (tree, andReturn) {
+ validate('OF', arguments)
+ return function (er) {
+ validate('E', arguments)
+ saveShrinkwrap(tree, andWarnErrors(andReturn))
+ }
+}
+
+function andWarnErrors (cb) {
+ validate('F', arguments)
+ return function (er) {
+ if (er) log.warn('saveError', er.message)
+ arguments[0] = null
+ cb.apply(null, arguments)
+ }
+}
+
+function saveShrinkwrap (tree, next) {
+ validate('OF', arguments)
+ var saveTarget = path.resolve(tree.path, 'npm-shrinkwrap.json')
+ fs.stat(saveTarget, function (er, stat) {
+ if (er) return next()
+ var save = npm.config.get('save')
+ var saveDev = npm.config.get('save-dev')
+ var saveOptional = npm.config.get('save-optional')
+ if (!saveOptional && saveDev) return next()
+ if (saveOptional || !save) return next()
+ npm.commands.shrinkwrap([], true, next)
+ })
+}
+
+function savePackageJson (args, tree, next) {
+ validate('AOF', arguments)
+ var saveBundle = npm.config.get('save-bundle')
+
+ // each item in the tree is a top-level thing that should be saved
+ // to the package.json file.
+ // The relevant tree shape is { <folder>: {what:<pkg>} }
+ var saveTarget = path.resolve(tree.path, 'package.json')
+ // don't use readJson, because we don't want to do all the other
+ // tricky npm-specific stuff that's in there.
+ fs.readFile(saveTarget, iferr(next, function (packagejson) {
+ try {
+ packagejson = JSON.parse(packagejson.toString('utf8'))
+ } catch (ex) {
+ return next(ex)
+ }
+
+ // If we're saving bundled deps, normalize the key before we start
+ if (saveBundle) {
+ var bundle = packagejson.bundleDependencies || packagejson.bundledDependencies
+ delete packagejson.bundledDependencies
+ if (!Array.isArray(bundle)) bundle = []
+ packagejson.bundleDependencies = bundle.sort()
+ }
+
+ var toSave = getThingsToSave(tree)
+ var toRemove = getThingsToRemove(args, tree)
+ var savingTo = {}
+ toSave.forEach(function (pkg) { savingTo[pkg.save] = true })
+ toRemove.forEach(function (pkg) { savingTo[pkg.save] = true })
+
+ Object.keys(savingTo).forEach(function (save) {
+ if (!packagejson[save]) packagejson[save] = {}
+ })
+
+ log.verbose('saving', toSave)
+ toSave.forEach(function (pkg) {
+ packagejson[pkg.save][pkg.name] = pkg.spec
+ if (saveBundle) {
+ var ii = bundle.indexOf(pkg.name)
+ if (ii === -1) bundle.push(pkg.name)
+ }
+ })
+
+ toRemove.forEach(function (pkg) {
+ delete packagejson[pkg.save][pkg.name]
+ if (saveBundle) {
+ bundle = without(bundle, pkg.name)
+ }
+ })
+
+ Object.keys(savingTo).forEach(function (save) {
+ packagejson[save] = sortedObject(packagejson[save])
+ })
+
+ if (saveBundle) {
+ packagejson.bundleDependencies = bundle.sort()
+ }
+
+ var json = JSON.stringify(packagejson, null, 2) + '\n'
+ writeFileAtomic(saveTarget, json, next)
+ }))
+}
+
+var getSaveType = exports.getSaveType = function (args) {
+ validate('A', arguments)
+ var nothingToSave = !args.length
+ var globalInstall = npm.config.get('global')
+ var noSaveFlags = !npm.config.get('save')
+ && !npm.config.get('save-dev')
+ && !npm.config.get('save-optional')
+ if (nothingToSave || globalInstall || noSaveFlags) return null
+
+ if (npm.config.get('save-optional')) return 'optionalDependencies'
+ else if (npm.config.get('save-dev')) return 'devDependencies'
+ else return 'dependencies'
+}
+
+function computeVersionSpec (child) {
+ validate('O', arguments)
+ var requested = child.package._requested || {
+ type: 'version',
+ spec: child.package.version
+ }
+ if (requested.type === 'vesion' || requested.type === 'range') {
+ var version = child.package.version
+ var rangeDescriptor = ''
+ if (semver.valid(version, true) &&
+ semver.gte(version, '0.1.0', true) &&
+ !npm.config.get('save-exact')) {
+ rangeDescriptor = npm.config.get('save-prefix')
+ }
+ return rangeDescriptor + version
+ } else if (requested.type === 'directory' || requested.type === 'local') {
+ var relativePath = path.relative(child.parent.path, requested.spec)
+ if (/^[.][.]/.test(relativePath)) {
+ return url.format({
+ protocol: 'file',
+ slashes: true,
+ pathname: requested.spec
+ })
+ } else {
+ return url.format({
+ protocol: 'file',
+ slashes: false,
+ pathname: relativePath
+ })
+ }
+ } else {
+ return requested.spec
+ }
+}
+
+function getThingsToSave (tree) {
+ validate('O', arguments)
+ var toSave = tree.children.filter(function (child) {
+ return child.save
+ }).map(function (child) {
+ return {
+ name: child.package.name,
+ spec: computeVersionSpec(child),
+ save: child.save
+ }
+ })
+ return toSave
+}
+
+function getThingsToRemove (args, tree) {
+ validate('AO', arguments)
+ if (!tree.removed) return []
+ var toRemove = tree.removed.map(function (child) {
+ return {
+ name: child.package.name,
+ save: child.save
+ }
+ })
+ var saveType = getSaveType(args)
+ args.forEach(function (arg) {
+ toRemove.push({
+ name: arg,
+ save: saveType
+ })
+ })
+ return toRemove
+}
diff --git a/lib/install/update-package-json.js b/lib/install/update-package-json.js
new file mode 100644
index 000000000..914155909
--- /dev/null
+++ b/lib/install/update-package-json.js
@@ -0,0 +1,30 @@
+'use strict'
+var path = require('path')
+var writeFileAtomic = require('write-file-atomic')
+var sortedObject = require('sorted-object')
+
+var deepSortObject = function (obj, sortBy) {
+ if (!obj || typeof obj !== 'object') return obj
+ if (obj instanceof Array) {
+ return obj.sort(sortBy)
+ }
+ obj = sortedObject(obj)
+ Object.keys(obj).forEach(function (key) {
+ obj[key] = deepSortObject(obj[key])
+ })
+ return obj
+}
+
+module.exports = function (pkg, buildpath, next) {
+ // FIXME: This bundled dance is because we're sticking a big tree of bundled
+ // deps into the parsed package.jsonā€“ it probably doesn't belong there =/
+ // But the real reason we don't just dump it out is that it's the result
+ // of npm-read-tree, which produces circular data structures, due to the
+ // parent and children keys.
+ var bundled = pkg.package._bundled
+ delete pkg.package._bundled // FIXME
+ var packagejson = deepSortObject(pkg.package)
+ var data = JSON.stringify(packagejson, null, 2) + '\n'
+ pkg.package._bundled = bundled
+ writeFileAtomic(path.resolve(buildpath, 'package.json'), data, next)
+}
diff --git a/lib/install/validate-tree.js b/lib/install/validate-tree.js
new file mode 100644
index 000000000..ec6f279d5
--- /dev/null
+++ b/lib/install/validate-tree.js
@@ -0,0 +1,38 @@
+'use strict'
+var validate = require('aproba')
+var npmInstallChecks = require('npm-install-checks')
+var checkEngine = npmInstallChecks.checkEngine
+var checkPlatform = npmInstallChecks.checkPlatform
+var checkGit = npmInstallChecks.checkGit
+var asyncMap = require('slide').asyncMap
+var chain = require('slide').chain
+var npm = require('../npm.js')
+var andFinishTracker = require('./and-finish-tracker.js')
+var flattenTree = require('./flatten-tree.js')
+var validatePeerDeps = require('./deps.js').validatePeerDeps
+
+module.exports = function (idealTree, log, next) {
+ validate('OOF', arguments)
+ var moduleMap = flattenTree(idealTree)
+ var force = npm.config.get('force')
+ var nodeVersion = npm.config.get('node-version')
+ var strict = npm.config.get('engine-strict')
+
+ var modules = Object.keys(moduleMap).map(function (name) { return moduleMap[name] })
+
+ asyncMap(modules, function (mod, done) {
+ chain([
+ [checkEngine, mod, npm.version, nodeVersion, force, strict],
+ [checkPlatform, mod, force],
+ mod.parent && [checkGit, mod.realpath]
+ ], done)
+ }, andValidatePeerDeps(idealTree, log, andFinishTracker(log, next)))
+}
+
+function andValidatePeerDeps (idealTree, log, next) {
+ validate('OOF', arguments)
+ return function (er) {
+ validatePeerDeps(idealTree, log)
+ next(er)
+ }
+}
diff --git a/lib/link.js b/lib/link.js
index 916ebd6af..4c5622aab 100644
--- a/lib/link.js
+++ b/lib/link.js
@@ -154,7 +154,9 @@ function resultPrinter (pkg, src, dest, rp, cb) {
return parseableOutput(dest, rp || src, cb)
}
if (rp === src) rp = null
+ log.clearProgress()
console.log(where + " -> " + src + (rp ? " -> " + rp: ""))
+ log.showProgress()
cb()
}
@@ -166,6 +168,8 @@ function parseableOutput (dest, rp, cb) {
// *just* print the target folder.
// However, we don't actually ever read the version number, so
// the second field is always blank.
+ log.clearProgress()
console.log(dest + "::" + rp)
+ log.showProgress()
cb()
}
diff --git a/lib/ls.js b/lib/ls.js
index 583c3f0c7..414031ed3 100644
--- a/lib/ls.js
+++ b/lib/ls.js
@@ -6,20 +6,30 @@
module.exports = exports = ls
-var npm = require('./npm.js')
-var readInstalled = require('read-installed')
-var log = require('npmlog')
var path = require('path')
+var url = require('url')
+var readPackageTree = require('read-package-tree')
+var log = require('npmlog')
var archy = require('archy')
var semver = require('semver')
-var url = require('url')
var color = require('ansicolors')
var npa = require('npm-package-arg')
+var iferr = require('iferr')
+var npm = require('./npm.js')
+var logicalTree = require('./install/logical-tree.js')
+var recalculateMetadata = require('./install/deps.js').recalculateMetadata
ls.usage = 'npm ls'
ls.completion = require('./utils/completion/installed-deep.js')
+function andRecalculateMetadata (next) {
+ return function (er, tree) {
+ if (er) return next(er)
+ recalculateMetadata(tree, log, next)
+ }
+}
+
function ls (args, silent, cb) {
if (typeof cb !== 'function') {
cb = silent
@@ -41,15 +51,14 @@ function ls (args, silent, cb) {
})
}
- var depth = npm.config.get('depth')
- var opt = { depth: depth, log: log.warn, dev: true }
- readInstalled(dir, opt, function (er, data) {
+ readPackageTree(dir, andRecalculateMetadata(iferr(cb, function (physicalTree) {
+ var data = logicalTree.asReadInstalled(physicalTree)
pruneNestedExtraneous(data)
filterByEnv(data)
var bfs = bfsify(data, args)
var lite = getLite(bfs)
- if (er || silent) return cb(er, data, lite)
+ if (silent) return cb(null, data, lite)
var long = npm.config.get('long')
var json = npm.config.get('json')
@@ -74,12 +83,23 @@ function ls (args, silent, cb) {
if (args.length && !data._found) process.exitCode = 1
+ var er
// if any errors were found, then complain and exit status 1
if (lite.problems && lite.problems.length) {
er = lite.problems.join('\n')
}
cb(er, data, lite)
+ })))
+}
+
+function translateTree (tree) {
+ var pkg = tree.package || {}
+ pkg._dependencies = pkg.dependencies
+ pkg.dependencies = {}
+ tree.children.forEach(function (child) {
+ pkg.dependencies[child.package.name] = translateTree(child)
})
+ return pkg
}
function pruneNestedExtraneous (data, visited) {
@@ -96,14 +116,14 @@ function pruneNestedExtraneous (data, visited) {
function filterByEnv (data) {
var dev = npm.config.get('dev')
- var production = npm.config.get('production')
- if (dev === production) return
+ var production = npm.config.get('produktion')
var dependencies = {}
var devDependencies = data.devDependencies || []
Object.keys(data.dependencies).forEach(function (name) {
var keys = Object.keys(devDependencies)
- if (production && keys.indexOf(name) !== -1) return
- if (dev && keys.indexOf(name) === -1) return
+ if (production && !dev && keys.indexOf(name) !== -1) return
+ if (dev && !production && keys.indexOf(name) === -1) return
+ if (!dev && keys.indexOf(name) !== -1 && typeof data.dependencies[name] === 'string') return
dependencies[name] = data.dependencies[name]
})
data.dependencies = dependencies
@@ -155,7 +175,7 @@ function getLite (data, noname) {
}
var deps = (data.dependencies && Object.keys(data.dependencies)) || []
- if (data.length) {
+ if (deps.length) {
lite.dependencies = deps.map(function (d) {
var dep = data.dependencies[d]
if (typeof dep === 'string') {
@@ -302,10 +322,14 @@ function makeArchy_ (data, long, dir, depth, parent, d) {
// add giturl to name@version
if (data._resolved) {
- var type = npa(data._resolved).type
- var isGit = type === 'git' || type === 'hosted'
- if (isGit) {
- out.label += ' (' + data._resolved + ')'
+ try {
+ var type = npa(data._resolved).type
+ var isGit = type === 'git' || type === 'hosted'
+ if (isGit) {
+ out.label += ' (' + data._resolved + ')'
+ }
+ } catch (ex) {
+ // npa threw an exception then it ain't git so whatev
}
}
diff --git a/lib/outdated.js b/lib/outdated.js
index fb48ebfd5..3dcfab69d 100644
--- a/lib/outdated.js
+++ b/lib/outdated.js
@@ -20,21 +20,38 @@ outdated.usage = 'npm outdated [<pkg> [<pkg> ...]]'
outdated.completion = require('./utils/completion/installed-deep.js')
+var os = require('os')
+var url = require('url')
var path = require('path')
-var readJson = require('read-package-json')
-var cache = require('./cache.js')
+var log = require('npmlog')
+var readPackageTree = require('read-package-tree')
var asyncMap = require('slide').asyncMap
-var npm = require('./npm.js')
-var url = require('url')
var color = require('ansicolors')
var styles = require('ansistyles')
var table = require('text-table')
var semver = require('semver')
-var os = require('os')
-var mapToRegistry = require('./utils/map-to-registry.js')
var npa = require('npm-package-arg')
-var readInstalled = require('read-installed')
+var logicalTree = require('./install/logical-tree.js')
+var cache = require('./cache.js')
+var npm = require('./npm.js')
var long = npm.config.get('long')
+var mapToRegistry = require('./utils/map-to-registry.js')
+
+function uniqName (item) {
+ return item[0].path + '|' + item[7]
+}
+
+function uniq (list) {
+ var uniqed = []
+ var seen = {}
+ list.forEach(function (item) {
+ var name = uniqName(item)
+ if (seen[name]) return
+ seen[name] = true
+ uniqed.push(item)
+ })
+ return uniqed
+}
function outdated (args, silent, cb) {
if (typeof cb !== 'function') {
@@ -46,65 +63,66 @@ function outdated (args, silent, cb) {
// default depth for `outdated` is 0 (cf. `ls`)
if (npm.config.get('depth') === Infinity) npm.config.set('depth', 0)
- outdated_(args, dir, {}, 0, function (er, list) {
- if (!list) list = []
- if (er || silent || list.length === 0) return cb(er, list)
- list.sort(function(a, b) {
- var aa = a[1].toLowerCase()
- , bb = b[1].toLowerCase()
- return aa === bb ? 0
- : aa < bb ? -1 : 1
- })
- if (npm.config.get('json')) {
- console.log(makeJSON(list))
- } else if (npm.config.get('parseable')) {
- console.log(makeParseable(list))
- } else {
- var outList = list.map(makePretty)
- var outHead = [
- 'Package',
- 'Current',
- 'Wanted',
- 'Latest',
- 'Location'
- ]
- if (long) outHead.push('Package Type')
- var outTable = [outHead].concat(outList)
-
- if (npm.color) {
- outTable[0] = outTable[0].map(function (heading) {
- return styles.underline(heading)
- })
- }
+ readPackageTree(dir, function (er, physicalTree) {
+ var tree = logicalTree(physicalTree)
+ outdated_(args, '', tree, {}, 0, function (er, list) {
+ list = uniq(list || []).sort(function (aa, bb) {
+ return aa[0].path.localeCompare(bb[0].path)
+ })
+ if (er || silent || list.length === 0) return cb(er, list)
+ log.disableProgress()
+ if (npm.config.get('json')) {
+ console.log(makeJSON(list))
+ } else if (npm.config.get('parseable')) {
+ console.log(makeParseable(list))
+ } else {
+ var outList = list.map(makePretty)
+ var outHead = [ 'Package',
+ 'Current',
+ 'Wanted',
+ 'Latest',
+ 'Location'
+ ]
+ if (long) outHead.push('Package Type')
+ var outTable = [outHead].concat(outList)
+
+ if (npm.color) {
+ outTable[0] = outTable[0].map(function (heading) {
+ return styles.underline(heading)
+ })
+ }
- var tableOpts = {
- align: ['l', 'r', 'r', 'r', 'l'],
- stringLength: function (s) { return ansiTrim(s).length }
+ var tableOpts = {
+ align: ['l', 'r', 'r', 'r', 'l'],
+ stringLength: function (s) { return ansiTrim(s).length }
+ }
+ console.log(table(outTable, tableOpts))
}
- console.log(table(outTable, tableOpts))
- }
- cb(null, list)
+ cb(null, list)
+ })
})
}
// [[ dir, dep, has, want, latest, type ]]
function makePretty (p) {
- var dep = p[1]
- var dir = path.resolve(p[0], 'node_modules', dep)
+ var dep = p[0]
+ var depname = p[1]
+ var dir = dep.path
var has = p[2]
var want = p[3]
var latest = p[4]
var type = p[6]
+ var deppath = p[7]
if (!npm.config.get('global')) {
dir = path.relative(process.cwd(), dir)
}
- var columns = [ dep,
+ var columns = [ depname,
has || 'MISSING',
want,
latest,
- dirToPrettyLocation(dir)
+ deppath
]
if (long) columns[5] = type
@@ -125,15 +143,25 @@ function ansiTrim (str) {
return str.replace(r, '')
}
-function dirToPrettyLocation (dir) {
- return dir.replace(/^node_modules[/\\]/, '')
- .replace(/[[/\\]node_modules[/\\]/g, ' > ')
+function depToPrettyLocation (dep) {
+ var depname = dep.package.name
+ var parentLocation
+
+ if (dep.requiredBy && dep.requiredBy.length > 1) {
+ parentLocation = '[ ' + dep.requiredBy.map(function (parent) {
+ return depToPrettyLocation(parent)
+ }).join(', ') + ' ]'
+ } else if (dep.requiredBy && dep.requiredBy.length) {
+ parentLocation = depToPrettyLocation(dep.requiredBy[0])
+ }
+ return parentLocation ? parentLocation + ' > ' + depname : depname
}
function makeParseable (list) {
return list.map(function (p) {
- var dep = p[1]
- var dir = path.resolve(p[0], 'node_modules', dep)
+ var dep = p[0]
+ var depname = p[1]
+ var dir = dep.path
var has = p[2]
var want = p[3]
var latest = p[4]
@@ -141,9 +169,9 @@ function makeParseable (list) {
var out = [
dir,
- dep + '@' + want,
- (has ? (dep + '@' + has) : 'MISSING'),
- dep + '@' + latest
+ depname + '@' + want,
+ (has ? (depname + '@' + has) : 'MISSING'),
+ depname + '@' + latest
]
if (long) out.push(type)
@@ -154,137 +182,87 @@ function makeParseable (list) {
function makeJSON (list) {
var out = {}
list.forEach(function (p) {
- var dir = path.resolve(p[0], 'node_modules', p[1])
+ var dep = p[0]
+ var depname = p[1]
+ var dir = dep.path
+ var has = p[2]
+ var want = p[3]
+ var latest = p[4]
+ var type = p[6]
if (!npm.config.get('global')) {
dir = path.relative(process.cwd(), dir)
}
- out[p[1]] = { current: p[2],
- wanted: p[3],
- latest: p[4],
+ out[depname] = { current: has,
+ wanted: want,
+ latest: latest,
location: dir
}
- if (long) out[p[1]].type = p[6]
+ if (long) out[depname].type = type
})
return JSON.stringify(out, null, 2)
}
-function outdated_ (args, dir, parentHas, depth, cb) {
- // get the deps from package.json, or {<dir/node_modules/*>:'*'}
- // asyncMap over deps:
- // shouldHave = cache.add(dep, req).version
- // if has === shouldHave then
- // return outdated(args, dir/node_modules/dep, parentHas + has)
- // else if dep in args or args is empty
- // return [dir, dep, has, shouldHave]
-
+function outdated_ (args, path, tree, parentHas, depth, cb) {
+ if (!tree.package) tree.package = {}
+ if (path && tree.package.name) path += ' > ' + tree.package.name
+ if (!path && tree.package.name) path = tree.package.name
if (depth > npm.config.get('depth')) {
return cb(null, [])
}
- var deps = null
var types = {}
- readJson(path.resolve(dir, 'package.json'), function (er, d) {
- d = d || {}
- if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er)
- deps = (er) ? true : (d.dependencies || {})
- if (!er) {
- Object.keys(deps).forEach(function (k) {
- types[k] = 'dependencies'
- })
- }
-
- if (npm.config.get('save-dev')) {
- deps = d.devDependencies || {}
- Object.keys(deps).forEach(function (k) {
- types[k] = 'devDependencies'
- })
-
- return next()
- }
-
- if (npm.config.get('save')) {
- // remove optional dependencies from dependencies during --save.
- Object.keys(d.optionalDependencies || {}).forEach(function (k) {
- delete deps[k]
- })
- return next()
- }
-
- if (npm.config.get('save-optional')) {
- deps = d.optionalDependencies || {}
- Object.keys(deps).forEach(function (k) {
- types[k] = 'optionalDependencies'
- })
- return next()
- }
-
- var doUpdate = npm.config.get('dev') ||
- (!npm.config.get('production') &&
- !Object.keys(parentHas).length &&
- !npm.config.get('global'))
-
- if (!er && d && doUpdate) {
- Object.keys(d.devDependencies || {}).forEach(function (k) {
- if (!(k in parentHas)) {
- deps[k] = d.devDependencies[k]
- types[k] = 'devDependencies'
- }
- })
- }
- return next()
+ var pkg = tree.package || {}
+ var deps = tree.children || []
+ deps.forEach(function (dep) {
+ types[dep.package.name] = 'dependencies'
})
-
- var has = null
- readInstalled(path.resolve(dir), { dev: true }, function (er, data) {
- if (er) {
- has = Object.create(parentHas)
- return next()
- }
- var pkgs = Object.keys(data.dependencies)
- pkgs = pkgs.filter(function (p) {
- return !p.match(/^[\._-]/)
+ if (npm.config.get('save-dev')) {
+ deps = deps.filter(function (dep) { return pkg.devDependencies[dep.package.name] })
+ deps.forEach(function (dep) {
+ types[dep.package.name] = 'devDependencies'
})
- asyncMap(pkgs, function (pkg, cb) {
- var jsonFile = path.resolve(dir, 'node_modules', pkg, 'package.json')
- readJson(jsonFile, function (er, d) {
- if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er)
- if (d && d.name && d.private) delete deps[d.name]
- cb(null, er ? [] : [[d.name, d.version, d._from]])
- })
- }, function (er, pvs) {
- if (er) return cb(er)
- has = Object.create(parentHas)
- pvs.forEach(function (pv) {
- has[pv[0]] = {
- version: pv[1],
- from: pv[2]
- }
- })
-
- next()
+ } else if (npm.config.get('save')) {
+ // remove optional dependencies from dependencies during --save.
+ deps = deps.filter(function (dep) { return !pkg.optionalDependencies[dep.package.name] })
+ } else if (npm.config.get('save-optional')) {
+ deps = deps.filter(function (dep) { return pkg.optionalDependencies[dep.package.name] })
+ deps.forEach(function (dep) {
+ types[dep.package.name] = 'optionalDependencies'
})
- })
+ }
- function next () {
- if (!has || !deps) return
- if (deps === true) {
- deps = Object.keys(has).reduce(function (l, r) {
- l[r] = 'latest'
- return l
- }, {})
+ var has = Object.create(parentHas)
+ tree.children.forEach(function (child) {
+ if (child.package.name && child.package.private) delete deps[child.package.name]
+ has[child.package.name] = {
+ version: child.package.version,
+ from: child.package._from
}
+ })
- // now get what we should have, based on the dep.
- // if has[dep] !== shouldHave[dep], then cb with the data
- // otherwise dive into the folder
- asyncMap(Object.keys(deps), function (dep, cb) {
- if (!long) return shouldUpdate(args, dir, dep, has, deps[dep], depth, cb)
-
- shouldUpdate(args, dir, dep, has, deps[dep], depth, cb, types[dep])
- }, cb)
+ if (deps === true) {
+ deps = Object.keys(has).reduce(function (l, r) {
+ l[r] = 'latest'
+ return l
+ }, {})
}
+
+ // now get what we should have, based on the dep.
+ // if has[dep] !== shouldHave[dep], then cb with the data
+ // otherwise dive into the folder
+ asyncMap(deps, function (dep, cb) {
+ var name = dep.package.name
+ var required = (tree.package.dependencies || {})[name] ||
+ (tree.package.optionalDependencies || {})[name] ||
+ (tree.package.devDependencies || {})[name] ||
+ dep.package._requested && dep.package._requested.spec ||
+ '*'
+ if (!long) return shouldUpdate(args, dep, name, has, required, depth, path, cb)
+
+ shouldUpdate(args, dep, name, has, required, depth, path, cb, types[name])
+ }, cb)
}
-function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
+function shouldUpdate (args, tree, dep, has, req, depth, path, cb, type) {
// look up the most recent version.
// if that's what we already have, or if it's not on the args list,
// then dive into it. Otherwise, cb() with the data.
@@ -296,7 +274,8 @@ function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
// show user that no viable version can be found
if (er) return cb(er)
outdated_(args,
- path.resolve(dir, 'node_modules', dep),
+ path,
+ tree,
has,
depth + 1,
cb)
@@ -304,9 +283,9 @@ function shouldUpdate (args, dir, dep, has, req, depth, cb, type) {
function doIt (wanted, latest) {
if (!long) {
- return cb(null, [[ dir, dep, curr && curr.version, wanted, latest, req]])
+ return cb(null, [[ tree, dep, curr && curr.version, wanted, latest, req, null, path]])
}
- cb(null, [[ dir, dep, curr && curr.version, wanted, latest, req, type]])
+ cb(null, [[ tree, dep, curr && curr.version, wanted, latest, req, type, path]])
}
if (args.length && args.indexOf(dep) === -1) return skip()
diff --git a/lib/rebuild.js b/lib/rebuild.js
index ab372c6ec..70c33e91e 100644
--- a/lib/rebuild.js
+++ b/lib/rebuild.js
@@ -29,9 +29,11 @@ function rebuild (args, cb) {
function cleanBuild (folders, set, cb) {
npm.commands.build(folders, function (er) {
if (er) return cb(er)
+ log.clearProgress()
console.log(folders.map(function (f) {
return set[f] + " " + f
}).join("\n"))
+ log.showProgress()
cb()
})
}
diff --git a/lib/unbuild.js b/lib/unbuild.js
index d5fe0e6a0..998d6e39f 100644
--- a/lib/unbuild.js
+++ b/lib/unbuild.js
@@ -1,4 +1,5 @@
module.exports = unbuild
+module.exports.rmStuff = rmStuff
unbuild.usage = "npm unbuild <folder>\n(this is plumbing)"
var readJson = require("read-package-json")
@@ -34,7 +35,9 @@ function unbuild_ (silent) { return function (folder, cb_) {
( [ [lifecycle, pkg, "preuninstall", folder, false, true]
, [lifecycle, pkg, "uninstall", folder, false, true]
, !silent && function(cb) {
+ log.clearProgress()
console.log("unbuild " + pkg._id)
+ log.showProgress()
cb()
}
, [rmStuff, pkg, folder]
diff --git a/lib/uninstall.js b/lib/uninstall.js
index 868037ac8..500a73242 100644
--- a/lib/uninstall.js
+++ b/lib/uninstall.js
@@ -1,127 +1,54 @@
+'use strict'
// remove a package.
module.exports = uninstall
+module.exports.Uninstaller = Uninstaller
uninstall.usage = 'npm uninstall <name>[@<version> [<name>[@<version>] ...]' +
'\nnpm rm <name>[@<version> [<name>[@<version>] ...]'
-uninstall.completion = require('./utils/completion/installed-shallow.js')
-
-var fs = require('graceful-fs')
-var writeFileAtomic = require('write-file-atomic')
-var log = require('npmlog')
-var readJson = require('read-package-json')
+var util = require('util')
var path = require('path')
+var validate = require('aproba')
+var chain = require('slide').chain
var npm = require('./npm.js')
-var asyncMap = require('slide').asyncMap
+var Installer = require('./install.js').Installer
+var getSaveType = require('./install/save.js').getSaveType
+var removeDeps = require('./install/deps.js').removeDeps
+var loadExtraneous = require('./install/deps.js').loadExtraneous
-function uninstall (args, cb) {
- // this is super easy
- // get the list of args that correspond to package names in either
- // the global npm.dir,
- // then call unbuild on all those folders to pull out their bins
- // and mans and whatnot, and then delete the folder.
+uninstall.completion = require('./utils/completion/installed-shallow.js')
- var nm = npm.dir
- if (args.length === 1 && args[0] === '.') args = []
- if (args.length) return uninstall_(args, nm, cb)
+function uninstall (args, cb) {
+ validate('AF', arguments)
+ // the /path/to/node_modules/..
+ var where = path.resolve(npm.dir, '..')
+ var dryrun = !!npm.config.get('dry-run')
- // remove this package from the global space, if it's installed there
- readJson(path.resolve(npm.localPrefix, 'package.json'), function (er, pkg) {
- if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er)
- if (er) return cb(uninstall.usage)
- uninstall_([pkg.name],
- npm.globalDir,
- cb)
+ args = args.filter(function (a) {
+ return path.resolve(a) !== where
})
-}
-
-function uninstall_ (args, nm, cb) {
- // if we've been asked to --save or --save-dev or --save-optional,
- // then also remove it from the associated dependencies hash.
- var s = npm.config.get('save')
- var d = npm.config.get('save-dev')
- var o = npm.config.get('save-optional')
- if (s || d || o) {
- cb = saver(args, nm, cb)
- }
+ if (!args.length) return cb(uninstall.usage)
- asyncMap(args, function (arg, cb) {
- // uninstall .. should not delete /usr/local/lib/node_modules/..
- var p = path.join(path.resolve(nm), path.join('/', arg))
- if (path.resolve(p) === nm) {
- log.warn('uninstall', 'invalid argument: %j', arg)
- return cb(null, [])
- }
- fs.lstat(p, function (er) {
- if (er) {
- log.warn('uninstall', 'not installed in %s: %j', nm, arg)
- return cb(null, [])
- }
- cb(null, p)
- })
- }, function (er, folders) {
- if (er) return cb(er)
- asyncMap(folders, npm.commands.unbuild, cb)
- })
+ new Uninstaller(where, dryrun, args).run(cb)
}
-function saver (args, nm, cb_) {
- return cb
- function cb (er, data) {
- var s = npm.config.get('save')
- var d = npm.config.get('save-dev')
- var o = npm.config.get('save-optional')
- if (er || !(s || d || o)) return cb_(er, data)
- var pj = path.resolve(nm, '..', 'package.json')
- // don't use readJson here, because we don't want all the defaults
- // filled in, for mans and other bs.
- fs.readFile(pj, 'utf8', function (er, json) {
- var pkg
- try {
- pkg = JSON.parse(json)
- } catch (_) {}
- if (!pkg) return cb_(null, data)
-
- var bundle
- if (npm.config.get('save-bundle')) {
- bundle = pkg.bundleDependencies || pkg.bundledDependencies
- if (!Array.isArray(bundle)) bundle = undefined
- }
-
- var changed = false
- args.forEach(function (a) {
- ; [ [s, 'dependencies'],
- [o, 'optionalDependencies'],
- [d, 'devDependencies'] ].forEach(function (f) {
- var flag = f[0]
- var field = f[1]
- if (!flag || !pkg[field] || !pkg[field].hasOwnProperty(a)) return
- changed = true
-
- if (bundle) {
- var i = bundle.indexOf(a)
- if (i !== -1) bundle.splice(i, 1)
- }
+function Uninstaller (where, dryrun, args) {
+ validate('SBA', arguments)
+ Installer.call(this, where, dryrun, args)
+}
+util.inherits(Uninstaller, Installer)
- delete pkg[field][a]
- })
- })
- if (!changed) return cb_(null, data)
+Uninstaller.prototype.loadAllDepsIntoIdealTree = function (cb) {
+ validate('F', arguments)
+ var saveDeps = getSaveType(this.args)
- if (bundle) {
- delete pkg.bundledDependencies
- if (bundle.length) {
- pkg.bundleDependencies = bundle
- } else {
- delete pkg.bundleDependencies
- }
- }
+ var cg = this.progress.loadAllDepsIntoIdealTree
+ var steps = []
- writeFileAtomic(pj, JSON.stringify(pkg, null, 2) + '\n', function (er) {
- return cb_(er, data)
- })
- })
- }
+ steps.push(
+ [removeDeps, this.args, this.idealTree, saveDeps, cg.newGroup('removeDeps')],
+ [loadExtraneous, this.idealTree, cg.newGroup('loadExtraneous')])
+ chain(steps, cb)
}
diff --git a/lib/update.js b/lib/update.js
index f9514c658..ead9945ed 100644
--- a/lib/update.js
+++ b/lib/update.js
@@ -1,58 +1,61 @@
-/*
-for each pkg in prefix that isn't a git repo
- look for a new version of pkg that satisfies dep
- if so, install it.
- if not, then update it
-*/
-
module.exports = update
update.usage = 'npm update [pkg]'
-var npm = require('./npm.js')
-var asyncMap = require('slide').asyncMap
+var url = require('url')
var log = require('npmlog')
-
- // load these, just so that we know that they'll be available, in case
- // npm itself is getting overwritten.
-var install = require('./install.js')
-var build = require('./build.js')
+var chain = require('slide').chain
+var npm = require('./npm.js')
+var Installer = require('./install.js').Installer
update.completion = npm.commands.outdated.completion
function update (args, cb) {
- npm.commands.outdated(args, true, function (er, outdated) {
+ var dryrun = false
+ if (npm.config.get('dry-run')) dryrun = true
+
+ npm.commands.outdated(args, true, function (er, rawOutdated) {
if (er) return cb(er)
+ var outdated = rawOutdated.map(function (ww) {
+ return {
+ dep: ww[0],
+ depname: ww[1],
+ current: ww[2],
+ wanted: ww[3],
+ latest: ww[4],
+ req: ww[5],
+ what: ww[1] + '@' + ww[3]
+ }
+ })
var wanted = outdated.filter(function (ww) {
- var dep = ww[1]
- var current = ww[2]
- var wanted = ww[3]
- var latest = ww[4]
- if (current === wanted && wanted !== latest) {
+ if (ww.current === ww.wanted && ww.wanted !== ww.latest) {
log.verbose(
'outdated',
- 'not updating', dep,
+ 'not updating', ww.depname,
"because it's currently at the maximum version that matches its specified semver range"
)
}
- return current !== wanted
+ return ww.current !== ww.wanted
})
if (wanted.length === 0) return cb()
log.info('outdated', 'updating', wanted)
- asyncMap(wanted, function (ww, cb) {
- // [[ dir, dep, has, want, req ]]
- var where = ww[0]
- var dep = ww[1]
- var want = ww[3]
- var what = dep + '@' + want
- var req = ww[5]
- var url = require('url')
+ var toInstall = {}
+ wanted.forEach(function (ww) {
+ if (ww.current === ww.wanted) return
// use the initial installation method (repo, tar, git) for updating
- if (url.parse(req).protocol) what = req
- npm.commands.install(where, what, cb)
- }, cb)
+ if (url.parse(ww.req).protocol) ww.what = ww.req
+
+ if (toInstall[ww.dep.path]) {
+ toInstall[ww.dep.path].push(ww.what)
+ } else {
+ toInstall[ww.dep.path] = [ww.what]
+ }
+ })
+ chain(Object.keys(toInstall).map(function (where) {
+ return [new Installer(where, dryrun, toInstall[where]), 'run']
+ }), cb)
})
}
diff --git a/lib/utils/error-handler.js b/lib/utils/error-handler.js
index 6848a3239..f5a7df082 100644
--- a/lib/utils/error-handler.js
+++ b/lib/utils/error-handler.js
@@ -367,6 +367,15 @@ function errorHandler (er) {
].join("\n"))
break
+ case "EMISSINGARG":
+ case "EUNKNOWNTYPE":
+ case "EINVALIDTYPE":
+ case "ETOOMANYARGS":
+ log.error("typeerror", [er.stack
+ ,"This is an error with npm itself. Please report this error at:"
+ ," <http://github.com/npm/npm/issues>"
+ ].join("\n"))
+ break
default:
log.error("", er.message || er)
log.error("", ["", "If you need help, you may report this error at:"
diff --git a/lib/utils/locker.js b/lib/utils/locker.js
index 293d2da05..4a8f37271 100644
--- a/lib/utils/locker.js
+++ b/lib/utils/locker.js
@@ -61,9 +61,11 @@ function unlock (base, name, cb) {
})
}
else {
- throw new Error(
+ var notLocked = new Error(
"Attempt to unlock " + resolve(base, name) + ", which hasn't been locked"
)
+ notLocked.code = "ENOTLOCKED"
+ throw notLocked
}
}
diff --git a/lib/utils/tar.js b/lib/utils/tar.js
index b148bc86b..3d0557e68 100644
--- a/lib/utils/tar.js
+++ b/lib/utils/tar.js
@@ -1,22 +1,25 @@
// commands for packing and unpacking tarballs
// this file is used by lib/cache.js
-var npm = require('../npm.js')
var fs = require('graceful-fs')
+var path = require('path')
var writeFileAtomic = require('write-file-atomic')
var writeStreamAtomic = require('fs-write-stream-atomic')
-var path = require('path')
var log = require('npmlog')
var uidNumber = require('uid-number')
-var rm = require('./gently-rm.js')
var readJson = require('read-package-json')
-var myUid = process.getuid && process.getuid()
-var myGid = process.getgid && process.getgid()
var tar = require('tar')
var zlib = require('zlib')
var fstream = require('fstream')
var Packer = require('fstream-npm')
-var lifecycle = require('./lifecycle.js')
+var iferr = require('iferr')
+var inherits = require('inherits')
+var npm = require('../npm.js')
+var rm = require('./gently-rm.js')
+var myUid = process.getuid && process.getuid()
+var myGid = process.getgid && process.getgid()
+var readPackageTree = require('read-package-tree')
+var union = require('lodash.union')
if (process.env.SUDO_UID && myUid === 0) {
if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID
@@ -26,29 +29,132 @@ if (process.env.SUDO_UID && myUid === 0) {
exports.pack = pack
exports.unpack = unpack
-function pack (tarball, folder, pkg, dfc, cb) {
+function pack (tarball, folder, pkg, cb) {
log.verbose('tar pack', [tarball, folder])
- if (typeof cb !== 'function') {
- cb = dfc
- dfc = false
- }
log.verbose('tarball', tarball)
log.verbose('folder', folder)
- if (dfc) {
- // do fancy crap
- return lifecycle(pkg, 'prepublish', folder, function (er) {
- if (er) return cb(er)
- pack_(tarball, folder, pkg, cb)
- })
- } else {
- pack_(tarball, folder, pkg, cb)
+ var recalculateMetadata = require('../install/deps.js').recalculateMetadata
+ readPackageTree(folder, iferr(cb, function (tree) {
+ recalculateMetadata(tree, log.newGroup('pack:' + pkg), iferr(cb, function () {
+ pack_(tarball, folder, tree, pkg, cb)
+ }))
+ }))
+}
+
+function BundledPacker (props) {
+ Packer.call(this, props)
+ this.tree = props.tree
+ var flattenTree = require('../install/flatten-tree.js')
+ this.flatTree = props.flatTree || flattenTree(props.tree)
+}
+inherits(BundledPacker, Packer)
+
+BundledPacker.prototype.getChildProps = function (stat) {
+ var props = Packer.prototype.getChildProps.call(this, stat)
+ props.tree = this.tree
+ props.flatTree = this.flatTree
+ return props
+}
+
+BundledPacker.prototype.applyIgnores = function (entry, partial, entryObj) {
+ // package.json files can never be ignored.
+ if (entry === 'package.json') return true
+
+ // readme files should never be ignored.
+ if (entry.match(/^readme(\.[^\.]*)$/i)) return true
+
+ // license files should never be ignored.
+ if (entry.match(/^(license|licence)(\.[^\.]*)?$/i)) return true
+
+ // changelogs should never be ignored.
+ if (entry.match(/^(changes|changelog|history)(\.[^\.]*)?$/i)) return true
+
+ // special rules. see below.
+ if (entry === 'node_modules' && this.packageRoot) return true
+
+ // some files are *never* allowed under any circumstances
+ if (entry === '.git' ||
+ entry === '.lock-wscript' ||
+ entry.match(/^\.wafpickle-[0-9]+$/) ||
+ entry === 'CVS' ||
+ entry === '.svn' ||
+ entry === '.hg' ||
+ entry.match(/^\..*\.swp$/) ||
+ entry === '.DS_Store' ||
+ entry.match(/^\._/) ||
+ entry === 'npm-debug.log'
+ ) {
+ return false
+ }
+
+ // in a node_modules folder, we only include bundled dependencies
+ // also, prevent packages in node_modules from being affected
+ // by rules set in the containing package, so that
+ // bundles don't get busted.
+ // Also, once in a bundle, everything is installed as-is
+ // To prevent infinite cycles in the case of cyclic deps that are
+ // linked with npm link, even in a bundle, deps are only bundled
+ // if they're not already present at a higher level.
+ if (this.bundleMagic) {
+ // bubbling up. stop here and allow anything the bundled pkg allows
+ if (entry.indexOf('/') !== -1) return true
+
+ // never include the .bin. It's typically full of platform-specific
+ // stuff like symlinks and .cmd files anyway.
+ if (entry === '.bin') return false
+
+ // the package root.
+ var p = this.parent
+ // the package before this one.
+ var pp = p && p.parent
+
+ // if this entry has already been bundled, and is a symlink,
+ // and it is the *same* symlink as this one, then exclude it.
+ if (pp && pp.bundleLinks && this.bundleLinks &&
+ pp.bundleLinks[entry] &&
+ pp.bundleLinks[entry] === this.bundleLinks[entry]) {
+ return false
+ }
+
+ // since it's *not* a symbolic link, if we're *already* in a bundle,
+ // then we should include everything.
+ if (pp && pp.package && pp.basename === 'node_modules') {
+ return true
+ }
+
+ // only include it at this point if it's a bundleDependency
+ return this.isBundled(entry)
+ }
+ // if (this.bundled) return true
+
+ return Packer.prototype.applyIgnores.call(this, entry, partial, entryObj)
+}
+
+function nameMatch (name) { return function (other) { return name === other.package.name } }
+
+BundledPacker.prototype.isBundled = function (name) {
+ var bd = this.package && this.package.bundleDependencies
+ if (!bd) return false
+ if (bd.indexOf(name) !== -1) return true
+ var pkg = this.tree.children.filter(nameMatch(name))[0]
+ if (!pkg) return false
+ var requiredBy = union([], pkg.package._requiredBy)
+ while (requiredBy.length) {
+ var req = requiredBy.shift()
+ var reqPkg = this.flatTree[req]
+ if (!reqPkg) continue
+ if (reqPkg.parent === this.tree && bd.indexOf(reqPkg.package.name) !== -1) {
+ return true
+ }
+ requiredBy = union(requiredBy, reqPkg.package._requiredBy)
}
+ return false
}
-function pack_ (tarball, folder, pkg, cb) {
- new Packer({ path: folder, type: 'Directory', isDirectory: true })
+function pack_ (tarball, folder, tree, pkg, cb) {
+ new BundledPacker({ path: folder, tree: tree, type: 'Directory', isDirectory: true })
.on('error', function (er) {
if (er) log.error('tar pack', 'Error reading ' + folder)
return cb(er)
diff --git a/test/tap/404-parent.js b/test/tap/404-parent.js
index eb8ae9bb1..e3f49f7ba 100644
--- a/test/tap/404-parent.js
+++ b/test/tap/404-parent.js
@@ -48,7 +48,10 @@ function plugin (server) {
function performInstall (cb) {
mr({port : common.port, plugin : plugin}, function (er, s) { // create mock registry.
npm.load({registry: common.registry}, function () {
- npm.commands.install(pkg, [], function (err) {
+ var pwd = process.cwd()
+ process.chdir(pkg)
+ npm.commands.install([], function (err) {
+ process.chdir(pwd)
cb(err)
s.close() // shutdown mock npm server.
})
diff --git a/test/tap/dedupe.js b/test/tap/dedupe.js
index 7b80102b3..d9dcb0c41 100644
--- a/test/tap/dedupe.js
+++ b/test/tap/dedupe.js
@@ -50,9 +50,8 @@ test('dedupe finds the common module and moves it up one level', function (t) {
t.ifError(err, 'successfully deduped against previous install')
t.notOk(code, 'npm dedupe exited with code')
- t.ok(existsSync(path.join(pkg, 'node_modules', 'minimist')))
- t.notOk(existsSync(path.join(pkg, 'node_modules', 'checker')))
-
+ t.ok(existsSync(path.join(pkg, 'node_modules', 'minimist')), 'minimist module exists')
+ t.notOk(existsSync(path.join(pkg, 'node_modules', 'checker')), 'checker module does not')
t.end()
}
)
diff --git a/test/tap/github-shortcut.js b/test/tap/hosted-shortcut.js
index 598aa6864..a4e83b9c9 100644
--- a/test/tap/github-shortcut.js
+++ b/test/tap/hosted-shortcut.js
@@ -25,8 +25,10 @@ test('setup', function (t) {
test('github-shortcut', function (t) {
var cloneUrls = [
['git://github.com/foo/private.git', 'GitHub shortcuts try git URLs first'],
- ['https://github.com/foo/private.git', 'GitHub shortcuts try HTTPS URLs third'],
- ['git@github.com:foo/private.git', 'GitHub shortcuts try SSH second']
+ ['https://github.com/foo/private.git', 'GitHub shortcuts try HTTPS URLs second'],
+ ['git@github.com:foo/private.git', 'GitHub shortcuts try SSH third'],
+ ['https://bitbucket.org/foo/private.git', 'bitbucket shortcuts try HTTPS URLs first'],
+ ['git@bitbucket.org:foo/private.git', 'bitbucket shortcuts try SSH second']
]
var npm = requireInject.installGlobally('../../lib/npm.js', {
'child_process': {
@@ -51,11 +53,13 @@ test('github-shortcut', function (t) {
registry: common.registry,
loglevel: 'silent'
}
+ t.plan(1 + cloneUrls.length)
npm.load(opts, function (er) {
t.ifError(er, 'npm loaded without error')
npm.commands.install(['foo/private'], function (er, result) {
- t.ok(er, 'mocked install failed as expected')
- t.end()
+ npm.commands.install(['bitbucket:foo/private'], function (er, result) {
+ t.end()
+ })
})
})
})
diff --git a/test/tap/ls-l-depth-0.js b/test/tap/ls-l-depth-0.js
index 3b5ae4d20..5d301c73f 100644
--- a/test/tap/ls-l-depth-0.js
+++ b/test/tap/ls-l-depth-0.js
@@ -62,7 +62,7 @@ test('#6311: npm ll --depth=0 duplicates listing', function (t) {
t.notOk(stderr, 'npm install ran silently')
t.equal(
stdout.trim(),
- 'glock@1.8.7 node_modules/glock\nā””ā”€ā”€ underscore@1.5.1',
+ '+ underscore@1.5.1 node_modules/underscore\n+ glock@1.8.7 node_modules/glock',
'got expected install output'
)