Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorForrest L Norvell <forrest@npmjs.com>2014-04-30 04:32:48 +0400
committerForrest L Norvell <forrest@npmjs.com>2014-05-08 05:00:50 +0400
commit355bb7e0f3bc92fb9bc6ed47f5ce3b059e5ff87f (patch)
tree9b4016a9ad171405dda4c1491e77cd1a22387c5b /lib
parentcc8dd4e63294443be8798bae881ef9ebb4b728c7 (diff)
refactor add* into separate files
Diffstat (limited to 'lib')
-rw-r--r--lib/cache.js1043
-rw-r--r--lib/cache/add-local-tarball.js304
-rw-r--r--lib/cache/add-local.js121
-rw-r--r--lib/cache/add-named.js318
-rw-r--r--lib/cache/add-remote-git.js288
-rw-r--r--lib/cache/add-remote-tarball.js106
-rw-r--r--lib/cache/get-stat.js10
-rw-r--r--lib/cache/maybe-github.js32
-rw-r--r--lib/utils/depr-check.js15
9 files changed, 1207 insertions, 1030 deletions
diff --git a/lib/cache.js b/lib/cache.js
index 88aff8038..c6e2a4865 100644
--- a/lib/cache.js
+++ b/lib/cache.js
@@ -1,5 +1,4 @@
'use strict';
-/* jshint node: true */
// XXX lib/utils/tar.js and this file need to be rewritten.
@@ -58,37 +57,22 @@ cache.read = read
cache.clean = clean
cache.unpack = unpack
-var mkdir = require("mkdirp")
- , spawn = require("child_process").spawn
- , exec = require("child_process").execFile
- , once = require("once")
- , fetch = require("./utils/fetch.js")
- , npm = require("./npm.js")
+var npm = require("./npm.js")
, fs = require("graceful-fs")
, rm = require("./utils/gently-rm.js")
, readJson = require("read-package-json")
- , registry = npm.registry
, log = require("npmlog")
, path = require("path")
- , sha = require("sha")
+ , url = require("url")
, asyncMap = require("slide").asyncMap
- , semver = require("semver")
, tar = require("./utils/tar.js")
, fileCompletion = require("./utils/completion/file-completion.js")
- , url = require("url")
- , chownr = require("chownr")
- , crypto = require("crypto")
- , retry = require("retry")
- , zlib = require("zlib")
- , chmodr = require("chmodr")
- , which = require("which")
, isGitUrl = require("./utils/is-git-url.js")
- , pathIsInside = require("path-is-inside")
- , http = require("http")
- , getCacheStat = require("./cache/get-stat.js")
- , locker = require("./utils/locker.js")
- , lock = locker.lock
- , unlock = locker.unlock
+ , deprCheck = require("./utils/depr-check.js")
+ , addNamed = require("./cache/add-named.js")
+ , addLocal = require("./cache/add-local.js")
+ , addRemoteTarball = require("./cache/add-remote-tarball.js")
+ , addRemoteGit = require("./cache/add-remote-git.js")
cache.usage = "npm cache add <tarball file>"
+ "\nnpm cache add <folder>"
@@ -131,6 +115,10 @@ function cache (args, cb) {
}
}
+// Only have a single download action at once for a given url
+// additional calls stack the callbacks.
+var inFlightURLs = {}
+
// if the pkg and ver are in the cache, then
// just do a readJson and return.
// if they're not, then fetch them from the registry.
@@ -144,14 +132,14 @@ function read (name, ver, forceBypass, cb) {
if (forceBypass && npm.config.get("force")) {
log.verbose("using force", "skipping cache")
- return addNamed(name, ver, c)
+ return addNamed(name, ver, null, inFlightURLs, c)
}
readJson(jsonFile, function (er, data) {
er = needName(er, data)
er = needVersion(er, data)
if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR") return cb(er)
- if (er) return addNamed(name, ver, c)
+ if (er) return addNamed(name, ver, null, inFlightURLs, c)
deprCheck(data)
c(er, data)
})
@@ -292,13 +280,13 @@ function maybeFile (spec, p, cb) {
fs.stat(spec, function (er, stat) {
if (!er) {
// definitely a local thing
- addLocal(spec, cb)
+ addLocal(spec, "", inFlightURLs, cb)
} else if (er && spec.indexOf("@") !== -1) {
// bar@baz/loofa
maybeAt(spec, cb)
} else {
// Already know it's not a url, so must be local
- addLocal(spec, cb)
+ addLocal(spec, "", inFlightURLs, cb)
}
})
}
@@ -317,1000 +305,19 @@ function add_ (name, spec, p, cb) {
switch (p.protocol) {
case "http:":
case "https:":
- return addRemoteTarball(spec, null, name, cb)
+ return addRemoteTarball(spec, null, name, "", inFlightURLs, cb)
default:
if (isGitUrl(p))
- return addRemoteGit(spec, p, name, false, cb)
+ return addRemoteGit(spec, p, name, false, inFlightURLs, cb)
// if we have a name and a spec, then try name@spec
// if not, then try just spec (which may try name@"" if not found)
if (name) {
- addNamed(name, spec, cb)
- } else {
- addLocal(spec, cb)
- }
- }
-}
-
-function fetchAndShaCheck (u, tmp, shasum, cb) {
- fetch(u, tmp, function (er, response) {
- if (er) {
- log.error("fetch failed", u)
- return cb(er, response)
- }
-
- if (!shasum) {
- // Well, we weren't given a shasum, so at least sha what we have
- // in case we want to compare it to something else later
- return sha.get(tmp, function (er, shasum) {
- cb(er, response, shasum)
- })
- }
-
- // validate that the url we just downloaded matches the expected shasum.
- sha.check(tmp, shasum, function (er) {
- if (er && er.message) {
- // add original filename for better debuggability
- er.message = er.message + '\n' + 'From: ' + u
- }
- return cb(er, response, shasum)
- })
- })
-}
-
-// Only have a single download action at once for a given url
-// additional calls stack the callbacks.
-var inFlightURLs = {}
-function addRemoteTarball (u, shasum, name, version, cb_) {
- if (typeof cb_ !== "function") cb_ = version, version = ""
- if (typeof cb_ !== "function") cb_ = name, name = ""
- if (typeof cb_ !== "function") cb_ = shasum, shasum = null
-
- if (!inFlightURLs[u]) inFlightURLs[u] = []
- var iF = inFlightURLs[u]
- iF.push(cb_)
- if (iF.length > 1) return
-
- function cb (er, data) {
- if (data) {
- data._from = u
- data._shasum = data._shasum || shasum
- data._resolved = u
- }
- unlock(u, function () {
- var c
- while (c = iF.shift()) c(er, data)
- delete inFlightURLs[u]
- })
- }
-
- var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
-
- lock(u, function (er) {
- if (er) return cb(er)
-
- log.verbose("addRemoteTarball", [u, shasum])
- mkdir(path.dirname(tmp), function (er) {
- if (er) return cb(er)
- addRemoteTarball_(u, tmp, shasum, done)
- })
- })
-
- function done (er, resp, shasum) {
- if (er) return cb(er)
- addLocalTarball(tmp, name, version, shasum, cb)
- }
-}
-
-function addRemoteTarball_(u, tmp, shasum, cb) {
- // Tuned to spread 3 attempts over about a minute.
- // See formula at <https://github.com/tim-kos/node-retry>.
- var operation = retry.operation
- ( { retries: npm.config.get("fetch-retries")
- , factor: npm.config.get("fetch-retry-factor")
- , minTimeout: npm.config.get("fetch-retry-mintimeout")
- , maxTimeout: npm.config.get("fetch-retry-maxtimeout") })
-
- operation.attempt(function (currentAttempt) {
- log.info("retry", "fetch attempt " + currentAttempt
- + " at " + (new Date()).toLocaleTimeString())
- fetchAndShaCheck(u, tmp, shasum, function (er, response, shasum) {
- // Only retry on 408, 5xx or no `response`.
- var sc = response && response.statusCode
- var statusRetry = !sc || (sc === 408 || sc >= 500)
- if (er && statusRetry && operation.retry(er)) {
- log.info("retry", "will retry, error on last attempt: " + er)
- return
- }
- cb(er, response, shasum)
- })
- })
-}
-
-// 1. cacheDir = path.join(cache,'_git-remotes',sha1(u))
-// 2. checkGitDir(cacheDir) ? 4. : 3. (rm cacheDir if necessary)
-// 3. git clone --mirror u cacheDir
-// 4. cd cacheDir && git fetch -a origin
-// 5. git archive /tmp/random.tgz
-// 6. addLocalTarball(/tmp/random.tgz) <gitref> --format=tar --prefix=package/
-// silent flag is used if this should error quietly
-function addRemoteGit (u, parsed, name, silent, cb_) {
- if (typeof cb_ !== "function") cb_ = name, name = null
-
- if (!inFlightURLs[u]) inFlightURLs[u] = []
- var iF = inFlightURLs[u]
- iF.push(cb_)
- if (iF.length > 1) return
-
- // git is so tricky!
- // if the path is like ssh://foo:22/some/path then it works, but
- // it needs the ssh://
- // If the path is like ssh://foo:some/path then it works, but
- // only if you remove the ssh://
- var origUrl = u
- u = u.replace(/^git\+/, "")
- .replace(/#.*$/, "")
-
- // ssh paths that are scp-style urls don't need the ssh://
- if (parsed.pathname.match(/^\/?:/)) {
- u = u.replace(/^ssh:\/\//, "")
- }
-
- function cb (er, data) {
- unlock(u, function () {
- var c
- while (c = iF.shift()) c(er, data)
- delete inFlightURLs[origUrl]
- })
- }
-
- lock(u, function (er) {
- if (er) return cb(er)
-
- // figure out what we should check out.
- var co = parsed.hash && parsed.hash.substr(1) || "master"
-
- var v = crypto.createHash("sha1").update(u).digest("hex").slice(0, 8)
- v = u.replace(/[^a-zA-Z0-9]+/g, '-') + '-' + v
-
- log.verbose("addRemoteGit", [u, co])
-
- var p = path.join(npm.config.get("cache"), "_git-remotes", v)
-
- checkGitDir(p, u, co, origUrl, silent, function(er, data) {
- chmodr(p, npm.modes.file, function(erChmod) {
- if (er) return cb(er, data)
- return cb(erChmod, data)
- })
- })
- })
-}
-
-function checkGitDir (p, u, co, origUrl, silent, cb) {
- fs.stat(p, function (er, s) {
- if (er) return cloneGitRemote(p, u, co, origUrl, silent, cb)
- if (!s.isDirectory()) return rm(p, function (er){
- if (er) return cb(er)
- cloneGitRemote(p, u, co, origUrl, silent, cb)
- })
-
- var git = npm.config.get("git")
- var args = [ "config", "--get", "remote.origin.url" ]
- var env = gitEnv()
-
- // check for git
- which(git, function (err) {
- if (err) {
- err.code = "ENOGIT"
- return cb(err)
- }
- exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
- var stdoutTrimmed = (stdout + "\n" + stderr).trim()
- if (er || u !== stdout.trim()) {
- log.warn( "`git config --get remote.origin.url` returned "
- + "wrong result ("+u+")", stdoutTrimmed )
- return rm(p, function (er){
- if (er) return cb(er)
- cloneGitRemote(p, u, co, origUrl, silent, cb)
- })
- }
- log.verbose("git remote.origin.url", stdoutTrimmed)
- archiveGitRemote(p, u, co, origUrl, cb)
- })
- })
- })
-}
-
-function cloneGitRemote (p, u, co, origUrl, silent, cb) {
- mkdir(p, function (er) {
- if (er) return cb(er)
-
- var git = npm.config.get("git")
- var args = [ "clone", "--mirror", u, p ]
- var env = gitEnv()
-
- // check for git
- which(git, function (err) {
- if (err) {
- err.code = "ENOGIT"
- return cb(err)
- }
- exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
- stdout = (stdout + "\n" + stderr).trim()
- if (er) {
- if (silent) {
- log.verbose("git clone " + u, stdout)
- } else {
- log.error("git clone " + u, stdout)
- }
- return cb(er)
- }
- log.verbose("git clone " + u, stdout)
- archiveGitRemote(p, u, co, origUrl, cb)
- })
- })
- })
-}
-
-function archiveGitRemote (p, u, co, origUrl, cb) {
- var git = npm.config.get("git")
- var archive = [ "fetch", "-a", "origin" ]
- var resolve = [ "rev-list", "-n1", co ]
- var env = gitEnv()
-
- var errState = null
- var n = 0
- var resolved = null
- var tmp
-
- exec(git, archive, {cwd: p, env: env}, function (er, stdout, stderr) {
- stdout = (stdout + "\n" + stderr).trim()
- if (er) {
- log.error("git fetch -a origin ("+u+")", stdout)
- return cb(er)
- }
- log.verbose("git fetch -a origin ("+u+")", stdout)
- tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
- verifyOwnership()
- })
-
- function verifyOwnership() {
- if (process.platform === "win32") {
- log.silly("verifyOwnership", "skipping for windows")
- resolveHead()
- } else {
- getCacheStat(function(er, cs) {
- if (er) {
- log.error("Could not get cache stat")
- return cb(er)
- }
- chownr(p, cs.uid, cs.gid, function(er) {
- if (er) {
- log.error("Failed to change folder ownership under npm cache for %s", p)
- return cb(er)
- }
- resolveHead()
- })
- })
- }
- }
-
- function resolveHead () {
- exec(git, resolve, {cwd: p, env: env}, function (er, stdout, stderr) {
- stdout = (stdout + "\n" + stderr).trim()
- if (er) {
- log.error("Failed resolving git HEAD (" + u + ")", stderr)
- return cb(er)
- }
- log.verbose("git rev-list -n1 " + co, stdout)
- var parsed = url.parse(origUrl)
- parsed.hash = stdout
- resolved = url.format(parsed)
-
- // https://github.com/npm/npm/issues/3224
- // node incorrectly sticks a / at the start of the path
- // We know that the host won't change, so split and detect this
- var spo = origUrl.split(parsed.host)
- var spr = resolved.split(parsed.host)
- if (spo[1].charAt(0) === ':' && spr[1].charAt(0) === '/')
- spr[1] = spr[1].slice(1)
- resolved = spr.join(parsed.host)
-
- log.verbose('resolved git url', resolved)
- next()
- })
- }
-
- function next () {
- mkdir(path.dirname(tmp), function (er) {
- if (er) return cb(er)
- var gzip = zlib.createGzip({ level: 9 })
- var git = npm.config.get("git")
- var args = ["archive", co, "--format=tar", "--prefix=package/"]
- var out = fs.createWriteStream(tmp)
- var env = gitEnv()
- cb = once(cb)
- var cp = spawn(git, args, { env: env, cwd: p })
- cp.on("error", cb)
- cp.stderr.on("data", function(chunk) {
- log.silly(chunk.toString(), "git archive")
- })
-
- cp.stdout.pipe(gzip).pipe(out).on("close", function() {
- addLocalTarball(tmp, function(er, data) {
- if (data) data._resolved = resolved
- cb(er, data)
- })
- })
- })
- }
-}
-
-var gitEnv_
-function gitEnv () {
- // git responds to env vars in some weird ways in post-receive hooks
- // so don't carry those along.
- if (gitEnv_) return gitEnv_
- gitEnv_ = {}
- for (var k in process.env) {
- if (!~['GIT_PROXY_COMMAND','GIT_SSH','GIT_SSL_NO_VERIFY'].indexOf(k) && k.match(/^GIT/)) continue
- gitEnv_[k] = process.env[k]
- }
- return gitEnv_
-}
-
-
-// only have one request in flight for a given
-// name@blah thing.
-var inFlightNames = {}
-function addNamed (name, x, data, cb_) {
- if (typeof cb_ !== "function") cb_ = data, data = null
- log.verbose("addNamed", [name, x])
-
- var k = name + "@" + x
- if (!inFlightNames[k]) inFlightNames[k] = []
- var iF = inFlightNames[k]
- iF.push(cb_)
- if (iF.length > 1) return
-
- function cb (er, data) {
- if (data && !data._fromGithub) data._from = k
- unlock(k, function () {
- var c
- while (c = iF.shift()) c(er, data)
- delete inFlightNames[k]
- })
- }
-
- log.verbose("addNamed", [semver.valid(x), semver.validRange(x)])
- lock(k, function (er, fd) {
- if (er) return cb(er)
-
- var fn = ( semver.valid(x, true) ? addNameVersion
- : semver.validRange(x, true) ? addNameRange
- : addNameTag
- )
- fn(name, x, data, cb)
- })
-}
-
-function addNameTag (name, tag, data, cb_) {
- if (typeof cb_ !== "function") cb_ = data, data = null
- log.info("addNameTag", [name, tag])
- var explicit = true
- if (!tag) {
- explicit = false
- tag = npm.config.get("tag")
- }
-
- function cb(er, data) {
- // might be username/project
- // in that case, try it as a github url.
- if (er && tag.split("/").length === 2) {
- return maybeGithub(tag, name, er, cb_)
- }
- return cb_(er, data)
- }
-
- registry.get(name, function (er, data, json, response) {
- if (!er) {
- er = errorResponse(name, response)
- }
- if (er) return cb(er)
- engineFilter(data)
- if (data["dist-tags"] && data["dist-tags"][tag]
- && data.versions[data["dist-tags"][tag]]) {
- var ver = data["dist-tags"][tag]
- return addNamed(name, ver, data.versions[ver], cb)
- }
- if (!explicit && Object.keys(data.versions).length) {
- return addNamed(name, "*", data, cb)
- }
-
- er = installTargetsError(tag, data)
- return cb(er)
- })
-}
-
-
-function engineFilter (data) {
- var npmv = npm.version
- , nodev = npm.config.get("node-version")
- , strict = npm.config.get("engine-strict")
-
- if (!nodev || npm.config.get("force")) return data
-
- Object.keys(data.versions || {}).forEach(function (v) {
- var eng = data.versions[v].engines
- if (!eng) return
- if (!strict && !data.versions[v].engineStrict) return
- if (eng.node && !semver.satisfies(nodev, eng.node, true)
- || eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
- delete data.versions[v]
- }
- })
-}
-
-function errorResponse (name, response) {
- if (response.statusCode >= 400) {
- var er = new Error(http.STATUS_CODES[response.statusCode])
- er.statusCode = response.statusCode
- er.code = "E" + er.statusCode
- er.pkgid = name
- }
- return er
-}
-
-function addNameRange (name, range, data, cb) {
- if (typeof cb !== "function") cb = data, data = null
-
- range = semver.validRange(range, true)
- if (range === null) return cb(new Error(
- "Invalid version range: "+range))
-
- log.silly("addNameRange", {name:name, range:range, hasData:!!data})
-
- if (data) return next()
- registry.get(name, function (er, d, json, response) {
- if (!er) {
- er = errorResponse(name, response)
- }
- if (er) return cb(er)
- data = d
- next()
- })
-
- function next () {
- log.silly( "addNameRange", "number 2"
- , {name:name, range:range, hasData:!!data})
- engineFilter(data)
-
- log.silly("addNameRange", "versions"
- , [data.name, Object.keys(data.versions || {})])
-
- // if the tagged version satisfies, then use that.
- var tagged = data["dist-tags"][npm.config.get("tag")]
- if (tagged
- && data.versions[tagged]
- && semver.satisfies(tagged, range, true)) {
- return addNamed(name, tagged, data.versions[tagged], cb)
- }
-
- // find the max satisfying version.
- var versions = Object.keys(data.versions || {})
- var ms = semver.maxSatisfying(versions, range, true)
- if (!ms) {
- return cb(installTargetsError(range, data))
- }
-
- // if we don't have a registry connection, try to see if
- // there's a cached copy that will be ok.
- addNamed(name, ms, data.versions[ms], cb)
- }
-}
-
-function installTargetsError (requested, data) {
- var targets = Object.keys(data["dist-tags"]).filter(function (f) {
- return (data.versions || {}).hasOwnProperty(f)
- }).concat(Object.keys(data.versions || {}))
-
- requested = data.name + (requested ? "@'" + requested + "'" : "")
-
- targets = targets.length
- ? "Valid install targets:\n" + JSON.stringify(targets) + "\n"
- : "No valid targets found.\n"
- + "Perhaps not compatible with your version of node?"
-
- var er = new Error( "No compatible version found: "
- + requested + "\n" + targets)
- er.code = "ETARGET"
- return er
-}
-
-function addNameVersion (name, v, data, cb) {
- if (typeof cb !== "function") cb = data, data = null
-
- var ver = semver.valid(v, true)
- if (!ver) return cb(new Error("Invalid version: "+v))
-
- var response
-
- if (data) {
- response = null
- return next()
- }
- registry.get(name, function (er, d, json, resp) {
- if (!er) {
- er = errorResponse(name, resp)
- }
- if (er) return cb(er)
- data = d && d.versions[ver]
- if (!data) {
- er = new Error('version not found: ' + name + '@' + ver)
- er.package = name
- er.statusCode = 404
- return cb(er)
- }
- response = resp
- next()
- })
-
- function next () {
- deprCheck(data)
- var dist = data.dist
-
- if (!dist) return cb(new Error("No dist in "+data._id+" package"))
-
- if (!dist.tarball) return cb(new Error(
- "No dist.tarball in " + data._id + " package"))
-
- if ((response && response.statusCode !== 304) || npm.config.get("force")) {
- return fetchit()
- }
-
- // we got cached data, so let's see if we have a tarball.
- var pkgroot = path.join(npm.cache, name, ver)
- var pkgtgz = path.join(pkgroot, "package.tgz")
- var pkgjson = path.join(pkgroot, "package", "package.json")
- fs.stat(pkgtgz, function (er, s) {
- if (!er) {
- readJson(pkgjson, function (er, data) {
- er = needName(er, data)
- er = needVersion(er, data)
- if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR")
- return cb(er)
- if (er) return fetchit()
- // check the SHA of the package we have, to ensure it wasn't installed
- // from somewhere other than the registry (eg, a fork)
- if (data._shasum && dist.shasum && data._shasum !== dist.shasum)
- return fetchit()
- return cb(null, data)
- })
- } else return fetchit()
- })
-
- function fetchit () {
- if (!npm.config.get("registry")) {
- return cb(new Error("Cannot fetch: "+dist.tarball))
- }
-
- // use the same protocol as the registry.
- // https registry --> https tarballs, but
- // only if they're the same hostname, or else
- // detached tarballs may not work.
- var tb = url.parse(dist.tarball)
- var rp = url.parse(npm.config.get("registry"))
- if (tb.hostname === rp.hostname
- && tb.protocol !== rp.protocol) {
- tb.protocol = url.parse(npm.config.get("registry")).protocol
- delete tb.href
- }
- tb = url.format(tb)
-
- // only add non-shasum'ed packages if --forced.
- // only ancient things would lack this for good reasons nowadays.
- if (!dist.shasum && !npm.config.get("force")) {
- return cb(new Error("package lacks shasum: " + data._id))
- }
- return addRemoteTarball( tb
- , dist.shasum
- , name
- , ver
- , cb )
- }
- }
-}
-
-function addLocal (p, name, cb_) {
- if (typeof cb_ !== "function") cb_ = name, name = ""
-
- function cb (er, data) {
- unlock(p, function () {
- if (er) {
- // if it doesn't have a / in it, it might be a
- // remote thing.
- if (p.indexOf("/") === -1 && p.charAt(0) !== "."
- && (process.platform !== "win32" || p.indexOf("\\") === -1)) {
- return addNamed(p, "", cb_)
- }
- log.error("addLocal", "Could not install %s", p)
- return cb_(er)
- }
- if (data && !data._fromGithub) data._from = p
- return cb_(er, data)
- })
- }
-
- lock(p, function (er) {
- if (er) return cb(er)
- // figure out if this is a folder or file.
- fs.stat(p, function (er, s) {
- if (er) {
- // might be username/project
- // in that case, try it as a github url.
- if (p.split("/").length === 2) {
- return maybeGithub(p, name, er, cb)
- }
- return cb(er)
- }
- if (s.isDirectory()) addLocalDirectory(p, name, cb)
- else addLocalTarball(p, name, cb)
- })
- })
-}
-
-function maybeGithub (p, name, er, cb) {
- var u = "git://github.com/" + p
- , up = url.parse(u)
- log.info("maybeGithub", "Attempting %s from %s", p, u)
-
- return addRemoteGit(u, up, name, true, function (er2, data) {
- if (er2) {
- var upriv = "git+ssh://git@github.com:" + p
- , uppriv = url.parse(upriv)
-
- log.info("maybeGithub", "Attempting %s from %s", p, upriv)
-
- return addRemoteGit(upriv, uppriv, false, name, function (er3, data) {
- if (er3) return cb(er)
- success(upriv, data)
- })
- }
- success(u, data)
- })
-
- function success (u, data) {
- data._from = u
- data._fromGithub = true
- return cb(null, data)
- }
-}
-
-function addLocalTarball (p, name, version, shasum, cb_) {
- if (typeof cb_ !== "function") cb_ = shasum, shasum = null
- if (typeof cb_ !== "function") cb_ = version, version = ""
- if (typeof cb_ !== "function") cb_ = name, name = ""
-
- // If we don't have a shasum yet, then get the shasum now.
- if (!shasum) {
- return sha.get(p, function (er, shasum) {
- if (er) return cb_(er)
- addLocalTarball(p, name, version, shasum, cb_)
- })
- }
-
- // if it's a tar, and not in place,
- // then unzip to .tmp, add the tmp folder, and clean up tmp
- if (pathIsInside(p, npm.tmp))
- return addTmpTarball(p, name, version, shasum, cb_)
-
- if (pathIsInside(p, npm.cache)) {
- if (path.basename(p) !== "package.tgz") return cb_(new Error(
- "Not a valid cache tarball name: "+p))
- return addPlacedTarball(p, name, shasum, cb_)
- }
-
- function cb (er, data) {
- if (data) {
- data._resolved = p
- data._shasum = data._shasum || shasum
- }
- return cb_(er, data)
- }
-
- // just copy it over and then add the temp tarball file.
- var tmp = path.join(npm.tmp, name + Date.now()
- + "-" + Math.random(), "tmp.tgz")
- mkdir(path.dirname(tmp), function (er) {
- if (er) return cb(er)
- var from = fs.createReadStream(p)
- , to = fs.createWriteStream(tmp)
- , errState = null
- function errHandler (er) {
- if (errState) return
- return cb(errState = er)
- }
- from.on("error", errHandler)
- to.on("error", errHandler)
- to.on("close", function () {
- if (errState) return
- log.verbose("chmod", tmp, npm.modes.file.toString(8))
- fs.chmod(tmp, npm.modes.file, function (er) {
- if (er) return cb(er)
- addTmpTarball(tmp, name, null, shasum, cb)
- })
- })
- from.pipe(to)
- })
-}
-
-
-
-
-function addPlacedTarball (p, name, shasum, cb) {
- if (!cb) cb = name, name = ""
- getCacheStat(function (er, cs) {
- if (er) return cb(er)
- return addPlacedTarball_(p, name, cs.uid, cs.gid, shasum, cb)
- })
-}
-
-// Resolved sum is the shasum from the registry dist object, but
-// *not* necessarily the shasum of this tarball, because for stupid
-// historical reasons, npm re-packs each package an extra time through
-// a temp directory, so all installed packages are actually built with
-// *this* version of npm, on this machine.
-//
-// Once upon a time, this meant that we could change package formats
-// around and fix junk that might be added by incompatible tar
-// implementations. Then, for a while, it was a way to correct bs
-// added by bugs in our own tar implementation. Now, it's just
-// garbage, but cleaning it up is a pain, and likely to cause issues
-// if anything is overlooked, so it's not high priority.
-//
-// If you're bored, and looking to make npm go faster, and you've
-// already made it this far in this file, here's a better methodology:
-//
-// cache.add should really be cache.place. That is, it should take
-// a set of arguments like it does now, but then also a destination
-// folder.
-//
-// cache.add('foo@bar', '/path/node_modules/foo', cb)
-//
-// 1. Resolve 'foo@bar' to some specific:
-// - git url
-// - local folder
-// - local tarball
-// - tarball url
-// 2. If resolved through the registry, then pick up the dist.shasum
-// along the way.
-// 3. Acquire request() stream fetching bytes: FETCH
-// 4. FETCH.pipe(tar unpack stream to dest)
-// 5. FETCH.pipe(shasum generator)
-// When the tar and shasum streams both finish, make sure that the
-// shasum matches dist.shasum, and if not, clean up and bail.
-//
-// publish(cb)
-//
-// 1. read package.json
-// 2. get root package object (for rev, and versions)
-// 3. update root package doc with version info
-// 4. remove _attachments object
-// 5. remove versions object
-// 5. jsonify, remove last }
-// 6. get stream: registry.put(/package)
-// 7. write trailing-}-less JSON
-// 8. write "_attachments":
-// 9. JSON.stringify(attachments), remove trailing }
-// 10. Write start of attachments (stubs)
-// 11. JSON(filename)+':{"type":"application/octet-stream","data":"'
-// 12. acquire tar packing stream, PACK
-// 13. PACK.pipe(PUT)
-// 14. PACK.pipe(shasum generator)
-// 15. when PACK finishes, get shasum
-// 16. PUT.write('"}},') (finish _attachments
-// 17. update "versions" object with current package version
-// (including dist.shasum and dist.tarball)
-// 18. write '"versions":' + JSON(versions)
-// 19. write '}}' (versions, close main doc)
-
-function addPlacedTarball_ (p, name, uid, gid, resolvedSum, cb) {
- // now we know it's in place already as .cache/name/ver/package.tgz
- // unpack to .cache/name/ver/package/, read the package.json,
- // and fire cb with the json data.
- var target = path.dirname(p)
- , folder = path.join(target, "package")
-
- lock(folder, function (er) {
- if (er) return cb(er)
- rmUnpack()
- })
-
- function rmUnpack () {
- rm(folder, function (er) {
- unlock(folder, function () {
- if (er) {
- log.error("addPlacedTarball", "Could not remove %j", folder)
- return cb(er)
- }
- thenUnpack()
- })
- })
- }
-
- function thenUnpack () {
- tar.unpack(p, folder, null, null, uid, gid, function (er) {
- if (er) {
- log.error("addPlacedTarball", "Could not unpack %j to %j", p, target)
- return cb(er)
- }
- // calculate the sha of the file that we just unpacked.
- // this is so that the data is available when publishing.
- sha.get(p, function (er, shasum) {
- if (er) {
- log.error("addPlacedTarball", "shasum fail", p)
- return cb(er)
- }
- readJson(path.join(folder, "package.json"), function (er, data) {
- er = needName(er, data)
- er = needVersion(er, data)
- if (er) {
- log.error("addPlacedTarball", "Couldn't read json in %j"
- , folder)
- return cb(er)
- }
-
- data.dist = data.dist || {}
- data.dist.shasum = shasum
- deprCheck(data)
- asyncMap([p], function (f, cb) {
- log.verbose("chmod", f, npm.modes.file.toString(8))
- fs.chmod(f, npm.modes.file, cb)
- }, function (f, cb) {
- if (process.platform === "win32") {
- log.silly("chown", "skipping for windows", f)
- cb()
- } else if (typeof uid === "number"
- && typeof gid === "number"
- && parseInt(uid, 10) === uid
- && parseInt(gid, 10) === gid) {
- log.verbose("chown", f, [uid, gid])
- fs.chown(f, uid, gid, cb)
- } else {
- log.verbose("chown", "skip for invalid uid/gid", [f, uid, gid])
- cb()
- }
- }, function (er) {
- cb(er, data)
- })
- })
- })
- })
- }
-}
-
-// At this point, if shasum is set, it's something that we've already
-// read and checked. Just stashing it in the data at this point.
-function addLocalDirectory (p, name, shasum, cb) {
- if (typeof cb !== "function") cb = shasum, shasum = ""
- if (typeof cb !== "function") cb = name, name = ""
- // if it's a folder, then read the package.json,
- // tar it to the proper place, and add the cache tar
- if (pathIsInside(p, npm.cache)) return cb(new Error(
- "Adding a cache directory to the cache will make the world implode."))
- readJson(path.join(p, "package.json"), false, function (er, data) {
- er = needName(er, data)
- er = needVersion(er, data)
- if (er) return cb(er)
- deprCheck(data)
- var random = Date.now() + "-" + Math.random()
- , tmp = path.join(npm.tmp, random)
- , tmptgz = path.resolve(tmp, "tmp.tgz")
- , placed = path.resolve( npm.cache, data.name
- , data.version, "package.tgz" )
- , placeDirect = path.basename(p) === "package"
- , tgz = placeDirect ? placed : tmptgz
- , version = data.version
-
- name = data.name
-
- getCacheStat(function (er, cs) {
- mkdir(path.dirname(tgz), function (er, made) {
- if (er) return cb(er)
-
- var fancy = !pathIsInside(p, npm.tmp)
- && !pathIsInside(p, npm.cache)
- tar.pack(tgz, p, data, fancy, function (er) {
- if (er) {
- log.error( "addLocalDirectory", "Could not pack %j to %j"
- , p, tgz )
- return cb(er)
- }
-
- // if we don't get a cache stat, or if the gid/uid is not
- // a number, then just move on. chown would fail anyway.
- if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb()
-
- chownr(made || tgz, cs.uid, cs.gid, function (er) {
- if (er) return cb(er)
- addLocalTarball(tgz, name, version, shasum, cb)
- })
- })
- })
- })
- })
-}
-
-// XXX This is where it should be fixed
-// Right now it's unpacking to a "package" folder, and then
-// adding that local folder, for historical reasons.
-// Instead, unpack to the *cache* folder, and then copy the
-// tgz into place in the cache, so the shasum doesn't change.
-function addTmpTarball (tgz, name, version, shasum, cb) {
- // Just have a placeholder here so we can move it into place after.
- var tmp = false
- if (!version) {
- tmp = true
- version = 'tmp_' + crypto.randomBytes(6).toString('hex')
- }
- if (!name) {
- tmp = true
- name = 'tmp_' + crypto.randomBytes(6).toString('hex')
- }
- var pdir
- if (!tmp) {
- pdir = path.resolve(npm.cache, name, version, "package")
- } else {
- pdir = path.resolve(npm.cache, name + version + "package")
- }
-
- getCacheStat(function (er, cs) {
- if (er) return cb(er)
- tar.unpack(tgz, pdir, null, null, cs.uid, cs.gid, next)
- })
-
- function next (er) {
- if (er) return cb(er)
- // it MUST be able to get a version now!
- var pj = path.resolve(pdir, "package.json")
- readJson(pj, function (er, data) {
- if (er) return cb(er)
- if (version === data.version && name === data.name && !tmp) {
- addTmpTarball_(tgz, data, name, version, shasum, cb)
+ addNamed(name, spec, null, inFlightURLs, cb)
} else {
- var old = pdir
- name = data.name
- version = data.version
- pdir = path.resolve(npm.cache, name, version, "package")
- mkdir(path.dirname(pdir), function(er) {
- if (er) return cb(er)
- rm(pdir, function(er) {
- if (er) return cb(er)
- fs.rename(old, pdir, function(er) {
- if (er) return cb(er)
- rm(old, function(er) {
- if (er) return cb(er)
- addTmpTarball_(tgz, data, name, version, shasum, cb)
- })
- })
- })
- })
+ addLocal(spec, "", inFlightURLs, cb)
}
- })
- }
-}
-
-function addTmpTarball_ (tgz, data, name, version, shasum, cb) {
- cb = once(cb)
- var target = path.resolve(npm.cache, name, version, "package.tgz")
- var read = fs.createReadStream(tgz)
- var write = fs.createWriteStream(target)
- read.on("error", cb).pipe(write).on("error", cb).on("close", done)
-
- function done() {
- data._shasum = data._shasum || shasum
- cb(null, data)
}
}
@@ -1336,18 +343,6 @@ function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
})
}
-var deprecated = {}
- , deprWarned = {}
-function deprCheck (data) {
- if (deprecated[data._id]) data.deprecated = deprecated[data._id]
- if (data.deprecated) deprecated[data._id] = data.deprecated
- else return
- if (!deprWarned[data._id]) {
- deprWarned[data._id] = true
- log.warn("deprecated", "%s: %s", data._id, data.deprecated)
- }
-}
-
function needName(er, data) {
return er ? er
: (data && !data.name) ? new Error("No name provided")
diff --git a/lib/cache/add-local-tarball.js b/lib/cache/add-local-tarball.js
new file mode 100644
index 000000000..f345744cf
--- /dev/null
+++ b/lib/cache/add-local-tarball.js
@@ -0,0 +1,304 @@
+'use strict';
+
+var mkdir = require("mkdirp")
+ , fs = require("graceful-fs")
+ , readJson = require("read-package-json")
+ , log = require("npmlog")
+ , path = require("path")
+ , crypto = require("crypto")
+ , once = require("once")
+ , sha = require("sha")
+ , asyncMap = require("slide").asyncMap
+ , npm = require("../npm.js")
+ , tar = require("../utils/tar.js")
+ , pathIsInside = require("path-is-inside")
+ , rm = require("../utils/gently-rm.js")
+ , deprCheck = require("../utils/depr-check.js")
+ , locker = require("../utils/locker.js")
+ , lock = locker.lock
+ , unlock = locker.unlock
+ , getCacheStat = require("./get-stat.js")
+
+module.exports = function addLocalTarball (p, name, version, shasum, cb_) {
+ if (typeof cb_ !== "function") cb_ = shasum, shasum = null
+ if (typeof cb_ !== "function") cb_ = version, version = ""
+ if (typeof cb_ !== "function") cb_ = name, name = ""
+
+ // If we don't have a shasum yet, then get the shasum now.
+ if (!shasum) {
+ return sha.get(p, function (er, shasum) {
+ if (er) return cb_(er)
+ addLocalTarball(p, name, version, shasum, cb_)
+ })
+ }
+
+ // if it's a tar, and not in place,
+ // then unzip to .tmp, add the tmp folder, and clean up tmp
+ if (pathIsInside(p, npm.tmp))
+ return addTmpTarball(p, name, version, shasum, cb_)
+
+ if (pathIsInside(p, npm.cache)) {
+ if (path.basename(p) !== "package.tgz") return cb_(new Error(
+ "Not a valid cache tarball name: "+p))
+ return addPlacedTarball(p, name, shasum, cb_)
+ }
+
+ function cb (er, data) {
+ if (data) {
+ data._resolved = p
+ data._shasum = data._shasum || shasum
+ }
+ return cb_(er, data)
+ }
+
+ // just copy it over and then add the temp tarball file.
+ var tmp = path.join(npm.tmp, name + Date.now()
+ + "-" + Math.random(), "tmp.tgz")
+ mkdir(path.dirname(tmp), function (er) {
+ if (er) return cb(er)
+ var from = fs.createReadStream(p)
+ , to = fs.createWriteStream(tmp)
+ , errState = null
+ function errHandler (er) {
+ if (errState) return
+ return cb(errState = er)
+ }
+ from.on("error", errHandler)
+ to.on("error", errHandler)
+ to.on("close", function () {
+ if (errState) return
+ log.verbose("chmod", tmp, npm.modes.file.toString(8))
+ fs.chmod(tmp, npm.modes.file, function (er) {
+ if (er) return cb(er)
+ addTmpTarball(tmp, name, null, shasum, cb)
+ })
+ })
+ from.pipe(to)
+ })
+}
+
+function addPlacedTarball (p, name, shasum, cb) {
+ if (!cb) cb = name, name = ""
+ getCacheStat(function (er, cs) {
+ if (er) return cb(er)
+ return addPlacedTarball_(p, name, cs.uid, cs.gid, shasum, cb)
+ })
+}
+
+// Resolved sum is the shasum from the registry dist object, but
+// *not* necessarily the shasum of this tarball, because for stupid
+// historical reasons, npm re-packs each package an extra time through
+// a temp directory, so all installed packages are actually built with
+// *this* version of npm, on this machine.
+//
+// Once upon a time, this meant that we could change package formats
+// around and fix junk that might be added by incompatible tar
+// implementations. Then, for a while, it was a way to correct bs
+// added by bugs in our own tar implementation. Now, it's just
+// garbage, but cleaning it up is a pain, and likely to cause issues
+// if anything is overlooked, so it's not high priority.
+//
+// If you're bored, and looking to make npm go faster, and you've
+// already made it this far in this file, here's a better methodology:
+//
+// cache.add should really be cache.place. That is, it should take
+// a set of arguments like it does now, but then also a destination
+// folder.
+//
+// cache.add('foo@bar', '/path/node_modules/foo', cb)
+//
+// 1. Resolve 'foo@bar' to some specific:
+// - git url
+// - local folder
+// - local tarball
+// - tarball url
+// 2. If resolved through the registry, then pick up the dist.shasum
+// along the way.
+// 3. Acquire request() stream fetching bytes: FETCH
+// 4. FETCH.pipe(tar unpack stream to dest)
+// 5. FETCH.pipe(shasum generator)
+// When the tar and shasum streams both finish, make sure that the
+// shasum matches dist.shasum, and if not, clean up and bail.
+//
+// publish(cb)
+//
+// 1. read package.json
+// 2. get root package object (for rev, and versions)
+// 3. update root package doc with version info
+// 4. remove _attachments object
+// 5. remove versions object
+// 5. jsonify, remove last }
+// 6. get stream: registry.put(/package)
+// 7. write trailing-}-less JSON
+// 8. write "_attachments":
+// 9. JSON.stringify(attachments), remove trailing }
+// 10. Write start of attachments (stubs)
+// 11. JSON(filename)+':{"type":"application/octet-stream","data":"'
+// 12. acquire tar packing stream, PACK
+// 13. PACK.pipe(PUT)
+// 14. PACK.pipe(shasum generator)
+// 15. when PACK finishes, get shasum
+// 16. PUT.write('"}},') (finish _attachments
+// 17. update "versions" object with current package version
+// (including dist.shasum and dist.tarball)
+// 18. write '"versions":' + JSON(versions)
+// 19. write '}}' (versions, close main doc)
+
+function addPlacedTarball_ (p, name, uid, gid, resolvedSum, cb) {
+ // now we know it's in place already as .cache/name/ver/package.tgz
+ // unpack to .cache/name/ver/package/, read the package.json,
+ // and fire cb with the json data.
+ var target = path.dirname(p)
+ , folder = path.join(target, "package")
+
+ lock(folder, function (er) {
+ if (er) return cb(er)
+ rmUnpack()
+ })
+
+ function rmUnpack () {
+ rm(folder, function (er) {
+ unlock(folder, function () {
+ if (er) {
+ log.error("addPlacedTarball", "Could not remove %j", folder)
+ return cb(er)
+ }
+ thenUnpack()
+ })
+ })
+ }
+
+ function thenUnpack () {
+ tar.unpack(p, folder, null, null, uid, gid, function (er) {
+ if (er) {
+ log.error("addPlacedTarball", "Could not unpack %j to %j", p, target)
+ return cb(er)
+ }
+ // calculate the sha of the file that we just unpacked.
+ // this is so that the data is available when publishing.
+ sha.get(p, function (er, shasum) {
+ if (er) {
+ log.error("addPlacedTarball", "shasum fail", p)
+ return cb(er)
+ }
+ readJson(path.join(folder, "package.json"), function (er, data) {
+ er = needName(er, data)
+ er = needVersion(er, data)
+ if (er) {
+ log.error("addPlacedTarball", "Couldn't read json in %j"
+ , folder)
+ return cb(er)
+ }
+
+ data.dist = data.dist || {}
+ data.dist.shasum = shasum
+ deprCheck(data)
+ asyncMap([p], function (f, cb) {
+ log.verbose("chmod", f, npm.modes.file.toString(8))
+ fs.chmod(f, npm.modes.file, cb)
+ }, function (f, cb) {
+ if (process.platform === "win32") {
+ log.silly("chown", "skipping for windows", f)
+ cb()
+ } else if (typeof uid === "number"
+ && typeof gid === "number"
+ && parseInt(uid, 10) === uid
+ && parseInt(gid, 10) === gid) {
+ log.verbose("chown", f, [uid, gid])
+ fs.chown(f, uid, gid, cb)
+ } else {
+ log.verbose("chown", "skip for invalid uid/gid", [f, uid, gid])
+ cb()
+ }
+ }, function (er) {
+ cb(er, data)
+ })
+ })
+ })
+ })
+ }
+}
+
+// XXX This is where it should be fixed
+// Right now it's unpacking to a "package" folder, and then
+// adding that local folder, for historical reasons.
+// Instead, unpack to the *cache* folder, and then copy the
+// tgz into place in the cache, so the shasum doesn't change.
+function addTmpTarball (tgz, name, version, shasum, cb) {
+ // Just have a placeholder here so we can move it into place after.
+ var tmp = false
+ if (!version) {
+ tmp = true
+ version = 'tmp_' + crypto.randomBytes(6).toString('hex')
+ }
+ if (!name) {
+ tmp = true
+ name = 'tmp_' + crypto.randomBytes(6).toString('hex')
+ }
+ var pdir
+ if (!tmp) {
+ pdir = path.resolve(npm.cache, name, version, "package")
+ } else {
+ pdir = path.resolve(npm.cache, name + version + "package")
+ }
+
+ getCacheStat(function (er, cs) {
+ if (er) return cb(er)
+ tar.unpack(tgz, pdir, null, null, cs.uid, cs.gid, next)
+ })
+
+ function next (er) {
+ if (er) return cb(er)
+ // it MUST be able to get a version now!
+ var pj = path.resolve(pdir, "package.json")
+ readJson(pj, function (er, data) {
+ if (er) return cb(er)
+ if (version === data.version && name === data.name && !tmp) {
+ addTmpTarball_(tgz, data, name, version, shasum, cb)
+ } else {
+ var old = pdir
+ name = data.name
+ version = data.version
+ pdir = path.resolve(npm.cache, name, version, "package")
+ mkdir(path.dirname(pdir), function(er) {
+ if (er) return cb(er)
+ rm(pdir, function(er) {
+ if (er) return cb(er)
+ fs.rename(old, pdir, function(er) {
+ if (er) return cb(er)
+ rm(old, function(er) {
+ if (er) return cb(er)
+ addTmpTarball_(tgz, data, name, version, shasum, cb)
+ })
+ })
+ })
+ })
+ }
+ })
+ }
+}
+
+function addTmpTarball_ (tgz, data, name, version, shasum, cb) {
+ cb = once(cb)
+ var target = path.resolve(npm.cache, name, version, "package.tgz")
+ var read = fs.createReadStream(tgz)
+ var write = fs.createWriteStream(target)
+ read.on("error", cb).pipe(write).on("error", cb).on("close", done)
+
+ function done() {
+ data._shasum = data._shasum || shasum
+ cb(null, data)
+ }
+}
+
+function needName(er, data) {
+ return er ? er
+ : (data && !data.name) ? new Error("No name provided")
+ : null
+}
+
+function needVersion(er, data) {
+ return er ? er
+ : (data && !data.version) ? new Error("No version provided")
+ : null
+}
diff --git a/lib/cache/add-local.js b/lib/cache/add-local.js
new file mode 100644
index 000000000..03c3ff98c
--- /dev/null
+++ b/lib/cache/add-local.js
@@ -0,0 +1,121 @@
+'use strict';
+
+var fs = require("graceful-fs")
+ , path = require("path")
+ , mkdir = require("mkdirp")
+ , chownr = require("chownr")
+ , pathIsInside = require("path-is-inside")
+ , readJson = require("read-package-json")
+ , log = require("npmlog")
+ , npm = require("../npm.js")
+ , tar = require("../utils/tar.js")
+ , deprCheck = require("../utils/depr-check.js")
+ , locker = require("../utils/locker.js")
+ , lock = locker.lock
+ , unlock = locker.unlock
+ , getCacheStat = require("./get-stat.js")
+ , addNamed = require("./add-named.js")
+ , addLocalTarball = require("./add-local-tarball.js")
+ , maybeGithub = require("./maybe-github.js")
+
+module.exports = function addLocal (p, name, inFlightURLs, cb_) {
+ if (typeof cb_ !== "function") cb_ = name, name = ""
+
+ function cb (er, data) {
+ unlock(p, function () {
+ if (er) {
+ // if it doesn't have a / in it, it might be a
+ // remote thing.
+ if (p.indexOf("/") === -1 && p.charAt(0) !== "."
+ && (process.platform !== "win32" || p.indexOf("\\") === -1)) {
+ return addNamed(p, "", null, inFlightURLs, cb_)
+ }
+ log.error("addLocal", "Could not install %s", p)
+ return cb_(er)
+ }
+ if (data && !data._fromGithub) data._from = p
+ return cb_(er, data)
+ })
+ }
+
+ lock(p, function (er) {
+ if (er) return cb(er)
+ // figure out if this is a folder or file.
+ fs.stat(p, function (er, s) {
+ if (er) {
+ // might be username/project
+ // in that case, try it as a github url.
+ if (p.split("/").length === 2) {
+ return maybeGithub(p, name, er, cb)
+ }
+ return cb(er)
+ }
+ if (s.isDirectory()) addLocalDirectory(p, name, cb)
+ else addLocalTarball(p, name, cb)
+ })
+ })
+}
+
+// At this point, if shasum is set, it's something that we've already
+// read and checked. Just stashing it in the data at this point.
+function addLocalDirectory (p, name, shasum, cb) {
+ if (typeof cb !== "function") cb = shasum, shasum = ""
+ if (typeof cb !== "function") cb = name, name = ""
+ // if it's a folder, then read the package.json,
+ // tar it to the proper place, and add the cache tar
+ if (pathIsInside(p, npm.cache)) return cb(new Error(
+ "Adding a cache directory to the cache will make the world implode."))
+ readJson(path.join(p, "package.json"), false, function (er, data) {
+ er = needName(er, data)
+ er = needVersion(er, data)
+ if (er) return cb(er)
+ deprCheck(data)
+ var random = Date.now() + "-" + Math.random()
+ , tmp = path.join(npm.tmp, random)
+ , tmptgz = path.resolve(tmp, "tmp.tgz")
+ , placed = path.resolve( npm.cache, data.name
+ , data.version, "package.tgz" )
+ , placeDirect = path.basename(p) === "package"
+ , tgz = placeDirect ? placed : tmptgz
+ , version = data.version
+
+ name = data.name
+
+ getCacheStat(function (er, cs) {
+ mkdir(path.dirname(tgz), function (er, made) {
+ if (er) return cb(er)
+
+ var fancy = !pathIsInside(p, npm.tmp)
+ && !pathIsInside(p, npm.cache)
+ tar.pack(tgz, p, data, fancy, function (er) {
+ if (er) {
+ log.error( "addLocalDirectory", "Could not pack %j to %j"
+ , p, tgz )
+ return cb(er)
+ }
+
+ // if we don't get a cache stat, or if the gid/uid is not
+ // a number, then just move on. chown would fail anyway.
+ if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb()
+
+ chownr(made || tgz, cs.uid, cs.gid, function (er) {
+ if (er) return cb(er)
+ addLocalTarball(tgz, name, version, shasum, cb)
+ })
+ })
+ })
+ })
+ })
+}
+
+function needName(er, data) {
+ return er ? er
+ : (data && !data.name) ? new Error("No name provided")
+ : null
+}
+
+function needVersion(er, data) {
+ return er ? er
+ : (data && !data.version) ? new Error("No version provided")
+ : null
+}
diff --git a/lib/cache/add-named.js b/lib/cache/add-named.js
new file mode 100644
index 000000000..c231388c6
--- /dev/null
+++ b/lib/cache/add-named.js
@@ -0,0 +1,318 @@
+'use strict';
+
+var path = require("path")
+ , fs = require("graceful-fs")
+ , http = require("http")
+ , log = require("npmlog")
+ , semver = require("semver")
+ , readJson = require("read-package-json")
+ , url = require("url")
+ , npm = require("../npm.js")
+ , registry = npm.registry
+ , deprCheck = require("../utils/depr-check.js")
+ , locker = require("../utils/locker.js")
+ , lock = locker.lock
+ , unlock = locker.unlock
+ , addRemoteTarball = require("./add-remote-tarball.js")
+ , addRemoteGit = require("./add-remote-git.js")
+
+
+module.exports = addNamed
+
+// only have one request in flight for a given
+// name@blah thing.
+var inFlightNames = {}
+function addNamed (name, x, data, inFlightURLs, cb_) {
+ if (typeof cb_ !== "function") cb_ = inFlightURLs, inFlightURLs = {}
+ if (typeof cb_ !== "function") cb_ = data, data = null
+ log.verbose("addNamed", [name, x])
+
+ var k = name + "@" + x
+ if (!inFlightNames[k]) inFlightNames[k] = []
+ var iF = inFlightNames[k]
+ iF.push(cb_)
+ if (iF.length > 1) return
+
+ function cb (er, data) {
+ if (data && !data._fromGithub) data._from = k
+ unlock(k, function () {
+ var c
+ while (c = iF.shift()) c(er, data)
+ delete inFlightNames[k]
+ })
+ }
+
+ log.verbose("addNamed", [semver.valid(x), semver.validRange(x)])
+ lock(k, function (er) {
+ if (er) return cb(er)
+
+ var fn = ( semver.valid(x, true) ? addNameVersion
+ : semver.validRange(x, true) ? addNameRange
+ : addNameTag
+ )
+ fn(name, x, data, inFlightURLs, cb)
+ })
+}
+
+function addNameTag (name, tag, data, inFlightURLs, cb_) {
+ if (typeof cb_ !== "function") cb_ = data, data = null
+ log.info("addNameTag", [name, tag])
+ var explicit = true
+ if (!tag) {
+ explicit = false
+ tag = npm.config.get("tag")
+ }
+
+ function cb(er, data) {
+ // might be username/project
+ // in that case, try it as a github url.
+ if (er && tag.split("/").length === 2) {
+ return maybeGithub(tag, name, er, cb_)
+ }
+ return cb_(er, data)
+ }
+
+ registry.get(name, function (er, data, json, response) {
+ if (!er) {
+ er = errorResponse(name, response)
+ }
+ if (er) return cb(er)
+ engineFilter(data)
+ if (data["dist-tags"] && data["dist-tags"][tag]
+ && data.versions[data["dist-tags"][tag]]) {
+ var ver = data["dist-tags"][tag]
+ return addNamed(name, ver, data.versions[ver], inFlightURLs, cb)
+ }
+ if (!explicit && Object.keys(data.versions).length) {
+ return addNamed(name, "*", data, inFlightURLs, cb)
+ }
+
+ er = installTargetsError(tag, data)
+ return cb(er)
+ })
+}
+
+function engineFilter (data) {
+ var npmv = npm.version
+ , nodev = npm.config.get("node-version")
+ , strict = npm.config.get("engine-strict")
+
+ if (!nodev || npm.config.get("force")) return data
+
+ Object.keys(data.versions || {}).forEach(function (v) {
+ var eng = data.versions[v].engines
+ if (!eng) return
+ if (!strict && !data.versions[v].engineStrict) return
+ if (eng.node && !semver.satisfies(nodev, eng.node, true)
+ || eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
+ delete data.versions[v]
+ }
+ })
+}
+
+function addNameVersion (name, v, data, inFlightURLs, cb) {
+ if (typeof cb !== "function") cb = data, data = null
+
+ var ver = semver.valid(v, true)
+ if (!ver) return cb(new Error("Invalid version: "+v))
+
+ var response
+
+ if (data) {
+ response = null
+ return next()
+ }
+ registry.get(name, function (er, d, json, resp) {
+ if (!er) {
+ er = errorResponse(name, resp)
+ }
+ if (er) return cb(er)
+ data = d && d.versions[ver]
+ if (!data) {
+ er = new Error('version not found: ' + name + '@' + ver)
+ er.package = name
+ er.statusCode = 404
+ return cb(er)
+ }
+ response = resp
+ next()
+ })
+
+ function next () {
+ deprCheck(data)
+ var dist = data.dist
+
+ if (!dist) return cb(new Error("No dist in "+data._id+" package"))
+
+ if (!dist.tarball) return cb(new Error(
+ "No dist.tarball in " + data._id + " package"))
+
+ if ((response && response.statusCode !== 304) || npm.config.get("force")) {
+ return fetchit()
+ }
+
+ // we got cached data, so let's see if we have a tarball.
+ var pkgroot = path.join(npm.cache, name, ver)
+ var pkgtgz = path.join(pkgroot, "package.tgz")
+ var pkgjson = path.join(pkgroot, "package", "package.json")
+ fs.stat(pkgtgz, function (er) {
+ if (!er) {
+ readJson(pkgjson, function (er, data) {
+ er = needName(er, data)
+ er = needVersion(er, data)
+ if (er && er.code !== "ENOENT" && er.code !== "ENOTDIR")
+ return cb(er)
+ if (er) return fetchit()
+ return cb(null, data)
+ })
+ } else return fetchit()
+ })
+
+ function fetchit () {
+ if (!npm.config.get("registry")) {
+ return cb(new Error("Cannot fetch: "+dist.tarball))
+ }
+
+ // use the same protocol as the registry.
+ // https registry --> https tarballs, but
+ // only if they're the same hostname, or else
+ // detached tarballs may not work.
+ var tb = url.parse(dist.tarball)
+ var rp = url.parse(npm.config.get("registry"))
+ if (tb.hostname === rp.hostname
+ && tb.protocol !== rp.protocol) {
+ tb.protocol = url.parse(npm.config.get("registry")).protocol
+ delete tb.href
+ }
+ tb = url.format(tb)
+
+ // only add non-shasum'ed packages if --forced.
+ // only ancient things would lack this for good reasons nowadays.
+ if (!dist.shasum && !npm.config.get("force")) {
+ return cb(new Error("package lacks shasum: " + data._id))
+ }
+ return addRemoteTarball( tb
+ , dist.shasum
+ , name
+ , ver
+ , inFlightURLs
+ , cb )
+ }
+ }
+}
+
+function addNameRange (name, range, data, inFlightURLs, cb) {
+ if (typeof cb !== "function") cb = data, data = null
+
+ range = semver.validRange(range, true)
+ if (range === null) return cb(new Error(
+ "Invalid version range: "+range))
+
+ log.silly("addNameRange", {name:name, range:range, hasData:!!data})
+
+ if (data) return next()
+ registry.get(name, function (er, d, json, response) {
+ if (!er) {
+ er = errorResponse(name, response)
+ }
+ if (er) return cb(er)
+ data = d
+ next()
+ })
+
+ function next () {
+ log.silly( "addNameRange", "number 2"
+ , {name:name, range:range, hasData:!!data})
+ engineFilter(data)
+
+ log.silly("addNameRange", "versions"
+ , [data.name, Object.keys(data.versions || {})])
+
+ // if the tagged version satisfies, then use that.
+ var tagged = data["dist-tags"][npm.config.get("tag")]
+ if (tagged
+ && data.versions[tagged]
+ && semver.satisfies(tagged, range, true)) {
+ return addNamed(name, tagged, data.versions[tagged], inFlightURLs, cb)
+ }
+
+ // find the max satisfying version.
+ var versions = Object.keys(data.versions || {})
+ var ms = semver.maxSatisfying(versions, range, true)
+ if (!ms) {
+ return cb(installTargetsError(range, data))
+ }
+
+ // if we don't have a registry connection, try to see if
+ // there's a cached copy that will be ok.
+ addNamed(name, ms, data.versions[ms], inFlightURLs, cb)
+ }
+}
+
+function maybeGithub (p, name, er, cb) {
+ var u = "git://github.com/" + p
+ , up = url.parse(u)
+ log.info("maybeGithub", "Attempting %s from %s", p, u)
+
+ return addRemoteGit(u, up, name, true, function (er2, data) {
+ if (er2) {
+ var upriv = "git+ssh://git@github.com:" + p
+ , uppriv = url.parse(upriv)
+
+ log.info("maybeGithub", "Attempting %s from %s", p, upriv)
+
+ return addRemoteGit(upriv, uppriv, false, name, function (er3, data) {
+ if (er3) return cb(er)
+ success(upriv, data)
+ })
+ }
+ success(u, data)
+ })
+
+ function success (u, data) {
+ data._from = u
+ data._fromGithub = true
+ return cb(null, data)
+ }
+}
+
+function installTargetsError (requested, data) {
+ var targets = Object.keys(data["dist-tags"]).filter(function (f) {
+ return (data.versions || {}).hasOwnProperty(f)
+ }).concat(Object.keys(data.versions || {}))
+
+ requested = data.name + (requested ? "@'" + requested + "'" : "")
+
+ targets = targets.length
+ ? "Valid install targets:\n" + JSON.stringify(targets) + "\n"
+ : "No valid targets found.\n"
+ + "Perhaps not compatible with your version of node?"
+
+ var er = new Error( "No compatible version found: "
+ + requested + "\n" + targets)
+ er.code = "ETARGET"
+ return er
+}
+
+function errorResponse (name, response) {
+ var er
+ if (response.statusCode >= 400) {
+ er = new Error(http.STATUS_CODES[response.statusCode])
+ er.statusCode = response.statusCode
+ er.code = "E" + er.statusCode
+ er.pkgid = name
+ }
+ return er
+}
+
+function needName(er, data) {
+ return er ? er
+ : (data && !data.name) ? new Error("No name provided")
+ : null
+}
+
+function needVersion(er, data) {
+ return er ? er
+ : (data && !data.version) ? new Error("No version provided")
+ : null
+}
diff --git a/lib/cache/add-remote-git.js b/lib/cache/add-remote-git.js
new file mode 100644
index 000000000..a7f928d1b
--- /dev/null
+++ b/lib/cache/add-remote-git.js
@@ -0,0 +1,288 @@
+'use strict';
+
+var mkdir = require("mkdirp")
+ , spawn = require("child_process").spawn
+ , exec = require("child_process").execFile
+ , once = require("once")
+ , fs = require("graceful-fs")
+ , log = require("npmlog")
+ , path = require("path")
+ , url = require("url")
+ , chownr = require("chownr")
+ , zlib = require("zlib")
+ , which = require("which")
+ , crypto = require("crypto")
+ , chmodr = require("chmodr")
+ , npm = require("../npm.js")
+ , rm = require("../utils/gently-rm.js")
+ , locker = require("../utils/locker.js")
+ , lock = locker.lock
+ , unlock = locker.unlock
+ , getCacheStat = require("./get-stat.js")
+ , addLocalTarball = require("./add-local-tarball.js")
+
+
+// 1. cacheDir = path.join(cache,'_git-remotes',sha1(u))
+// 2. checkGitDir(cacheDir) ? 4. : 3. (rm cacheDir if necessary)
+// 3. git clone --mirror u cacheDir
+// 4. cd cacheDir && git fetch -a origin
+// 5. git archive /tmp/random.tgz
+// 6. addLocalTarball(/tmp/random.tgz) <gitref> --format=tar --prefix=package/
+// silent flag is used if this should error quietly
+module.exports = function addRemoteGit (u, parsed, name, silent, inFlightURLs, cb_) {
+ if (typeof cb_ !== "function") cb_ = name, name = null
+
+ if (!inFlightURLs[u]) inFlightURLs[u] = []
+ var iF = inFlightURLs[u]
+ iF.push(cb_)
+ if (iF.length > 1) return
+
+ // git is so tricky!
+ // if the path is like ssh://foo:22/some/path then it works, but
+ // it needs the ssh://
+ // If the path is like ssh://foo:some/path then it works, but
+ // only if you remove the ssh://
+ var origUrl = u
+ u = u.replace(/^git\+/, "")
+ .replace(/#.*$/, "")
+
+ // ssh paths that are scp-style urls don't need the ssh://
+ if (parsed.pathname.match(/^\/?:/)) {
+ u = u.replace(/^ssh:\/\//, "")
+ }
+
+ function cb (er, data) {
+ unlock(u, function () {
+ var c
+ while (c = iF.shift()) c(er, data)
+ delete inFlightURLs[origUrl]
+ })
+ }
+
+ lock(u, function (er) {
+ if (er) return cb(er)
+
+ // figure out what we should check out.
+ var co = parsed.hash && parsed.hash.substr(1) || "master"
+
+ var v = crypto.createHash("sha1").update(u).digest("hex").slice(0, 8)
+ v = u.replace(/[^a-zA-Z0-9]+/g, '-') + '-' + v
+
+ log.verbose("addRemoteGit", [u, co])
+
+ var p = path.join(npm.config.get("cache"), "_git-remotes", v)
+
+ checkGitDir(p, u, co, origUrl, silent, function(er, data) {
+ chmodr(p, npm.modes.file, function(erChmod) {
+ if (er) return cb(er, data)
+ return cb(erChmod, data)
+ })
+ })
+ })
+}
+
+function checkGitDir (p, u, co, origUrl, silent, cb) {
+ fs.stat(p, function (er, s) {
+ if (er) return cloneGitRemote(p, u, co, origUrl, silent, cb)
+ if (!s.isDirectory()) return rm(p, function (er){
+ if (er) return cb(er)
+ cloneGitRemote(p, u, co, origUrl, silent, cb)
+ })
+
+ var git = npm.config.get("git")
+ var args = [ "config", "--get", "remote.origin.url" ]
+ var env = gitEnv()
+
+ // check for git
+ which(git, function (err) {
+ if (err) {
+ err.code = "ENOGIT"
+ return cb(err)
+ }
+ exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
+ var stdoutTrimmed = (stdout + "\n" + stderr).trim()
+ if (er || u !== stdout.trim()) {
+ log.warn( "`git config --get remote.origin.url` returned "
+ + "wrong result ("+u+")", stdoutTrimmed )
+ return rm(p, function (er){
+ if (er) return cb(er)
+ cloneGitRemote(p, u, co, origUrl, silent, cb)
+ })
+ }
+ log.verbose("git remote.origin.url", stdoutTrimmed)
+ archiveGitRemote(p, u, co, origUrl, cb)
+ })
+ })
+ })
+}
+
+function checkGitDir (p, u, co, origUrl, silent, cb) {
+ fs.stat(p, function (er, s) {
+ if (er) return cloneGitRemote(p, u, co, origUrl, silent, cb)
+ if (!s.isDirectory()) return rm(p, function (er){
+ if (er) return cb(er)
+ cloneGitRemote(p, u, co, origUrl, silent, cb)
+ })
+
+ var git = npm.config.get("git")
+ var args = [ "config", "--get", "remote.origin.url" ]
+ var env = gitEnv()
+
+ // check for git
+ which(git, function (err) {
+ if (err) {
+ err.code = "ENOGIT"
+ return cb(err)
+ }
+ exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
+ var stdoutTrimmed = (stdout + "\n" + stderr).trim()
+ if (er || u !== stdout.trim()) {
+ log.warn( "`git config --get remote.origin.url` returned "
+ + "wrong result ("+u+")", stdoutTrimmed )
+ return rm(p, function (er){
+ if (er) return cb(er)
+ cloneGitRemote(p, u, co, origUrl, silent, cb)
+ })
+ }
+ log.verbose("git remote.origin.url", stdoutTrimmed)
+ archiveGitRemote(p, u, co, origUrl, cb)
+ })
+ })
+ })
+}
+
+function cloneGitRemote (p, u, co, origUrl, silent, cb) {
+ mkdir(p, function (er) {
+ if (er) return cb(er)
+
+ var git = npm.config.get("git")
+ var args = [ "clone", "--mirror", u, p ]
+ var env = gitEnv()
+
+ // check for git
+ which(git, function (err) {
+ if (err) {
+ err.code = "ENOGIT"
+ return cb(err)
+ }
+ exec(git, args, {cwd: p, env: env}, function (er, stdout, stderr) {
+ stdout = (stdout + "\n" + stderr).trim()
+ if (er) {
+ if (silent) {
+ log.verbose("git clone " + u, stdout)
+ } else {
+ log.error("git clone " + u, stdout)
+ }
+ return cb(er)
+ }
+ log.verbose("git clone " + u, stdout)
+ archiveGitRemote(p, u, co, origUrl, cb)
+ })
+ })
+ })
+}
+
+function archiveGitRemote (p, u, co, origUrl, cb) {
+ var git = npm.config.get("git")
+ var archive = [ "fetch", "-a", "origin" ]
+ var resolve = [ "rev-list", "-n1", co ]
+ var env = gitEnv()
+
+ var resolved = null
+ var tmp
+
+ exec(git, archive, {cwd: p, env: env}, function (er, stdout, stderr) {
+ stdout = (stdout + "\n" + stderr).trim()
+ if (er) {
+ log.error("git fetch -a origin ("+u+")", stdout)
+ return cb(er)
+ }
+ log.verbose("git fetch -a origin ("+u+")", stdout)
+ tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
+ verifyOwnership()
+ })
+
+ function verifyOwnership() {
+ if (process.platform === "win32") {
+ log.silly("verifyOwnership", "skipping for windows")
+ resolveHead()
+ } else {
+ getCacheStat(function(er, cs) {
+ if (er) {
+ log.error("Could not get cache stat")
+ return cb(er)
+ }
+ chownr(p, cs.uid, cs.gid, function(er) {
+ if (er) {
+ log.error("Failed to change folder ownership under npm cache for %s", p)
+ return cb(er)
+ }
+ resolveHead()
+ })
+ })
+ }
+ }
+
+ function resolveHead () {
+ exec(git, resolve, {cwd: p, env: env}, function (er, stdout, stderr) {
+ stdout = (stdout + "\n" + stderr).trim()
+ if (er) {
+ log.error("Failed resolving git HEAD (" + u + ")", stderr)
+ return cb(er)
+ }
+ log.verbose("git rev-list -n1 " + co, stdout)
+ var parsed = url.parse(origUrl)
+ parsed.hash = stdout
+ resolved = url.format(parsed)
+
+ // https://github.com/npm/npm/issues/3224
+ // node incorrectly sticks a / at the start of the path
+ // We know that the host won't change, so split and detect this
+ var spo = origUrl.split(parsed.host)
+ var spr = resolved.split(parsed.host)
+ if (spo[1].charAt(0) === ':' && spr[1].charAt(0) === '/')
+ spr[1] = spr[1].slice(1)
+ resolved = spr.join(parsed.host)
+
+ log.verbose('resolved git url', resolved)
+ next()
+ })
+ }
+
+ function next () {
+ mkdir(path.dirname(tmp), function (er) {
+ if (er) return cb(er)
+ var gzip = zlib.createGzip({ level: 9 })
+ var git = npm.config.get("git")
+ var args = ["archive", co, "--format=tar", "--prefix=package/"]
+ var out = fs.createWriteStream(tmp)
+ var env = gitEnv()
+ cb = once(cb)
+ var cp = spawn(git, args, { env: env, cwd: p })
+ cp.on("error", cb)
+ cp.stderr.on("data", function(chunk) {
+ log.silly(chunk.toString(), "git archive")
+ })
+
+ cp.stdout.pipe(gzip).pipe(out).on("close", function() {
+ addLocalTarball(tmp, function(er, data) {
+ if (data) data._resolved = resolved
+ cb(er, data)
+ })
+ })
+ })
+ }
+}
+
+var gitEnv_
+function gitEnv () {
+ // git responds to env vars in some weird ways in post-receive hooks
+ // so don't carry those along.
+ if (gitEnv_) return gitEnv_
+ gitEnv_ = {}
+ for (var k in process.env) {
+ if (!~['GIT_PROXY_COMMAND','GIT_SSH','GIT_SSL_NO_VERIFY'].indexOf(k) && k.match(/^GIT/)) continue
+ gitEnv_[k] = process.env[k]
+ }
+ return gitEnv_
+}
diff --git a/lib/cache/add-remote-tarball.js b/lib/cache/add-remote-tarball.js
new file mode 100644
index 000000000..f6550dd74
--- /dev/null
+++ b/lib/cache/add-remote-tarball.js
@@ -0,0 +1,106 @@
+'use strict';
+
+var mkdir = require("mkdirp")
+ , log = require("npmlog")
+ , path = require("path")
+ , sha = require("sha")
+ , retry = require("retry")
+ , npm = require("../npm.js")
+ , fetch = require("../utils/fetch.js")
+ , locker = require("../utils/locker.js")
+ , lock = locker.lock
+ , unlock = locker.unlock
+ , addLocalTarball = require("./add-local-tarball.js")
+
+module.exports = function addRemoteTarball (u, shasum, name, version, inFlightURLs, cb_) {
+ if (typeof cb_ !== "function") cb_ = inFlightURLs, inFlightURLs = {}
+ if (typeof cb_ !== "function") cb_ = version, version = ""
+ if (typeof cb_ !== "function") cb_ = name, name = ""
+ if (typeof cb_ !== "function") cb_ = shasum, shasum = null
+
+ if (!inFlightURLs[u]) inFlightURLs[u] = []
+ var iF = inFlightURLs[u]
+ iF.push(cb_)
+ if (iF.length > 1) return
+
+ function cb (er, data) {
+ if (data) {
+ data._from = u
+ data._shasum = data._shasum || shasum
+ data._resolved = u
+ }
+ unlock(u, function () {
+ var c
+ while (c = iF.shift()) c(er, data)
+ delete inFlightURLs[u]
+ })
+ }
+
+ var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
+
+ lock(u, function (er) {
+ if (er) return cb(er)
+
+ log.verbose("addRemoteTarball", [u, shasum])
+ mkdir(path.dirname(tmp), function (er) {
+ if (er) return cb(er)
+ addRemoteTarball_(u, tmp, shasum, done)
+ })
+ })
+
+ function done (er, resp, shasum) {
+ if (er) return cb(er)
+ addLocalTarball(tmp, name, version, shasum, cb)
+ }
+}
+
+function addRemoteTarball_(u, tmp, shasum, cb) {
+ // Tuned to spread 3 attempts over about a minute.
+ // See formula at <https://github.com/tim-kos/node-retry>.
+ var operation = retry.operation
+ ( { retries: npm.config.get("fetch-retries")
+ , factor: npm.config.get("fetch-retry-factor")
+ , minTimeout: npm.config.get("fetch-retry-mintimeout")
+ , maxTimeout: npm.config.get("fetch-retry-maxtimeout") })
+
+ operation.attempt(function (currentAttempt) {
+ log.info("retry", "fetch attempt " + currentAttempt
+ + " at " + (new Date()).toLocaleTimeString())
+ fetchAndShaCheck(u, tmp, shasum, function (er, response, shasum) {
+ // Only retry on 408, 5xx or no `response`.
+ var sc = response && response.statusCode
+ var statusRetry = !sc || (sc === 408 || sc >= 500)
+ if (er && statusRetry && operation.retry(er)) {
+ log.info("retry", "will retry, error on last attempt: " + er)
+ return
+ }
+ cb(er, response, shasum)
+ })
+ })
+}
+
+function fetchAndShaCheck (u, tmp, shasum, cb) {
+ fetch(u, tmp, function (er, response) {
+ if (er) {
+ log.error("fetch failed", u)
+ return cb(er, response)
+ }
+
+ if (!shasum) {
+ // Well, we weren't given a shasum, so at least sha what we have
+ // in case we want to compare it to something else later
+ return sha.get(tmp, function (er, shasum) {
+ cb(er, response, shasum)
+ })
+ }
+
+ // validate that the url we just downloaded matches the expected shasum.
+ sha.check(tmp, shasum, function (er) {
+ if (er && er.message) {
+ // add original filename for better debuggability
+ er.message = er.message + '\n' + 'From: ' + u
+ }
+ return cb(er, response, shasum)
+ })
+ })
+}
diff --git a/lib/cache/get-stat.js b/lib/cache/get-stat.js
index 5e9709731..a144508af 100644
--- a/lib/cache/get-stat.js
+++ b/lib/cache/get-stat.js
@@ -1,12 +1,10 @@
'use strict';
-/* jshint node: true */
-var fs = require("fs")
-
-var npm = require("../npm.js")
-var log = require("npmlog")
var mkdir = require("mkdirp")
-var chownr = require("chownr")
+ , fs = require("graceful-fs")
+ , log = require("npmlog")
+ , chownr = require("chownr")
+ , npm = require("../npm.js")
function makeCacheDir (cb) {
if (!process.getuid) return mkdir(npm.cache, cb)
diff --git a/lib/cache/maybe-github.js b/lib/cache/maybe-github.js
new file mode 100644
index 000000000..198683468
--- /dev/null
+++ b/lib/cache/maybe-github.js
@@ -0,0 +1,32 @@
+'use strict';
+
+var log = require("npmlog")
+ , url = require("url")
+ , addRemoteGit = require("./add-remote-git.js")
+
+module.exports = function maybeGithub (p, name, er, cb) {
+ var u = "git://github.com/" + p
+ , up = url.parse(u)
+ log.info("maybeGithub", "Attempting %s from %s", p, u)
+
+ return addRemoteGit(u, up, name, true, function (er2, data) {
+ if (er2) {
+ var upriv = "git+ssh://git@github.com:" + p
+ , uppriv = url.parse(upriv)
+
+ log.info("maybeGithub", "Attempting %s from %s", p, upriv)
+
+ return addRemoteGit(upriv, uppriv, false, name, function (er3, data) {
+ if (er3) return cb(er)
+ success(upriv, data)
+ })
+ }
+ success(u, data)
+ })
+
+ function success (u, data) {
+ data._from = u
+ data._fromGithub = true
+ return cb(null, data)
+ }
+}
diff --git a/lib/utils/depr-check.js b/lib/utils/depr-check.js
new file mode 100644
index 000000000..6252b33c2
--- /dev/null
+++ b/lib/utils/depr-check.js
@@ -0,0 +1,15 @@
+'use strict';
+
+var log = require("npmlog")
+
+var deprecated = {}
+ , deprWarned = {}
+module.exports = function deprCheck (data) {
+ if (deprecated[data._id]) data.deprecated = deprecated[data._id]
+ if (data.deprecated) deprecated[data._id] = data.deprecated
+ else return
+ if (!deprWarned[data._id]) {
+ deprWarned[data._id] = true
+ log.warn("deprecated", "%s: %s", data._id, data.deprecated)
+ }
+}