Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorisaacs <i@izs.me>2012-01-13 20:33:27 +0400
committerisaacs <i@izs.me>2012-01-13 20:41:06 +0400
commitc18fe966ddbeb66c8a9b9aa8ab4899166e6986f5 (patch)
tree8650e7575caff9d05e04352b78669dd81d21230d /node_modules
parentb7ff884870958221eb52c5a600c101fab4c26781 (diff)
check in node_modules
Diffstat (limited to 'node_modules')
-rw-r--r--node_modules/abbrev/LICENSE23
-rw-r--r--node_modules/abbrev/README.md23
-rw-r--r--node_modules/abbrev/lib/abbrev.js106
-rw-r--r--node_modules/abbrev/package.json11
-rw-r--r--node_modules/block-stream/README.md14
-rw-r--r--node_modules/block-stream/block-stream.js209
-rw-r--r--node_modules/block-stream/package.json23
-rw-r--r--node_modules/fast-list/.npmignore1
-rw-r--r--node_modules/fast-list/.travis.yml4
-rw-r--r--node_modules/fast-list/README.md116
-rw-r--r--node_modules/fast-list/fast-list.js144
-rw-r--r--node_modules/fast-list/package.json20
-rw-r--r--node_modules/fstream/.npmignore3
-rw-r--r--node_modules/fstream/.travis.yml3
-rw-r--r--node_modules/fstream/README.md76
-rw-r--r--node_modules/fstream/fstream.js31
-rw-r--r--node_modules/fstream/lib/abstract.js82
-rw-r--r--node_modules/fstream/lib/collect.js67
-rw-r--r--node_modules/fstream/lib/dir-reader.js192
-rw-r--r--node_modules/fstream/lib/dir-writer.js165
-rw-r--r--node_modules/fstream/lib/file-reader.js147
-rw-r--r--node_modules/fstream/lib/file-writer.js95
-rw-r--r--node_modules/fstream/lib/get-type.js32
-rw-r--r--node_modules/fstream/lib/link-reader.js54
-rw-r--r--node_modules/fstream/lib/link-writer.js96
-rw-r--r--node_modules/fstream/lib/proxy-reader.js89
-rw-r--r--node_modules/fstream/lib/proxy-writer.js109
-rw-r--r--node_modules/fstream/lib/reader.js240
-rw-r--r--node_modules/fstream/lib/socket-reader.js38
-rw-r--r--node_modules/fstream/lib/writer.js316
-rw-r--r--node_modules/fstream/package.json26
-rw-r--r--node_modules/graceful-fs/.npmignore1
-rw-r--r--node_modules/graceful-fs/LICENSE23
-rw-r--r--node_modules/graceful-fs/README.md5
-rw-r--r--node_modules/graceful-fs/graceful-fs.js212
-rw-r--r--node_modules/graceful-fs/package.json18
-rw-r--r--node_modules/inherits/LICENSE26
-rw-r--r--node_modules/inherits/README.md51
-rw-r--r--node_modules/inherits/inherits-old.js40
-rw-r--r--node_modules/inherits/inherits.js29
-rw-r--r--node_modules/inherits/package.json8
-rw-r--r--node_modules/ini/LICENSE23
-rw-r--r--node_modules/ini/README.md71
-rw-r--r--node_modules/ini/ini.js102
-rw-r--r--node_modules/ini/package.json24
-rw-r--r--node_modules/lru-cache/.npmignore1
-rw-r--r--node_modules/lru-cache/LICENSE23
-rw-r--r--node_modules/lru-cache/README.md12
-rw-r--r--node_modules/lru-cache/lib/lru-cache.js100
-rw-r--r--node_modules/lru-cache/package.json13
-rw-r--r--node_modules/minimatch/.travis.yml4
-rw-r--r--node_modules/minimatch/LICENSE23
-rw-r--r--node_modules/minimatch/README.md212
-rw-r--r--node_modules/minimatch/minimatch.js1021
-rw-r--r--node_modules/minimatch/package.json29
-rw-r--r--node_modules/mkdirp/LICENSE21
-rw-r--r--node_modules/mkdirp/README.markdown21
-rw-r--r--node_modules/mkdirp/index.js36
-rw-r--r--node_modules/mkdirp/package.json23
-rw-r--r--node_modules/node-uuid/.npmignore2
-rw-r--r--node_modules/node-uuid/LICENSE.md3
-rw-r--r--node_modules/node-uuid/README.md199
-rw-r--r--node_modules/node-uuid/package.json14
-rw-r--r--node_modules/node-uuid/uuid.js249
-rw-r--r--node_modules/nopt/.npmignore0
-rw-r--r--node_modules/nopt/LICENSE23
-rw-r--r--node_modules/nopt/README.md210
-rwxr-xr-xnode_modules/nopt/bin/nopt.js44
-rw-r--r--node_modules/nopt/lib/nopt.js552
-rw-r--r--node_modules/nopt/package.json12
-rw-r--r--node_modules/proto-list/LICENSE23
-rw-r--r--node_modules/proto-list/README.md3
-rw-r--r--node_modules/proto-list/package.json9
-rw-r--r--node_modules/proto-list/proto-list.js94
-rw-r--r--node_modules/read/README.md43
-rw-r--r--node_modules/read/lib/read.js151
-rw-r--r--node_modules/read/package.json16
-rw-r--r--node_modules/request/LICENSE55
-rw-r--r--node_modules/request/README.md286
-rw-r--r--node_modules/request/forever.js84
-rw-r--r--node_modules/request/main.js652
-rw-r--r--node_modules/request/mimetypes.js146
-rw-r--r--node_modules/request/oauth.js34
-rw-r--r--node_modules/request/package.json15
-rw-r--r--node_modules/request/uuid.js19
-rw-r--r--node_modules/request/vendor/cookie/index.js60
-rw-r--r--node_modules/request/vendor/cookie/jar.js72
-rw-r--r--node_modules/rimraf/AUTHORS5
-rw-r--r--node_modules/rimraf/LICENSE23
-rw-r--r--node_modules/rimraf/README.md32
-rw-r--r--node_modules/rimraf/fiber.js86
-rw-r--r--node_modules/rimraf/package.json9
-rw-r--r--node_modules/rimraf/rimraf.js145
-rw-r--r--node_modules/semver/LICENSE23
-rw-r--r--node_modules/semver/README.md119
-rwxr-xr-xnode_modules/semver/bin/semver71
-rw-r--r--node_modules/semver/package.json11
-rw-r--r--node_modules/semver/semver.js305
-rw-r--r--node_modules/slide/.npmignore1
-rw-r--r--node_modules/slide/LICENSE23
-rw-r--r--node_modules/slide/README.md32
-rw-r--r--node_modules/slide/index.js1
-rw-r--r--node_modules/slide/lib/async-map-ordered.js65
-rw-r--r--node_modules/slide/lib/async-map.js56
-rw-r--r--node_modules/slide/lib/bind-actor.js16
-rw-r--r--node_modules/slide/lib/chain.js20
-rw-r--r--node_modules/slide/lib/slide.js3
-rw-r--r--node_modules/slide/nodejs-controlling-flow.pdfbin0 -> 167502 bytes
-rw-r--r--node_modules/slide/package.json19
-rw-r--r--node_modules/tar/.npmignore5
-rw-r--r--node_modules/tar/.travis.yml3
-rw-r--r--node_modules/tar/README.md50
-rw-r--r--node_modules/tar/lib/buffer-entry.js30
-rw-r--r--node_modules/tar/lib/entry-writer.js169
-rw-r--r--node_modules/tar/lib/entry.js212
-rw-r--r--node_modules/tar/lib/extended-header-writer.js192
-rw-r--r--node_modules/tar/lib/extended-header.js139
-rw-r--r--node_modules/tar/lib/extract.js64
-rw-r--r--node_modules/tar/lib/global-header-writer.js14
-rw-r--r--node_modules/tar/lib/header.js385
-rw-r--r--node_modules/tar/lib/pack.js226
-rw-r--r--node_modules/tar/lib/parse.js270
-rw-r--r--node_modules/tar/package.json26
-rw-r--r--node_modules/tar/tar.js172
-rw-r--r--node_modules/which/LICENSE23
-rw-r--r--node_modules/which/README.md5
-rwxr-xr-xnode_modules/which/bin/which14
-rw-r--r--node_modules/which/package.json17
-rw-r--r--node_modules/which/which.js67
129 files changed, 10945 insertions, 0 deletions
diff --git a/node_modules/abbrev/LICENSE b/node_modules/abbrev/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/abbrev/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/abbrev/README.md b/node_modules/abbrev/README.md
new file mode 100644
index 000000000..99746fe67
--- /dev/null
+++ b/node_modules/abbrev/README.md
@@ -0,0 +1,23 @@
+# abbrev-js
+
+Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
+
+Usage:
+
+ var abbrev = require("abbrev");
+ abbrev("foo", "fool", "folding", "flop");
+
+ // returns:
+ { fl: 'flop'
+ , flo: 'flop'
+ , flop: 'flop'
+ , fol: 'folding'
+ , fold: 'folding'
+ , foldi: 'folding'
+ , foldin: 'folding'
+ , folding: 'folding'
+ , foo: 'foo'
+ , fool: 'fool'
+ }
+
+This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.
diff --git a/node_modules/abbrev/lib/abbrev.js b/node_modules/abbrev/lib/abbrev.js
new file mode 100644
index 000000000..037de2d8d
--- /dev/null
+++ b/node_modules/abbrev/lib/abbrev.js
@@ -0,0 +1,106 @@
+
+module.exports = exports = abbrev.abbrev = abbrev
+
+abbrev.monkeyPatch = monkeyPatch
+
+function monkeyPatch () {
+ Array.prototype.abbrev = function () { return abbrev(this) }
+ Object.prototype.abbrev = function () { return abbrev(Object.keys(this)) }
+}
+
+function abbrev (list) {
+ if (arguments.length !== 1 || !Array.isArray(list)) {
+ list = Array.prototype.slice.call(arguments, 0)
+ }
+ for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
+ args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
+ }
+
+ // sort them lexicographically, so that they're next to their nearest kin
+ args = args.sort(lexSort)
+
+ // walk through each, seeing how much it has in common with the next and previous
+ var abbrevs = {}
+ , prev = ""
+ for (var i = 0, l = args.length ; i < l ; i ++) {
+ var current = args[i]
+ , next = args[i + 1] || ""
+ , nextMatches = true
+ , prevMatches = true
+ if (current === next) continue
+ for (var j = 0, cl = current.length ; j < cl ; j ++) {
+ var curChar = current.charAt(j)
+ nextMatches = nextMatches && curChar === next.charAt(j)
+ prevMatches = prevMatches && curChar === prev.charAt(j)
+ if (nextMatches || prevMatches) continue
+ else {
+ j ++
+ break
+ }
+ }
+ prev = current
+ if (j === cl) {
+ abbrevs[current] = current
+ continue
+ }
+ for (var a = current.substr(0, j) ; j <= cl ; j ++) {
+ abbrevs[a] = current
+ a += current.charAt(j)
+ }
+ }
+ return abbrevs
+}
+
+function lexSort (a, b) {
+ return a === b ? 0 : a > b ? 1 : -1
+}
+
+
+// tests
+if (module === require.main) {
+
+var assert = require("assert")
+ , sys
+sys = require("util")
+
+console.log("running tests")
+function test (list, expect) {
+ var actual = abbrev(list)
+ assert.deepEqual(actual, expect,
+ "abbrev("+sys.inspect(list)+") === " + sys.inspect(expect) + "\n"+
+ "actual: "+sys.inspect(actual))
+ actual = abbrev.apply(exports, list)
+ assert.deepEqual(abbrev.apply(exports, list), expect,
+ "abbrev("+list.map(JSON.stringify).join(",")+") === " + sys.inspect(expect) + "\n"+
+ "actual: "+sys.inspect(actual))
+}
+
+test([ "ruby", "ruby", "rules", "rules", "rules" ],
+{ rub: 'ruby'
+, ruby: 'ruby'
+, rul: 'rules'
+, rule: 'rules'
+, rules: 'rules'
+})
+test(["fool", "foom", "pool", "pope"],
+{ fool: 'fool'
+, foom: 'foom'
+, poo: 'pool'
+, pool: 'pool'
+, pop: 'pope'
+, pope: 'pope'
+})
+test(["a", "ab", "abc", "abcd", "abcde", "acde"],
+{ a: 'a'
+, ab: 'ab'
+, abc: 'abc'
+, abcd: 'abcd'
+, abcde: 'abcde'
+, ac: 'acde'
+, acd: 'acde'
+, acde: 'acde'
+})
+
+console.log("pass")
+
+}
diff --git a/node_modules/abbrev/package.json b/node_modules/abbrev/package.json
new file mode 100644
index 000000000..045cbd4b2
--- /dev/null
+++ b/node_modules/abbrev/package.json
@@ -0,0 +1,11 @@
+{ "name" : "abbrev"
+, "version" : "1.0.3"
+, "description" : "Like ruby's abbrev module, but in js"
+, "author" : "Isaac Z. Schlueter <i@izs.me>"
+, "main" : "./lib/abbrev.js"
+, "scripts" : { "test" : "node lib/abbrev.js" }
+, "repository" : "http://github.com/isaacs/abbrev-js"
+, "license" :
+ { "type" : "MIT"
+ , "url" : "https://github.com/isaacs/abbrev-js/raw/master/LICENSE" }
+}
diff --git a/node_modules/block-stream/README.md b/node_modules/block-stream/README.md
new file mode 100644
index 000000000..c16e9c468
--- /dev/null
+++ b/node_modules/block-stream/README.md
@@ -0,0 +1,14 @@
+# block-stream
+
+A stream of blocks.
+
+Write data into it, and it'll output data in buffer blocks the size you
+specify, padding with zeroes if necessary.
+
+```javascript
+var block = new BlockStream(512)
+fs.createReadStream("some-file").pipe(block)
+block.pipe(fs.createWriteStream("block-file"))
+```
+
+When `.end()` or `.flush()` is called, it'll pad the block with zeroes.
diff --git a/node_modules/block-stream/block-stream.js b/node_modules/block-stream/block-stream.js
new file mode 100644
index 000000000..008de035c
--- /dev/null
+++ b/node_modules/block-stream/block-stream.js
@@ -0,0 +1,209 @@
+// write data to it, and it'll emit data in 512 byte blocks.
+// if you .end() or .flush(), it'll emit whatever it's got,
+// padded with nulls to 512 bytes.
+
+module.exports = BlockStream
+
+var Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , assert = require("assert").ok
+ , debug = process.env.DEBUG ? console.error : function () {}
+
+function BlockStream (size, opt) {
+ this.writable = this.readable = true
+ this._opt = opt || {}
+ this._chunkSize = size || 512
+ this._offset = 0
+ this._buffer = []
+ this._bufferLength = 0
+ if (this._opt.nopad) this._zeroes = false
+ else {
+ this._zeroes = new Buffer(this._chunkSize)
+ for (var i = 0; i < this._chunkSize; i ++) {
+ this._zeroes[i] = 0
+ }
+ }
+}
+
+inherits(BlockStream, Stream)
+
+BlockStream.prototype.write = function (c) {
+ // debug(" BS write", c)
+ if (this._ended) throw new Error("BlockStream: write after end")
+ if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "")
+ if (c.length) {
+ this._buffer.push(c)
+ this._bufferLength += c.length
+ }
+ // debug("pushed onto buffer", this._bufferLength)
+ if (this._bufferLength >= this._chunkSize) {
+ if (this._paused) {
+ // debug(" BS paused, return false, need drain")
+ this._needDrain = true
+ return false
+ }
+ this._emitChunk()
+ }
+ return true
+}
+
+BlockStream.prototype.pause = function () {
+ // debug(" BS pausing")
+ this._paused = true
+}
+
+BlockStream.prototype.resume = function () {
+ // debug(" BS resume")
+ this._paused = false
+ return this._emitChunk()
+}
+
+BlockStream.prototype.end = function (chunk) {
+ // debug("end", chunk)
+ if (typeof chunk === "function") cb = chunk, chunk = null
+ if (chunk) this.write(chunk)
+ this._ended = true
+ this.flush()
+}
+
+BlockStream.prototype.flush = function () {
+ this._emitChunk(true)
+}
+
+BlockStream.prototype._emitChunk = function (flush) {
+ // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused)
+
+ // emit a <chunkSize> chunk
+ if (flush && this._zeroes) {
+ // debug(" BS push zeroes", this._bufferLength)
+ // push a chunk of zeroes
+ var padBytes = (this._bufferLength % this._chunkSize)
+ if (padBytes !== 0) padBytes = this._chunkSize - padBytes
+ if (padBytes > 0) {
+ // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes))
+ this._buffer.push(this._zeroes.slice(0, padBytes))
+ this._bufferLength += padBytes
+ // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength)
+ }
+ }
+
+ if (this._emitting || this._paused) return
+ this._emitting = true
+
+ // debug(" BS entering loops")
+ var bufferIndex = 0
+ while (this._bufferLength >= this._chunkSize &&
+ (flush || !this._paused)) {
+ // debug(" BS data emission loop", this._bufferLength)
+
+ var out
+ , outOffset = 0
+ , outHas = this._chunkSize
+
+ while (outHas > 0 && (flush || !this._paused) ) {
+ // debug(" BS data inner emit loop", this._bufferLength)
+ var cur = this._buffer[bufferIndex]
+ , curHas = cur.length - this._offset
+ // debug("cur=", cur)
+ // debug("curHas=%j", curHas)
+ // If it's not big enough to fill the whole thing, then we'll need
+ // to copy multiple buffers into one. However, if it is big enough,
+ // then just slice out the part we want, to save unnecessary copying.
+ // Also, need to copy if we've already done some copying, since buffers
+ // can't be joined like cons strings.
+ if (out || curHas < outHas) {
+ out = out || new Buffer(this._chunkSize)
+ cur.copy(out, outOffset,
+ this._offset, this._offset + Math.min(curHas, outHas))
+ } else if (cur.length === outHas && this._offset === 0) {
+ // shortcut -- cur is exactly long enough, and no offset.
+ out = cur
+ } else {
+ // slice out the piece of cur that we need.
+ out = cur.slice(this._offset, this._offset + outHas)
+ }
+
+ if (curHas > outHas) {
+ // means that the current buffer couldn't be completely output
+ // update this._offset to reflect how much WAS written
+ this._offset += outHas
+ outHas = 0
+ } else {
+ // output the entire current chunk.
+ // toss it away
+ outHas -= curHas
+ outOffset += curHas
+ bufferIndex ++
+ this._offset = 0
+ }
+ }
+
+ this._bufferLength -= this._chunkSize
+ assert(out.length === this._chunkSize)
+ // debug("emitting data", out)
+ // debug(" BS emitting, paused=%j", this._paused, this._bufferLength)
+ this.emit("data", out)
+ out = null
+ }
+ // debug(" BS out of loops", this._bufferLength)
+
+ // whatever is left, it's not enough to fill up a block, or we're paused
+ this._buffer = this._buffer.slice(bufferIndex)
+ if (this._paused) {
+ // debug(" BS paused, leaving", this._bufferLength)
+ this._needsDrain = true
+ this._emitting = false
+ return
+ }
+
+ // if flushing, and not using null-padding, then need to emit the last
+ // chunk(s) sitting in the queue. We know that it's not enough to
+ // fill up a whole block, because otherwise it would have been emitted
+ // above, but there may be some offset.
+ var l = this._buffer.length
+ if (flush && !this._zeroes && l) {
+ if (l === 1) {
+ if (this._offset) {
+ this.emit("data", this._buffer[0].slice(this._offset))
+ } else {
+ this.emit("data", this._buffer[0])
+ }
+ } else {
+ var outHas = this._bufferLength
+ , out = new Buffer(outHas)
+ , outOffset = 0
+ for (var i = 0; i < l; i ++) {
+ var cur = this._buffer[i]
+ , curHas = cur.length - this._offset
+ cur.copy(out, outOffset, this._offset)
+ this._offset = 0
+ outOffset += curHas
+ this._bufferLength -= curHas
+ }
+ this.emit("data", out)
+ }
+ // truncate
+ this._buffer.length = 0
+ this._bufferLength = 0
+ this._offset = 0
+ }
+
+ // now either drained or ended
+ // debug("either draining, or ended", this._bufferLength, this._ended)
+ // means that we've flushed out all that we can so far.
+ if (this._needDrain) {
+ // debug("emitting drain", this._bufferLength)
+ this._needDrain = false
+ this.emit("drain")
+ }
+
+ if ((this._bufferLength === 0) && this._ended && !this._endEmitted) {
+ // debug("emitting end", this._bufferLength)
+ this._endEmitted = true
+ this.emit("end")
+ }
+
+ this._emitting = false
+
+ // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize)
+}
diff --git a/node_modules/block-stream/package.json b/node_modules/block-stream/package.json
new file mode 100644
index 000000000..203961a14
--- /dev/null
+++ b/node_modules/block-stream/package.json
@@ -0,0 +1,23 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "block-stream",
+ "description": "a stream of blocks",
+ "version": "0.0.4",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/block-stream.git"
+ },
+ "engines": {
+ "node": "0.4 || ~0.5.8 || 0.6"
+ },
+ "main": "block-stream.js",
+ "dependencies": {
+ "inherits": "~1.0.0"
+ },
+ "devDependencies": {
+ "tap": "0.x"
+ },
+ "scripts": {
+ "test": "tap test/"
+ }
+}
diff --git a/node_modules/fast-list/.npmignore b/node_modules/fast-list/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/node_modules/fast-list/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/node_modules/fast-list/.travis.yml b/node_modules/fast-list/.travis.yml
new file mode 100644
index 000000000..f1d0f13c8
--- /dev/null
+++ b/node_modules/fast-list/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - 0.4
+ - 0.6
diff --git a/node_modules/fast-list/README.md b/node_modules/fast-list/README.md
new file mode 100644
index 000000000..88a842ef0
--- /dev/null
+++ b/node_modules/fast-list/README.md
@@ -0,0 +1,116 @@
+# The Problem
+
+You've got some thing where you need to push a bunch of stuff into a
+queue and then shift it out. Or, maybe it's a stack, and you're just
+pushing and popping it.
+
+Arrays work for this, but are a bit costly performance-wise.
+
+# The Solution
+
+A linked-list implementation that takes advantage of what v8 is good at:
+creating objects with a known shape.
+
+This is faster for this use case. How much faster? About 50%.
+
+ $ node bench.js
+ benchmarking /Users/isaacs/dev-src/js/fast-list/bench.js
+ Please be patient.
+ { node: '0.6.2-pre',
+ v8: '3.6.6.8',
+ ares: '1.7.5-DEV',
+ uv: '0.1',
+ openssl: '0.9.8l' }
+ Scores: (bigger is better)
+
+ new FastList()
+ Raw:
+ > 22556.39097744361
+ > 23054.755043227666
+ > 22770.398481973436
+ > 23414.634146341465
+ > 23099.133782483157
+ Average (mean) 22979.062486293868
+
+ []
+ Raw:
+ > 12195.121951219513
+ > 12184.508268059182
+ > 12173.91304347826
+ > 12216.404886561955
+ > 12184.508268059182
+ Average (mean) 12190.891283475617
+
+ new Array()
+ Raw:
+ > 12131.715771230503
+ > 12184.508268059182
+ > 12216.404886561955
+ > 12195.121951219513
+ > 11940.298507462687
+ Average (mean) 12133.609876906768
+
+ Winner: new FastList()
+ Compared with next highest ([]), it's:
+ 46.95% faster
+ 1.88 times as fast
+ 0.28 order(s) of magnitude faster
+
+ Compared with the slowest (new Array()), it's:
+ 47.2% faster
+ 1.89 times as fast
+ 0.28 order(s) of magnitude faster
+
+This lacks a lot of features that arrays have:
+
+1. You can't specify the size at the outset.
+2. It's not indexable.
+3. There's no join, concat, etc.
+
+If any of this matters for your use case, you're probably better off
+using an Array object.
+
+## Installing
+
+```
+npm install fast-list
+```
+
+## API
+
+```javascript
+var FastList = require("fast-list")
+var list = new FastList()
+list.push("foo")
+list.unshift("bar")
+list.push("baz")
+console.log(list.length) // 2
+console.log(list.pop()) // baz
+console.log(list.shift()) // bar
+console.log(list.shift()) // foo
+```
+
+### Methods
+
+* `push`: Just like Array.push, but only can take a single entry
+* `pop`: Just like Array.pop
+* `shift`: Just like Array.shift
+* `unshift`: Just like Array.unshift, but only can take a single entry
+* `drop`: Drop all entries
+* `item(n)`: Retrieve the nth item in the list. This involves a walk
+ every time. It's very slow. If you find yourself using this,
+ consider using a normal Array instead.
+* `map(fn, thisp)`: Like `Array.prototype.map`. Returns a new FastList.
+* `reduce(fn, startValue, thisp)`: Like `Array.prototype.reduce`
+* `forEach(fn, this)`: Like `Array.prototype.forEach`
+* `filter(fn, thisp)`: Like `Array.prototype.filter`. Returns a new
+ FastList.
+* `slice(start, end)`: Retrieve an array of the items at this position.
+ This involves a walk every time. It's very slow. If you find
+ yourself using this, consider using a normal Array instead.
+
+### Members
+
+* `length`: The number of things in the list. Note that, unlike
+ Array.length, this is not a getter/setter, but rather a counter that
+ is internally managed. Setting it can only cause harm.
diff --git a/node_modules/fast-list/fast-list.js b/node_modules/fast-list/fast-list.js
new file mode 100644
index 000000000..692db0df8
--- /dev/null
+++ b/node_modules/fast-list/fast-list.js
@@ -0,0 +1,144 @@
+;(function() { // closure for web browsers
+
+function Item (data, prev, next) {
+ this.next = next
+ if (next) next.prev = this
+ this.prev = prev
+ if (prev) prev.next = this
+ this.data = data
+}
+
+function FastList () {
+ if (!(this instanceof FastList)) return new FastList
+ this._head = null
+ this._tail = null
+ this.length = 0
+}
+
+FastList.prototype =
+{ push: function (data) {
+ this._tail = new Item(data, this._tail, null)
+ if (!this._head) this._head = this._tail
+ this.length ++
+ }
+
+, pop: function () {
+ if (this.length === 0) return undefined
+ var t = this._tail
+ this._tail = t.prev
+ if (t.prev) {
+ t.prev = this._tail.next = null
+ }
+ this.length --
+ if (this.length === 1) this._head = this._tail
+ else if (this.length === 0) this._head = this._tail = null
+ return t.data
+ }
+
+, unshift: function (data) {
+ this._head = new Item(data, null, this._head)
+ if (!this._tail) this._tail = this._head
+ this.length ++
+ }
+
+, shift: function () {
+ if (this.length === 0) return undefined
+ var h = this._head
+ this._head = h.next
+ if (h.next) {
+ h.next = this._head.prev = null
+ }
+ this.length --
+ if (this.length === 1) this._tail = this._head
+ else if (this.length === 0) this._head = this._tail = null
+ return h.data
+ }
+
+, item: function (n) {
+ if (n < 0) n = this.length + n
+ var h = this._head
+ while (n-- > 0 && h) h = h.next
+ return h ? h.data : undefined
+ }
+
+, slice: function (n, m) {
+ if (!n) n = 0
+ if (!m) m = this.length
+ if (m < 0) m = this.length + m
+ if (n < 0) n = this.length + n
+
+ if (m === n) {
+ return []
+ }
+
+ if (m < n) {
+ throw new Error("invalid offset: "+n+","+m+" (length="+this.length+")")
+ }
+
+ var len = m - n
+ , ret = new Array(len)
+ , i = 0
+ , h = this._head
+ while (n-- > 0 && h) h = h.next
+ while (i < len && h) {
+ ret[i++] = h.data
+ h = h.next
+ }
+ return ret
+ }
+
+, drop: function () {
+ FastList.call(this)
+ }
+
+, forEach: function (fn, thisp) {
+ var p = this._head
+ , i = 0
+ , len = this.length
+ while (i < len && p) {
+ fn.call(thisp || this, p.data, i, this)
+ p = p.next
+ i ++
+ }
+ }
+
+, map: function (fn, thisp) {
+ var n = new FastList()
+ this.forEach(function (v, i, me) {
+ n.push(fn.call(thisp || me, v, i, me))
+ })
+ return n
+ }
+
+, filter: function (fn, thisp) {
+ var n = new FastList()
+ this.forEach(function (v, i, me) {
+ if (fn.call(thisp || me, v, i, me)) n.push(v)
+ })
+ return n
+ }
+
+, reduce: function (fn, val, thisp) {
+ var i = 0
+ , p = this._head
+ , len = this.length
+ if (!val) {
+ i = 1
+ val = p && p.data
+ p = p && p.next
+ }
+ while (i < len && p) {
+ val = fn.call(thisp || this, val, p.data, this)
+ i ++
+ p = p.next
+ }
+ return val
+ }
+}
+
+if ("undefined" !== typeof(exports)) module.exports = FastList
+else if ("function" === typeof(define) && define.amd) {
+ define("FastList", function() { return FastList })
+} else (function () { return this })().FastList = FastList
+
+})()
diff --git a/node_modules/fast-list/package.json b/node_modules/fast-list/package.json
new file mode 100644
index 000000000..9bcc6b413
--- /dev/null
+++ b/node_modules/fast-list/package.json
@@ -0,0 +1,20 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "fast-list",
+ "description": "A fast linked list (good for queues, stacks, etc.)",
+ "version": "1.0.2",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/fast-list.git"
+ },
+ "main": "fast-list.js",
+ "dependencies": {},
+ "devDependencies": {
+ "bench": "~0.3.2",
+ "tap": "~0.1.0"
+ },
+ "scripts": {
+ "test": "tap test.js",
+ "bench": "node bench.js"
+ }
+}
diff --git a/node_modules/fstream/.npmignore b/node_modules/fstream/.npmignore
new file mode 100644
index 000000000..66880db1a
--- /dev/null
+++ b/node_modules/fstream/.npmignore
@@ -0,0 +1,3 @@
+.*.swp
+examples/deep-copy
+node_modules/
diff --git a/node_modules/fstream/.travis.yml b/node_modules/fstream/.travis.yml
new file mode 100644
index 000000000..2d26206d5
--- /dev/null
+++ b/node_modules/fstream/.travis.yml
@@ -0,0 +1,3 @@
+language: node_js
+node_js:
+ - 0.6
diff --git a/node_modules/fstream/README.md b/node_modules/fstream/README.md
new file mode 100644
index 000000000..9d8cb77e5
--- /dev/null
+++ b/node_modules/fstream/README.md
@@ -0,0 +1,76 @@
+Like FS streams, but with stat on them, and supporting directories and
+symbolic links, as well as normal files. Also, you can use this to set
+the stats on a file, even if you don't change its contents, or to create
+a symlink, etc.
+
+So, for example, you can "write" a directory, and it'll call `mkdir`. You
+can specify a uid and gid, and it'll call `chown`. You can specify a
+`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink
+and provide a `linkpath` and it'll call `symlink`.
+
+Note that it won't automatically resolve symbolic links. So, if you
+call `fstream.Reader('/some/symlink')` then you'll get an object
+that stats and then ends immediately (since it has no data). To follow
+symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:
+true })`.
+
+There are various checks to make sure that the bytes emitted are the
+same as the intended size, if the size is set.
+
+## Examples
+
+```javascript
+fstream
+ .Writer({ path: "path/to/file"
+ , mode: 0755
+ , size: 6
+ })
+ .write("hello\n")
+ .end()
+```
+
+This will create the directories if they're missing, and then write
+`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have
+been written when it's done.
+
+```javascript
+fstream
+ .Writer({ path: "path/to/file"
+ , mode: 0755
+ , size: 6
+ , flags: "a"
+ })
+ .write("hello\n")
+ .end()
+```
+
+You can pass flags in, if you want to append to a file.
+
+```javascript
+fstream
+ .Writer({ path: "path/to/symlink"
+ , linkpath: "./file"
+ , SymbolicLink: true
+ , mode: "0755" // octal strings supported
+ })
+ .end()
+```
+
+If isSymbolicLink is a function, it'll be called, and if it returns
+true, then it'll treat it as a symlink. If it's not a function, then
+any truish value will make a symlink, or you can set `type:
+'SymbolicLink'`, which does the same thing.
+
+Note that the linkpath is relative to the symbolic link location, not
+the parent dir or cwd.
+
+```javascript
+fstream
+ .Reader("path/to/dir")
+ .pipe(fstream.Writer("path/to/other/dir"))
+```
+
+This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other
+dir exists and isn't a directory, then it'll emit an error. It'll also
+set the uid, gid, mode, etc. to be identical. In this way, it's more
+like `rsync -a` than simply a copy.
diff --git a/node_modules/fstream/fstream.js b/node_modules/fstream/fstream.js
new file mode 100644
index 000000000..c66d26f51
--- /dev/null
+++ b/node_modules/fstream/fstream.js
@@ -0,0 +1,31 @@
+exports.Abstract = require("./lib/abstract.js")
+exports.Reader = require("./lib/reader.js")
+exports.Writer = require("./lib/writer.js")
+
+exports.File =
+ { Reader: require("./lib/file-reader.js")
+ , Writer: require("./lib/file-writer.js") }
+
+exports.Dir =
+ { Reader : require("./lib/dir-reader.js")
+ , Writer : require("./lib/dir-writer.js") }
+
+exports.Link =
+ { Reader : require("./lib/link-reader.js")
+ , Writer : require("./lib/link-writer.js") }
+
+exports.Proxy =
+ { Reader : require("./lib/proxy-reader.js")
+ , Writer : require("./lib/proxy-writer.js") }
+
+exports.Reader.Dir = exports.DirReader = exports.Dir.Reader
+exports.Reader.File = exports.FileReader = exports.File.Reader
+exports.Reader.Link = exports.LinkReader = exports.Link.Reader
+exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader
+
+exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer
+exports.Writer.File = exports.FileWriter = exports.File.Writer
+exports.Writer.Link = exports.LinkWriter = exports.Link.Writer
+exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer
+
+exports.collect = require("./lib/collect.js")
diff --git a/node_modules/fstream/lib/abstract.js b/node_modules/fstream/lib/abstract.js
new file mode 100644
index 000000000..add48b945
--- /dev/null
+++ b/node_modules/fstream/lib/abstract.js
@@ -0,0 +1,82 @@
+// the parent class for all fstreams.
+
+module.exports = Abstract
+
+var Stream = require("stream").Stream
+ , inherits = require("inherits")
+
+function Abstract () {
+ Stream.call(this)
+}
+
+inherits(Abstract, Stream)
+
+Abstract.prototype.on = function (ev, fn) {
+ if (ev === "ready" && this.ready) {
+ process.nextTick(fn.bind(this))
+ } else {
+ Stream.prototype.on.call(this, ev, fn)
+ }
+ return this
+}
+
+Abstract.prototype.destroy = function () {}
+
+Abstract.prototype.warn = function (msg, code) {
+ var me = this
+ , er = decorate(msg, code, me)
+ if (!me.listeners("warn")) {
+ console.error("%s %s\n" +
+ "path = %s\n" +
+ "syscall = %s\n" +
+ "fstream_type = %s\n" +
+ "fstream_path = %s\n" +
+ "fstream_unc_path = %s\n" +
+ "fstream_class = %s\n" +
+ "fstream_stack =\n%s\n",
+ code || "UNKNOWN",
+ er.stack,
+ er.path,
+ er.syscall,
+ er.fstream_type,
+ er.fstream_path,
+ er.fstream_unc_path,
+ er.fstream_class,
+ er.fstream_stack.join("\n"))
+ } else {
+ me.emit("warn", er)
+ }
+}
+
+Abstract.prototype.info = function (msg, code) {
+ var me = this
+ if (!me.listeners("info")) return
+ me.emit("info", msg, code)
+}
+
+Abstract.prototype.error = function (msg, code, th) {
+ var er = decorate(msg, code, this)
+ if (th) throw er
+ else this.emit("error", er)
+}
+
+function decorate (er, code, me) {
+ if (!(er instanceof Error)) er = new Error(er)
+ er.code = er.code || code
+ er.path = er.path || me.path
+ er.fstream_type = er.fstream_type || me.type
+ er.fstream_path = er.fstream_path || me.path
+ if (me._path !== me.path) {
+ er.fstream_unc_path = er.fstream_unc_path || me._path
+ }
+ if (me.linkpath) {
+ er.fstream_linkpath = er.fstream_linkpath || me.linkpath
+ }
+ er.fstream_class = er.fstream_class || me.constructor.name
+ er.fstream_stack = er.fstream_stack ||
+ new Error().stack.split(/\n/).slice(3).map(function (s) {
+ return s.replace(/^ at /, "")
+ })
+
+ return er
+}
diff --git a/node_modules/fstream/lib/collect.js b/node_modules/fstream/lib/collect.js
new file mode 100644
index 000000000..a36f780eb
--- /dev/null
+++ b/node_modules/fstream/lib/collect.js
@@ -0,0 +1,67 @@
+module.exports = collect
+
+function collect (stream) {
+ if (stream._collected) return
+
+ stream._collected = true
+ stream.pause()
+
+ stream.on("data", save)
+ stream.on("end", save)
+ var buf = []
+ function save (b) {
+ if (typeof b === "string") b = new Buffer(b)
+ if (Buffer.isBuffer(b) && !b.length) return
+ buf.push(b)
+ }
+
+ stream.on("entry", saveEntry)
+ var entryBuffer = []
+ function saveEntry (e) {
+ collect(e)
+ entryBuffer.push(e)
+ }
+
+ stream.on("proxy", proxyPause)
+ function proxyPause (p) {
+ p.pause()
+ }
+
+
+ // replace the pipe method with a new version that will
+ // unlock the buffered stuff. if you just call .pipe()
+ // without a destination, then it'll re-play the events.
+ stream.pipe = (function (orig) { return function (dest) {
+ // console.error(" === open the pipes", dest && dest.path)
+
+ // let the entries flow through one at a time.
+ // Once they're all done, then we can resume completely.
+ var e = 0
+ ;(function unblockEntry () {
+ var entry = entryBuffer[e++]
+ // console.error(" ==== unblock entry", entry && entry.path)
+ if (!entry) return resume()
+ entry.on("end", unblockEntry)
+ if (dest) dest.add(entry)
+ else stream.emit("entry", entry)
+ })()
+
+ function resume () {
+ stream.removeListener("entry", saveEntry)
+ stream.removeListener("data", save)
+ stream.removeListener("end", save)
+
+ stream.pipe = orig
+ if (dest) stream.pipe(dest)
+
+ buf.forEach(function (b) {
+ if (b) stream.emit("data", b)
+ else stream.emit("end")
+ })
+
+ stream.resume()
+ }
+
+ return dest
+ }})(stream.pipe)
+}
diff --git a/node_modules/fstream/lib/dir-reader.js b/node_modules/fstream/lib/dir-reader.js
new file mode 100644
index 000000000..ab990d150
--- /dev/null
+++ b/node_modules/fstream/lib/dir-reader.js
@@ -0,0 +1,192 @@
+// A thing that emits "entry" events with Reader objects
+// Pausing it causes it to stop emitting entry events, and also
+// pauses the current entry if there is one.
+
+module.exports = DirReader
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , Reader = fstream.Reader
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , path = require("path")
+ , Reader = require("./reader.js")
+
+inherits(DirReader, Reader)
+
+function DirReader (props) {
+ var me = this
+ if (!(me instanceof DirReader)) throw new Error(
+ "DirReader must be called as constructor.")
+
+ // should already be established as a Directory type
+ if (props.type !== "Directory" || !props.Directory) {
+ throw new Error("Non-directory type "+ props.type)
+ }
+
+ me._entries = null
+ me._index = -1
+ me._paused = false
+ me._length = -1
+
+ Reader.call(this, props)
+}
+
+DirReader.prototype._getEntries = function () {
+ var me = this
+ fs.readdir(me._path, function (er, entries) {
+ if (er) return me.error(er)
+ me._entries = entries
+ me._length = entries.length
+ // console.error("DR %s sort =", me.path, me.props.sort)
+ if (typeof me.props.sort === "function") {
+ me._entries.sort(me.props.sort)
+ }
+ me._read()
+ })
+}
+
+// start walking the dir, and emit an "entry" event for each one.
+DirReader.prototype._read = function () {
+ var me = this
+
+ if (!me._entries) return me._getEntries()
+
+ if (me._paused || me._currentEntry || me._aborted) {
+ // console.error("DR paused=%j, current=%j, aborted=%j", me._paused, !!me._currentEntry, me._aborted)
+ return
+ }
+
+ me._index ++
+ if (me._index >= me._length) {
+ if (!me._ended) {
+ me._ended = true
+ me.emit("end")
+ me.emit("close")
+ }
+ return
+ }
+
+ // ok, handle this one, then.
+
+ // save creating a proxy, by stat'ing the thing now.
+ var p = path.resolve(me._path, me._entries[me._index])
+ // set this to prevent trying to _read() again in the stat time.
+ me._currentEntry = p
+ fs[ me.props.follow ? "stat" : "lstat" ](p, function (er, stat) {
+ if (er) return me.error(er)
+
+ var entry = Reader({ path: p
+ , depth: me.depth + 1
+ , root: me.root || me._proxy || me
+ , parent: me._proxy || me
+ , follow: me.follow
+ , filter: me.filter
+ , sort: me.props.sort
+ }, stat)
+
+ // console.error("DR Entry", p, stat.size)
+
+ me._currentEntry = entry
+
+ // "entry" events are for direct entries in a specific dir.
+ // "child" events are for any and all children at all levels.
+ // This nomenclature is not completely final.
+
+ entry.on("pause", function (who) {
+ if (!me._paused) {
+ me.pause(who)
+ }
+ })
+
+ entry.on("resume", function (who) {
+ if (me._paused) {
+ me.resume(who)
+ }
+ })
+
+ entry.on("ready", function EMITCHILD () {
+ // console.error("DR emit child", entry._path)
+ if (me._paused) {
+ // console.error(" DR emit child - try again later")
+ // pause the child, and emit the "entry" event once we drain.
+ // console.error("DR pausing child entry")
+ entry.pause(me)
+ return me.once("resume", EMITCHILD)
+ }
+
+ // skip over sockets. they can't be piped around properly,
+ // so there's really no sense even acknowledging them.
+ // if someone really wants to see them, they can listen to
+ // the "socket" events.
+ if (entry.type === "Socket") {
+ me.emit("socket", entry)
+ } else {
+ me.emit("entry", entry)
+ me.emit("child", entry)
+ }
+ })
+
+ var ended = false
+ entry.on("close", onend)
+ function onend () {
+ if (ended) return
+ ended = true
+ me.emit("childEnd", entry)
+ me.emit("entryEnd", entry)
+ me._currentEntry = null
+ me._read()
+ }
+
+ // XXX Make this work in node.
+ // Long filenames should not break stuff.
+ entry.on("error", function (er) {
+ if (entry._swallowErrors) {
+ me.warn(er)
+ entry.emit("end")
+ entry.emit("close")
+ } else {
+ me.emit("error", er)
+ }
+ })
+
+ // proxy up some events.
+ ; [ "child"
+ , "childEnd"
+ , "warn"
+ ].forEach(function (ev) {
+ entry.on(ev, me.emit.bind(me, ev))
+ })
+ })
+}
+
+DirReader.prototype.pause = function (who) {
+ var me = this
+ if (me._paused) return
+ who = who || me
+ me._paused = true
+ if (me._currentEntry && me._currentEntry.pause) {
+ me._currentEntry.pause(who)
+ }
+ me.emit("pause", who)
+}
+
+DirReader.prototype.resume = function (who) {
+ var me = this
+ if (!me._paused) return
+ who = who || me
+
+ me._paused = false
+ // console.error("DR Emit Resume", me._path)
+ me.emit("resume", who)
+ if (me._paused) {
+ // console.error("DR Re-paused", me._path)
+ return
+ }
+
+ if (me._currentEntry) {
+ if (me._currentEntry.resume) {
+ me._currentEntry.resume(who)
+ }
+ } else me._read()
+}
diff --git a/node_modules/fstream/lib/dir-writer.js b/node_modules/fstream/lib/dir-writer.js
new file mode 100644
index 000000000..01920244c
--- /dev/null
+++ b/node_modules/fstream/lib/dir-writer.js
@@ -0,0 +1,165 @@
+// It is expected that, when .add() returns false, the consumer
+// of the DirWriter will pause until a "drain" event occurs. Note
+// that this is *almost always going to be the case*, unless the
+// thing being written is some sort of unsupported type, and thus
+// skipped over.
+
+module.exports = DirWriter
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , Writer = require("./writer.js")
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , path = require("path")
+ , collect = require("./collect.js")
+
+inherits(DirWriter, Writer)
+
+function DirWriter (props) {
+ var me = this
+ if (!(me instanceof DirWriter)) me.error(
+ "DirWriter must be called as constructor.", null, true)
+
+ // should already be established as a Directory type
+ if (props.type !== "Directory" || !props.Directory) {
+ me.error("Non-directory type "+ props.type + " " +
+ JSON.stringify(props), null, true)
+ }
+
+ Writer.call(this, props)
+}
+
+DirWriter.prototype._create = function () {
+ var me = this
+ mkdir(me._path, Writer.dirmode, function (er) {
+ if (er) return me.error(er)
+ // ready to start getting entries!
+ me.ready = true
+ me.emit("ready")
+ })
+}
+
+// a DirWriter has an add(entry) method, but its .write() doesn't
+// do anything. Why a no-op rather than a throw? Because this
+// leaves open the door for writing directory metadata for
+// gnu/solaris style dumpdirs.
+DirWriter.prototype.write = function () {
+ return true
+}
+
+DirWriter.prototype.end = function () {
+ this._ended = true
+ this._process()
+}
+
+DirWriter.prototype.add = function (entry) {
+ var me = this
+
+ // console.error("\tadd", entry._path, "->", me._path)
+ collect(entry)
+ if (!me.ready || me._currentEntry) {
+ me._buffer.push(entry)
+ return false
+ }
+
+ // create a new writer, and pipe the incoming entry into it.
+ if (me._ended) {
+ return me.error("add after end")
+ }
+
+ me._buffer.push(entry)
+ me._process()
+
+ return 0 === this._buffer.length
+}
+
+DirWriter.prototype._process = function () {
+ var me = this
+
+ // console.error("DW Process p=%j", me._processing, me.basename)
+
+ if (me._processing) return
+
+ var entry = me._buffer.shift()
+ if (!entry) {
+ // console.error("DW Drain")
+ me.emit("drain")
+ if (me._ended) me._finish()
+ return
+ }
+
+ me._processing = true
+ // console.error("DW Entry", entry._path)
+
+ me.emit("entry", entry)
+
+ // ok, add this entry
+ //
+ // don't allow recursive copying
+ var p = entry
+ do {
+ if (p._path === me.root._path || p._path === me._path) {
+ // console.error("DW Exit (recursive)", entry.basename, me._path)
+ me._processing = false
+ if (entry._collected) entry.pipe()
+ return me._process()
+ }
+ } while (p = p.parent)
+
+ // console.error("DW not recursive")
+
+ // chop off the entry's root dir, replace with ours
+ var props = { parent: me
+ , root: me.root || me
+ , type: entry.type
+ , depth: me.depth + 1 }
+
+ var p = entry._path || entry.path || entry.props.path
+ if (entry.parent) {
+ p = p.substr(entry.parent._path.length + 1)
+ }
+ // get rid of any ../../ shenanigans
+ props.path = path.join(me.path, path.join("/", p))
+
+ // all the rest of the stuff, copy over from the source.
+ Object.keys(entry.props).forEach(function (k) {
+ if (!props.hasOwnProperty(k)) {
+ props[k] = entry.props[k]
+ }
+ })
+
+ // not sure at this point what kind of writer this is.
+ var child = me._currentChild = new Writer(props)
+ child.on("ready", function () {
+ // console.error("DW Child Ready", child.type, child._path)
+ // console.error(" resuming", entry._path)
+ entry.pipe(child)
+ entry.resume()
+ })
+
+ // XXX Make this work in node.
+ // Long filenames should not break stuff.
+ child.on("error", function (er) {
+ if (child._swallowErrors) {
+ me.warn(er)
+ child.emit("end")
+ child.emit("close")
+ } else {
+ me.emit("error", er)
+ }
+ })
+
+ // we fire _end internally *after* end, so that we don't move on
+ // until any "end" listeners have had their chance to do stuff.
+ child.on("close", onend)
+ var ended = false
+ function onend () {
+ if (ended) return
+ ended = true
+ // console.error("* DW Child end", child.basename)
+ me._currentChild = null
+ me._processing = false
+ me._process()
+ }
+}
diff --git a/node_modules/fstream/lib/file-reader.js b/node_modules/fstream/lib/file-reader.js
new file mode 100644
index 000000000..b1f986183
--- /dev/null
+++ b/node_modules/fstream/lib/file-reader.js
@@ -0,0 +1,147 @@
+// Basically just a wrapper around an fs.ReadStream
+
+module.exports = FileReader
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , Reader = fstream.Reader
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , Reader = require("./reader.js")
+ , EOF = {EOF: true}
+ , CLOSE = {CLOSE: true}
+
+inherits(FileReader, Reader)
+
+function FileReader (props) {
+ // console.error(" FR create", props.path, props.size, new Error().stack)
+ var me = this
+ if (!(me instanceof FileReader)) throw new Error(
+ "FileReader must be called as constructor.")
+
+ // should already be established as a File type
+ // XXX Todo: preserve hardlinks by tracking dev+inode+nlink,
+ // with a HardLinkReader class.
+ if (!((props.type === "Link" && props.Link) ||
+ (props.type === "File" && props.File))) {
+ throw new Error("Non-file type "+ props.type)
+ }
+
+ me._buffer = []
+ me._bytesEmitted = 0
+ Reader.call(me, props)
+}
+
+FileReader.prototype._getStream = function () {
+ var me = this
+ , stream = me._stream = fs.createReadStream(me._path, me.props)
+
+ if (me.props.blksize) {
+ stream.bufferSize = me.props.blksize
+ }
+
+ stream.on("open", me.emit.bind(me, "open"))
+
+ stream.on("data", function (c) {
+ // console.error("\t\t%d %s", c.length, me.basename)
+ me._bytesEmitted += c.length
+ // no point saving empty chunks
+ if (!c.length) return
+ else if (me._paused || me._buffer.length) {
+ me._buffer.push(c)
+ me._read()
+ } else me.emit("data", c)
+ })
+
+ stream.on("end", function () {
+ if (me._paused || me._buffer.length) {
+ // console.error("FR Buffering End", me._path)
+ me._buffer.push(EOF)
+ me._read()
+ } else {
+ me.emit("end")
+ }
+
+ if (me._bytesEmitted !== me.props.size) {
+ me.error("Didn't get expected byte count\n"+
+ "expect: "+me.props.size + "\n" +
+ "actual: "+me._bytesEmitted)
+ }
+ })
+
+ stream.on("close", function () {
+ if (me._paused || me._buffer.length) {
+ // console.error("FR Buffering Close", me._path)
+ me._buffer.push(CLOSE)
+ me._read()
+ } else {
+ // console.error("FR close 1", me._path)
+ me.emit("close")
+ }
+ })
+
+ me._read()
+}
+
+FileReader.prototype._read = function () {
+ var me = this
+ // console.error("FR _read", me._path)
+ if (me._paused) {
+ // console.error("FR _read paused", me._path)
+ return
+ }
+
+ if (!me._stream) {
+ // console.error("FR _getStream calling", me._path)
+ return me._getStream()
+ }
+
+ // clear out the buffer, if there is one.
+ if (me._buffer.length) {
+ // console.error("FR _read has buffer", me._buffer.length, me._path)
+ var buf = me._buffer
+ for (var i = 0, l = buf.length; i < l; i ++) {
+ var c = buf[i]
+ if (c === EOF) {
+ // console.error("FR Read emitting buffered end", me._path)
+ me.emit("end")
+ } else if (c === CLOSE) {
+ // console.error("FR Read emitting buffered close", me._path)
+ me.emit("close")
+ } else {
+ // console.error("FR Read emitting buffered data", me._path)
+ me.emit("data", c)
+ }
+
+ if (me._paused) {
+ // console.error("FR Read Re-pausing at "+i, me._path)
+ me._buffer = buf.slice(i)
+ return
+ }
+ }
+ me._buffer.length = 0
+ }
+ // console.error("FR _read done")
+ // that's about all there is to it.
+}
+
+FileReader.prototype.pause = function (who) {
+ var me = this
+ // console.error("FR Pause", me._path)
+ if (me._paused) return
+ who = who || me
+ me._paused = true
+ if (me._stream) me._stream.pause()
+ me.emit("pause", who)
+}
+
+FileReader.prototype.resume = function (who) {
+ var me = this
+ // console.error("FR Resume", me._path)
+ if (!me._paused) return
+ who = who || me
+ me.emit("resume", who)
+ me._paused = false
+ if (me._stream) me._stream.resume()
+ me._read()
+}
diff --git a/node_modules/fstream/lib/file-writer.js b/node_modules/fstream/lib/file-writer.js
new file mode 100644
index 000000000..4ed0c9c6c
--- /dev/null
+++ b/node_modules/fstream/lib/file-writer.js
@@ -0,0 +1,95 @@
+module.exports = FileWriter
+
+var fs = require("graceful-fs")
+ , mkdir = require("mkdirp")
+ , Writer = require("./writer.js")
+ , inherits = require("inherits")
+ , EOF = {}
+
+inherits(FileWriter, Writer)
+
+function FileWriter (props) {
+ var me = this
+ if (!(me instanceof FileWriter)) throw new Error(
+ "FileWriter must be called as constructor.")
+
+ // should already be established as a File type
+ if (props.type !== "File" || !props.File) {
+ throw new Error("Non-file type "+ props.type)
+ }
+
+ me._buffer = []
+ me._bytesWritten = 0
+
+ Writer.call(this, props)
+}
+
+FileWriter.prototype._create = function () {
+ var me = this
+ if (me._stream) return
+
+ var so = {}
+ if (me.props.flags) so.flags = me.props.flags
+ so.mode = Writer.filemode
+ if (me._old && me._old.blksize) so.bufferSize = me._old.blksize
+
+ me._stream = fs.createWriteStream(me._path, so)
+
+ me._stream.on("open", function (fd) {
+ me.ready = true
+ me._buffer.forEach(function (c) {
+ if (c === EOF) me._stream.end()
+ else me._stream.write(c)
+ })
+ me.emit("ready")
+ })
+
+ me._stream.on("drain", function () { me.emit("drain") })
+
+ me._stream.on("close", function () {
+ // console.error("\n\nFW Stream Close", me._path, me.size)
+ me._finish()
+ })
+}
+
+FileWriter.prototype.write = function (c) {
+ var me = this
+
+ me._bytesWritten += c.length
+
+ if (!me.ready) {
+ me._buffer.push(c)
+ return false
+ }
+
+ var ret = me._stream.write(c)
+ // console.error("\t-- fw wrote, _stream says", ret, me._stream._queue.length)
+
+ // allow 2 buffered writes, because otherwise there's just too
+ // much stop and go bs.
+ return ret || (me._stream._queue && me._stream._queue.length <= 2)
+}
+
+FileWriter.prototype.end = function (c) {
+ var me = this
+
+ if (c) me.write(c)
+
+ if (!me.ready) {
+ me._buffer.push(EOF)
+ return false
+ }
+
+ return me._stream.end()
+}
+
+FileWriter.prototype._finish = function () {
+ var me = this
+ if (typeof me.size === "number" && me._bytesWritten != me.size) {
+ me.error(
+ "Did not get expected byte count.\n" +
+ "expect: " + me.size + "\n" +
+ "actual: " + me._bytesWritten)
+ }
+ Writer.prototype._finish.call(me)
+}
diff --git a/node_modules/fstream/lib/get-type.js b/node_modules/fstream/lib/get-type.js
new file mode 100644
index 000000000..cd65c41d8
--- /dev/null
+++ b/node_modules/fstream/lib/get-type.js
@@ -0,0 +1,32 @@
+module.exports = getType
+
+function getType (st) {
+ var types =
+ [ "Directory"
+ , "File"
+ , "SymbolicLink"
+ , "Link" // special for hardlinks from tarballs
+ , "BlockDevice"
+ , "CharacterDevice"
+ , "FIFO"
+ , "Socket" ]
+ , type
+
+ if (st.type && -1 !== types.indexOf(st.type)) {
+ st[st.type] = true
+ return st.type
+ }
+
+ for (var i = 0, l = types.length; i < l; i ++) {
+ type = types[i]
+ var is = st[type] || st["is" + type]
+ if (typeof is === "function") is = is.call(st)
+ if (is) {
+ st[type] = true
+ st.type = type
+ return type
+ }
+ }
+
+ return null
+}
diff --git a/node_modules/fstream/lib/link-reader.js b/node_modules/fstream/lib/link-reader.js
new file mode 100644
index 000000000..7e7ab6ce5
--- /dev/null
+++ b/node_modules/fstream/lib/link-reader.js
@@ -0,0 +1,54 @@
+// Basically just a wrapper around an fs.readlink
+//
+// XXX: Enhance this to support the Link type, by keeping
+// a lookup table of {<dev+inode>:<path>}, so that hardlinks
+// can be preserved in tarballs.
+
+module.exports = LinkReader
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , Reader = require("./reader.js")
+
+inherits(LinkReader, Reader)
+
+function LinkReader (props) {
+ var me = this
+ if (!(me instanceof LinkReader)) throw new Error(
+ "LinkReader must be called as constructor.")
+
+ if (!((props.type === "Link" && props.Link) ||
+ (props.type === "SymbolicLink" && props.SymbolicLink))) {
+ throw new Error("Non-link type "+ props.type)
+ }
+
+ Reader.call(me, props)
+}
+
+// When piping a LinkReader into a LinkWriter, we have to
+// already have the linkpath property set, so that has to
+// happen *before* the "ready" event, which means we need to
+// override the _stat method.
+LinkReader.prototype._stat = function (currentStat) {
+ var me = this
+ fs.readlink(me._path, function (er, linkpath) {
+ if (er) return me.error(er)
+ me.linkpath = me.props.linkpath = linkpath
+ me.emit("linkpath", linkpath)
+ Reader.prototype._stat.call(me, currentStat)
+ })
+}
+
+LinkReader.prototype._read = function () {
+ var me = this
+ if (me._paused) return
+ // basically just a no-op, since we got all the info we need
+ // from the _stat method
+ if (!me._ended) {
+ me.emit("end")
+ me.emit("close")
+ me._ended = true
+ }
+}
diff --git a/node_modules/fstream/lib/link-writer.js b/node_modules/fstream/lib/link-writer.js
new file mode 100644
index 000000000..8a9816380
--- /dev/null
+++ b/node_modules/fstream/lib/link-writer.js
@@ -0,0 +1,96 @@
+
+module.exports = LinkWriter
+
+var fs = require("graceful-fs")
+ , Writer = require("./writer.js")
+ , inherits = require("inherits")
+ , collect = require("./collect.js")
+ , path = require("path")
+ , rimraf = require("rimraf")
+
+inherits(LinkWriter, Writer)
+
+function LinkWriter (props) {
+ var me = this
+ if (!(me instanceof LinkWriter)) throw new Error(
+ "LinkWriter must be called as constructor.")
+
+ // should already be established as a Link type
+ if (!((props.type === "Link" && props.Link) ||
+ (props.type === "SymbolicLink" && props.SymbolicLink))) {
+ throw new Error("Non-link type "+ props.type)
+ }
+
+ if (props.linkpath === "") props.linkpath = "."
+ if (!props.linkpath) {
+ me.error("Need linkpath property to create " + props.type)
+ }
+
+ Writer.call(this, props)
+}
+
+LinkWriter.prototype._create = function () {
+ // console.error(" LW _create")
+ var me = this
+ , hard = me.type === "Link" || process.platform === "win32"
+ , link = hard ? "link" : "symlink"
+ , lp = hard ? path.resolve(me.dirname, me.linkpath) : me.linkpath
+
+ // can only change the link path by clobbering
+ // For hard links, let's just assume that's always the case, since
+ // there's no good way to read them if we don't already know.
+ if (hard) return clobber(me, lp, link)
+
+ fs.readlink(me._path, function (er, p) {
+ // only skip creation if it's exactly the same link
+ if (p && p === lp) return finish(me)
+ clobber(me, lp, link)
+ })
+}
+
+function clobber (me, lp, link) {
+ rimraf(me._path, function (er) {
+ if (er) return me.error(er)
+ create(me, lp, link)
+ })
+}
+
+function create (me, lp, link) {
+ fs[link](lp, me._path, function (er) {
+ // if this is a hard link, and we're in the process of writing out a
+ // directory, it's very possible that the thing we're linking to
+ // doesn't exist yet (especially if it was intended as a symlink),
+ // so swallow ENOENT errors here and just soldier in.
+ // Additionally, an EPERM or EACCES can happen on win32 if it's trying
+ // to make a link to a directory. Again, just skip it.
+ // A better solution would be to have fs.symlink be supported on
+ // windows in some nice fashion.
+ if (er) {
+ if ((er.code === "ENOENT" ||
+ er.code === "EACCES" ||
+ er.code === "EPERM" ) && process.platform === "win32") {
+ me.ready = true
+ me.emit("ready")
+ me.emit("end")
+ me.emit("close")
+ me.end = me._finish = function () {}
+ } else return me.error(er)
+ }
+ finish(me)
+ })
+}
+
+function finish (me) {
+ me.ready = true
+ me.emit("ready")
+ if (me._ended && !me._finished) me._finish()
+}
+
+LinkWriter.prototype.end = function () {
+ // console.error("LW finish in end")
+ this._ended = true
+ if (this.ready) {
+ this._finished = true
+ this._finish()
+ }
+}
diff --git a/node_modules/fstream/lib/proxy-reader.js b/node_modules/fstream/lib/proxy-reader.js
new file mode 100644
index 000000000..f99b28fe5
--- /dev/null
+++ b/node_modules/fstream/lib/proxy-reader.js
@@ -0,0 +1,89 @@
+// A reader for when we don't yet know what kind of thing
+// the thing is.
+
+module.exports = ProxyReader
+
+var Reader = require("./reader.js")
+ , getType = require("./get-type.js")
+ , inherits = require("inherits")
+ , fs = require("graceful-fs")
+
+inherits(ProxyReader, Reader)
+
+function ProxyReader (props) {
+ var me = this
+ if (!(me instanceof ProxyReader)) throw new Error(
+ "ProxyReader must be called as constructor.")
+
+ me.props = props
+ me._buffer = []
+ me.ready = false
+
+ Reader.call(me, props)
+}
+
+ProxyReader.prototype._stat = function () {
+ var me = this
+ , props = me.props
+ // stat the thing to see what the proxy should be.
+ , stat = props.follow ? "stat" : "lstat"
+
+ fs[stat](props.path, function (er, current) {
+ var type
+ if (er || !current) {
+ type = "File"
+ } else {
+ type = getType(current)
+ }
+
+ props[type] = true
+ props.type = me.type = type
+
+ me._old = current
+ me._addProxy(Reader(props, current))
+ })
+}
+
+ProxyReader.prototype._addProxy = function (proxy) {
+ var me = this
+ if (me._proxyTarget) {
+ return me.error("proxy already set")
+ }
+
+ me._proxyTarget = proxy
+ proxy._proxy = me
+
+ ; [ "error"
+ , "data"
+ , "end"
+ , "close"
+ , "linkpath"
+ , "entry"
+ , "warn"
+ ].forEach(function (ev) {
+ // console.error("~~ proxy event", ev, me.path)
+ proxy.on(ev, me.emit.bind(me, ev))
+ })
+
+ me.emit("proxy", proxy)
+
+ proxy.on("ready", function () {
+ // console.error("~~ proxy is ready!", me.path)
+ me.ready = true
+ me.emit("ready")
+ })
+
+ var calls = me._buffer
+ me._buffer.length = 0
+ calls.forEach(function (c) {
+ proxy[c[0]].apply(proxy, c[1])
+ })
+}
+
+ProxyReader.prototype.pause = function () {
+ return this._proxyTarget ? this._proxyTarget.pause() : false
+}
+
+ProxyReader.prototype.resume = function () {
+ return this._proxyTarget ? this._proxyTarget.resume() : false
+}
diff --git a/node_modules/fstream/lib/proxy-writer.js b/node_modules/fstream/lib/proxy-writer.js
new file mode 100644
index 000000000..2c78fc673
--- /dev/null
+++ b/node_modules/fstream/lib/proxy-writer.js
@@ -0,0 +1,109 @@
+// A writer for when we don't know what kind of thing
+// the thing is. That is, it's not explicitly set,
+// so we're going to make it whatever the thing already
+// is, or "File"
+//
+// Until then, collect all events.
+
+module.exports = ProxyWriter
+
+var Writer = require("./writer.js")
+ , getType = require("./get-type.js")
+ , inherits = require("inherits")
+ , collect = require("./collect.js")
+ , fs = require("fs")
+
+inherits(ProxyWriter, Writer)
+
+function ProxyWriter (props) {
+ var me = this
+ if (!(me instanceof ProxyWriter)) throw new Error(
+ "ProxyWriter must be called as constructor.")
+
+ me.props = props
+ me._needDrain = false
+
+ Writer.call(me, props)
+}
+
+ProxyWriter.prototype._stat = function () {
+ var me = this
+ , props = me.props
+ // stat the thing to see what the proxy should be.
+ , stat = props.follow ? "stat" : "lstat"
+
+ fs[stat](props.path, function (er, current) {
+ var type
+ if (er || !current) {
+ type = "File"
+ } else {
+ type = getType(current)
+ }
+
+ props[type] = true
+ props.type = me.type = type
+
+ me._old = current
+ me._addProxy(Writer(props, current))
+ })
+}
+
+ProxyWriter.prototype._addProxy = function (proxy) {
+ // console.error("~~ set proxy", this.path)
+ var me = this
+ if (me._proxy) {
+ return me.error("proxy already set")
+ }
+
+ me._proxy = proxy
+ ; [ "ready"
+ , "error"
+ , "close"
+ , "pipe"
+ , "drain"
+ , "warn"
+ ].forEach(function (ev) {
+ proxy.on(ev, me.emit.bind(me, ev))
+ })
+
+ me.emit("proxy", proxy)
+
+ var calls = me._buffer
+ calls.forEach(function (c) {
+ // console.error("~~ ~~ proxy buffered call", c[0], c[1])
+ proxy[c[0]].call(proxy, c[1])
+ })
+ me._buffer.length = 0
+ if (me._needsDrain) me.emit("drain")
+}
+
+ProxyWriter.prototype.add = function (entry) {
+ // console.error("~~ proxy add")
+ collect(entry)
+
+ if (!this._proxy) {
+ this._buffer.push(["add", [entry]])
+ this._needDrain = true
+ return false
+ }
+ return this._proxy.add(entry)
+}
+
+ProxyWriter.prototype.write = function (c) {
+ // console.error("~~ proxy write")
+ if (!this._proxy) {
+ this._buffer.push(["write", [c]])
+ this._needDrain = true
+ return false
+ }
+ return this._proxy.write(c)
+}
+
+ProxyWriter.prototype.end = function (c) {
+ // console.error("~~ proxy end")
+ if (!this._proxy) {
+ this._buffer.push(["end", c])
+ return false
+ }
+ return this._proxy.end(c)
+}
diff --git a/node_modules/fstream/lib/reader.js b/node_modules/fstream/lib/reader.js
new file mode 100644
index 000000000..6aa67ada7
--- /dev/null
+++ b/node_modules/fstream/lib/reader.js
@@ -0,0 +1,240 @@
+
+module.exports = Reader
+
+var fs = require("graceful-fs")
+ , Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , path = require("path")
+ , getType = require("./get-type.js")
+ , hardLinks = Reader.hardLinks = {}
+ , Abstract = require("./abstract.js")
+
+// Must do this *before* loading the child classes
+inherits(Reader, Abstract)
+
+var DirReader = require("./dir-reader.js")
+ , FileReader = require("./file-reader.js")
+ , LinkReader = require("./link-reader.js")
+ , SocketReader = require("./socket-reader.js")
+ , ProxyReader = require("./proxy-reader.js")
+
+function Reader (props, currentStat) {
+ var me = this
+ if (!(me instanceof Reader)) return new Reader(props, currentStat)
+
+ if (typeof props === "string") {
+ props = { path: props }
+ }
+
+ if (!props.path) {
+ me.error("Must provide a path", null, true)
+ }
+
+ // polymorphism.
+ // call fstream.Reader(dir) to get a DirReader object, etc.
+ // Note that, unlike in the Writer case, ProxyReader is going
+ // to be the *normal* state of affairs, since we rarely know
+ // the type of a file prior to reading it.
+
+
+ var type
+ , ClassType
+
+ if (props.type && typeof props.type === "function") {
+ type = props.type
+ ClassType = type
+ } else {
+ type = getType(props)
+ ClassType = Reader
+ }
+
+ if (currentStat && !type) {
+ type = getType(currentStat)
+ props[type] = true
+ props.type = type
+ }
+
+ switch (type) {
+ case "Directory":
+ ClassType = DirReader
+ break
+
+ case "Link":
+ // XXX hard links are just files.
+ // However, it would be good to keep track of files' dev+inode
+ // and nlink values, and create a HardLinkReader that emits
+ // a linkpath value of the original copy, so that the tar
+ // writer can preserve them.
+ // ClassType = HardLinkReader
+ // break
+
+ case "File":
+ ClassType = FileReader
+ break
+
+ case "SymbolicLink":
+ ClassType = LinkReader
+ break
+
+ case "Socket":
+ ClassType = SocketReader
+ break
+
+ case null:
+ ClassType = ProxyReader
+ break
+ }
+
+ if (!(me instanceof ClassType)) {
+ return new ClassType(props)
+ }
+
+ Abstract.call(me)
+
+ me.readable = true
+ me.writable = false
+
+ me.type = type
+ me.props = props
+ me.depth = props.depth = props.depth || 0
+ me.parent = props.parent || null
+ me.root = props.root || (props.parent && props.parent.root) || me
+
+ me._path = me.path = path.resolve(props.path)
+ if (process.platform === "win32") {
+ me.path = me._path = me.path.replace(/\?/g, "_")
+ if (me._path.length >= 260) {
+ // how DOES one create files on the moon?
+ // if the path has spaces in it, then UNC will fail.
+ me._swallowErrors = true
+ //if (me._path.indexOf(" ") === -1) {
+ me._path = "\\\\?\\" + me.path.replace(/\//g, "\\")
+ //}
+ }
+ }
+ me.basename = props.basename = path.basename(me.path)
+ me.dirname = props.dirname = path.dirname(me.path)
+
+ // these have served their purpose, and are now just noisy clutter
+ props.parent = props.root = null
+
+ // console.error("\n\n\n%s setting size to", props.path, props.size)
+ me.size = props.size
+ me.filter = typeof props.filter === "function" ? props.filter : null
+ if (props.sort === "alpha") props.sort = alphasort
+
+ // start the ball rolling.
+ // this will stat the thing, and then call me._read()
+ // to start reading whatever it is.
+ // console.error("calling stat", props.path, currentStat)
+ me._stat(currentStat)
+}
+
+function alphasort (a, b) {
+ return a === b ? 0
+ : a.toLowerCase() > b.toLowerCase() ? 1
+ : a.toLowerCase() < b.toLowerCase() ? -1
+ : a > b ? 1
+ : -1
+}
+
+Reader.prototype._stat = function (currentStat) {
+ var me = this
+ , props = me.props
+ , stat = props.follow ? "stat" : "lstat"
+
+ // console.error("Reader._stat", me._path, currentStat)
+ if (currentStat) process.nextTick(statCb.bind(null, null, currentStat))
+ else fs[stat](me._path, statCb)
+
+
+ function statCb (er, props_) {
+ // console.error("Reader._stat, statCb", me._path, props_, props_.nlink)
+ if (er) return me.error(er)
+
+ Object.keys(props_).forEach(function (k) {
+ props[k] = props_[k]
+ })
+
+ // if it's not the expected size, then abort here.
+ if (undefined !== me.size && props.size !== me.size) {
+ return me.error("incorrect size")
+ }
+ me.size = props.size
+
+ var type = getType(props)
+ // special little thing for handling hardlinks.
+ if (type !== "Directory" && props.nlink && props.nlink > 1) {
+ var k = props.dev + ":" + props.ino
+ // console.error("Reader has nlink", me._path, k)
+ if (hardLinks[k] === me._path || !hardLinks[k]) hardLinks[k] = me._path
+ else {
+ // switch into hardlink mode.
+ type = me.type = me.props.type = "Link"
+ me.Link = me.props.Link = true
+ me.linkpath = me.props.linkpath = hardLinks[k]
+ // console.error("Hardlink detected, switching mode", me._path, me.linkpath)
+ // Setting __proto__ would arguably be the "correct"
+ // approach here, but that just seems too wrong.
+ me._stat = me._read = LinkReader.prototype._read
+ }
+ }
+
+ if (me.type && me.type !== type) {
+ me.error("Unexpected type: " + type)
+ }
+
+ // if the filter doesn't pass, then just skip over this one.
+ // still have to emit end so that dir-walking can move on.
+ if (me.filter) {
+ // special handling for ProxyReaders
+ if (!me.filter.call(me._proxy || me)) {
+ me._aborted = true
+ me.emit("end")
+ me.emit("close")
+ return
+ }
+ }
+
+ me.emit("ready", props)
+
+ // if it's a directory, then we'll be emitting "entry" events.
+ me._read()
+ }
+}
+
+Reader.prototype.pipe = function (dest, opts) {
+ var me = this
+ if (typeof dest.add === "function") {
+ // piping to a multi-compatible, and we've got directory entries.
+ me.on("entry", function (entry) {
+ var ret = dest.add(entry)
+ if (false === ret) {
+ me.pause()
+ }
+ })
+ }
+
+ // console.error("R Pipe apply Stream Pipe")
+ return Stream.prototype.pipe.apply(this, arguments)
+}
+
+Reader.prototype.pause = function (who) {
+ this._paused = true
+ who = who || this
+ this.emit("pause", who)
+ if (this._stream) this._stream.pause(who)
+}
+
+Reader.prototype.resume = function (who) {
+ this._paused = false
+ who = who || this
+ this.emit("resume", who)
+ if (this._stream) this._stream.resume(who)
+ this._read()
+}
+
+Reader.prototype._read = function () {
+ this.error("Cannot read unknown type: "+this.type)
+}
+
diff --git a/node_modules/fstream/lib/socket-reader.js b/node_modules/fstream/lib/socket-reader.js
new file mode 100644
index 000000000..e89c1731a
--- /dev/null
+++ b/node_modules/fstream/lib/socket-reader.js
@@ -0,0 +1,38 @@
+// Just get the stats, and then don't do anything.
+// You can't really "read" from a socket. You "connect" to it.
+// Mostly, this is here so that reading a dir with a socket in it
+// doesn't blow up.
+
+module.exports = SocketReader
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , Reader = require("./reader.js")
+
+inherits(SocketReader, Reader)
+
+function SocketReader (props) {
+ var me = this
+ if (!(me instanceof SocketReader)) throw new Error(
+ "SocketReader must be called as constructor.")
+
+ if (!(props.type === "Socket" && props.Socket)) {
+ throw new Error("Non-socket type "+ props.type)
+ }
+
+ Reader.call(me, props)
+}
+
+SocketReader.prototype._read = function () {
+ var me = this
+ if (me._paused) return
+ // basically just a no-op, since we got all the info we have
+ // from the _stat method
+ if (!me._ended) {
+ me.emit("end")
+ me.emit("close")
+ me._ended = true
+ }
+}
diff --git a/node_modules/fstream/lib/writer.js b/node_modules/fstream/lib/writer.js
new file mode 100644
index 000000000..dde29fd7b
--- /dev/null
+++ b/node_modules/fstream/lib/writer.js
@@ -0,0 +1,316 @@
+
+module.exports = Writer
+
+var fs = require("graceful-fs")
+ , inherits = require("inherits")
+ , rimraf = require("rimraf")
+ , mkdir = require("mkdirp")
+ , path = require("path")
+ , umask = process.platform === "win32" ? 0 : process.umask()
+ , getType = require("./get-type.js")
+ , Abstract = require("./abstract.js")
+
+// Must do this *before* loading the child classes
+inherits(Writer, Abstract)
+
+Writer.dirmode = 0777 & (~umask)
+Writer.filemode = 0666 & (~umask)
+
+var DirWriter = require("./dir-writer.js")
+ , LinkWriter = require("./link-writer.js")
+ , FileWriter = require("./file-writer.js")
+ , ProxyWriter = require("./proxy-writer.js")
+
+// props is the desired state. current is optionally the current stat,
+// provided here so that subclasses can avoid statting the target
+// more than necessary.
+function Writer (props, current) {
+ var me = this
+
+ if (typeof props === "string") {
+ props = { path: props }
+ }
+
+ if (!props.path) me.error("Must provide a path", null, true)
+
+ // polymorphism.
+ // call fstream.Writer(dir) to get a DirWriter object, etc.
+ var type = getType(props)
+ , ClassType = Writer
+
+ switch (type) {
+ case "Directory":
+ ClassType = DirWriter
+ break
+ case "File":
+ ClassType = FileWriter
+ break
+ case "Link":
+ case "SymbolicLink":
+ ClassType = LinkWriter
+ break
+ case null:
+ // Don't know yet what type to create, so we wrap in a proxy.
+ ClassType = ProxyWriter
+ break
+ }
+
+ if (!(me instanceof ClassType)) return new ClassType(props)
+
+ // now get down to business.
+
+ Abstract.call(me)
+
+ // props is what we want to set.
+ // set some convenience properties as well.
+ me.type = props.type
+ me.props = props
+ me.depth = props.depth || 0
+ me.clobber = false === props.clobber ? props.clobber : true
+ me.parent = props.parent || null
+ me.root = props.root || (props.parent && props.parent.root) || me
+
+ me._path = me.path = path.resolve(props.path)
+ if (process.platform === "win32") {
+ me.path = me._path = me.path.replace(/\?/g, "_")
+ if (me._path.length >= 260) {
+ me._swallowErrors = true
+ me._path = "\\\\?\\" + me.path.replace(/\//g, "\\")
+ }
+ }
+ me.basename = path.basename(props.path)
+ me.dirname = path.dirname(props.path)
+ me.linkpath = props.linkpath || null
+
+ props.parent = props.root = null
+
+ // console.error("\n\n\n%s setting size to", props.path, props.size)
+ me.size = props.size
+
+ if (typeof props.mode === "string") {
+ props.mode = parseInt(props.mode, 8)
+ }
+
+ me.readable = false
+ me.writable = true
+
+ // buffer until ready, or while handling another entry
+ me._buffer = []
+ me.ready = false
+
+ // start the ball rolling.
+ // this checks what's there already, and then calls
+ // me._create() to call the impl-specific creation stuff.
+ me._stat(current)
+}
+
+// Calling this means that it's something we can't create.
+// Just assert that it's already there, otherwise raise a warning.
+Writer.prototype._create = function () {
+ var me = this
+ fs[me.props.follow ? "stat" : "lstat"](me._path, function (er, current) {
+ if (er) {
+ return me.warn("Cannot create " + me._path + "\n" +
+ "Unsupported type: "+me.type, "ENOTSUP")
+ }
+ me._finish()
+ })
+}
+
+Writer.prototype._stat = function (current) {
+ var me = this
+ , props = me.props
+ , stat = props.follow ? "stat" : "lstat"
+
+ if (current) statCb(null, current)
+ else fs[stat](me._path, statCb)
+
+ function statCb (er, current) {
+ // if it's not there, great. We'll just create it.
+ // if it is there, then we'll need to change whatever differs
+ if (er || !current) {
+ return create(me)
+ }
+
+ me._old = current
+ var currentType = getType(current)
+
+ // if it's a type change, then we need to clobber or error.
+ // if it's not a type change, then let the impl take care of it.
+ if (currentType !== me.type) {
+ return rimraf(me._path, function (er) {
+ if (er) return me.error(er)
+ me._old = null
+ create(me)
+ })
+ }
+
+ // otherwise, just handle in the app-specific way
+ // this creates a fs.WriteStream, or mkdir's, or whatever
+ create(me)
+ }
+}
+
+function create (me) {
+ // console.error("W create", me._path, Writer.dirmode)
+
+ // XXX Need to clobber non-dirs that are in the way,
+ // unless { clobber: false } in the props.
+ mkdir(path.dirname(me._path), Writer.dirmode, function (er) {
+ // console.error("W created", path.dirname(me._path), er)
+ if (er) return me.error(er)
+ me._create()
+ })
+}
+
+Writer.prototype._finish = function () {
+ var me = this
+
+ // console.error(" W Finish", me._path, me.size)
+
+ // set up all the things.
+ // At this point, we're already done writing whatever we've gotta write,
+ // adding files to the dir, etc.
+ var todo = 0
+ var errState = null
+ var done = false
+
+ if (me._old) {
+ // the times will almost *certainly* have changed.
+ // adds the utimes syscall, but remove another stat.
+ me._old.atime = new Date(0)
+ me._old.mtime = new Date(0)
+ // console.error(" W Finish Stale Stat", me._path, me.size)
+ setProps(me._old)
+ } else {
+ var stat = me.props.follow ? "stat" : "lstat"
+ // console.error(" W Finish Stating", me._path, me.size)
+ fs[stat](me._path, function (er, current) {
+ // console.error(" W Finish Stated", me._path, me.size, current)
+ if (er) {
+ // if we're in the process of writing out a
+ // directory, it's very possible that the thing we're linking to
+ // doesn't exist yet (especially if it was intended as a symlink),
+ // so swallow ENOENT errors here and just soldier on.
+ if (er.code === "ENOENT" &&
+ (me.type === "Link" || me.type === "SymbolicLink") &&
+ process.platform === "win32") {
+ me.ready = true
+ me.emit("ready")
+ me.emit("end")
+ me.emit("close")
+ me.end = me._finish = function () {}
+ return
+ } else return me.error(er)
+ }
+ setProps(me._old = current)
+ })
+ }
+
+ return
+
+ function setProps (current) {
+ // console.error(" W setprops", me._path)
+ // mode
+ var wantMode = me.props.mode
+ , chmod = me.props.follow || me.type !== "SymbolicLink"
+ ? "chmod" : "lchmod"
+
+ if (fs[chmod] && typeof wantMode === "number") {
+ wantMode = wantMode & 0777
+ todo ++
+ // console.error(" W chmod", wantMode.toString(8), me.basename, "\r")
+ fs[chmod](me._path, wantMode, next(chmod))
+ }
+
+ // uid, gid
+ // Don't even try it unless root. Too easy to EPERM.
+ if (process.platform !== "win32" &&
+ process.getuid && process.getuid() === 0 &&
+ ( typeof me.props.uid === "number" ||
+ typeof me.props.gid === "number" )) {
+ var chown = (me.props.follow || me.type !== "SymbolicLink")
+ ? "chown" : "lchown"
+ if (fs[chown]) {
+ if (typeof me.props.uid !== "number") me.props.uid = current.uid
+ if (typeof me.props.gid !== "number") me.props.gid = current.gid
+ if (me.props.uid !== current.uid || me.props.gid !== current.gid) {
+ todo ++
+ // console.error(" W chown", me.props.uid, me.props.gid, me.basename)
+ fs[chown](me._path, me.props.uid, me.props.gid, next("chown"))
+ }
+ }
+ }
+
+ // atime, mtime.
+ if (fs.utimes && process.platform !== "win32") {
+ var utimes = (me.props.follow || me.type !== "SymbolicLink")
+ ? "utimes" : "lutimes"
+
+ if (utimes === "lutimes" && !fs[utimes]) {
+ utimes = "utimes"
+ }
+
+ var curA = current.atime
+ , curM = current.mtime
+ , meA = me.props.atime
+ , meM = me.props.mtime
+
+ if (meA === undefined) meA = curA
+ if (meM === undefined) meM = curM
+
+ if (!isDate(meA)) meA = new Date(meA)
+ if (!isDate(meM)) meA = new Date(meM)
+
+ if (meA.getTime() !== curA.getTime() ||
+ meM.getTime() !== curM.getTime()) {
+ todo ++
+ // console.error(" W utimes", meA, meM, me.basename)
+ fs[utimes](me._path, meA, meM, next("utimes"))
+ }
+ }
+
+ // finally, handle the case if there was nothing to do.
+ if (todo === 0) {
+ // console.error(" W nothing to do", me.basename)
+ next("nothing to do")()
+ }
+ }
+
+ function next (what) { return function (er) {
+ // console.error(" W Finish", what, todo)
+ if (errState) return
+ if (er) {
+ er.fstream_finish_call = what
+ return me.error(errState = er)
+ }
+ if (--todo > 0) return
+ if (done) return
+ done = true
+
+ // all the props have been set, so we're completely done.
+ me.emit("end")
+ me.emit("close")
+ }}
+}
+
+Writer.prototype.pipe = function () {
+ this.error("Can't pipe from writable stream")
+}
+
+Writer.prototype.add = function () {
+ this.error("Cannot add to non-Directory type")
+}
+
+Writer.prototype.write = function () {
+ return true
+}
+
+function objectToString (d) {
+ return Object.prototype.toString.call(d)
+}
+
+function isDate(d) {
+ return typeof d === 'object' && objectToString(d) === '[object Date]';
+}
+
diff --git a/node_modules/fstream/package.json b/node_modules/fstream/package.json
new file mode 100644
index 000000000..eb8561532
--- /dev/null
+++ b/node_modules/fstream/package.json
@@ -0,0 +1,26 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "fstream",
+ "description": "Advanced file system stream things",
+ "version": "0.1.11",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/fstream.git"
+ },
+ "main": "fstream.js",
+ "engines": {
+ "node": "0.5 || 0.6 || 0.7"
+ },
+ "dependencies": {
+ "rimraf": "~1.0.8",
+ "mkdirp": "~0.1.0",
+ "graceful-fs": "~1.1.2",
+ "inherits": "~1.0.0"
+ },
+ "devDependencies": {
+ "tap": "0.1"
+ },
+ "scripts": {
+ "test": "tap examples/*.js"
+ }
+}
diff --git a/node_modules/graceful-fs/.npmignore b/node_modules/graceful-fs/.npmignore
new file mode 100644
index 000000000..c2658d7d1
--- /dev/null
+++ b/node_modules/graceful-fs/.npmignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/node_modules/graceful-fs/LICENSE b/node_modules/graceful-fs/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/graceful-fs/README.md b/node_modules/graceful-fs/README.md
new file mode 100644
index 000000000..7d2e681e3
--- /dev/null
+++ b/node_modules/graceful-fs/README.md
@@ -0,0 +1,5 @@
+Just like node's `fs` module, but it does an incremental back-off when
+EMFILE is encountered.
+
+Useful in asynchronous situations where one needs to try to open lots
+and lots of files.
diff --git a/node_modules/graceful-fs/graceful-fs.js b/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 000000000..7467f304a
--- /dev/null
+++ b/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,212 @@
+// this keeps a queue of opened file descriptors, and will make
+// fs operations wait until some have closed before trying to open more.
+
+var fs = require("fs")
+
+// there is such a thing as TOO graceful.
+if (fs.open === gracefulOpen) return
+
+var FastList = require("fast-list")
+ , queue = new FastList()
+ , curOpen = 0
+ , constants = require("constants")
+
+
+exports = module.exports = fs
+
+
+fs.MIN_MAX_OPEN = 64
+fs.MAX_OPEN = 1024
+
+var originalOpen = fs.open
+ , originalOpenSync = fs.openSync
+ , originalClose = fs.close
+ , originalCloseSync = fs.closeSync
+
+
+// prevent EMFILE errors
+function OpenReq (path, flags, mode, cb) {
+ this.path = path
+ this.flags = flags
+ this.mode = mode
+ this.cb = cb
+}
+
+function noop () {}
+
+fs.open = gracefulOpen
+
+function gracefulOpen (path, flags, mode, cb) {
+ if (typeof mode === "function") cb = mode, mode = null
+ if (typeof cb !== "function") cb = noop
+
+ if (curOpen >= fs.MAX_OPEN) {
+ queue.push(new OpenReq(path, flags, mode, cb))
+ setTimeout(flush)
+ return
+ }
+ open(path, flags, mode, function (er, fd) {
+ if (er && er.code === "EMFILE" && curOpen > fs.MIN_MAX_OPEN) {
+ // that was too many. reduce max, get back in queue.
+ // this should only happen once in a great while, and only
+ // if the ulimit -n is set lower than 1024.
+ fs.MAX_OPEN = curOpen - 1
+ return fs.open(path, flags, mode, cb)
+ }
+ cb(er, fd)
+ })
+}
+
+function open (path, flags, mode, cb) {
+ cb = cb || noop
+ curOpen ++
+ originalOpen.call(fs, path, flags, mode, function (er, fd) {
+ if (er) {
+ onclose()
+ }
+
+ cb(er, fd)
+ })
+}
+
+fs.openSync = function (path, flags, mode) {
+ curOpen ++
+ return originalOpenSync.call(fs, path, flags, mode)
+}
+
+function onclose () {
+ curOpen --
+ flush()
+}
+
+function flush () {
+ while (curOpen < fs.MAX_OPEN) {
+ var req = queue.shift()
+ if (!req) break
+ open(req.path, req.flags || "r", req.mode || 0777, req.cb)
+ }
+ if (queue.length === 0) return
+}
+
+fs.close = function (fd, cb) {
+ cb = cb || noop
+ originalClose.call(fs, fd, function (er) {
+ onclose()
+ cb(er)
+ })
+}
+
+fs.closeSync = function (fd) {
+ onclose()
+ return originalCloseSync.call(fs, fd)
+}
+
+
+// (re-)implement some things that are known busted or missing.
+
+var constants = require("constants")
+
+// lchmod, broken prior to 0.6.2
+// back-port the fix here.
+if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ fs.lchmod = function (path, mode, callback) {
+ callback = callback || noop
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ callback(err || err2)
+ })
+ })
+ })
+ }
+
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var err, err2
+ try {
+ var ret = fs.fchmodSync(fd, mode)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+}
+
+
+// lstat on windows, missing from early 0.5 versions
+// replacing with stat isn't quite perfect, but good enough to get by.
+if (process.platform === "win32" && !process.binding("fs").lstat) {
+ fs.lstat = fs.stat
+ fs.lstatSync = fs.statSync
+}
+
+
+// lutimes implementation, or no-op
+if (!fs.lutimes) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ cb = cb || noop
+ if (er) return cb(er)
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ return cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ , err
+ , err2
+ , ret
+
+ try {
+ var ret = fs.futimesSync(fd, at, mt)
+ } catch (er) {
+ err = er
+ }
+ try {
+ fs.closeSync(fd)
+ } catch (er) {
+ err2 = er
+ }
+ if (err || err2) throw (err || err2)
+ return ret
+ }
+
+ } else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
+ // maybe utimensat will be bound soonish?
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
+ }
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
+ fs.lutimesSync = function () {}
+ }
+}
diff --git a/node_modules/graceful-fs/package.json b/node_modules/graceful-fs/package.json
new file mode 100644
index 000000000..ec72affa1
--- /dev/null
+++ b/node_modules/graceful-fs/package.json
@@ -0,0 +1,18 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
+ "name": "graceful-fs",
+ "description": "fs monkey-patching to avoid EMFILE and other problems",
+ "version": "1.1.2",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "main": "graceful-fs.js",
+ "engines": {
+ "node": "0.4 || 0.5 || 0.6"
+ },
+ "dependencies": {
+ "fast-list": "1"
+ },
+ "devDependencies": {}
+}
diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE
new file mode 100644
index 000000000..c78c4f661
--- /dev/null
+++ b/node_modules/inherits/LICENSE
@@ -0,0 +1,26 @@
+Copyright 2011 Isaac Z. Schlueter (the "Author")
+All rights reserved.
+
+General Public Obviousness License
+
+The Author asserts that this software and associated documentation
+files (the "Software"), while the Author's original creation, is
+nonetheless obvious, trivial, unpatentable, and implied by the
+context in which the software was created. If you sat down and
+thought about the problem for an hour or less, you'd probably
+come up with exactly this solution.
+
+Permission is granted to use this software in any way
+whatsoever, with the following restriction:
+
+You may not release the Software under a more restrictive license
+than this one.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md
new file mode 100644
index 000000000..b2beaed93
--- /dev/null
+++ b/node_modules/inherits/README.md
@@ -0,0 +1,51 @@
+A dead simple way to do inheritance in JS.
+
+ var inherits = require("inherits")
+
+ function Animal () {
+ this.alive = true
+ }
+ Animal.prototype.say = function (what) {
+ console.log(what)
+ }
+
+ inherits(Dog, Animal)
+ function Dog () {
+ Dog.super.apply(this)
+ }
+ Dog.prototype.sniff = function () {
+ this.say("sniff sniff")
+ }
+ Dog.prototype.bark = function () {
+ this.say("woof woof")
+ }
+
+ inherits(Chihuahua, Dog)
+ function Chihuahua () {
+ Chihuahua.super.apply(this)
+ }
+ Chihuahua.prototype.bark = function () {
+ this.say("yip yip")
+ }
+
+ // also works
+ function Cat () {
+ Cat.super.apply(this)
+ }
+ Cat.prototype.hiss = function () {
+ this.say("CHSKKSS!!")
+ }
+ inherits(Cat, Animal, {
+ meow: function () { this.say("miao miao") }
+ })
+ Cat.prototype.purr = function () {
+ this.say("purr purr")
+ }
+
+
+ var c = new Chihuahua
+ assert(c instanceof Chihuahua)
+ assert(c instanceof Dog)
+ assert(c instanceof Animal)
+
+The actual function is laughably small. 10-lines small.
diff --git a/node_modules/inherits/inherits-old.js b/node_modules/inherits/inherits-old.js
new file mode 100644
index 000000000..ef39252dd
--- /dev/null
+++ b/node_modules/inherits/inherits-old.js
@@ -0,0 +1,40 @@
+// This is a less perfect implementation of the inherits function,
+// designed to work in cases where ES5 is not available.
+//
+// Note that it is a bit longer, and doesn't properly deal with
+// getter/setters or property descriptor flags (enumerable, etc.)
+
+module.exports = inheritsOld
+
+function inheritsOld (c, p, proto) {
+ function F () { this.constructor = c }
+ F.prototype = p.prototype
+ var e = {}
+ for (var i in c.prototype) if (c.prototype.hasOwnProperty(i)) {
+ e[i] = c.prototype[i]
+ }
+ if (proto) for (var i in proto) if (proto.hasOwnProperty(i)) {
+ e[i] = proto[i]
+ }
+ c.prototype = new F()
+ for (var i in e) if (e.hasOwnProperty(i)) {
+ c.prototype[i] = e[i]
+ }
+ c.super = p
+}
+
+// function Child () {
+// Child.super.call(this)
+// console.error([this
+// ,this.constructor
+// ,this.constructor === Child
+// ,this.constructor.super === Parent
+// ,Object.getPrototypeOf(this) === Child.prototype
+// ,Object.getPrototypeOf(Object.getPrototypeOf(this))
+// === Parent.prototype
+// ,this instanceof Child
+// ,this instanceof Parent])
+// }
+// function Parent () {}
+// inheritsOld(Child, Parent)
+// new Child
diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js
new file mode 100644
index 000000000..061b39620
--- /dev/null
+++ b/node_modules/inherits/inherits.js
@@ -0,0 +1,29 @@
+module.exports = inherits
+
+function inherits (c, p, proto) {
+ proto = proto || {}
+ var e = {}
+ ;[c.prototype, proto].forEach(function (s) {
+ Object.getOwnPropertyNames(s).forEach(function (k) {
+ e[k] = Object.getOwnPropertyDescriptor(s, k)
+ })
+ })
+ c.prototype = Object.create(p.prototype, e)
+ c.super = p
+}
+
+//function Child () {
+// Child.super.call(this)
+// console.error([this
+// ,this.constructor
+// ,this.constructor === Child
+// ,this.constructor.super === Parent
+// ,Object.getPrototypeOf(this) === Child.prototype
+// ,Object.getPrototypeOf(Object.getPrototypeOf(this))
+// === Parent.prototype
+// ,this instanceof Child
+// ,this instanceof Parent])
+//}
+//function Parent () {}
+//inherits(Child, Parent)
+//new Child
diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json
new file mode 100644
index 000000000..7dc32771b
--- /dev/null
+++ b/node_modules/inherits/package.json
@@ -0,0 +1,8 @@
+{ "name" : "inherits"
+, "description": "A tiny simple way to do classic inheritance in js"
+, "version" : "1.0.0"
+, "keywords" : ["inheritance", "class", "klass", "oop", "object-oriented"]
+, "main" : "./inherits.js"
+, "repository" : "https://github.com/isaacs/inherits"
+, "license": { "type": "GPOL", "url": "https://raw.github.com/isaacs/inherits/master/LICENSE" }
+, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)" }
diff --git a/node_modules/ini/LICENSE b/node_modules/ini/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/ini/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/ini/README.md b/node_modules/ini/README.md
new file mode 100644
index 000000000..9f82a765d
--- /dev/null
+++ b/node_modules/ini/README.md
@@ -0,0 +1,71 @@
+An ini format parser and serializer for node.
+
+Sections are treated as nested objects. Items before the first heading
+are saved on the object directly.
+
+## Usage
+
+Consider an ini-file `config.ini` that looks like this:
+
+ ; this comment is being ignored
+ scope = global
+
+ [database]
+ user = dbuser
+ password = dbpassword
+ database = use_this_database
+
+ [paths.default]
+ datadir = /var/lib/data
+
+You can read, manipulate and write the ini-file like so:
+
+ var fs = require('fs')
+ , ini = require('ini')
+
+ var config = ini.parse(fs.readFileSync('./config.ini', 'utf-8'))
+
+ config.scope = 'local'
+ config.database.database = 'use_another_database'
+ config.paths.default.tmpdir = '/tmp'
+ delete config.paths.default.datadir
+
+ fs.writeFileSync('./config_modified.ini', ini.stringify(config, 'section'))
+
+This will result in a file called `config_modified.ini` being written to the filesystem with the following content:
+
+ [section]
+ scope = local
+ [section.database]
+ user = dbuser
+ password = dbpassword
+ database = use_another_database
+ [section.paths.default]
+ tmpdir = /tmp
+
+## API
+
+### decode(inistring)
+Decode the ini-style formatted `inistring` into a nested object.
+
+### parse(inistring)
+Alias for `decode(inistring)`
+
+### encode(object, [section])
+Encode the object `object` into an ini-style formatted string. If the optional parameter `section` is given, then all top-level properties of the object are put into this section and the `section`-string is prepended to all sub-sections, see the usage example above.
+
+### stringify(object, [section])
+Alias for `encode(object, [section])`
+
+### safe(val)
+Escapes the string `val` such that it is safe to be used as a key or value in an ini-file. Basically escapes quotes. For example
+
+ ini.safe('"unsafe string"')
+
+would result in
+
+ "\"unsafe string\""
+
+### unsafe(val)
+Unescapes the string `val`
+
diff --git a/node_modules/ini/ini.js b/node_modules/ini/ini.js
new file mode 100644
index 000000000..e8a949f94
--- /dev/null
+++ b/node_modules/ini/ini.js
@@ -0,0 +1,102 @@
+
+exports.parse = exports.decode = decode
+exports.stringify = exports.encode = encode
+
+exports.safe = safe
+exports.unsafe = unsafe
+
+function encode (obj, section) {
+ var children = []
+ , out = ""
+
+ Object.keys(obj).forEach(function (k, _, __) {
+ var val = obj[k]
+ if (val && typeof val === "object") {
+ children.push(k)
+ } else {
+ out += safe(k) + " = " + safe(val) + "\n"
+ }
+ })
+
+ if (section && out.length) {
+ out = "[" + safe(section) + "]" + "\n" + out
+ }
+
+ children.forEach(function (k, _, __) {
+ var child = encode(obj[k], (section ? section + "." : "") + k)
+ if (out.length && child.length) {
+ out += "\n"
+ }
+ out += child
+ })
+
+ return out
+}
+
+function decode (str) {
+ var out = {}
+ , p = out
+ , section = null
+ , state = "START"
+ // section |key = value
+ , re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i
+ , lines = str.split(/[\r\n]+/g)
+ , section = null
+
+ lines.forEach(function (line, _, __) {
+ //line = line
+ var rem = line.indexOf(";")
+ if (rem !== -1) line = line.substr(0, rem)//.trim()
+ if (!line) return
+ var match = line.match(re)
+ if (!match) return
+ if (match[1] !== undefined) {
+ section = unsafe(match[1])
+ p = out[section] = out[section] || {}
+ return
+ }
+ var key = unsafe(match[2])
+ , value = match[3] ? unsafe((match[4] || "")) : true
+ p[key] = value
+ })
+
+ // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
+ // use a filter to return the keys that have to be deleted.
+ Object.keys(out).filter(function (k, _, __) {
+ if (!out[k] || typeof out[k] !== "object") return false
+ // see if the parent section is also an object.
+ // if so, add it to that, and mark this one for deletion
+ var parts = k.split(".")
+ , p = out
+ , l = parts.pop()
+ parts.forEach(function (part, _, __) {
+ if (!p[part] || typeof p[part] !== "object") p[part] = {}
+ p = p[part]
+ })
+ if (p === out) return false
+ p[l] = out[k]
+ return true
+ }).forEach(function (del, _, __) {
+ delete out[del]
+ })
+
+ return out
+}
+
+function safe (val) {
+ return ( typeof val !== "string"
+ || val.match(/[\r\n]/)
+ || val.match(/^\[/)
+ || (val.length > 1
+ && val.charAt(0) === "\""
+ && val.slice(-1) === "\"")
+ || val !== val.trim() ) ? JSON.stringify(val) : val
+}
+
+function unsafe (val) {
+ val = (val || "").trim()
+ if (val.charAt(0) === "\"" && val.slice(-1) === "\"") {
+ try { val = JSON.parse(val) } catch (_) {}
+ }
+ return val
+}
diff --git a/node_modules/ini/package.json b/node_modules/ini/package.json
new file mode 100644
index 000000000..a022b598f
--- /dev/null
+++ b/node_modules/ini/package.json
@@ -0,0 +1,24 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "ini",
+ "description": "An ini encoder/decoder for node",
+ "version": "1.0.2",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/ini.git"
+ },
+ "main": "ini.js",
+ "scripts": {
+ "test": "node ini.js"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "~0.0.9"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ }
+}
diff --git a/node_modules/lru-cache/.npmignore b/node_modules/lru-cache/.npmignore
new file mode 100644
index 000000000..07e6e472c
--- /dev/null
+++ b/node_modules/lru-cache/.npmignore
@@ -0,0 +1 @@
+/node_modules
diff --git a/node_modules/lru-cache/LICENSE b/node_modules/lru-cache/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/lru-cache/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md
new file mode 100644
index 000000000..1f5f155b7
--- /dev/null
+++ b/node_modules/lru-cache/README.md
@@ -0,0 +1,12 @@
+# lru cache
+
+A cache object that deletes the least-recently-used items.
+
+Usage:
+
+ var LRU = require("lru-cache")
+ , cache = LRU(10) // max 10 items. default = Infinity
+ cache.set("key", "value")
+ cache.get("key") // "value"
+
+RTFS for more info.
diff --git a/node_modules/lru-cache/lib/lru-cache.js b/node_modules/lru-cache/lib/lru-cache.js
new file mode 100644
index 000000000..ca7a2b3c9
--- /dev/null
+++ b/node_modules/lru-cache/lib/lru-cache.js
@@ -0,0 +1,100 @@
+;(function () { // closure for web browsers
+
+if (module) {
+ module.exports = LRUCache
+} else {
+ // just set the global for non-node platforms.
+ ;(function () { return this })().LRUCache = LRUCache
+}
+
+function hOP (obj, key) {
+ return Object.prototype.hasOwnProperty.call(obj, key)
+}
+
+function LRUCache (maxLength) {
+ if (!(this instanceof LRUCache)) {
+ return new LRUCache(maxLength)
+ }
+ var cache = {} // hash of items by key
+ , lruList = {} // list of items in order of use recency
+ , lru = 0 // least recently used
+ , mru = 0 // most recently used
+ , length = 0 // number of items in the list
+
+ // resize the cache when the maxLength changes.
+ Object.defineProperty(this, "maxLength",
+ { set : function (mL) {
+ if (!mL || !(typeof mL === "number") || mL <= 0 ) mL = Infinity
+ maxLength = mL
+ // if it gets above double maxLength, trim right away.
+ // otherwise, do it whenever it's convenient.
+ if (length > maxLength) trim()
+ }
+ , get : function () { return maxLength }
+ , enumerable : true
+ })
+
+ this.maxLength = maxLength
+
+ Object.defineProperty(this, "length",
+ { get : function () { return length }
+ , enumerable : true
+ })
+
+ this.reset = function () {
+ cache = {}
+ lruList = {}
+ lru = 0
+ mru = 0
+ length = 0
+ }
+
+ this.set = function (key, value) {
+ if (hOP(cache, key)) {
+ this.get(key)
+ cache[key].value = value
+ return undefined
+ }
+ var hit = {key:key, value:value, lu:mru++}
+ lruList[hit.lu] = cache[key] = hit
+ length ++
+ if (length > maxLength) trim()
+ }
+
+ this.get = function (key) {
+ if (!hOP(cache, key)) return undefined
+ var hit = cache[key]
+ delete lruList[hit.lu]
+ if (hit.lu === lru) lruWalk()
+ hit.lu = mru ++
+ lruList[hit.lu] = hit
+ return hit.value
+ }
+
+ this.del = function (key) {
+ if (!hOP(cache, key)) return undefined
+ var hit = cache[key]
+ delete cache[key]
+ delete lruList[hit.lu]
+ if (hit.lu === lru) lruWalk()
+ length --
+ }
+
+ function lruWalk () {
+ // lru has been deleted, hop up to the next hit.
+ lru = Object.keys(lruList).shift()
+ }
+
+ function trim () {
+ if (length <= maxLength) return undefined
+ var prune = Object.keys(lruList).slice(0, length - maxLength)
+ for (var i = 0, l = (length - maxLength); i < l; i ++) {
+ delete cache[ lruList[prune[i]].key ]
+ delete lruList[prune[i]]
+ }
+ length = maxLength
+ lruWalk()
+ }
+}
+
+})()
diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json
new file mode 100644
index 000000000..676ec3a0e
--- /dev/null
+++ b/node_modules/lru-cache/package.json
@@ -0,0 +1,13 @@
+{ "name": "lru-cache"
+, "description": "A cache object that deletes the least-recently-used items."
+, "version": "1.0.5"
+, "author": "Isaac Z. Schlueter <i@izs.me>"
+, "scripts": { "test": "tap test" }
+, "main": "lib/lru-cache.js"
+, "repository": "git://github.com/isaacs/node-lru-cache.git"
+, "devDependencies": { "tap": "0.1" }
+, "license":
+ { "type": "MIT"
+ , "url": "http://github.com/isaacs/node-lru-cache/raw/master/LICENSE"
+ }
+}
diff --git a/node_modules/minimatch/.travis.yml b/node_modules/minimatch/.travis.yml
new file mode 100644
index 000000000..f1d0f13c8
--- /dev/null
+++ b/node_modules/minimatch/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - 0.4
+ - 0.6
diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/minimatch/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md
new file mode 100644
index 000000000..d5f97234c
--- /dev/null
+++ b/node_modules/minimatch/README.md
@@ -0,0 +1,212 @@
+# minimatch
+
+A minimal matching utility.
+
+[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch)
+
+
+This is the matching library used internally by npm.
+
+Eventually, it will replace the C binding in node-glob.
+
+It works by converting glob expressions into JavaScript `RegExp`
+objects.
+
+## Usage
+
+```javascript
+var minimatch = require("minimatch")
+
+minimatch("bar.foo", "*.foo") // true!
+minimatch("bar.foo", "*.bar") // false!
+```
+
+## Features
+
+Supports these glob features:
+
+* Brace Expansion
+* Extended glob matching
+* "Globstar" `**` matching
+
+See:
+
+* `man sh`
+* `man bash`
+* `man 3 fnmatch`
+* `man 5 gitignore`
+
+### Comparisons to other fnmatch/glob implementations
+
+While strict compliance with the existing standards is a worthwhile
+goal, some discrepancies exist between minimatch and other
+implementations, and are intentional.
+
+If the pattern starts with a `!` character, then it is negated. Set the
+`nonegate` flag to suppress this behavior, and treat leading `!`
+characters normally. This is perhaps relevant if you wish to start the
+pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
+characters at the start of a pattern will negate the pattern multiple
+times.
+
+If a pattern starts with `#`, then it is treated as a comment, and
+will not match anything. Use `\#` to match a literal `#` at the
+start of a line, or set the `nocomment` flag to suppress this behavior.
+
+The double-star character `**` is supported by default, unless the
+`noglobstar` flag is set. This is supported in the manner of bsdglob
+and bash 4.1, where `**` only has special significance if it is the only
+thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
+`a/**b` will not. **Note that this is different from the way that `**` is
+handled by ruby's `Dir` class.**
+
+If an escaped pattern has no matches, and the `null` flag is not set,
+then minimatch.match returns the pattern as-provided, rather than
+interpreting the character escapes. For example,
+`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
+`"*a?"`.
+
+If brace expansion is not disabled, then it is performed before any
+other interpretation of the glob pattern. Thus, a pattern like
+`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
+**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
+checked for validity. Since those two are valid, matching proceeds.
+
+
+## Minimatch Class
+
+Create a minimatch object by instanting the `minimatch.Minimatch` class.
+
+```javascript
+var Minimatch = require("minimatch").Minimatch
+var mm = new Minimatch(pattern, options)
+```
+
+### Properties
+
+* `pattern` The original pattern the minimatch object represents.
+* `options` The options supplied to the constructor.
+* `set` A 2-dimensional array of regexp or string expressions.
+ Each row in the
+ array corresponds to a brace-expanded pattern. Each item in the row
+ corresponds to a single path-part. For example, the pattern
+ `{a,b/c}/d` would expand to a set of patterns like:
+
+ [ [ a, d ]
+ , [ b, c, d ] ]
+
+ If a portion of the pattern doesn't have any "magic" in it
+ (that is, it's something like `"foo"` rather than `fo*o?`), then it
+ will be left as a string rather than converted to a regular
+ expression.
+
+* `regexp` Created by the `makeRe` method. A single regular expression
+ expressing the entire pattern. This is useful in cases where you wish
+ to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
+* `negate` True if the pattern is negated.
+* `comment` True if the pattern is a comment.
+* `empty` True if the pattern is `""`.
+
+### Methods
+
+* `makeRe` Generate the `regexp` member if necessary, and return it.
+ Will return `false` if the pattern is invalid.
+* `match(fname)` Return true if the filename matches the pattern, or
+ false otherwise.
+* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
+ filename, and match it against a single row in the `regExpSet`. This
+ method is mainly for internal use, but is exposed so that it can be
+ used by a glob-walker that needs to avoid excessive filesystem calls.
+
+All other methods are internal, and will be called as necessary.
+
+## Functions
+
+The top-level exported function has a `cache` property, which is an LRU
+cache set to store 100 items. So, calling these methods repeatedly
+with the same pattern and options will use the same Minimatch object,
+saving the cost of parsing it multiple times.
+
+### minimatch(path, pattern, options)
+
+Main export. Tests a path against the pattern using the options.
+
+```javascript
+var isJS = minimatch(file, "*.js", { matchBase: true })
+```
+
+### minimatch.filter(pattern, options)
+
+Returns a function that tests its
+supplied argument, suitable for use with `Array.filter`. Example:
+
+```javascript
+var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
+```
+
+### minimatch.match(list, pattern, options)
+
+Match against the list of
+files, in the style of fnmatch or glob. If nothing is matched, then
+return the pattern (unless `{ null: true }` in the options.)
+
+```javascript
+var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}))
+```
+
+### minimatch.makeRe(pattern, options)
+
+Make a regular expression object from the pattern.
+
+## Options
+
+All options are `false` by default.
+
+### debug
+
+Dump a ton of stuff to stderr.
+
+### nobrace
+
+Do not expand `{a,b}` and `{1..3}` brace sets.
+
+### noglobstar
+
+Disable `**` matching against multiple folder names.
+
+### dot
+
+Allow patterns to match filenames starting with a period, even if
+the pattern does not explicitly have a period in that spot.
+
+Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
+is set.
+
+### noext
+
+Disable "extglob" style patterns like `+(a|b)`.
+
+### nocase
+
+Perform a case-insensitive match.
+
+### nonull
+
+When a match is not found by `minimatch.match`, return a list containing
+the pattern itself. When set, an empty list is returned if there are
+no matches.
+
+### matchBase
+
+If set, then patterns without slashes will be matched
+against the basename of the path if it contains slashes. For example,
+`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
+
+### nocomment
+
+Suppress the behavior of treating `#` at the start of a pattern as a
+comment.
+
+### nonegate
+
+Suppress the behavior of treating a leading `!` character as negation.
diff --git a/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js
new file mode 100644
index 000000000..768c8ebac
--- /dev/null
+++ b/node_modules/minimatch/minimatch.js
@@ -0,0 +1,1021 @@
+module.exports = minimatch
+minimatch.Minimatch = Minimatch
+
+var LRU = require("lru-cache")
+ , cache = minimatch.cache = new LRU(100)
+ , GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
+ , pathSplit = process.platform === "win32" ? /\\|\// : "/"
+
+var path = require("path")
+ // any single thing other than /
+ // don't need to escape / when using new RegExp()
+ , qmark = "[^/]"
+
+ // * => any number of characters
+ , star = qmark + "*?"
+
+ // ** when dots are allowed. Anything goes, except .. and .
+ // not (^ or / followed by one or two dots followed by $ or /),
+ // followed by anything, any number of times.
+ , twoStarDot = "(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?"
+
+ // not a ^ or / followed by a dot,
+ // followed by anything, any number of times.
+ , twoStarNoDot = "(?:(?!(?:\\\/|^)\\.).)*?"
+
+ // characters that need to be escaped in RegExp.
+ , reSpecials = charSet("().*{}+?[]^$\\!")
+
+// "abc" -> { a:true, b:true, c:true }
+function charSet (s) {
+ return s.split("").reduce(function (set, c) {
+ set[c] = true
+ return set
+ }, {})
+}
+
+// normalizes slashes.
+var slashSplit = /\/+/
+
+minimatch.monkeyPatch = monkeyPatch
+function monkeyPatch () {
+ var desc = Object.getOwnPropertyDescriptor(String.prototype, "match")
+ var orig = desc.value
+ desc.value = function (p) {
+ if (p instanceof Minimatch) return p.match(this)
+ return orig.call(this, p)
+ }
+ Object.defineProperty(String.prototype, desc)
+}
+
+minimatch.filter = filter
+function filter (pattern, options) {
+ options = options || {}
+ return function (p, i, list) {
+ return minimatch(p, pattern, options)
+ }
+}
+
+function minimatch (p, pattern, options) {
+ if (typeof pattern !== "string") {
+ throw new TypeError("glob pattern string required")
+ }
+
+ if (!options) options = {}
+
+ // shortcut: comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === "#") {
+ return false
+ }
+
+ // "" only matches ""
+ if (pattern.trim() === "") return p === ""
+
+ return new Minimatch(pattern, options).match(p)
+}
+
+function Minimatch (pattern, options) {
+ if (!(this instanceof Minimatch)) {
+ return new Minimatch(pattern, options, cache)
+ }
+
+ if (typeof pattern !== "string") {
+ throw new TypeError("glob pattern string required")
+ }
+
+ if (!options) options = {}
+ pattern = pattern.trim()
+
+ // lru storage.
+ // these things aren't particularly big, but walking down the string
+ // and turning it into a regexp can get pretty costly.
+ var cacheKey = pattern + "\n" + Object.keys(options).filter(function (k) {
+ return options[k]
+ }).join(":")
+ var cached = minimatch.cache.get(cacheKey)
+ if (cached) return cached
+ minimatch.cache.set(cacheKey, this)
+
+ this.options = options
+ this.set = []
+ this.pattern = pattern
+ this.regexp = null
+ this.negate = false
+ this.comment = false
+ this.empty = false
+
+ // make the set of regexps etc.
+ this.make()
+}
+
+Minimatch.prototype.make = make
+function make () {
+ // don't do it more than once.
+ if (this._made) return
+
+ var pattern = this.pattern
+ var options = this.options
+
+ // empty patterns and comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === "#") {
+ this.comment = true
+ return
+ }
+ if (!pattern) {
+ this.empty = true
+ return
+ }
+
+ // step 1: figure out negation, etc.
+ this.parseNegate()
+
+ // step 2: expand braces
+ var set = this.braceExpand()
+
+ if (options.debug) console.error(this.pattern, set)
+
+ // step 3: now we have a set, so turn each one into a series of path-portion
+ // matching patterns.
+ // These will be regexps, except in the case of "**", which is
+ // set to the GLOBSTAR object for globstar behavior,
+ // and will not contain any / characters
+ set = set.map(function (s) {
+ return s.split(slashSplit)
+ })
+
+ // step 4: if we have a defined root, then patterns starting with ""
+ // get attached to that. If we have a defined cwd, then patterns
+ // *not* starting with "" get attached to that.
+ // Exception 1: on windows, a pattern like //\?/c:/ or c:/ will
+ // not get anything prefixed to it.
+ // Exception 2: If matchBase is set, and it's just a filename,
+ // then don't prefix anything onto it, since it'll only match
+ // files with that basename anyhow.
+ set = set.map(function (p) {
+ if (process.platform === "win32" &&
+ ( (p[0] === "" && p[1] === "" && p[2] === "\\?") // unc
+ || (p[0].match(/^[a-zA-Z]:$/)) )) {
+ return p
+ }
+ if (options.matchBase && p.length === 1) return p
+ // do prefixing.
+ if (options.root && p[0] === "") {
+ return options.root.split(pathSplit).concat(p)
+ }
+ if (options.cwd && p[0] !== "") {
+ return options.cwd.split(pathSplit).concat(p)
+ }
+ return p
+ })
+
+
+ if (options.debug) console.error(this.pattern, set)
+
+ // glob --> regexps
+ set = set.map(function (s, si, set) {
+ return s.map(this.parse, this)
+ }, this)
+
+ if (options.debug) console.error(this.pattern, set)
+
+ // filter out everything that didn't compile properly.
+ set = set.filter(function (s) {
+ return -1 === s.indexOf(false)
+ })
+
+ if (options.debug) console.error(this.pattern, set)
+
+ this.set = set
+}
+
+Minimatch.prototype.parseNegate = parseNegate
+function parseNegate () {
+ var pattern = this.pattern
+ , negate = false
+ , options = this.options
+ , negateOffset = 0
+
+ if (options.nonegate) return
+
+ for ( var i = 0, l = pattern.length
+ ; i < l && pattern.charAt(i) === "!"
+ ; i ++) {
+ negate = !negate
+ negateOffset ++
+ }
+
+ if (negateOffset) this.pattern = pattern.substr(negateOffset)
+ this.negate = negate
+}
+
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+minimatch.braceExpand = function (pattern, options) {
+ return new Minimatch(pattern, options).braceExpand()
+}
+
+Minimatch.prototype.braceExpand = braceExpand
+function braceExpand (pattern, options) {
+ options = options || this.options
+ pattern = typeof pattern === "undefined"
+ ? this.pattern : pattern
+
+ if (typeof pattern === "undefined") {
+ throw new Error("undefined pattern")
+ }
+
+ if (options.nobrace ||
+ !pattern.match(/\{.*\}/)) {
+ // shortcut. no need to expand.
+ return [pattern]
+ }
+
+ var escaping = false
+
+ // examples and comments refer to this crazy pattern:
+ // a{b,c{d,e},{f,g}h}x{y,z}
+ // expected:
+ // abxy
+ // abxz
+ // acdxy
+ // acdxz
+ // acexy
+ // acexz
+ // afhxy
+ // afhxz
+ // aghxy
+ // aghxz
+
+ // everything before the first \{ is just a prefix.
+ // So, we pluck that off, and work with the rest,
+ // and then prepend it to everything we find.
+ if (pattern.charAt(0) !== "{") {
+ // console.error(pattern)
+ var prefix = null
+ for (var i = 0, l = pattern.length; i < l; i ++) {
+ var c = pattern.charAt(i)
+ // console.error(i, c)
+ if (c === "\\") {
+ escaping = !escaping
+ } else if (c === "{" && !escaping) {
+ prefix = pattern.substr(0, i)
+ break
+ }
+ }
+
+ // actually no sets, all { were escaped.
+ if (prefix === null) {
+ // console.error("no sets")
+ return [pattern]
+ }
+
+ var tail = braceExpand(pattern.substr(i), options)
+ return tail.map(function (t) {
+ return prefix + t
+ })
+ }
+
+ // now we have something like:
+ // {b,c{d,e},{f,g}h}x{y,z}
+ // walk through the set, expanding each part, until
+ // the set ends. then, we'll expand the suffix.
+ // If the set only has a single member, then'll put the {} back
+
+ // first, handle numeric sets, since they're easier
+ var numset = pattern.match(/^\{(-?[0-9]+)\.\.(-?[0-9]+)\}/)
+ if (numset) {
+ // console.error("numset", numset[1], numset[2])
+ var suf = braceExpand(pattern.substr(numset[0].length), options)
+ , start = +numset[1]
+ , end = +numset[2]
+ , inc = start > end ? -1 : 1
+ , set = []
+ for (var i = start; i != (end + inc); i += inc) {
+ // append all the suffixes
+ for (var ii = 0, ll = suf.length; ii < ll; ii ++) {
+ set.push(i + suf[ii])
+ }
+ }
+ return set
+ }
+
+ // ok, walk through the set
+ // We hope, somewhat optimistically, that there
+ // will be a } at the end.
+ // If the closing brace isn't found, then the pattern is
+ // interpreted as braceExpand("\\" + pattern) so that
+ // the leading \{ will be interpreted literally.
+ var i = 1 // skip the \{
+ , depth = 1
+ , set = []
+ , member = ""
+ , sawEnd = false
+ , escaping = false
+
+ function addMember () {
+ set.push(member)
+ member = ""
+ }
+
+ // console.error("Entering for")
+ FOR: for (i = 1, l = pattern.length; i < l; i ++) {
+ var c = pattern.charAt(i)
+ // console.error("", i, c)
+
+ if (escaping) {
+ escaping = false
+ member += "\\" + c
+ } else {
+ switch (c) {
+ case "\\":
+ escaping = true
+ continue
+
+ case "{":
+ depth ++
+ member += "{"
+ continue
+
+ case "}":
+ depth --
+ // if this closes the actual set, then we're done
+ if (depth === 0) {
+ addMember()
+ // pluck off the close-brace
+ i ++
+ break FOR
+ } else {
+ member += c
+ continue
+ }
+
+ case ",":
+ if (depth === 1) {
+ addMember()
+ } else {
+ member += c
+ }
+ continue
+
+ default:
+ member += c
+ continue
+ } // switch
+ } // else
+ } // for
+
+ // now we've either finished the set, and the suffix is
+ // pattern.substr(i), or we have *not* closed the set,
+ // and need to escape the leading brace
+ if (depth !== 0) {
+ // console.error("didn't close", pattern)
+ return braceExpand("\\" + pattern, options)
+ }
+
+ // x{y,z} -> ["xy", "xz"]
+ // console.error("set", set)
+ // console.error("suffix", pattern.substr(i))
+ var suf = braceExpand(pattern.substr(i), options)
+ // ["b", "c{d,e}","{f,g}h"] ->
+ // [["b"], ["cd", "ce"], ["fh", "gh"]]
+ var addBraces = set.length === 1
+ // console.error("set pre-expanded", set)
+ set = set.map(function (p) {
+ return braceExpand(p, options)
+ })
+ // console.error("set expanded", set)
+
+
+ // [["b"], ["cd", "ce"], ["fh", "gh"]] ->
+ // ["b", "cd", "ce", "fh", "gh"]
+ set = set.reduce(function (l, r) {
+ return l.concat(r)
+ })
+
+ if (addBraces) {
+ set = set.map(function (s) {
+ return "{" + s + "}"
+ })
+ }
+
+ // now attach the suffixes.
+ var ret = []
+ for (var i = 0, l = set.length; i < l; i ++) {
+ for (var ii = 0, ll = suf.length; ii < ll; ii ++) {
+ ret.push(set[i] + suf[ii])
+ }
+ }
+ return ret
+}
+
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion. Otherwise, any series
+// of * is equivalent to a single *. Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+Minimatch.prototype.parse = parse
+var SUBPARSE = {}
+function parse (pattern, isSub) {
+ var options = this.options
+
+ // shortcuts
+ if (!options.noglobstar && pattern === "**") return GLOBSTAR
+ if (pattern === "") return ""
+
+ var re = ""
+ , hasMagic = false
+ , escaping = false
+ // ? => one single character
+ , patternListStack = []
+ , plType
+ , stateChar
+ , inClass = false
+ , reClassStart = -1
+ , classStart = -1
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ , patternStart = pattern.charAt(0) === "." ? "" // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? "(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))"
+ : "(?!\\.)"
+
+ function clearStateChar () {
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case "*":
+ re += star
+ hasMagic = true
+ break
+ case "?":
+ re += qmark
+ hasMagic = true
+ break
+ default:
+ re += "\\"+stateChar
+ break
+ }
+ stateChar = false
+ }
+ }
+
+ for ( var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i ++ ) {
+
+ if (options.debug) {
+ console.error("%s\t%s %s %j", pattern, i, re, c)
+ }
+
+ // skip over any that are escaped.
+ if (escaping && reSpecials[c]) {
+ re += "\\" + c
+ escaping = false
+ continue
+ }
+
+ SWITCH: switch (c) {
+ case "/":
+ // completely not allowed, even escaped.
+ // Should already be path-split by now.
+ return false
+
+ case "\\":
+ clearStateChar()
+ escaping = true
+ continue
+
+ // the various stateChar values
+ // for the "extglob" stuff.
+ case "?":
+ case "*":
+ case "+":
+ case "@":
+ case "!":
+ if (options.debug) {
+ console.error("%s\t%s %s %j <-- stateChar", pattern, i, re, c)
+ }
+
+ // all of those are literals inside a class, except that
+ // the glob [!a] means [^a] in regexp
+ if (inClass) {
+ if (c === "!" && i === classStart + 1) c = "^"
+ re += c
+ continue
+ }
+
+ // if we already have a stateChar, then it means
+ // that there was something like ** or +? in there.
+ // Handle the stateChar, then proceed with this one.
+ clearStateChar()
+ stateChar = c
+ // if extglob is disabled, then +(asdf|foo) isn't a thing.
+ // just clear the statechar *now*, rather than even diving into
+ // the patternList stuff.
+ if (options.noext) clearStateChar()
+ continue
+
+ case "(":
+ if (inClass) {
+ re += "("
+ continue
+ }
+
+ if (!stateChar) {
+ re += "\\("
+ continue
+ }
+
+ plType = stateChar
+ patternListStack.push({ type: plType
+ , start: i - 1
+ , reStart: re.length })
+ re += stateChar === "!" ? "(?!" : "(?:"
+ stateChar = false
+ continue
+
+ case ")":
+ if (inClass || !patternListStack.length) {
+ re += "\\)"
+ continue
+ }
+
+ hasMagic = true
+ re += ")"
+ plType = patternListStack.pop().type
+ switch (plType) {
+ case "?":
+ case "+":
+ case "*": re += plType
+ case "!": // already handled by the start
+ case "@": break // the default anyway
+ }
+ continue
+
+ case "|":
+ if (inClass || !patternListStack.length || escaping) {
+ re += "\\|"
+ escaping = false
+ continue
+ }
+
+ re += "|"
+ continue
+
+ // these are mostly the same in regexp and glob
+ case "[":
+ // swallow any state-tracking char before the [
+ clearStateChar()
+
+ if (inClass) {
+ re += "\\" + c
+ continue
+ }
+
+ inClass = true
+ classStart = i
+ reClassStart = re.length
+ re += c
+ continue
+
+ case "]":
+ // a right bracket shall lose its special
+ // meaning and represent itself in
+ // a bracket expression if it occurs
+ // first in the list. -- POSIX.2 2.8.3.2
+ if (i === classStart + 1 || !inClass) {
+ re += "\\" + c
+ escaping = false
+ continue
+ }
+
+ // finish up the class.
+ hasMagic = true
+ inClass = false
+ re += c
+ continue
+
+ default:
+ // swallow any state char that wasn't consumed
+ clearStateChar()
+
+ if (escaping) {
+ // no need
+ escaping = false
+ } else if (reSpecials[c]
+ && !(c === "^" && inClass)) {
+ re += "\\"
+ }
+
+ re += c
+
+ } // switch
+ } // for
+
+
+ // handle the case where we left a class open.
+ // "[abc" is valid, equivalent to "\[abc"
+ if (inClass) {
+ // split where the last [ was, and escape it
+ // this is a huge pita. We now have to re-walk
+ // the contents of the would-be class to re-translate
+ // any characters that were passed through as-is
+ var cs = pattern.substr(classStart + 1)
+ , sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + "\\[" + sp[0]
+ hasMagic = hasMagic || sp[1]
+ }
+
+ // handle the case where we had a +( thing at the *end*
+ // of the pattern.
+ // each pattern list stack adds 3 chars, and we need to go through
+ // and escape any | chars that were passed through as-is for the regexp.
+ // Go through and escape them, taking care not to double-escape any
+ // | chars that were already escaped.
+ var pl
+ while (pl = patternListStack.pop()) {
+ var tail = re.slice(pl.reStart + 3)
+ // maybe some even number of \, then maybe 1 \, followed by a |
+ tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) {
+ if (!$2) {
+ // the | isn't already escaped, so escape it.
+ $2 = "\\"
+ }
+
+ // need to escape all those slashes *again*, without escaping the
+ // one that we need for escaping the | character. As it works out,
+ // escaping an even number of slashes can be done by simply repeating
+ // it exactly after itself. That's why this trick works.
+ //
+ // I am sorry that you have to see this.
+ return $1 + $1 + $2 + "|"
+ })
+
+ // console.error("tail=%j\n %s", tail, tail)
+ var t = pl.type === "*" ? star
+ : pl.type === "?" ? qmark
+ : "\\" + pl.type
+
+ hasMagic = true
+ re = re.slice(0, pl.reStart)
+ + t + "\\("
+ + tail
+ }
+
+ // handle trailing things that only matter at the very end.
+ clearStateChar()
+ if (escaping) {
+ // trailing \\
+ re += "\\\\"
+ }
+
+ // only need to apply the nodot start if the re starts with
+ // something that could conceivably capture a dot
+ var addPatternStart = false
+ switch (re.charAt(0)) {
+ case ".":
+ case "[":
+ case "(": addPatternStart = true
+ }
+
+ // if the re is not "" at this point, then we need to make sure
+ // it doesn't match against an empty path part.
+ // Otherwise a/* will match a/, which it should not.
+ if (re !== "" && hasMagic) re = "(?=.)" + re
+
+ if (addPatternStart) re = patternStart + re
+
+ // parsing just a piece of a larger pattern.
+ if (isSub === SUBPARSE) {
+ return [ re, hasMagic ]
+ }
+
+ // skip the regexp for non-magical patterns
+ // unescape anything in it, though, so that it'll be
+ // an exact match against a file etc.
+ if (!hasMagic) {
+ return globUnescape(pattern)
+ }
+
+ var flags = options.nocase ? "i" : ""
+ , regExp = new RegExp("^" + re + "$", flags)
+
+ regExp._glob = pattern
+ regExp._src = re
+
+ return regExp
+}
+
+minimatch.makeRe = function (pattern, options) {
+ return new Minimatch(pattern, options || {}).makeRe()
+}
+
+Minimatch.prototype.makeRe = makeRe
+function makeRe () {
+ if (this.regexp || this.regexp === false) return this.regexp
+
+ // at this point, this.set is a 2d array of partial
+ // pattern strings, or "**".
+ //
+ // It's better to use .match(). This function shouldn't
+ // be used, really, but it's pretty convenient sometimes,
+ // when you just want to work with a regex.
+ var set = this.set
+
+ if (!set.length) return this.regexp = false
+ var options = this.options
+
+ var twoStar = options.noglobstar ? star
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ , flags = options.nocase ? "i" : ""
+
+ var re = set.map(function (pattern) {
+ return pattern.map(function (p) {
+ return (p === GLOBSTAR) ? twoStar
+ : (typeof p === "string") ? regExpEscape(p)
+ : p._src
+ }).join("\\\/")
+ }).join("|")
+
+ // must match entire pattern
+ // ending in a * or ** will make it less strict.
+ re = "^" + re + "$"
+
+ // can match anything, as long as it's not this.
+ if (this.negate) re = "^(?!" + re + ").*$"
+
+ try {
+ return this.regexp = new RegExp(re, flags)
+ } catch (ex) {
+ return this.regexp = false
+ }
+}
+
+minimatch.match = function (list, pattern, options) {
+ var mm = new Minimatch(pattern, options)
+ list = list.filter(function (f) {
+ return mm.match(f)
+ })
+ if (options.nonull && !list.length) {
+ list.push(pattern)
+ }
+ return list
+}
+
+Minimatch.prototype.match = match
+function match (f, partial) {
+ // console.error("match", f, this.pattern)
+ // short-circuit in the case of busted things.
+ // comments, etc.
+ if (this.comment) return false
+ if (this.empty) return f === ""
+
+ var options = this.options
+
+ // first, normalize any slash-separated path parts.
+ // f = path.normalize(f)
+ var absolute = isAbsolute(f)
+
+ // console.error(this.pattern, f, absolute)
+
+ // windows: need to use /, not \
+ // On other platforms, \ is a valid (albeit bad) filename char.
+ if (process.platform === "win32") {
+ f = f.split("\\").join("/")
+ }
+
+ // treat the test path as a set of pathparts.
+ f = f.split(slashSplit)
+ // console.error(this.pattern, "split", f)
+
+ // just ONE of the pattern sets in this.set needs to match
+ // in order for it to be valid. If negating, then just one
+ // match means that we have failed.
+ // Either way, return on the first hit.
+
+ var set = this.set
+ // console.error(this.pattern, "set", set)
+
+ for (var i = 0, l = set.length; i < l; i ++) {
+ var pattern = set[i]
+ var hit = this.matchOne(f, pattern, partial)
+ if (hit) {
+ return !this.negate
+ }
+ }
+
+ // didn't get any hits. this is success if it's a negative
+ // pattern, failure otherwise.
+ return this.negate
+}
+
+// set partial to true to test if, for example,
+// "/a/b" matches the start of "/*/b/*/d"
+// Partial means, if you run out of file before you run
+// out of pattern, then that's fine, as long as all
+// the parts match.
+Minimatch.prototype.matchOne = function (file, pattern, partial) {
+ var options = this.options
+
+ if (options.debug) {
+ console.error("matchOne",
+ { "this": this
+ , file: file
+ , pattern: pattern })
+ }
+
+ if (options.matchBase && pattern.length === 1) {
+ file = path.basename(file.join("/")).split("/")
+ }
+
+ if (options.debug) {
+ console.error("matchOne", file.length, pattern.length)
+ }
+
+ for ( var fi = 0
+ , pi = 0
+ , fl = file.length
+ , pl = pattern.length
+ ; (fi < fl) && (pi < pl)
+ ; fi ++, pi ++ ) {
+
+ if (options.debug) {
+ console.error("matchOne loop")
+ }
+ var p = pattern[pi]
+ , f = file[fi]
+
+ if (options.debug) {
+ console.error(pattern, p, f)
+ }
+
+ // should be impossible.
+ // some invalid regexp stuff in the set.
+ if (p === false) return false
+
+ if (p === GLOBSTAR) {
+ // "**"
+ // a/**/b/**/c would match the following:
+ // a/b/x/y/z/c
+ // a/x/y/z/b/c
+ // a/b/x/b/x/c
+ // a/b/c
+ // To do this, take the rest of the pattern after
+ // the **, and see if it would match the file remainder.
+ // If so, return success.
+ // If not, the ** "swallows" a segment, and try again.
+ // This is recursively awful.
+ // a/b/x/y/z/c
+ // - a matches a
+ // - doublestar
+ // - matchOne(b/x/y/z/c, b/**/c)
+ // - b matches b
+ // - doublestar
+ // - matchOne(x/y/z/c, c) -> no
+ // - matchOne(y/z/c, c) -> no
+ // - matchOne(z/c, c) -> no
+ // - matchOne(c, c) yes, hit
+ var fr = fi
+ , pr = pi + 1
+ if (pr === pl) {
+ // a ** at the end will just swallow the rest.
+ // We have found a match.
+ // however, it will not swallow /.x, unless
+ // options.dot is set.
+ // . and .. are *never* matched by **, for explosively
+ // exponential reasons.
+ for ( ; fi < fl; fi ++) {
+ if (file[fi] === "." || file[fi] === ".." ||
+ (!options.dot && file[fi].charAt(0) === ".")) return false
+ }
+ return true
+ }
+
+ // ok, let's see if we can swallow whatever we can.
+ WHILE: while (fr < fl) {
+ var swallowee = file[fr]
+ if (swallowee === "." || swallowee === ".." ||
+ (!options.dot && swallowee.charAt(0) === ".")) {
+ // console.error("dot detected!")
+ break WHILE
+ }
+
+ // XXX remove this slice. Just pass the start index.
+ if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+ // found a match.
+ return true
+ } else {
+ // ** swallows a segment, and continue.
+ fr ++
+ }
+ }
+ // no match was found.
+ // However, in partial mode, we can't say this is necessarily over.
+ // If there's more *pattern* left, then
+ if (partial) {
+ // ran out of file
+ // console.error("\n>>> no match, partial?", file, fr, pattern, pr)
+ if (fr === fl) return true
+ }
+ return false
+ }
+
+ // something other than **
+ // non-magic patterns just have to match exactly
+ // patterns with magic have been turned into regexps.
+ var hit
+ if (typeof p === "string") {
+ if (options.nocase) {
+ hit = f.toLowerCase() === p.toLowerCase()
+ } else {
+ hit = f === p
+ }
+ if (options.debug) {
+ console.error("string match", p, f, hit)
+ }
+ } else {
+ hit = f.match(p)
+ if (options.debug) {
+ console.error("pattern match", p, f, hit)
+ }
+ }
+
+ if (!hit) return false
+ }
+
+ // Note: ending in / means that we'll get a final ""
+ // at the end of the pattern. This can only match a
+ // corresponding "" at the end of the file.
+ // If the file ends in /, then it can only match a
+ // a pattern that ends in /, unless the pattern just
+ // doesn't have any more for it. But, a/b/ should *not*
+ // match "a/b/*", even though "" matches against the
+ // [^/]*? pattern, except in partial mode, where it might
+ // simply not be reached yet.
+ // However, a/b/ should still satisfy a/*
+
+ // now either we fell off the end of the pattern, or we're done.
+ if (fi === fl && pi === pl) {
+ // ran out of pattern and filename at the same time.
+ // an exact hit!
+ return true
+ } else if (fi === fl) {
+ // ran out of file, but still had pattern left.
+ // this is ok if we're doing the match as part of
+ // a glob fs traversal.
+ return partial
+ } else if (pi === pl) {
+ // ran out of pattern, still have file left.
+ // this is only acceptable if we're on the very last
+ // empty segment of a file with a trailing slash.
+ // a/* should match a/b/
+ var emptyFileEnd = (fi === fl - 1) && (file[fi] === "")
+ return emptyFileEnd
+ }
+
+ // should be unreachable.
+ throw new Error("wtf?")
+}
+
+
+// replace stuff like \* with *
+function globUnescape (s) {
+ return s.replace(/\\(.)/g, "$1")
+}
+
+
+function regExpEscape (s) {
+ return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&")
+}
+
+
+function isAbsolute (p) {
+ if (process.platform !== "win32") return p.charAt(0) === "/"
+
+ // yanked from node/lib/path.js
+ var splitDeviceRe =
+ /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?([\\\/])?([\s\S]*?)$/
+
+ var result = p.match(splitDeviceRe)
+ , device = result[1] || ""
+ , isUnc = device && device.charAt(1) !== ":"
+ , isAbs = !!result[2] || isUnc // UNC always absolute
+
+ return isAbs
+}
diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json
new file mode 100644
index 000000000..92ccac5fb
--- /dev/null
+++ b/node_modules/minimatch/package.json
@@ -0,0 +1,29 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
+ "name": "minimatch",
+ "description": "a glob matcher in javascript",
+ "version": "0.1.3",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/minimatch.git"
+ },
+ "main": "minimatch.js",
+ "scripts": {
+ "test": "tap test"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "dependencies": {
+ "lru-cache": "~1.0.5"
+ },
+ "devDependencies": {
+ "tap": "~0.1.3"
+ },
+ "licenses" : [
+ {
+ "type" : "MIT",
+ "url" : "http://github.com/isaacs/minimatch/raw/master/LICENSE"
+ }
+ ]
+}
diff --git a/node_modules/mkdirp/LICENSE b/node_modules/mkdirp/LICENSE
new file mode 100644
index 000000000..432d1aeb0
--- /dev/null
+++ b/node_modules/mkdirp/LICENSE
@@ -0,0 +1,21 @@
+Copyright 2010 James Halliday (mail@substack.net)
+
+This project is free software released under the MIT/X11 license:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/mkdirp/README.markdown b/node_modules/mkdirp/README.markdown
new file mode 100644
index 000000000..0393c4ea5
--- /dev/null
+++ b/node_modules/mkdirp/README.markdown
@@ -0,0 +1,21 @@
+mkdirp
+======
+
+Like `mkdir -p`, but in node.js!
+
+Example
+=======
+
+pow.js
+------
+ var mkdirp = require('mkdirp');
+
+ mkdirp('/tmp/foo/bar/baz', 0755, function (err) {
+ if (err) console.error(err)
+ else console.log('pow!')
+ });
+
+Output
+ pow!
+
+And now /tmp/foo/bar/baz exists, huzzah!
diff --git a/node_modules/mkdirp/index.js b/node_modules/mkdirp/index.js
new file mode 100644
index 000000000..660280146
--- /dev/null
+++ b/node_modules/mkdirp/index.js
@@ -0,0 +1,36 @@
+var path = require('path');
+var fs = require('fs');
+
+module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
+
+function mkdirP (p, mode, f) {
+ var cb = f || function () {};
+ if (typeof mode === 'string') mode = parseInt(mode, 8);
+ p = path.resolve(p);
+
+ fs.mkdir(p, mode, function (er) {
+ if (!er) return cb();
+ switch (er.code) {
+ case 'ENOENT':
+ mkdirP(path.dirname(p), mode, function (er) {
+ if (er) cb(er);
+ else mkdirP(p, mode, cb);
+ });
+ break;
+
+ case 'EEXIST':
+ fs.stat(p, function (er2, stat) {
+ // if the stat fails, then that's super weird.
+ // let the original EEXIST be the failure reason.
+ if (er2 || !stat.isDirectory()) cb(er)
+ else if ((stat.mode & 0777) !== mode) fs.chmod(p, mode, cb);
+ else cb();
+ });
+ break;
+
+ default:
+ cb(er);
+ break;
+ }
+ });
+}
diff --git a/node_modules/mkdirp/package.json b/node_modules/mkdirp/package.json
new file mode 100644
index 000000000..99149f747
--- /dev/null
+++ b/node_modules/mkdirp/package.json
@@ -0,0 +1,23 @@
+{
+ "name" : "mkdirp",
+ "description" : "Recursively mkdir, like `mkdir -p`",
+ "version" : "0.1.0",
+ "author" : "James Halliday <mail@substack.net> (http://substack.net)",
+ "main" : "./index",
+ "keywords" : [
+ "mkdir",
+ "directory"
+ ],
+ "repository" : {
+ "type" : "git",
+ "url" : "http://github.com/substack/node-mkdirp.git"
+ },
+ "scripts" : {
+ "test" : "tap test/*.js"
+ },
+ "devDependencies" : {
+ "tap" : "0.0.x"
+ },
+ "license" : "MIT/X11",
+ "engines": { "node": "*" }
+}
diff --git a/node_modules/node-uuid/.npmignore b/node_modules/node-uuid/.npmignore
new file mode 100644
index 000000000..fd4f2b066
--- /dev/null
+++ b/node_modules/node-uuid/.npmignore
@@ -0,0 +1,2 @@
+node_modules
+.DS_Store
diff --git a/node_modules/node-uuid/LICENSE.md b/node_modules/node-uuid/LICENSE.md
new file mode 100644
index 000000000..bcdddf9a0
--- /dev/null
+++ b/node_modules/node-uuid/LICENSE.md
@@ -0,0 +1,3 @@
+Copyright (c) 2010 Robert Kieffer
+
+Dual licensed under the [MIT](http://en.wikipedia.org/wiki/MIT_License) and [GPL](http://en.wikipedia.org/wiki/GNU_General_Public_License) licenses.
diff --git a/node_modules/node-uuid/README.md b/node_modules/node-uuid/README.md
new file mode 100644
index 000000000..a44d9a761
--- /dev/null
+++ b/node_modules/node-uuid/README.md
@@ -0,0 +1,199 @@
+# node-uuid
+
+Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS.
+
+Features:
+
+* Generate RFC4122 version 1 or version 4 UUIDs
+* Runs in node.js and all browsers.
+* Cryptographically strong random # generation on supporting platforms
+* 1.1K minified and gzip'ed (Want something smaller? Check this [crazy shit](https://gist.github.com/982883) out! )
+* [Annotated source code](http://broofa.github.com/node-uuid/docs/uuid.html)
+
+## Getting Started
+
+Install it in your browser:
+
+```html
+<script src="uuid.js"></script>
+```
+
+Or in node.js:
+
+```
+npm install node-uuid
+```
+
+```javascript
+var uuid = require('node-uuid');
+```
+
+Then create some ids ...
+
+```javascript
+// Generate a v1 (time-based) id
+uuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a'
+
+// Generate a v4 (random) id
+uuid.v4(); // -> '110ec58a-a0f2-4ac4-8393-c866d813b8d1'
+```
+
+## API
+
+### uuid.v1([`options` [, `buffer` [, `offset`]]])
+
+Generate and return a RFC4122 v1 (timestamp-based) UUID.
+
+* `options` - (Object) Optional uuid state to apply. Properties may include:
+
+ * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomnly generated ID. See note 1.
+ * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used.
+ * `msecs` - (Number | Date) Time in milliseconds since unix Epoch. Default: The current time is used.
+ * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2.
+
+* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written.
+* `offset` - (Number) Starting index in `buffer` at which to begin writing.
+
+Returns `buffer`, if specified, otherwise the string form of the UUID
+
+Notes:
+
+1. The randomly generated node id is only guaranteed to stay constant for the lifetime of the current JS runtime. (Future versions of this module may use persistent storage mechanisms to extend this guarantee.)
+
+Example: Generate string UUID with fully-specified options
+
+```javascript
+uuid.v1({
+ node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab],
+ clockseq: 0x1234,
+ msecs: new Date('2011-11-01').getTime(),
+ nsecs: 5678
+}); // -> "710b962e-041c-11e1-9234-0123456789ab"
+```
+
+Example: In-place generation of two binary IDs
+
+```javascript
+// Generate two ids in an array
+var arr = new Array(32); // -> []
+uuid.v1(null, arr, 0); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15]
+uuid.v1(null, arr, 16); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15 02 a3 1c b0 14 32 11 e1 85 58 0b 48 8e 4f c1 15]
+
+// Optionally use uuid.unparse() to get stringify the ids
+uuid.unparse(buffer); // -> '02a2ce90-1432-11e1-8558-0b488e4fc115'
+uuid.unparse(buffer, 16) // -> '02a31cb0-1432-11e1-8558-0b488e4fc115'
+```
+
+### uuid.v4([`options` [, `buffer` [, `offset`]]])
+
+Generate and return a RFC4122 v4 UUID.
+
+* `options` - (Object) Optional uuid state to apply. Properties may include:
+
+ * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values
+ * `rng` - (Function) Random # generator to use. Set to one of the built-in generators - `uuid.mathRNG` (all platforms), `uuid.nodeRNG` (node.js only), `uuid.whatwgRNG` (WebKit only) - or a custom function that returns an array[16] of byte values.
+
+* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written.
+* `offset` - (Number) Starting index in `buffer` at which to begin writing.
+
+Returns `buffer`, if specified, otherwise the string form of the UUID
+
+Example: Generate string UUID with fully-specified options
+
+```javascript
+uuid.v4({
+ random: [
+ 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea,
+ 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36
+ ]
+});
+// -> "109156be-c4fb-41ea-b1b4-efe1671c5836"
+```
+
+Example: Generate two IDs in a single buffer
+
+```javascript
+var buffer = new Array(32); // (or 'new Buffer' in node.js)
+uuid.v4(null, buffer, 0);
+uuid.v4(null, buffer, 16);
+```
+
+### uuid.parse(id[, buffer[, offset]])
+### uuid.unparse(buffer[, offset])
+
+Parse and unparse UUIDs
+
+ * `id` - (String) UUID(-like) string
+ * `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. Default: A new Array or Buffer is used
+ * `offset` - (Number) Starting index in `buffer` at which to begin writing. Default: 0
+
+Example parsing and unparsing a UUID string
+
+```javascript
+var bytes = uuid.parse('797ff043-11eb-11e1-80d6-510998755d10'); // -> <Buffer 79 7f f0 43 11 eb 11 e1 80 d6 51 09 98 75 5d 10>
+var string = uuid.unparse(bytes); // -> '797ff043-11eb-11e1-80d6-510998755d10'
+```
+
+### uuid.noConflict()
+
+(Browsers only) Set `uuid` property back to it's previous value.
+
+Returns the node-uuid object.
+
+Example:
+
+```javascript
+var myUuid = uuid.noConflict();
+myUuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a'
+```
+
+## Deprecated APIs
+
+Support for the following v1.2 APIs is available in v1.3, but is deprecated and will be removed in the next major version.
+
+### uuid([format [, buffer [, offset]]])
+
+uuid() has become uuid.v4(), and the `format` argument is now implicit in the `buffer` argument. (i.e. if you specify a buffer, the format is assumed to be binary).
+
+### uuid.BufferClass
+
+The class of container created when generating binary uuid data if no buffer argument is specified. This is expected to go away, with no replacement API.
+
+## Testing
+
+In node.js
+
+```
+> cd test
+> node uuid.js
+```
+
+In Browser
+
+```
+open test/test.html
+```
+
+### Benchmarking
+
+Requires node.js
+
+```
+npm install uuid uuid-js
+node test/benchmark.js
+```
+
+For a more complete discussion of node-uuid performance, please see the `benchmark/README.md` file, and the [benchmark wiki](https://github.com/broofa/node-uuid/wiki/Benchmark)
+
+For browser performance [checkout the JSPerf tests](http://jsperf.com/node-uuid-performance).
+
+## Release notes
+
+v1.3.2:
+* Improve tests and handling of v1() options (Issue #24)
+* Expose RNG option to allow for perf testing with different generators
+
+v1.3:
+* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)!
+* Support for node.js crypto API
+* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code
diff --git a/node_modules/node-uuid/package.json b/node_modules/node-uuid/package.json
new file mode 100644
index 000000000..9df0da985
--- /dev/null
+++ b/node_modules/node-uuid/package.json
@@ -0,0 +1,14 @@
+{
+ "name" : "node-uuid",
+ "description" : "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.",
+ "url" : "http://github.com/broofa/node-uuid",
+ "keywords" : ["uuid", "guid", "rfc4122"],
+ "author" : "Robert Kieffer <robert@broofa.com>",
+ "contributors" : [
+ {"name": "Christoph Tavan <dev@tavan.de>", "github": "https://github.com/ctavan"}
+ ],
+ "dependencies" : {},
+ "lib" : ".",
+ "main" : "./uuid.js",
+ "version" : "1.3.3"
+}
diff --git a/node_modules/node-uuid/uuid.js b/node_modules/node-uuid/uuid.js
new file mode 100644
index 000000000..27f1d1272
--- /dev/null
+++ b/node_modules/node-uuid/uuid.js
@@ -0,0 +1,249 @@
+// node-uuid/uuid.js
+//
+// Copyright (c) 2010 Robert Kieffer
+// Dual licensed under the MIT and GPL licenses.
+// Documentation and details at https://github.com/broofa/node-uuid
+(function() {
+ var _global = this;
+
+ // Unique ID creation requires a high quality random # generator, but
+ // Math.random() does not guarantee "cryptographic quality". So we feature
+ // detect for more robust APIs, normalizing each method to return 128-bits
+ // (16 bytes) of random data.
+ var mathRNG, nodeRNG, whatwgRNG;
+
+ // Math.random()-based RNG. All platforms, very fast, unknown quality
+ var _rndBytes = new Array(16);
+ mathRNG = function() {
+ var r, b = _rndBytes, i = 0;
+
+ for (var i = 0, r; i < 16; i++) {
+ if ((i & 0x03) == 0) r = Math.random() * 0x100000000;
+ b[i] = r >>> ((i & 0x03) << 3) & 0xff;
+ }
+
+ return b;
+ }
+
+ // WHATWG crypto-based RNG - http://wiki.whatwg.org/wiki/Crypto
+ // WebKit only (currently), moderately fast, high quality
+ if (_global.crypto && crypto.getRandomValues) {
+ var _rnds = new Uint32Array(4);
+ whatwgRNG = function() {
+ crypto.getRandomValues(_rnds);
+
+ for (var c = 0 ; c < 16; c++) {
+ _rndBytes[c] = _rnds[c >> 2] >>> ((c & 0x03) * 8) & 0xff;
+ }
+ return _rndBytes;
+ }
+ }
+
+ // Node.js crypto-based RNG - http://nodejs.org/docs/v0.6.2/api/crypto.html
+ // Node.js only, moderately fast, high quality
+ try {
+ var _rb = require('crypto').randomBytes;
+ nodeRNG = _rb && function() {
+ return _rb(16);
+ };
+ } catch (e) {}
+
+ // Select RNG with best quality
+ var _rng = nodeRNG || whatwgRNG || mathRNG;
+
+ // Buffer class to use
+ var BufferClass = typeof(Buffer) == 'function' ? Buffer : Array;
+
+ // Maps for number <-> hex string conversion
+ var _byteToHex = [];
+ var _hexToByte = {};
+ for (var i = 0; i < 256; i++) {
+ _byteToHex[i] = (i + 0x100).toString(16).substr(1);
+ _hexToByte[_byteToHex[i]] = i;
+ }
+
+ // **`parse()` - Parse a UUID into it's component bytes**
+ function parse(s, buf, offset) {
+ var i = (buf && offset) || 0, ii = 0;
+
+ buf = buf || [];
+ s.toLowerCase().replace(/[0-9a-f]{2}/g, function(byte) {
+ if (ii < 16) { // Don't overflow!
+ buf[i + ii++] = _hexToByte[byte];
+ }
+ });
+
+ // Zero out remaining bytes if string was short
+ while (ii < 16) {
+ buf[i + ii++] = 0;
+ }
+
+ return buf;
+ }
+
+ // **`unparse()` - Convert UUID byte array (ala parse()) into a string**
+ function unparse(buf, offset) {
+ var i = offset || 0, bth = _byteToHex;
+ return bth[buf[i++]] + bth[buf[i++]] +
+ bth[buf[i++]] + bth[buf[i++]] + '-' +
+ bth[buf[i++]] + bth[buf[i++]] + '-' +
+ bth[buf[i++]] + bth[buf[i++]] + '-' +
+ bth[buf[i++]] + bth[buf[i++]] + '-' +
+ bth[buf[i++]] + bth[buf[i++]] +
+ bth[buf[i++]] + bth[buf[i++]] +
+ bth[buf[i++]] + bth[buf[i++]];
+ }
+
+ // **`v1()` - Generate time-based UUID**
+ //
+ // Inspired by https://github.com/LiosK/UUID.js
+ // and http://docs.python.org/library/uuid.html
+
+ // random #'s we need to init node and clockseq
+ var _seedBytes = _rng();
+
+ // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
+ var _nodeId = [
+ _seedBytes[0] | 0x01,
+ _seedBytes[1], _seedBytes[2], _seedBytes[3], _seedBytes[4], _seedBytes[5]
+ ];
+
+ // Per 4.2.2, randomize (14 bit) clockseq
+ var _clockseq = (_seedBytes[6] << 8 | _seedBytes[7]) & 0x3fff;
+
+ // Previous uuid creation time
+ var _lastMSecs = 0, _lastNSecs = 0;
+
+ // See https://github.com/broofa/node-uuid for API details
+ function v1(options, buf, offset) {
+ var i = buf && offset || 0;
+ var b = buf || [];
+
+ options = options || {};
+
+ var clockseq = options.clockseq != null ? options.clockseq : _clockseq;
+
+ // UUID timestamps are 100 nano-second units since the Gregorian epoch,
+ // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
+ // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
+ // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
+ var msecs = options.msecs != null ? options.msecs : new Date().getTime();
+
+ // Per 4.2.1.2, use count of uuid's generated during the current clock
+ // cycle to simulate higher resolution clock
+ var nsecs = options.nsecs != null ? options.nsecs : _lastNSecs + 1;
+
+ // Time since last uuid creation (in msecs)
+ var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000;
+
+ // Per 4.2.1.2, Bump clockseq on clock regression
+ if (dt < 0 && options.clockseq == null) {
+ clockseq = clockseq + 1 & 0x3fff;
+ }
+
+ // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
+ // time interval
+ if ((dt < 0 || msecs > _lastMSecs) && options.nsecs == null) {
+ nsecs = 0;
+ }
+
+ // Per 4.2.1.2 Throw error if too many uuids are requested
+ if (nsecs >= 10000) {
+ throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec');
+ }
+
+ _lastMSecs = msecs;
+ _lastNSecs = nsecs;
+ _clockseq = clockseq;
+
+ // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
+ msecs += 12219292800000;
+
+ // `time_low`
+ var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
+ b[i++] = tl >>> 24 & 0xff;
+ b[i++] = tl >>> 16 & 0xff;
+ b[i++] = tl >>> 8 & 0xff;
+ b[i++] = tl & 0xff;
+
+ // `time_mid`
+ var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff;
+ b[i++] = tmh >>> 8 & 0xff;
+ b[i++] = tmh & 0xff;
+
+ // `time_high_and_version`
+ b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
+ b[i++] = tmh >>> 16 & 0xff;
+
+ // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
+ b[i++] = clockseq >>> 8 | 0x80;
+
+ // `clock_seq_low`
+ b[i++] = clockseq & 0xff;
+
+ // `node`
+ var node = options.node || _nodeId;
+ for (var n = 0; n < 6; n++) {
+ b[i + n] = node[n];
+ }
+
+ return buf ? buf : unparse(b);
+ }
+
+ // **`v4()` - Generate random UUID**
+
+ // See https://github.com/broofa/node-uuid for API details
+ function v4(options, buf, offset) {
+ // Deprecated - 'format' argument, as supported in v1.2
+ var i = buf && offset || 0;
+
+ if (typeof(options) == 'string') {
+ buf = options == 'binary' ? new BufferClass(16) : null;
+ options = null;
+ }
+ options = options || {};
+
+ var rnds = options.random || (options.rng || _rng)();
+
+ // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
+ rnds[6] = (rnds[6] & 0x0f) | 0x40;
+ rnds[8] = (rnds[8] & 0x3f) | 0x80;
+
+ // Copy bytes to buffer, if provided
+ if (buf) {
+ for (var ii = 0; ii < 16; ii++) {
+ buf[i + ii] = rnds[ii];
+ }
+ }
+
+ return buf || unparse(rnds);
+ }
+
+ // Export public API
+ var uuid = v4;
+ uuid.v1 = v1;
+ uuid.v4 = v4;
+ uuid.parse = parse;
+ uuid.unparse = unparse;
+ uuid.BufferClass = BufferClass;
+
+ // Export RNG options
+ uuid.mathRNG = mathRNG;
+ uuid.nodeRNG = nodeRNG;
+ uuid.whatwgRNG = whatwgRNG;
+
+ if (typeof(module) != 'undefined') {
+ // Play nice with node.js
+ module.exports = uuid;
+ } else {
+ // Play nice with browsers
+ var _previousRoot = _global.uuid;
+
+ // **`noConflict()` - (browser only) to reset global 'uuid' var**
+ uuid.noConflict = function() {
+ _global.uuid = _previousRoot;
+ return uuid;
+ }
+ _global.uuid = uuid;
+ }
+}());
diff --git a/node_modules/nopt/.npmignore b/node_modules/nopt/.npmignore
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/node_modules/nopt/.npmignore
diff --git a/node_modules/nopt/LICENSE b/node_modules/nopt/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/nopt/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/nopt/README.md b/node_modules/nopt/README.md
new file mode 100644
index 000000000..f290da8f4
--- /dev/null
+++ b/node_modules/nopt/README.md
@@ -0,0 +1,210 @@
+If you want to write an option parser, and have it be good, there are
+two ways to do it. The Right Way, and the Wrong Way.
+
+The Wrong Way is to sit down and write an option parser. We've all done
+that.
+
+The Right Way is to write some complex configurable program with so many
+options that you go half-insane just trying to manage them all, and put
+it off with duct-tape solutions until you see exactly to the core of the
+problem, and finally snap and write an awesome option parser.
+
+If you want to write an option parser, don't write an option parser.
+Write a package manager, or a source control system, or a service
+restarter, or an operating system. You probably won't end up with a
+good one of those, but if you don't give up, and you are relentless and
+diligent enough in your procrastination, you may just end up with a very
+nice option parser.
+
+## USAGE
+
+ // my-program.js
+ var nopt = require("nopt")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , knownOpts = { "foo" : [String, null]
+ , "bar" : [Stream, Number]
+ , "baz" : path
+ , "bloo" : [ "big", "medium", "small" ]
+ , "flag" : Boolean
+ , "pick" : Boolean
+ , "many" : [String, Array]
+ }
+ , shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
+ , "b7" : ["--bar", "7"]
+ , "m" : ["--bloo", "medium"]
+ , "p" : ["--pick"]
+ , "f" : ["--flag"]
+ }
+ // everything is optional.
+ // knownOpts and shorthands default to {}
+ // arg list defaults to process.argv
+ // slice defaults to 2
+ , parsed = nopt(knownOpts, shortHands, process.argv, 2)
+ console.log(parsed)
+
+This would give you support for any of the following:
+
+```bash
+$ node my-program.js --foo "blerp" --no-flag
+{ "foo" : "blerp", "flag" : false }
+
+$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag
+{ bar: 7, foo: "Mr. Hand", flag: true }
+
+$ node my-program.js --foo "blerp" -f -----p
+{ foo: "blerp", flag: true, pick: true }
+
+$ node my-program.js -fp --foofoo
+{ foo: "Mr. Foo", flag: true, pick: true }
+
+$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.
+{ foo: "Mr. Foo", argv: { remain: ["-fp"] } }
+
+$ node my-program.js --blatzk 1000 -fp # unknown opts are ok.
+{ blatzk: 1000, flag: true, pick: true }
+
+$ node my-program.js --blatzk true -fp # but they need a value
+{ blatzk: true, flag: true, pick: true }
+
+$ node my-program.js --no-blatzk -fp # unless they start with "no-"
+{ blatzk: false, flag: true, pick: true }
+
+$ node my-program.js --baz b/a/z # known paths are resolved.
+{ baz: "/Users/isaacs/b/a/z" }
+
+# if Array is one of the types, then it can take many
+# values, and will always be an array. The other types provided
+# specify what types are allowed in the list.
+
+$ node my-program.js --many 1 --many null --many foo
+{ many: ["1", "null", "foo"] }
+
+$ node my-program.js --many foo
+{ many: ["foo"] }
+```
+
+Read the tests at the bottom of `lib/nopt.js` for more examples of
+what this puppy can do.
+
+## Types
+
+The following types are supported, and defined on `nopt.typeDefs`
+
+* String: A normal string. No parsing is done.
+* path: A file system path. Gets resolved against cwd if not absolute.
+* url: A url. If it doesn't parse, it isn't accepted.
+* Number: Must be numeric.
+* Date: Must parse as a date. If it does, and `Date` is one of the options,
+ then it will return a Date object, not a string.
+* Boolean: Must be either `true` or `false`. If an option is a boolean,
+ then it does not need a value, and its presence will imply `true` as
+ the value. To negate boolean flags, do `--no-whatever` or `--whatever
+ false`
+* NaN: Means that the option is strictly not allowed. Any value will
+ fail.
+* Stream: An object matching the "Stream" class in node. Valuable
+ for use when validating programmatically. (npm uses this to let you
+ supply any WriteStream on the `outfd` and `logfd` config options.)
+* Array: If `Array` is specified as one of the types, then the value
+ will be parsed as a list of options. This means that multiple values
+ can be specified, and that the value will always be an array.
+
+If a type is an array of values not on this list, then those are
+considered valid values. For instance, in the example above, the
+`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`,
+and any other value will be rejected.
+
+When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be
+interpreted as their JavaScript equivalents, and numeric values will be
+interpreted as a number.
+
+You can also mix types and values, or multiple types, in a list. For
+instance `{ blah: [Number, null] }` would allow a value to be set to
+either a Number or null. When types are ordered, this implies a
+preference, and the first type that can be used to properly interpret
+the value will be used.
+
+To define a new type, add it to `nopt.typeDefs`. Each item in that
+hash is an object with a `type` member and a `validate` method. The
+`type` member is an object that matches what goes in the type list. The
+`validate` method is a function that gets called with `validate(data,
+key, val)`. Validate methods should assign `data[key]` to the valid
+value of `val` if it can be handled properly, or return boolean
+`false` if it cannot.
+
+You can also call `nopt.clean(data, types, typeDefs)` to clean up a
+config object and remove its invalid properties.
+
+## Error Handling
+
+By default, nopt outputs a warning to standard error when invalid
+options are found. You can change this behavior by assigning a method
+to `nopt.invalidHandler`. This method will be called with
+the offending `nopt.invalidHandler(key, val, types)`.
+
+If no `nopt.invalidHandler` is assigned, then it will console.error
+its whining. If it is assigned to boolean `false` then the warning is
+suppressed.
+
+## Abbreviations
+
+Yes, they are supported. If you define options like this:
+
+```javascript
+{ "foolhardyelephants" : Boolean
+, "pileofmonkeys" : Boolean }
+```
+
+Then this will work:
+
+```bash
+node program.js --foolhar --pil
+node program.js --no-f --pileofmon
+# etc.
+```
+
+## Shorthands
+
+Shorthands are a hash of shorter option names to a snippet of args that
+they expand to.
+
+If multiple one-character shorthands are all combined, and the
+combination does not unambiguously match any other option or shorthand,
+then they will be broken up into their constituent parts. For example:
+
+```json
+{ "s" : ["--loglevel", "silent"]
+, "g" : "--global"
+, "f" : "--force"
+, "p" : "--parseable"
+, "l" : "--long"
+}
+```
+
+```bash
+npm ls -sgflp
+# just like doing this:
+npm ls --loglevel silent --global --force --long --parseable
+```
+
+## The Rest of the args
+
+The config object returned by nopt is given a special member called
+`argv`, which is an object with the following fields:
+
+* `remain`: The remaining args after all the parsing has occurred.
+* `original`: The args as they originally appeared.
+* `cooked`: The args after flags and shorthands are expanded.
+
+## Slicing
+
+Node programs are called with more or less the exact argv as it appears
+in C land, after the v8 and node-specific options have been plucked off.
+As such, `argv[0]` is always `node` and `argv[1]` is always the
+JavaScript program being run.
+
+That's usually not very useful to you. So they're sliced off by
+default. If you want them, then you can pass in `0` as the last
+argument, or any other number that you'd like to slice off the start of
+the list.
diff --git a/node_modules/nopt/bin/nopt.js b/node_modules/nopt/bin/nopt.js
new file mode 100755
index 000000000..df90c729a
--- /dev/null
+++ b/node_modules/nopt/bin/nopt.js
@@ -0,0 +1,44 @@
+#!/usr/bin/env node
+var nopt = require("../lib/nopt")
+ , types = { num: Number
+ , bool: Boolean
+ , help: Boolean
+ , list: Array
+ , "num-list": [Number, Array]
+ , "str-list": [String, Array]
+ , "bool-list": [Boolean, Array]
+ , str: String }
+ , shorthands = { s: [ "--str", "astring" ]
+ , b: [ "--bool" ]
+ , nb: [ "--no-bool" ]
+ , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
+ , "?": ["--help"]
+ , h: ["--help"]
+ , H: ["--help"]
+ , n: [ "--num", "125" ] }
+ , parsed = nopt( types
+ , shorthands
+ , process.argv
+ , 2 )
+
+console.log("parsed", parsed)
+
+if (parsed.help) {
+ console.log("")
+ console.log("nopt cli tester")
+ console.log("")
+ console.log("types")
+ console.log(Object.keys(types).map(function M (t) {
+ var type = types[t]
+ if (Array.isArray(type)) {
+ return [t, type.map(function (type) { return type.name })]
+ }
+ return [t, type && type.name]
+ }).reduce(function (s, i) {
+ s[i[0]] = i[1]
+ return s
+ }, {}))
+ console.log("")
+ console.log("shorthands")
+ console.log(shorthands)
+}
diff --git a/node_modules/nopt/lib/nopt.js b/node_modules/nopt/lib/nopt.js
new file mode 100644
index 000000000..ff802dafe
--- /dev/null
+++ b/node_modules/nopt/lib/nopt.js
@@ -0,0 +1,552 @@
+// info about each config option.
+
+var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
+ ? function () { console.error.apply(console, arguments) }
+ : function () {}
+
+var url = require("url")
+ , path = require("path")
+ , Stream = require("stream").Stream
+ , abbrev = require("abbrev")
+
+module.exports = exports = nopt
+exports.clean = clean
+
+exports.typeDefs =
+ { String : { type: String, validate: validateString }
+ , Boolean : { type: Boolean, validate: validateBoolean }
+ , url : { type: url, validate: validateUrl }
+ , Number : { type: Number, validate: validateNumber }
+ , path : { type: path, validate: validatePath }
+ , Stream : { type: Stream, validate: validateStream }
+ , Date : { type: Date, validate: validateDate }
+ }
+
+function nopt (types, shorthands, args, slice) {
+ args = args || process.argv
+ types = types || {}
+ shorthands = shorthands || {}
+ if (typeof slice !== "number") slice = 2
+
+ debug(types, shorthands, args, slice)
+
+ args = args.slice(slice)
+ var data = {}
+ , key
+ , remain = []
+ , cooked = args
+ , original = args.slice(0)
+
+ parse(args, data, remain, types, shorthands)
+ // now data is full
+ clean(data, types, exports.typeDefs)
+ data.argv = {remain:remain,cooked:cooked,original:original}
+ data.argv.toString = function () {
+ return this.original.map(JSON.stringify).join(" ")
+ }
+ return data
+}
+
+function clean (data, types, typeDefs) {
+ typeDefs = typeDefs || exports.typeDefs
+ var remove = {}
+ , typeDefault = [false, true, null, String, Number]
+
+ Object.keys(data).forEach(function (k) {
+ if (k === "argv") return
+ var val = data[k]
+ , isArray = Array.isArray(val)
+ , type = types[k]
+ if (!isArray) val = [val]
+ if (!type) type = typeDefault
+ if (type === Array) type = typeDefault.concat(Array)
+ if (!Array.isArray(type)) type = [type]
+
+ debug("val=%j", val)
+ debug("types=", type)
+ val = val.map(function (val) {
+ // if it's an unknown value, then parse false/true/null/numbers/dates
+ if (typeof val === "string") {
+ debug("string %j", val)
+ val = val.trim()
+ if ((val === "null" && ~type.indexOf(null))
+ || (val === "true" &&
+ (~type.indexOf(true) || ~type.indexOf(Boolean)))
+ || (val === "false" &&
+ (~type.indexOf(false) || ~type.indexOf(Boolean)))) {
+ val = JSON.parse(val)
+ debug("jsonable %j", val)
+ } else if (~type.indexOf(Number) && !isNaN(val)) {
+ debug("convert to number", val)
+ val = +val
+ } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) {
+ debug("convert to date", val)
+ val = new Date(val)
+ }
+ }
+
+ if (!types.hasOwnProperty(k)) {
+ return val
+ }
+
+ // allow `--no-blah` to set 'blah' to null if null is allowed
+ if (val === false && ~type.indexOf(null) &&
+ !(~type.indexOf(false) || ~type.indexOf(Boolean))) {
+ val = null
+ }
+
+ var d = {}
+ d[k] = val
+ debug("prevalidated val", d, val, types[k])
+ if (!validate(d, k, val, types[k], typeDefs)) {
+ if (exports.invalidHandler) {
+ exports.invalidHandler(k, val, types[k], data)
+ } else if (exports.invalidHandler !== false) {
+ debug("invalid: "+k+"="+val, types[k])
+ }
+ return remove
+ }
+ debug("validated val", d, val, types[k])
+ return d[k]
+ }).filter(function (val) { return val !== remove })
+
+ if (!val.length) delete data[k]
+ else if (isArray) {
+ debug(isArray, data[k], val)
+ data[k] = val
+ } else data[k] = val[0]
+
+ debug("k=%s val=%j", k, val, data[k])
+ })
+}
+
+function validateString (data, k, val) {
+ data[k] = String(val)
+}
+
+function validatePath (data, k, val) {
+ data[k] = path.resolve(String(val))
+ return true
+}
+
+function validateNumber (data, k, val) {
+ debug("validate Number %j %j %j", k, val, isNaN(val))
+ if (isNaN(val)) return false
+ data[k] = +val
+}
+
+function validateDate (data, k, val) {
+ debug("validate Date %j %j %j", k, val, Date.parse(val))
+ var s = Date.parse(val)
+ if (isNaN(s)) return false
+ data[k] = new Date(val)
+}
+
+function validateBoolean (data, k, val) {
+ if (val instanceof Boolean) val = val.valueOf()
+ else if (typeof val === "string") {
+ if (!isNaN(val)) val = !!(+val)
+ else if (val === "null" || val === "false") val = false
+ else val = true
+ } else val = !!val
+ data[k] = val
+}
+
+function validateUrl (data, k, val) {
+ val = url.parse(String(val))
+ if (!val.host) return false
+ data[k] = val.href
+}
+
+function validateStream (data, k, val) {
+ if (!(val instanceof Stream)) return false
+ data[k] = val
+}
+
+function validate (data, k, val, type, typeDefs) {
+ // arrays are lists of types.
+ if (Array.isArray(type)) {
+ for (var i = 0, l = type.length; i < l; i ++) {
+ if (type[i] === Array) continue
+ if (validate(data, k, val, type[i], typeDefs)) return true
+ }
+ delete data[k]
+ return false
+ }
+
+ // an array of anything?
+ if (type === Array) return true
+
+ // NaN is poisonous. Means that something is not allowed.
+ if (type !== type) {
+ debug("Poison NaN", k, val, type)
+ delete data[k]
+ return false
+ }
+
+ // explicit list of values
+ if (val === type) {
+ debug("Explicitly allowed %j", val)
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ return true
+ }
+
+ // now go through the list of typeDefs, validate against each one.
+ var ok = false
+ , types = Object.keys(typeDefs)
+ for (var i = 0, l = types.length; i < l; i ++) {
+ debug("test type %j %j %j", k, val, types[i])
+ var t = typeDefs[types[i]]
+ if (t && type === t.type) {
+ var d = {}
+ ok = false !== t.validate(d, k, val)
+ val = d[k]
+ if (ok) {
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ break
+ }
+ }
+ }
+ debug("OK? %j (%j %j %j)", ok, k, val, types[i])
+
+ if (!ok) delete data[k]
+ return ok
+}
+
+function parse (args, data, remain, types, shorthands) {
+ debug("parse", args, data, remain)
+
+ var key = null
+ , abbrevs = abbrev(Object.keys(types))
+ , shortAbbr = abbrev(Object.keys(shorthands))
+
+ for (var i = 0; i < args.length; i ++) {
+ var arg = args[i]
+ debug("arg", arg)
+
+ if (arg.match(/^-{2,}$/)) {
+ // done with keys.
+ // the rest are args.
+ remain.push.apply(remain, args.slice(i + 1))
+ args[i] = "--"
+ break
+ }
+ if (arg.charAt(0) === "-") {
+ if (arg.indexOf("=") !== -1) {
+ var v = arg.split("=")
+ arg = v.shift()
+ v = v.join("=")
+ args.splice.apply(args, [i, 1].concat([arg, v]))
+ }
+ // see if it's a shorthand
+ // if so, splice and back up to re-parse it.
+ var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs)
+ debug("arg=%j shRes=%j", arg, shRes)
+ if (shRes) {
+ debug(arg, shRes)
+ args.splice.apply(args, [i, 1].concat(shRes))
+ if (arg !== shRes[0]) {
+ i --
+ continue
+ }
+ }
+ arg = arg.replace(/^-+/, "")
+ var no = false
+ while (arg.toLowerCase().indexOf("no-") === 0) {
+ no = !no
+ arg = arg.substr(3)
+ }
+
+ if (abbrevs[arg]) arg = abbrevs[arg]
+
+ var isArray = types[arg] === Array ||
+ Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1
+
+ var val
+ , la = args[i + 1]
+
+ var isBool = no ||
+ types[arg] === Boolean ||
+ Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 ||
+ (la === "false" &&
+ (types[arg] === null ||
+ Array.isArray(types[arg]) && ~types[arg].indexOf(null)))
+
+ if (isBool) {
+ // just set and move along
+ val = !no
+ // however, also support --bool true or --bool false
+ if (la === "true" || la === "false") {
+ val = JSON.parse(la)
+ la = null
+ if (no) val = !val
+ i ++
+ }
+
+ // also support "foo":[Boolean, "bar"] and "--foo bar"
+ if (Array.isArray(types[arg]) && la) {
+ if (~types[arg].indexOf(la)) {
+ // an explicit type
+ val = la
+ i ++
+ } else if ( la === "null" && ~types[arg].indexOf(null) ) {
+ // null allowed
+ val = null
+ i ++
+ } else if ( !la.match(/^-{2,}[^-]/) &&
+ !isNaN(la) &&
+ ~types[arg].indexOf(Number) ) {
+ // number
+ val = +la
+ i ++
+ } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) {
+ // string
+ val = la
+ i ++
+ }
+ }
+
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ continue
+ }
+
+ if (la && la.match(/^-{2,}$/)) {
+ la = undefined
+ i --
+ }
+
+ val = la === undefined ? true : la
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ i ++
+ continue
+ }
+ remain.push(arg)
+ }
+}
+
+function resolveShort (arg, shorthands, shortAbbr, abbrevs) {
+ // handle single-char shorthands glommed together, like
+ // npm ls -glp, but only if there is one dash, and only if
+ // all of the chars are single-char shorthands, and it's
+ // not a match to some other abbrev.
+ arg = arg.replace(/^-+/, '')
+ if (abbrevs[arg] && !shorthands[arg]) {
+ return null
+ }
+ if (shortAbbr[arg]) {
+ arg = shortAbbr[arg]
+ } else {
+ var singles = shorthands.___singles
+ if (!singles) {
+ singles = Object.keys(shorthands).filter(function (s) {
+ return s.length === 1
+ }).reduce(function (l,r) { l[r] = true ; return l }, {})
+ shorthands.___singles = singles
+ }
+ var chrs = arg.split("").filter(function (c) {
+ return singles[c]
+ })
+ if (chrs.join("") === arg) return chrs.map(function (c) {
+ return shorthands[c]
+ }).reduce(function (l, r) {
+ return l.concat(r)
+ }, [])
+ }
+
+ if (shorthands[arg] && !Array.isArray(shorthands[arg])) {
+ shorthands[arg] = shorthands[arg].split(/\s+/)
+ }
+ return shorthands[arg]
+}
+
+if (module === require.main) {
+var assert = require("assert")
+ , util = require("util")
+
+ , shorthands =
+ { s : ["--loglevel", "silent"]
+ , d : ["--loglevel", "info"]
+ , dd : ["--loglevel", "verbose"]
+ , ddd : ["--loglevel", "silly"]
+ , noreg : ["--no-registry"]
+ , reg : ["--registry"]
+ , "no-reg" : ["--no-registry"]
+ , silent : ["--loglevel", "silent"]
+ , verbose : ["--loglevel", "verbose"]
+ , h : ["--usage"]
+ , H : ["--usage"]
+ , "?" : ["--usage"]
+ , help : ["--usage"]
+ , v : ["--version"]
+ , f : ["--force"]
+ , desc : ["--description"]
+ , "no-desc" : ["--no-description"]
+ , "local" : ["--no-global"]
+ , l : ["--long"]
+ , p : ["--parseable"]
+ , porcelain : ["--parseable"]
+ , g : ["--global"]
+ }
+
+ , types =
+ { aoa: Array
+ , nullstream: [null, Stream]
+ , date: Date
+ , str: String
+ , browser : String
+ , cache : path
+ , color : ["always", Boolean]
+ , depth : Number
+ , description : Boolean
+ , dev : Boolean
+ , editor : path
+ , force : Boolean
+ , global : Boolean
+ , globalconfig : path
+ , group : [String, Number]
+ , gzipbin : String
+ , logfd : [Number, Stream]
+ , loglevel : ["silent","win","error","warn","info","verbose","silly"]
+ , long : Boolean
+ , "node-version" : [false, String]
+ , npaturl : url
+ , npat : Boolean
+ , "onload-script" : [false, String]
+ , outfd : [Number, Stream]
+ , parseable : Boolean
+ , pre: Boolean
+ , prefix: path
+ , proxy : url
+ , "rebuild-bundle" : Boolean
+ , registry : url
+ , searchopts : String
+ , searchexclude: [null, String]
+ , shell : path
+ , t: [Array, String]
+ , tag : String
+ , tar : String
+ , tmp : path
+ , "unsafe-perm" : Boolean
+ , usage : Boolean
+ , user : String
+ , username : String
+ , userconfig : path
+ , version : Boolean
+ , viewer: path
+ , _exit : Boolean
+ }
+
+; [["-v", {version:true}, []]
+ ,["---v", {version:true}, []]
+ ,["ls -s --no-reg connect -d",
+ {loglevel:"info",registry:null},["ls","connect"]]
+ ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]]
+ ,["ls --registry blargle", {}, ["ls"]]
+ ,["--no-registry", {registry:null}, []]
+ ,["--no-color true", {color:false}, []]
+ ,["--no-color false", {color:true}, []]
+ ,["--no-color", {color:false}, []]
+ ,["--color false", {color:false}, []]
+ ,["--color --logfd 7", {logfd:7,color:true}, []]
+ ,["--color=true", {color:true}, []]
+ ,["--logfd=10", {logfd:10}, []]
+ ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]]
+ ,["--tmp=tmp -tar=gtar",
+ {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]]
+ ,["--logfd x", {}, []]
+ ,["a -true -- -no-false", {true:true},["a","-no-false"]]
+ ,["a -no-false", {false:false},["a"]]
+ ,["a -no-no-true", {true:true}, ["a"]]
+ ,["a -no-no-no-false", {false:false}, ["a"]]
+ ,["---NO-no-No-no-no-no-nO-no-no"+
+ "-No-no-no-no-no-no-no-no-no"+
+ "-no-no-no-no-NO-NO-no-no-no-no-no-no"+
+ "-no-body-can-do-the-boogaloo-like-I-do"
+ ,{"body-can-do-the-boogaloo-like-I-do":false}, []]
+ ,["we are -no-strangers-to-love "+
+ "--you-know the-rules --and so-do-i "+
+ "---im-thinking-of=a-full-commitment "+
+ "--no-you-would-get-this-from-any-other-guy "+
+ "--no-gonna-give-you-up "+
+ "-no-gonna-let-you-down=true "+
+ "--no-no-gonna-run-around false "+
+ "--desert-you=false "+
+ "--make-you-cry false "+
+ "--no-tell-a-lie "+
+ "--no-no-and-hurt-you false"
+ ,{"strangers-to-love":false
+ ,"you-know":"the-rules"
+ ,"and":"so-do-i"
+ ,"you-would-get-this-from-any-other-guy":false
+ ,"gonna-give-you-up":false
+ ,"gonna-let-you-down":false
+ ,"gonna-run-around":false
+ ,"desert-you":false
+ ,"make-you-cry":false
+ ,"tell-a-lie":false
+ ,"and-hurt-you":false
+ },["we", "are"]]
+ ,["-t one -t two -t three"
+ ,{t: ["one", "two", "three"]}
+ ,[]]
+ ,["-t one -t null -t three four five null"
+ ,{t: ["one", "null", "three"]}
+ ,["four", "five", "null"]]
+ ,["-t foo"
+ ,{t:["foo"]}
+ ,[]]
+ ,["--no-t"
+ ,{t:["false"]}
+ ,[]]
+ ,["-no-no-t"
+ ,{t:["true"]}
+ ,[]]
+ ,["-aoa one -aoa null -aoa 100"
+ ,{aoa:["one", null, 100]}
+ ,[]]
+ ,["-str 100"
+ ,{str:"100"}
+ ,[]]
+ ,["--color always"
+ ,{color:"always"}
+ ,[]]
+ ,["--no-nullstream"
+ ,{nullstream:null}
+ ,[]]
+ ,["--nullstream false"
+ ,{nullstream:null}
+ ,[]]
+ ,["--notadate 2011-01-25"
+ ,{notadate: "2011-01-25"}
+ ,[]]
+ ,["--date 2011-01-25"
+ ,{date: new Date("2011-01-25")}
+ ,[]]
+ ].forEach(function (test) {
+ var argv = test[0].split(/\s+/)
+ , opts = test[1]
+ , rem = test[2]
+ , actual = nopt(types, shorthands, argv, 0)
+ , parsed = actual.argv
+ delete actual.argv
+ console.log(util.inspect(actual, false, 2, true), parsed.remain)
+ for (var i in opts) {
+ var e = JSON.stringify(opts[i])
+ , a = JSON.stringify(actual[i] === undefined ? null : actual[i])
+ if (e && typeof e === "object") {
+ assert.deepEqual(e, a)
+ } else {
+ assert.equal(e, a)
+ }
+ }
+ assert.deepEqual(rem, parsed.remain)
+ })
+}
diff --git a/node_modules/nopt/package.json b/node_modules/nopt/package.json
new file mode 100644
index 000000000..d1118e399
--- /dev/null
+++ b/node_modules/nopt/package.json
@@ -0,0 +1,12 @@
+{ "name" : "nopt"
+, "version" : "1.0.10"
+, "description" : "Option parsing for Node, supporting types, shorthands, etc. Used by npm."
+, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)"
+, "main" : "lib/nopt.js"
+, "scripts" : { "test" : "node lib/nopt.js" }
+, "repository" : "http://github.com/isaacs/nopt"
+, "bin" : "./bin/nopt.js"
+, "license" :
+ { "type" : "MIT"
+ , "url" : "https://github.com/isaacs/nopt/raw/master/LICENSE" }
+, "dependencies" : { "abbrev" : "1" }}
diff --git a/node_modules/proto-list/LICENSE b/node_modules/proto-list/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/proto-list/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/proto-list/README.md b/node_modules/proto-list/README.md
new file mode 100644
index 000000000..43cfa3589
--- /dev/null
+++ b/node_modules/proto-list/README.md
@@ -0,0 +1,3 @@
+A list of objects, bound by their prototype chain.
+
+Used in npm's config stuff.
diff --git a/node_modules/proto-list/package.json b/node_modules/proto-list/package.json
new file mode 100644
index 000000000..5cab34bef
--- /dev/null
+++ b/node_modules/proto-list/package.json
@@ -0,0 +1,9 @@
+{ "name" : "proto-list"
+, "version" : "1.0.0"
+, "description" : "A utility for managing a prototype chain"
+, "main" : "./proto-list.js"
+, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)"
+, "scripts" : { "test" : "node proto-list.js" }
+, "repository": { "type": "git", "url": "https://github.com/isaacs/proto-list" }
+, "license": { "type": "MIT", "url": "https://github.com/isaacs/proto-list/blob/master/LICENSE" }
+, "devDependencies" : { "tap" : "0" } }
diff --git a/node_modules/proto-list/proto-list.js b/node_modules/proto-list/proto-list.js
new file mode 100644
index 000000000..759d82738
--- /dev/null
+++ b/node_modules/proto-list/proto-list.js
@@ -0,0 +1,94 @@
+
+module.exports = ProtoList
+
+function ProtoList () { this.list = [] }
+ProtoList.prototype =
+ { get length () { return this.list.length }
+ , get keys () {
+ var k = []
+ for (var i in this.list[0]) k.push(i)
+ return k
+ }
+ , get snapshot () {
+ var o = {}
+ this.keys.forEach(function (k) { o[k] = this.get(k) }, this)
+ return o
+ }
+ , push : function (obj) {
+ if (typeof obj !== "object") obj = {valueOf:obj}
+ if (this.list.length >= 1) {
+ this.list[this.list.length - 1].__proto__ = obj
+ }
+ obj.__proto__ = Object.prototype
+ return this.list.push(obj)
+ }
+ , pop : function () {
+ if (this.list.length >= 2) {
+ this.list[this.list.length - 2].__proto__ = Object.prototype
+ }
+ return this.list.pop()
+ }
+ , unshift : function (obj) {
+ obj.__proto__ = this.list[0] || Object.prototype
+ return this.list.unshift(obj)
+ }
+ , shift : function () {
+ if (this.list.length >= 1) {
+ this.list[0].__proto__ = Object.prototype
+ }
+ return this.list.shift()
+ }
+ , get : function (key) {
+ return this.list[0][key]
+ }
+ , set : function (key, val, save) {
+ if (!this.length) this.push({})
+ if (save && this.list[0].hasOwnProperty(key)) this.push({})
+ return this.list[0][key] = val
+ }
+ , forEach : function (fn, thisp) {
+ for (var key in this.list[0]) fn.call(thisp, key, this.list[0][key])
+ }
+ , slice : function () {
+ return this.list.slice.apply(this.list, arguments)
+ }
+ , splice : function () {
+ return this.list.splice.apply(this.list, arguments)
+ }
+ }
+
+if (module === require.main) {
+
+var tap = require("tap")
+ , test = tap.test
+
+tap.plan(1)
+
+tap.test("protoList tests", function (t) {
+ var p = new ProtoList
+ p.push({foo:"bar"})
+ p.push({})
+ p.set("foo", "baz")
+ t.equal(p.get("foo"), "baz")
+
+ var p = new ProtoList
+ p.push({foo:"bar"})
+ p.set("foo", "baz")
+ t.equal(p.get("foo"), "baz")
+ t.equal(p.length, 1)
+ p.pop()
+ t.equal(p.length, 0)
+ p.set("foo", "asdf")
+ t.equal(p.length, 1)
+ t.equal(p.get("foo"), "asdf")
+ p.push({bar:"baz"})
+ t.equal(p.length, 2)
+ t.equal(p.get("foo"), "asdf")
+ p.shift()
+ t.equal(p.length, 1)
+ t.equal(p.get("foo"), undefined)
+ t.end()
+})
+
+
+}
diff --git a/node_modules/read/README.md b/node_modules/read/README.md
new file mode 100644
index 000000000..9913b4df0
--- /dev/null
+++ b/node_modules/read/README.md
@@ -0,0 +1,43 @@
+For reading user input from stdin.
+
+## USAGE
+
+```javascript
+var read = require("read")
+read(options, callback)
+```
+
+The callback gets called with either the user input, or the default
+specified, or an error, in the traditional `callback(error, result)`
+node style.
+
+## OPTIONS
+
+Every option is optional.
+
+* `prompt` What to write to stdout before reading input.
+* `silent` Don't echo the output as the user types it.
+* `num` Max number of chars to read from terminal.
+* `delim` The char that means we're done. Default: `"\n"`
+* `timeout` Number of ms to wait for user input before giving up.
+* `default` The default value if the user enters nothing.
+
+If silent is true, or num is set, or delim is something other than
+`"\n"`, then read will set raw mode, and read character by character.
+
+At this time, backspace and arrow keys are not supported in raw mode.
+It's probably not too hard to add support for this, perhaps using node's
+built-in readline module.
+
+## CONTRIBUTING
+
+Patches welcome.
+
+## BUGS
+
+In node 0.6.0 through 0.6.5, you must explicitly call
+`process.stdin.destroy()` or `process.exit()` when you know that your
+program is done reading, or else it will keep the event loop running
+forever.
+
+See: <https://github.com/joyent/node/issues/2257>
diff --git a/node_modules/read/lib/read.js b/node_modules/read/lib/read.js
new file mode 100644
index 000000000..246044bcd
--- /dev/null
+++ b/node_modules/read/lib/read.js
@@ -0,0 +1,151 @@
+
+module.exports = read
+
+var buffer = ""
+ , tty = require("tty")
+ , StringDecoder = require("string_decoder").StringDecoder
+
+function read (opts, cb) {
+ if (!cb) cb = opts, opts = {}
+
+ var p = opts.prompt || ""
+ , def = opts.default
+ , silent = opts.silent
+ , timeout = opts.timeout
+ , num = opts.num || null
+ , delim = opts.delim || "\n"
+
+ if (p && def) p += "("+(silent ? "<default hidden>" : def)+") "
+
+ // switching into raw mode is a little bit painful.
+ // avoid if possible.
+ var r = silent || num || delim !== "\n" ? rawRead : normalRead
+
+ if (timeout) {
+ cb = (function (cb) {
+ var called = false
+ var t = setTimeout(function () {
+ tty.setRawMode(false)
+ process.stdout.write("\n")
+ if (def) done(null, def)
+ else done(new Error("timeout"))
+ }, timeout)
+
+ function done (er, data) {
+ clearTimeout(t)
+ if (called) return
+ // stop reading!
+ stdin.pause()
+ called = true
+ cb(er, data)
+ }
+
+ return done
+ })(cb)
+ }
+
+ if (p && !process.stdout.write(p)) {
+ process.stdout.on("drain", function D () {
+ process.stdout.removeListener("drain", D)
+ r(def, timeout, delim, silent, num, cb)
+ })
+ } else {
+ process.nextTick(function () {
+ r(def, timeout, delim, silent, num, cb)
+ })
+ }
+}
+
+function normalRead (def, timeout, delim, silent, num, cb) {
+ var stdin = process.openStdin()
+ , val = ""
+ , decoder = new StringDecoder("utf8")
+
+ stdin.resume()
+ stdin.on("error", cb)
+ stdin.on("data", function D (chunk) {
+ // get the characters that are completed.
+ val += buffer + decoder.write(chunk)
+ buffer = ""
+
+ // \r has no place here.
+ // XXX But what if \r is the delim or something dumb like that?
+ // Meh. If anyone complains about this, deal with it.
+ val = val.replace(/\r/g, "")
+
+ // TODO Make delim configurable
+ if (val.indexOf(delim) !== -1) {
+ // pluck off any delims at the beginning.
+ if (val !== delim) {
+ var i, l
+ for (i = 0, l = val.length; i < l; i ++) {
+ if (val.charAt(i) !== delim) break
+ }
+ if (i !== 0) val = val.substr(i)
+ }
+
+ // buffer whatever might have come *after* the delimter
+ var delimIndex = val.indexOf(delim)
+ if (delimIndex !== -1) {
+ buffer = val.substr(delimIndex)
+ val = val.substr(0, delimIndex)
+ } else {
+ buffer = ""
+ }
+
+ stdin.pause()
+ stdin.removeListener("data", D)
+ stdin.removeListener("error", cb)
+
+ // read(1) trims
+ val = val.trim() || def
+ cb(null, val)
+ }
+ })
+}
+
+function rawRead (def, timeout, delim, silent, num, cb) {
+ var stdin = process.openStdin()
+ , val = ""
+ , decoder = new StringDecoder
+
+ tty.setRawMode(true)
+ stdin.resume()
+ stdin.on("error", cb)
+ stdin.on("data", function D (c) {
+ // \r is my enemy.
+ c = decoder.write(c).replace(/\r/g, "\n")
+
+ switch (c) {
+ case "": // probably just a \r that was ignored.
+ break
+
+ case "\u0004": // EOF
+ case delim:
+ tty.setRawMode(false)
+ stdin.removeListener("data", D)
+ stdin.removeListener("error", cb)
+ val = val.trim() || def
+ process.stdout.write("\n")
+ stdin.pause()
+ return cb(null, val)
+
+ case "\u0003": case "\0": // ^C or other signal abort
+ tty.setRawMode(false)
+ stdin.removeListener("data", D)
+ stdin.removeListener("error", cb)
+ stdin.pause()
+ return cb(new Error("cancelled"))
+ break
+
+ default: // just a normal char
+ val += buffer + c
+ buffer = ""
+ if (!silent) process.stdout.write(c)
+
+ // explicitly process a delim if we have enough chars.
+ if (num && val.length >= num) D(delim)
+ break
+ }
+ })
+}
diff --git a/node_modules/read/package.json b/node_modules/read/package.json
new file mode 100644
index 000000000..bc05577aa
--- /dev/null
+++ b/node_modules/read/package.json
@@ -0,0 +1,16 @@
+{
+ "name": "read",
+ "version": "0.0.1",
+ "main": "lib/read.js",
+ "dependencies": {},
+ "devDependencies": {},
+ "engines": {
+ "node": ">=0.6"
+ },
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "description": "read(1) for node programs",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/read.git"
+ }
+}
diff --git a/node_modules/request/LICENSE b/node_modules/request/LICENSE
new file mode 100644
index 000000000..a4a9aee0c
--- /dev/null
+++ b/node_modules/request/LICENSE
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS \ No newline at end of file
diff --git a/node_modules/request/README.md b/node_modules/request/README.md
new file mode 100644
index 000000000..4ea89f794
--- /dev/null
+++ b/node_modules/request/README.md
@@ -0,0 +1,286 @@
+# Request -- Simplified HTTP request method
+
+## Install
+
+<pre>
+ npm install request
+</pre>
+
+Or from source:
+
+<pre>
+ git clone git://github.com/mikeal/request.git
+ cd request
+ npm link
+</pre>
+
+## Super simple to use
+
+Request is designed to be the simplest way possible to make http calls. It support HTTPS and follows redirects by default.
+
+```javascript
+var request = require('request');
+request('http://www.google.com', function (error, response, body) {
+ if (!error && response.statusCode == 200) {
+ console.log(body) // Print the google web page.
+ }
+})
+```
+
+## Streaming
+
+You can stream any response to a file stream.
+
+```javascript
+request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
+```
+
+You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types, in this case `application/json`, and use the proper content-type in the PUT request if one is not already provided in the headers.
+
+```javascript
+fs.readStream('file.json').pipe(request.put('http://mysite.com/obj.json'))
+```
+
+Request can also pipe to itself. When doing so the content-type and content-length will be preserved in the PUT headers.
+
+```javascript
+request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))
+```
+
+Now let's get fancy.
+
+```javascript
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ if (req.method === 'PUT') {
+ req.pipe(request.put('http://mysite.com/doodle.png'))
+ } else if (req.method === 'GET' || req.method === 'HEAD') {
+ request.get('http://mysite.com/doodle.png').pipe(resp)
+ }
+ }
+})
+```
+
+You can also pipe() from a http.ServerRequest instance and to a http.ServerResponse instance. The HTTP method and headers will be sent as well as the entity-body data. Which means that, if you don't really care about security, you can do:
+
+```javascript
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ var x = request('http://mysite.com/doodle.png')
+ req.pipe(x)
+ x.pipe(resp)
+ }
+})
+```
+
+And since pipe() returns the destination stream in node 0.5.x you can do one line proxying :)
+
+```javascript
+req.pipe(request('http://mysite.com/doodle.png')).pipe(resp)
+```
+
+Also, none of this new functionality conflicts with requests previous features, it just expands them.
+
+```javascript
+var r = request.defaults({'proxy':'http://localproxy.com'})
+
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ r.get('http://google.com/doodle.png').pipe(resp)
+ }
+})
+```
+
+You can still use intermediate proxies, the requests will still follow HTTP forwards, etc.
+
+## OAuth Signing
+
+```javascript
+// Twitter OAuth
+var qs = require('querystring')
+ , oauth =
+ { callback: 'http://mysite.com/callback/'
+ , consumer_key: CONSUMER_KEY
+ , consumer_secret: CONSUMER_SECRET
+ }
+ , url = 'https://api.twitter.com/oauth/request_token'
+ ;
+request.post({url:url, oauth:oauth}, function (e, r, body) {
+ // Assume by some stretch of magic you aquired the verifier
+ var access_token = qs.parse(body)
+ , oauth =
+ { consumer_key: CONSUMER_KEY
+ , consumer_secret: CONSUMER_SECRET
+ , token: access_token.oauth_token
+ , verifier: VERIFIER
+ , token_secret: access_token.oauth_token_secret
+ }
+ , url = 'https://api.twitter.com/oauth/access_token'
+ ;
+ request.post({url:url, oauth:oauth}, function (e, r, body) {
+ var perm_token = qs.parse(body)
+ , oauth =
+ { consumer_key: CONSUMER_KEY
+ , consumer_secret: CONSUMER_SECRET
+ , token: perm_token.oauth_token
+ , token_secret: perm_token.oauth_token_secret
+ }
+ , url = 'https://api.twitter.com/1/users/show.json?'
+ , params =
+ { screen_name: perm_token.screen_name
+ , user_id: perm_token.user_id
+ }
+ ;
+ url += qs.stringify(params)
+ request.get({url:url, oauth:oauth, json:true}, function (e, r, user) {
+ console.log(user)
+ })
+ })
+})
+```
+
+
+
+### request(options, callback)
+
+The first argument can be either a url or an options object. The only required option is uri, all others are optional.
+
+* `uri` || `url` - fully qualified uri or a parsed url object from url.parse()
+* `method` - http method, defaults to GET
+* `headers` - http headers, defaults to {}
+* `body` - entity body for POST and PUT requests. Must be buffer or string.
+* `form` - sets `body` but to querystring representation of value and adds `Content-type: application/x-www-form-urlencoded; charset=utf-8` header.
+* `json` - sets `body` but to JSON representation of value and adds `Content-type: application/json` header.
+* `multipart` - (experimental) array of objects which contains their own headers and `body` attribute. Sends `multipart/related` request. See example below.
+* `followRedirect` - follow HTTP 3xx responses as redirects. defaults to true.
+* `maxRedirects` - the maximum number of redirects to follow, defaults to 10.
+* `onResponse` - If true the callback will be fired on the "response" event instead of "end". If a function it will be called on "response" and not effect the regular semantics of the main callback on "end".
+* `encoding` - Encoding to be used on `setEncoding` of response data. If set to `null`, the body is returned as a Buffer.
+* `pool` - A hash object containing the agents for these requests. If omitted this request will use the global pool which is set to node's default maxSockets.
+* `pool.maxSockets` - Integer containing the maximum amount of sockets in the pool.
+* `timeout` - Integer containing the number of milliseconds to wait for a request to respond before aborting the request
+* `proxy` - An HTTP proxy to be used. Support proxy Auth with Basic Auth the same way it's supported with the `url` parameter by embedding the auth info in the uri.
+* `oauth` - Options for OAuth HMAC-SHA1 signing, see documentation above.
+* `strictSSL` - Set to `true` to require that SSL certificates be valid. Note: to use your own certificate authority, you need to specify an agent that was created with that ca as an option.
+* `jar` - Set to `false` if you don't want cookies to be remembered for future use or define your custom cookie jar (see examples section)
+
+
+The callback argument gets 3 arguments. The first is an error when applicable (usually from the http.Client option not the http.ClientRequest object). The second in an http.ClientResponse object. The third is the response body String or Buffer.
+
+## Convenience methods
+
+There are also shorthand methods for different HTTP METHODs and some other conveniences.
+
+### request.defaults(options)
+
+This method returns a wrapper around the normal request API that defaults to whatever options you pass in to it.
+
+### request.put
+
+Same as request() but defaults to `method: "PUT"`.
+
+```javascript
+request.put(url)
+```
+
+### request.post
+
+Same as request() but defaults to `method: "POST"`.
+
+```javascript
+request.post(url)
+```
+
+### request.head
+
+Same as request() but defaults to `method: "HEAD"`.
+
+```javascript
+request.head(url)
+```
+
+### request.del
+
+Same as request() but defaults to `method: "DELETE"`.
+
+```javascript
+request.del(url)
+```
+
+### request.get
+
+Alias to normal request method for uniformity.
+
+```javascript
+request.get(url)
+```
+### request.cookie
+
+Function that creates a new cookie.
+
+```javascript
+request.cookie('cookie_string_here')
+```
+### request.jar
+
+Function that creates a new cookie jar.
+
+```javascript
+request.jar()
+```
+
+
+## Examples:
+
+```javascript
+ var request = require('request')
+ , rand = Math.floor(Math.random()*100000000).toString()
+ ;
+ request(
+ { method: 'PUT'
+ , uri: 'http://mikeal.iriscouch.com/testjs/' + rand
+ , multipart:
+ [ { 'content-type': 'application/json'
+ , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
+ }
+ , { body: 'I am an attachment' }
+ ]
+ }
+ , function (error, response, body) {
+ if(response.statusCode == 201){
+ console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand)
+ } else {
+ console.log('error: '+ response.statusCode)
+ console.log(body)
+ }
+ }
+ )
+```
+Cookies are enabled by default (so they can be used in subsequent requests). To disable cookies set jar to false (either in defaults or in the options sent).
+
+```javascript
+var request = request.defaults({jar: false})
+request('http://www.google.com', function () {
+ request('http://images.google.com')
+})
+```
+
+If you to use a custom cookie jar (instead of letting request use its own global cookie jar) you do so by setting the jar default or by specifying it as an option:
+
+```javascript
+var j = request.jar()
+var request = request.defaults({jar:j})
+request('http://www.google.com', function () {
+ request('http://images.google.com')
+})
+```
+OR
+
+```javascript
+var j = request.jar()
+var cookie = request.cookie('your_cookie_here')
+j.add(cookie)
+request({url: 'http://www.google.com', jar: j}, function () {
+ request('http://images.google.com')
+})
+```
diff --git a/node_modules/request/forever.js b/node_modules/request/forever.js
new file mode 100644
index 000000000..e6531a21b
--- /dev/null
+++ b/node_modules/request/forever.js
@@ -0,0 +1,84 @@
+module.exports = ForeverAgent
+
+var util = require('util')
+ , Agent = require('http').Agent
+ , net = require('net')
+
+function ForeverAgent(options) {
+ var self = this
+ self.options = options || {}
+ self.requests = {}
+ self.sockets = {}
+ self.freeSockets = {}
+ self.maxSockets = self.options.maxSockets || Agent.defaultMaxSockets
+ self.minSockets = self.options.minSockets || ForeverAgent.defaultMinSockets
+ self.on('free', function(socket, host, port) {
+ var name = host + ':' + port
+ if (self.requests[name] && self.requests[name].length) {
+ self.requests[name].shift().onSocket(socket)
+ } else if (self.sockets[name].length < self.minSockets) {
+ if (!self.freeSockets[name]) self.freeSockets[name] = []
+ self.freeSockets[name].push(socket)
+
+ // if an error happens while we don't use the socket anyway, meh, throw the socket away
+ function onIdleError() {
+ socket.destroy()
+ }
+ socket._onIdleError = onIdleError
+ socket.on('error', onIdleError)
+ } else {
+ // If there are no pending requests just destroy the
+ // socket and it will get removed from the pool. This
+ // gets us out of timeout issues and allows us to
+ // default to Connection:keep-alive.
+ socket.destroy();
+ }
+ })
+ self.createConnection = net.createConnection
+}
+util.inherits(ForeverAgent, Agent)
+
+ForeverAgent.defaultMinSockets = 5
+
+ForeverAgent.prototype.addRequestNoreuse = Agent.prototype.addRequest
+ForeverAgent.prototype.addRequest = function(req, host, port) {
+ var name = host + ':' + port
+ if (this.freeSockets[name] && this.freeSockets[name].length > 0 && !req.useChunkedEncodingByDefault) {
+ var idleSocket = this.freeSockets[name].pop()
+ idleSocket.removeListener('error', idleSocket._onIdleError)
+ delete idleSocket._onIdleError
+ req._reusedSocket = true
+ req.onSocket(idleSocket)
+ } else {
+ this.addRequestNoreuse(req, host, port)
+ }
+}
+
+ForeverAgent.prototype.removeSocket = function(s, name, host, port) {
+ if (this.sockets[name]) {
+ var index = this.sockets[name].indexOf(s);
+ if (index !== -1) {
+ this.sockets[name].splice(index, 1);
+ }
+ } else if (this.sockets[name] && this.sockets[name].length === 0) {
+ // don't leak
+ delete this.sockets[name];
+ delete this.requests[name];
+ }
+
+ if (this.freeSockets[name]) {
+ var index = this.freeSockets[name].indexOf(s)
+ if (index !== -1) {
+ this.freeSockets[name].splice(index, 1)
+ if (this.freeSockets[name].length === 0) {
+ delete this.freeSockets[name]
+ }
+ }
+ }
+
+ if (this.requests[name] && this.requests[name].length) {
+ // If we have pending requests and a socket gets closed a new one
+ // needs to be created to take over in the pool for the one that closed.
+ this.createSocket(name, host, port).emit('free');
+ }
+}
diff --git a/node_modules/request/main.js b/node_modules/request/main.js
new file mode 100644
index 000000000..a25393ec3
--- /dev/null
+++ b/node_modules/request/main.js
@@ -0,0 +1,652 @@
+// Copyright 2010-2011 Mikeal Rogers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+var http = require('http')
+ , https = false
+ , tls = false
+ , url = require('url')
+ , util = require('util')
+ , stream = require('stream')
+ , qs = require('querystring')
+ , mimetypes = require('./mimetypes')
+ , oauth = require('./oauth')
+ , uuid = require('./uuid')
+ , ForeverAgent = require('./forever')
+ , Cookie = require('./vendor/cookie')
+ , CookieJar = require('./vendor/cookie/jar')
+ , cookieJar = new CookieJar
+ ;
+
+if (process.logging) {
+ var log = process.logging('request')
+}
+
+try {
+ https = require('https')
+} catch (e) {}
+
+try {
+ tls = require('tls')
+} catch (e) {}
+
+function toBase64 (str) {
+ return (new Buffer(str || "", "ascii")).toString("base64")
+}
+
+// Hacky fix for pre-0.4.4 https
+if (https && !https.Agent) {
+ https.Agent = function (options) {
+ http.Agent.call(this, options)
+ }
+ util.inherits(https.Agent, http.Agent)
+ https.Agent.prototype._getConnection = function(host, port, cb) {
+ var s = tls.connect(port, host, this.options, function() {
+ // do other checks here?
+ if (cb) cb()
+ })
+ return s
+ }
+}
+
+function isReadStream (rs) {
+ if (rs.readable && rs.path && rs.mode) {
+ return true
+ }
+}
+
+function copy (obj) {
+ var o = {}
+ for (var i in obj) o[i] = obj[i]
+ return o
+}
+
+var isUrl = /^https?:/
+
+var globalPool = {}
+
+function Request (options) {
+ stream.Stream.call(this)
+ this.readable = true
+ this.writable = true
+
+ if (typeof options === 'string') {
+ options = {uri:options}
+ }
+
+ for (var i in options) {
+ this[i] = options[i]
+ }
+ if (!this.pool) this.pool = globalPool
+ this.dests = []
+ this.__isRequestRequest = true
+}
+util.inherits(Request, stream.Stream)
+Request.prototype.getAgent = function (host, port) {
+ if (!this.pool[host+':'+port]) {
+ this.pool[host+':'+port] = new this.httpModule.Agent({host:host, port:port})
+ }
+ return this.pool[host+':'+port]
+}
+Request.prototype.request = function () {
+ var self = this
+
+ // Protect against double callback
+ if (!self._callback && self.callback) {
+ self._callback = self.callback
+ self.callback = function () {
+ if (self._callbackCalled) return // Print a warning maybe?
+ self._callback.apply(self, arguments)
+ self._callbackCalled = true
+ }
+ }
+
+ if (self.url) {
+ // People use this property instead all the time so why not just support it.
+ self.uri = self.url
+ delete self.url
+ }
+
+ if (!self.uri) {
+ throw new Error("options.uri is a required argument")
+ } else {
+ if (typeof self.uri == "string") self.uri = url.parse(self.uri)
+ }
+ if (self.proxy) {
+ if (typeof self.proxy == 'string') self.proxy = url.parse(self.proxy)
+ }
+
+ self._redirectsFollowed = self._redirectsFollowed || 0
+ self.maxRedirects = (self.maxRedirects !== undefined) ? self.maxRedirects : 10
+ self.followRedirect = (self.followRedirect !== undefined) ? self.followRedirect : true
+ if (self.followRedirect)
+ self.redirects = self.redirects || []
+
+ self.headers = self.headers ? copy(self.headers) : {}
+
+ var setHost = false
+ if (!self.headers.host) {
+ self.headers.host = self.uri.hostname
+ if (self.uri.port) {
+ if ( !(self.uri.port === 80 && self.uri.protocol === 'http:') &&
+ !(self.uri.port === 443 && self.uri.protocol === 'https:') )
+ self.headers.host += (':'+self.uri.port)
+ }
+ setHost = true
+ }
+
+ if (self.jar === false) {
+ // disable cookies
+ var cookies = false;
+ self._disableCookies = true;
+ } else if (self.jar) {
+ // fetch cookie from the user defined cookie jar
+ var cookies = self.jar.get({ url: self.uri.href })
+ } else {
+ // fetch cookie from the global cookie jar
+ var cookies = cookieJar.get({ url: self.uri.href })
+ }
+ if (cookies) {
+ var cookieString = cookies.map(function (c) {
+ return c.name + "=" + c.value;
+ }).join("; ");
+
+ self.headers.Cookie = cookieString;
+ }
+
+ if (!self.uri.pathname) {self.uri.pathname = '/'}
+ if (!self.uri.port) {
+ if (self.uri.protocol == 'http:') {self.uri.port = 80}
+ else if (self.uri.protocol == 'https:') {self.uri.port = 443}
+ }
+
+ if (self.proxy) {
+ self.port = self.proxy.port
+ self.host = self.proxy.hostname
+ } else {
+ self.port = self.uri.port
+ self.host = self.uri.hostname
+ }
+
+ if (self.onResponse === true) {
+ self.onResponse = self.callback
+ delete self.callback
+ }
+
+ var clientErrorHandler = function (error) {
+ if (setHost) delete self.headers.host
+ if (self.req._reusedSocket && error.code === 'ECONNRESET') {
+ self.agent = {addRequest: ForeverAgent.prototype.addRequestNoreuse.bind(self.agent)}
+ self.start()
+ self.req.end()
+ return
+ }
+ if (self.timeout && self.timeoutTimer) clearTimeout(self.timeoutTimer)
+ self.emit('error', error)
+ }
+ if (self.onResponse) self.on('error', function (e) {self.onResponse(e)})
+ if (self.callback) self.on('error', function (e) {self.callback(e)})
+
+ if (self.form) {
+ self.headers['content-type'] = 'application/x-www-form-urlencoded; charset=utf-8'
+ self.body = qs.stringify(self.form).toString('utf8')
+ }
+
+ if (self.oauth) {
+ var form
+ if (self.headers['content-type'] &&
+ self.headers['content-type'].slice(0, 'application/x-www-form-urlencoded'.length) ===
+ 'application/x-www-form-urlencoded'
+ ) {
+ form = qs.parse(self.body)
+ }
+ if (self.uri.query) {
+ form = qs.parse(self.uri.query)
+ }
+ if (!form) form = {}
+ var oa = {}
+ for (var i in form) oa[i] = form[i]
+ for (var i in self.oauth) oa['oauth_'+i] = self.oauth[i]
+ if (!oa.oauth_version) oa.oauth_version = '1.0'
+ if (!oa.oauth_timestamp) oa.oauth_timestamp = Math.floor( (new Date()).getTime() / 1000 ).toString()
+ if (!oa.oauth_nonce) oa.oauth_nonce = uuid().replace(/-/g, '')
+
+ oa.oauth_signature_method = 'HMAC-SHA1'
+
+ var consumer_secret = oa.oauth_consumer_secret
+ delete oa.oauth_consumer_secret
+ var token_secret = oa.oauth_token_secret
+ delete oa.oauth_token_secret
+
+ var baseurl = self.uri.protocol + '//' + self.uri.host + self.uri.pathname
+ var signature = oauth.hmacsign(self.method, baseurl, oa, consumer_secret, token_secret)
+
+ // oa.oauth_signature = signature
+ for (var i in form) {
+ if ( i.slice(0, 'oauth_') in self.oauth) {
+ // skip
+ } else {
+ delete oa['oauth_'+i]
+ }
+ }
+ self.headers.authorization =
+ 'OAuth '+Object.keys(oa).sort().map(function (i) {return i+'="'+oauth.rfc3986(oa[i])+'"'}).join(',')
+ self.headers.authorization += ',oauth_signature="'+oauth.rfc3986(signature)+'"'
+ }
+
+ if (self.uri.auth && !self.headers.authorization) {
+ self.headers.authorization = "Basic " + toBase64(self.uri.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
+ }
+ if (self.proxy && self.proxy.auth && !self.headers['proxy-authorization']) {
+ self.headers['proxy-authorization'] = "Basic " + toBase64(self.proxy.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
+ }
+
+ if (self.uri.path) {
+ self.path = self.uri.path
+ } else {
+ self.path = self.uri.pathname + (self.uri.search || "")
+ }
+
+ if (self.path.length === 0) self.path = '/'
+
+ if (self.proxy) self.path = (self.uri.protocol + '//' + self.uri.host + self.path)
+
+ if (self.json) {
+ self.headers['content-type'] = 'application/json'
+ if (typeof self.json === 'boolean') {
+ if (typeof self.body === 'object') self.body = JSON.stringify(self.body)
+ } else {
+ self.body = JSON.stringify(self.json)
+ }
+
+ } else if (self.multipart) {
+ self.body = []
+
+ if (!self.headers['content-type']) {
+ self.headers['content-type'] = 'multipart/related;boundary="frontier"';
+ } else {
+ self.headers['content-type'] = self.headers['content-type'].split(';')[0] + ';boundary="frontier"';
+ }
+
+ if (!self.multipart.forEach) throw new Error('Argument error, options.multipart.')
+
+ self.multipart.forEach(function (part) {
+ var body = part.body
+ if(!body) throw Error('Body attribute missing in multipart.')
+ delete part.body
+ var preamble = '--frontier\r\n'
+ Object.keys(part).forEach(function(key){
+ preamble += key + ': ' + part[key] + '\r\n'
+ })
+ preamble += '\r\n'
+ self.body.push(new Buffer(preamble))
+ self.body.push(new Buffer(body))
+ self.body.push(new Buffer('\r\n'))
+ })
+ self.body.push(new Buffer('--frontier--'))
+ }
+
+ if (self.body) {
+ var length = 0
+ if (!Buffer.isBuffer(self.body)) {
+ if (Array.isArray(self.body)) {
+ for (var i = 0; i < self.body.length; i++) {
+ length += self.body[i].length
+ }
+ } else {
+ self.body = new Buffer(self.body)
+ length = self.body.length
+ }
+ } else {
+ length = self.body.length
+ }
+ if (length) {
+ self.headers['content-length'] = length
+ } else {
+ throw new Error('Argument error, options.body.')
+ }
+ }
+
+ var protocol = self.proxy ? self.proxy.protocol : self.uri.protocol
+ , defaultModules = {'http:':http, 'https:':https}
+ , httpModules = self.httpModules || {}
+ ;
+ self.httpModule = httpModules[protocol] || defaultModules[protocol]
+
+ if (!self.httpModule) throw new Error("Invalid protocol")
+
+ if (self.pool === false) {
+ self.agent = false
+ } else {
+ if (self.maxSockets) {
+ // Don't use our pooling if node has the refactored client
+ self.agent = self.agent || self.httpModule.globalAgent || self.getAgent(self.host, self.port)
+ self.agent.maxSockets = self.maxSockets
+ }
+ if (self.pool.maxSockets) {
+ // Don't use our pooling if node has the refactored client
+ self.agent = self.agent || self.httpModule.globalAgent || self.getAgent(self.host, self.port)
+ self.agent.maxSockets = self.pool.maxSockets
+ }
+ }
+
+ self.start = function () {
+ self._started = true
+ self.method = self.method || 'GET'
+ self.href = self.uri.href
+ if (log) log('%method %href', self)
+ self.req = self.httpModule.request(self, function (response) {
+ self.response = response
+ response.request = self
+
+ if (self.httpModule === https &&
+ self.strictSSL &&
+ !response.client.authorized) {
+ var sslErr = response.client.authorizationError
+ self.emit('error', new Error('SSL Error: '+ sslErr))
+ return
+ }
+
+ if (setHost) delete self.headers.host
+ if (self.timeout && self.timeoutTimer) clearTimeout(self.timeoutTimer)
+
+ if (response.headers['set-cookie'] && (!self._disableCookies)) {
+ response.headers['set-cookie'].forEach(function(cookie) {
+ if (self.jar) self.jar.add(new Cookie(cookie))
+ else cookieJar.add(new Cookie(cookie))
+ })
+ }
+
+ if (response.statusCode >= 300 &&
+ response.statusCode < 400 &&
+ self.followRedirect &&
+ self.method !== 'PUT' &&
+ self.method !== 'POST' &&
+ response.headers.location) {
+ if (self._redirectsFollowed >= self.maxRedirects) {
+ self.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop."))
+ return
+ }
+ self._redirectsFollowed += 1
+
+ if (!isUrl.test(response.headers.location)) {
+ response.headers.location = url.resolve(self.uri.href, response.headers.location)
+ }
+ self.uri = response.headers.location
+ self.redirects.push(
+ { statusCode : response.statusCode
+ , redirectUri: response.headers.location
+ }
+ )
+ delete self.req
+ delete self.agent
+ delete self._started
+ if (self.headers) {
+ delete self.headers.host
+ }
+ if (log) log('Redirect to %uri', self)
+ request(self, self.callback)
+ return // Ignore the rest of the response
+ } else {
+ self._redirectsFollowed = self._redirectsFollowed || 0
+ // Be a good stream and emit end when the response is finished.
+ // Hack to emit end on close because of a core bug that never fires end
+ response.on('close', function () {
+ if (!self._ended) self.response.emit('end')
+ })
+
+ if (self.encoding) {
+ if (self.dests.length !== 0) {
+ console.error("Ingoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.")
+ } else {
+ response.setEncoding(self.encoding)
+ }
+ }
+
+ self.pipeDest = function (dest) {
+ if (dest.headers) {
+ dest.headers['content-type'] = response.headers['content-type']
+ if (response.headers['content-length']) {
+ dest.headers['content-length'] = response.headers['content-length']
+ }
+ }
+ if (dest.setHeader) {
+ for (var i in response.headers) {
+ dest.setHeader(i, response.headers[i])
+ }
+ dest.statusCode = response.statusCode
+ }
+ if (self.pipefilter) self.pipefilter(response, dest)
+ }
+
+ self.dests.forEach(function (dest) {
+ self.pipeDest(dest)
+ })
+
+ response.on("data", function (chunk) {
+ self._destdata = true
+ self.emit("data", chunk)
+ })
+ response.on("end", function (chunk) {
+ self._ended = true
+ self.emit("end", chunk)
+ })
+ response.on("close", function () {self.emit("close")})
+
+ self.emit('response', response)
+
+ if (self.onResponse) {
+ self.onResponse(null, response)
+ }
+ if (self.callback) {
+ var buffer = []
+ var bodyLen = 0
+ self.on("data", function (chunk) {
+ buffer.push(chunk)
+ bodyLen += chunk.length
+ })
+ self.on("end", function () {
+ if (buffer.length && Buffer.isBuffer(buffer[0])) {
+ var body = new Buffer(bodyLen)
+ var i = 0
+ buffer.forEach(function (chunk) {
+ chunk.copy(body, i, 0, chunk.length)
+ i += chunk.length
+ })
+ if (self.encoding === null) {
+ response.body = body
+ } else {
+ response.body = body.toString()
+ }
+ } else if (buffer.length) {
+ response.body = buffer.join('')
+ }
+
+ if (self.json) {
+ try {
+ response.body = JSON.parse(response.body)
+ } catch (e) {}
+ }
+
+ self.callback(null, response, response.body)
+ })
+ }
+ }
+ })
+
+ if (self.timeout && !self.timeoutTimer) {
+ self.timeoutTimer = setTimeout(function() {
+ self.req.abort()
+ var e = new Error("ETIMEDOUT")
+ e.code = "ETIMEDOUT"
+ self.emit("error", e)
+ }, self.timeout)
+ }
+
+ self.req.on('error', clientErrorHandler)
+ }
+
+ self.once('pipe', function (src) {
+ if (self.ntick) throw new Error("You cannot pipe to this stream after the first nextTick() after creation of the request stream.")
+ self.src = src
+ if (isReadStream(src)) {
+ if (!self.headers['content-type'] && !self.headers['Content-Type'])
+ self.headers['content-type'] = mimetypes.lookup(src.path.slice(src.path.lastIndexOf('.')+1))
+ } else {
+ if (src.headers) {
+ for (var i in src.headers) {
+ if (!self.headers[i]) {
+ self.headers[i] = src.headers[i]
+ }
+ }
+ }
+ if (src.method && !self.method) {
+ self.method = src.method
+ }
+ }
+
+ self.on('pipe', function () {
+ console.error("You have already piped to this stream. Pipeing twice is likely to break the request.")
+ })
+ })
+
+ process.nextTick(function () {
+ if (self.body) {
+ if (Array.isArray(self.body)) {
+ self.body.forEach(function(part) {
+ self.write(part)
+ })
+ } else {
+ self.write(self.body)
+ }
+ self.end()
+ } else if (self.requestBodyStream) {
+ console.warn("options.requestBodyStream is deprecated, please pass the request object to stream.pipe.")
+ self.requestBodyStream.pipe(self)
+ } else if (!self.src) {
+ self.headers['content-length'] = 0
+ self.end()
+ }
+ self.ntick = true
+ })
+}
+Request.prototype.pipe = function (dest) {
+ if (this.response) {
+ if (this._destdata) {
+ throw new Error("You cannot pipe after data has been emitted from the response.")
+ } else if (this._ended) {
+ throw new Error("You cannot pipe after the response has been ended.")
+ } else {
+ stream.Stream.prototype.pipe.call(this, dest)
+ this.pipeDest(dest)
+ return dest
+ }
+ } else {
+ this.dests.push(dest)
+ stream.Stream.prototype.pipe.call(this, dest)
+ return dest
+ }
+}
+Request.prototype.write = function () {
+ if (!this._started) this.start()
+ if (!this.req) throw new Error("This request has been piped before http.request() was called.")
+ this.req.write.apply(this.req, arguments)
+}
+Request.prototype.end = function () {
+ if (!this._started) this.start()
+ if (!this.req) throw new Error("This request has been piped before http.request() was called.")
+ this.req.end.apply(this.req, arguments)
+}
+Request.prototype.pause = function () {
+ if (!this.response) throw new Error("This request has been piped before http.request() was called.")
+ this.response.pause.apply(this.response, arguments)
+}
+Request.prototype.resume = function () {
+ if (!this.response) throw new Error("This request has been piped before http.request() was called.")
+ this.response.resume.apply(this.response, arguments)
+}
+
+function request (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ if (callback) options.callback = callback
+ var r = new Request(options)
+ r.request()
+ return r
+}
+
+module.exports = request
+
+request.defaults = function (options) {
+ var def = function (method) {
+ var d = function (opts, callback) {
+ if (typeof opts === 'string') opts = {uri:opts}
+ for (var i in options) {
+ if (opts[i] === undefined) opts[i] = options[i]
+ }
+ return method(opts, callback)
+ }
+ return d
+ }
+ var de = def(request)
+ de.get = def(request.get)
+ de.post = def(request.post)
+ de.put = def(request.put)
+ de.head = def(request.head)
+ de.del = def(request.del)
+ de.cookie = def(request.cookie)
+ de.jar = def(request.jar)
+ return de
+}
+
+request.forever = function (agentOptions, optionsArg) {
+ var options = {}
+ if (agentOptions) {
+ for (option in optionsArg) {
+ options[option] = optionsArg[option]
+ }
+ }
+ options.agent = new ForeverAgent(agentOptions)
+ return request.defaults(options)
+}
+
+request.get = request
+request.post = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'POST'
+ return request(options, callback)
+}
+request.put = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'PUT'
+ return request(options, callback)
+}
+request.head = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'HEAD'
+ if (options.body || options.requestBodyStream || options.json || options.multipart) {
+ throw new Error("HTTP HEAD requests MUST NOT include a request body.")
+ }
+ return request(options, callback)
+}
+request.del = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'DELETE'
+ return request(options, callback)
+}
+request.jar = function () {
+ return new CookieJar
+}
+request.cookie = function (str) {
+ if (typeof str !== 'string') throw new Error("The cookie function only accepts STRING as param")
+ return new Cookie(str)
+}
diff --git a/node_modules/request/mimetypes.js b/node_modules/request/mimetypes.js
new file mode 100644
index 000000000..86910064c
--- /dev/null
+++ b/node_modules/request/mimetypes.js
@@ -0,0 +1,146 @@
+// from http://github.com/felixge/node-paperboy
+exports.types = {
+ "aiff":"audio/x-aiff",
+ "arj":"application/x-arj-compressed",
+ "asf":"video/x-ms-asf",
+ "asx":"video/x-ms-asx",
+ "au":"audio/ulaw",
+ "avi":"video/x-msvideo",
+ "bcpio":"application/x-bcpio",
+ "ccad":"application/clariscad",
+ "cod":"application/vnd.rim.cod",
+ "com":"application/x-msdos-program",
+ "cpio":"application/x-cpio",
+ "cpt":"application/mac-compactpro",
+ "csh":"application/x-csh",
+ "css":"text/css",
+ "deb":"application/x-debian-package",
+ "dl":"video/dl",
+ "doc":"application/msword",
+ "drw":"application/drafting",
+ "dvi":"application/x-dvi",
+ "dwg":"application/acad",
+ "dxf":"application/dxf",
+ "dxr":"application/x-director",
+ "etx":"text/x-setext",
+ "ez":"application/andrew-inset",
+ "fli":"video/x-fli",
+ "flv":"video/x-flv",
+ "gif":"image/gif",
+ "gl":"video/gl",
+ "gtar":"application/x-gtar",
+ "gz":"application/x-gzip",
+ "hdf":"application/x-hdf",
+ "hqx":"application/mac-binhex40",
+ "html":"text/html",
+ "ice":"x-conference/x-cooltalk",
+ "ico":"image/x-icon",
+ "ief":"image/ief",
+ "igs":"model/iges",
+ "ips":"application/x-ipscript",
+ "ipx":"application/x-ipix",
+ "jad":"text/vnd.sun.j2me.app-descriptor",
+ "jar":"application/java-archive",
+ "jpeg":"image/jpeg",
+ "jpg":"image/jpeg",
+ "js":"text/javascript",
+ "json":"application/json",
+ "latex":"application/x-latex",
+ "lsp":"application/x-lisp",
+ "lzh":"application/octet-stream",
+ "m":"text/plain",
+ "m3u":"audio/x-mpegurl",
+ "man":"application/x-troff-man",
+ "me":"application/x-troff-me",
+ "midi":"audio/midi",
+ "mif":"application/x-mif",
+ "mime":"www/mime",
+ "movie":"video/x-sgi-movie",
+ "mustache":"text/plain",
+ "mp4":"video/mp4",
+ "mpg":"video/mpeg",
+ "mpga":"audio/mpeg",
+ "ms":"application/x-troff-ms",
+ "nc":"application/x-netcdf",
+ "oda":"application/oda",
+ "ogm":"application/ogg",
+ "pbm":"image/x-portable-bitmap",
+ "pdf":"application/pdf",
+ "pgm":"image/x-portable-graymap",
+ "pgn":"application/x-chess-pgn",
+ "pgp":"application/pgp",
+ "pm":"application/x-perl",
+ "png":"image/png",
+ "pnm":"image/x-portable-anymap",
+ "ppm":"image/x-portable-pixmap",
+ "ppz":"application/vnd.ms-powerpoint",
+ "pre":"application/x-freelance",
+ "prt":"application/pro_eng",
+ "ps":"application/postscript",
+ "qt":"video/quicktime",
+ "ra":"audio/x-realaudio",
+ "rar":"application/x-rar-compressed",
+ "ras":"image/x-cmu-raster",
+ "rgb":"image/x-rgb",
+ "rm":"audio/x-pn-realaudio",
+ "rpm":"audio/x-pn-realaudio-plugin",
+ "rtf":"text/rtf",
+ "rtx":"text/richtext",
+ "scm":"application/x-lotusscreencam",
+ "set":"application/set",
+ "sgml":"text/sgml",
+ "sh":"application/x-sh",
+ "shar":"application/x-shar",
+ "silo":"model/mesh",
+ "sit":"application/x-stuffit",
+ "skt":"application/x-koan",
+ "smil":"application/smil",
+ "snd":"audio/basic",
+ "sol":"application/solids",
+ "spl":"application/x-futuresplash",
+ "src":"application/x-wais-source",
+ "stl":"application/SLA",
+ "stp":"application/STEP",
+ "sv4cpio":"application/x-sv4cpio",
+ "sv4crc":"application/x-sv4crc",
+ "svg":"image/svg+xml",
+ "swf":"application/x-shockwave-flash",
+ "tar":"application/x-tar",
+ "tcl":"application/x-tcl",
+ "tex":"application/x-tex",
+ "texinfo":"application/x-texinfo",
+ "tgz":"application/x-tar-gz",
+ "tiff":"image/tiff",
+ "tr":"application/x-troff",
+ "tsi":"audio/TSP-audio",
+ "tsp":"application/dsptype",
+ "tsv":"text/tab-separated-values",
+ "unv":"application/i-deas",
+ "ustar":"application/x-ustar",
+ "vcd":"application/x-cdlink",
+ "vda":"application/vda",
+ "vivo":"video/vnd.vivo",
+ "vrm":"x-world/x-vrml",
+ "wav":"audio/x-wav",
+ "wax":"audio/x-ms-wax",
+ "wma":"audio/x-ms-wma",
+ "wmv":"video/x-ms-wmv",
+ "wmx":"video/x-ms-wmx",
+ "wrl":"model/vrml",
+ "wvx":"video/x-ms-wvx",
+ "xbm":"image/x-xbitmap",
+ "xlw":"application/vnd.ms-excel",
+ "xml":"text/xml",
+ "xpm":"image/x-xpixmap",
+ "xwd":"image/x-xwindowdump",
+ "xyz":"chemical/x-pdb",
+ "zip":"application/zip",
+};
+
+exports.lookup = function(ext, defaultType) {
+ defaultType = defaultType || 'application/octet-stream';
+
+ return (ext in exports.types)
+ ? exports.types[ext]
+ : defaultType;
+}; \ No newline at end of file
diff --git a/node_modules/request/oauth.js b/node_modules/request/oauth.js
new file mode 100644
index 000000000..25db66977
--- /dev/null
+++ b/node_modules/request/oauth.js
@@ -0,0 +1,34 @@
+var crypto = require('crypto')
+ , qs = require('querystring')
+ ;
+
+function sha1 (key, body) {
+ return crypto.createHmac('sha1', key).update(body).digest('base64')
+}
+
+function rfc3986 (str) {
+ return encodeURIComponent(str)
+ .replace('!','%21')
+ .replace('*','%2A')
+ .replace('(','%28')
+ .replace(')','%29')
+ .replace("'",'%27')
+ ;
+}
+
+function hmacsign (httpMethod, base_uri, params, consumer_secret, token_secret, body) {
+ // adapted from https://dev.twitter.com/docs/auth/oauth
+ var base =
+ httpMethod + "&" +
+ encodeURIComponent( base_uri ) + "&" +
+ Object.keys(params).sort().map(function (i) {
+ // big WTF here with the escape + encoding but it's what twitter wants
+ return escape(rfc3986(i)) + "%3D" + escape(rfc3986(params[i]))
+ }).join("%26")
+ var key = consumer_secret + '&'
+ if (token_secret) key += token_secret
+ return sha1(key, base)
+}
+
+exports.hmacsign = hmacsign
+exports.rfc3986 = rfc3986 \ No newline at end of file
diff --git a/node_modules/request/package.json b/node_modules/request/package.json
new file mode 100644
index 000000000..e7b899a8b
--- /dev/null
+++ b/node_modules/request/package.json
@@ -0,0 +1,15 @@
+{ "name" : "request"
+, "description" : "Simplified HTTP request client."
+, "tags" : ["http", "simple", "util", "utility"]
+, "version" : "2.9.3"
+, "author" : "Mikeal Rogers <mikeal.rogers@gmail.com>"
+, "repository" :
+ { "type" : "git"
+ , "url" : "http://github.com/mikeal/request.git"
+ }
+, "bugs" :
+ { "url" : "http://github.com/mikeal/request/issues" }
+, "engines" : ["node >= 0.3.6"]
+, "main" : "./main"
+, "scripts": { "test": "bash tests/run.sh" }
+}
diff --git a/node_modules/request/uuid.js b/node_modules/request/uuid.js
new file mode 100644
index 000000000..1d83bd50a
--- /dev/null
+++ b/node_modules/request/uuid.js
@@ -0,0 +1,19 @@
+module.exports = function () {
+ var s = [], itoh = '0123456789ABCDEF';
+
+ // Make array of random hex digits. The UUID only has 32 digits in it, but we
+ // allocate an extra items to make room for the '-'s we'll be inserting.
+ for (var i = 0; i <36; i++) s[i] = Math.floor(Math.random()*0x10);
+
+ // Conform to RFC-4122, section 4.4
+ s[14] = 4; // Set 4 high bits of time_high field to version
+ s[19] = (s[19] & 0x3) | 0x8; // Specify 2 high bits of clock sequence
+
+ // Convert to hex chars
+ for (var i = 0; i <36; i++) s[i] = itoh[s[i]];
+
+ // Insert '-'s
+ s[8] = s[13] = s[18] = s[23] = '-';
+
+ return s.join('');
+}
diff --git a/node_modules/request/vendor/cookie/index.js b/node_modules/request/vendor/cookie/index.js
new file mode 100644
index 000000000..1eb2eaa22
--- /dev/null
+++ b/node_modules/request/vendor/cookie/index.js
@@ -0,0 +1,60 @@
+/*!
+ * Tobi - Cookie
+ * Copyright(c) 2010 LearnBoost <dev@learnboost.com>
+ * MIT Licensed
+ */
+
+/**
+ * Module dependencies.
+ */
+
+var url = require('url');
+
+/**
+ * Initialize a new `Cookie` with the given cookie `str` and `req`.
+ *
+ * @param {String} str
+ * @param {IncomingRequest} req
+ * @api private
+ */
+
+var Cookie = exports = module.exports = function Cookie(str, req) {
+ this.str = str;
+
+ // First key is the name
+ this.name = str.substr(0, str.indexOf('=')).trim();
+
+ // Map the key/val pairs
+ str.split(/ *; */).reduce(function(obj, pair){
+ var p = pair.indexOf('=');
+ if(p > 0)
+ obj[pair.substring(0, p).trim()] = pair.substring(p + 1).trim();
+ else
+ obj[pair.trim()] = true;
+ return obj;
+ }, this);
+
+ // Assign value
+ this.value = this[this.name];
+
+ // Expires
+ this.expires = this.expires
+ ? new Date(this.expires)
+ : Infinity;
+
+ // Default or trim path
+ this.path = this.path
+ ? this.path.trim(): req
+ ? url.parse(req.url).pathname: '/';
+};
+
+/**
+ * Return the original cookie string.
+ *
+ * @return {String}
+ * @api public
+ */
+
+Cookie.prototype.toString = function(){
+ return this.str;
+};
diff --git a/node_modules/request/vendor/cookie/jar.js b/node_modules/request/vendor/cookie/jar.js
new file mode 100644
index 000000000..34920e062
--- /dev/null
+++ b/node_modules/request/vendor/cookie/jar.js
@@ -0,0 +1,72 @@
+/*!
+* Tobi - CookieJar
+* Copyright(c) 2010 LearnBoost <dev@learnboost.com>
+* MIT Licensed
+*/
+
+/**
+* Module dependencies.
+*/
+
+var url = require('url');
+
+/**
+* Initialize a new `CookieJar`.
+*
+* @api private
+*/
+
+var CookieJar = exports = module.exports = function CookieJar() {
+ this.cookies = [];
+};
+
+/**
+* Add the given `cookie` to the jar.
+*
+* @param {Cookie} cookie
+* @api private
+*/
+
+CookieJar.prototype.add = function(cookie){
+ this.cookies = this.cookies.filter(function(c){
+ // Avoid duplication (same path, same name)
+ return !(c.name == cookie.name && c.path == cookie.path);
+ });
+ this.cookies.push(cookie);
+};
+
+/**
+* Get cookies for the given `req`.
+*
+* @param {IncomingRequest} req
+* @return {Array}
+* @api private
+*/
+
+CookieJar.prototype.get = function(req){
+ var path = url.parse(req.url).pathname
+ , now = new Date
+ , specificity = {};
+ return this.cookies.filter(function(cookie){
+ if (0 == path.indexOf(cookie.path) && now < cookie.expires
+ && cookie.path.length > (specificity[cookie.name] || 0))
+ return specificity[cookie.name] = cookie.path.length;
+ });
+};
+
+/**
+* Return Cookie string for the given `req`.
+*
+* @param {IncomingRequest} req
+* @return {String}
+* @api private
+*/
+
+CookieJar.prototype.cookieString = function(req){
+ var cookies = this.get(req);
+ if (cookies.length) {
+ return cookies.map(function(cookie){
+ return cookie.name + '=' + cookie.value;
+ }).join('; ');
+ }
+};
diff --git a/node_modules/rimraf/AUTHORS b/node_modules/rimraf/AUTHORS
new file mode 100644
index 000000000..008cbe7dd
--- /dev/null
+++ b/node_modules/rimraf/AUTHORS
@@ -0,0 +1,5 @@
+# Authors sorted by whether or not they're me.
+Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)
+Wayne Larsen <wayne@larsen.st> (http://github.com/wvl)
+ritch <skawful@gmail.com>
+Marcel Laverdet
diff --git a/node_modules/rimraf/LICENSE b/node_modules/rimraf/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/rimraf/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/rimraf/README.md b/node_modules/rimraf/README.md
new file mode 100644
index 000000000..99983dc43
--- /dev/null
+++ b/node_modules/rimraf/README.md
@@ -0,0 +1,32 @@
+A `rm -rf` for node.
+
+Install with `npm install rimraf`, or just drop rimraf.js somewhere.
+
+## API
+
+`rimraf(f, [options,] callback)`
+
+The callback will be called with an error if there is one. Certain
+errors are handled for you:
+
+* `EBUSY` - rimraf will back off a maximum of opts.maxBusyTries times
+ before giving up.
+* `EMFILE` - If too many file descriptors get opened, rimraf will
+ patiently wait until more become available.
+
+## Options
+
+The options object is optional. These fields are respected:
+
+* `maxBusyTries` - The number of times to retry a file or folder in the
+ event of an `EBUSY` error. The default is 3.
+* `gently` - If provided a `gently` path, then rimraf will only delete
+ files and folders that are beneath this path, and only delete symbolic
+ links that point to a place within this path. (This is very important
+ to npm's use-case, and shows rimraf's pedigree.)
+
+
+## rimraf.sync
+
+It can remove stuff synchronously, too. But that's not so good. Use
+the async API. It's better.
diff --git a/node_modules/rimraf/fiber.js b/node_modules/rimraf/fiber.js
new file mode 100644
index 000000000..8812a6b44
--- /dev/null
+++ b/node_modules/rimraf/fiber.js
@@ -0,0 +1,86 @@
+// fiber/future port originally written by Marcel Laverdet
+// https://gist.github.com/1131093
+// I updated it to bring to feature parity with cb version.
+// The bugs are probably mine, not Marcel's.
+// -- isaacs
+
+var path = require('path')
+ , fs = require('fs')
+ , Future = require('fibers/future')
+
+// Create future-returning fs functions
+var fs2 = {}
+for (var ii in fs) {
+ fs2[ii] = Future.wrap(fs[ii])
+}
+
+// Return a future which just pauses for a certain amount of time
+
+function timer (ms) {
+ var future = new Future
+ setTimeout(function () {
+ future.return()
+ }, ms)
+ return future
+}
+
+function realish (p) {
+ return path.resolve(path.dirname(fs2.readlink(p)))
+}
+
+// for EMFILE backoff.
+var timeout = 0
+ , EMFILE_MAX = 1000
+
+function rimraf_ (p, opts) {
+ opts = opts || {}
+ opts.maxBusyTries = opts.maxBusyTries || 3
+ if (opts.gently) opts.gently = path.resolve(opts.gently)
+ var busyTries = 0
+
+ // exits by throwing or returning.
+ // loops on handled errors.
+ while (true) {
+ try {
+ var stat = fs2.lstat(p).wait()
+
+ // check to make sure that symlinks are ours.
+ if (opts.gently) {
+ var rp = stat.isSymbolicLink() ? realish(p) : path.resolve(p)
+ if (rp.indexOf(opts.gently) !== 0) {
+ var er = new Error("Refusing to delete: "+p+" not in "+opts.gently)
+ er.errno = require("constants").EEXIST
+ er.code = "EEXIST"
+ er.path = p
+ throw er
+ }
+ }
+
+ if (!stat.isDirectory()) return fs2.unlink(p).wait()
+
+ var rimrafs = fs2.readdir(p).wait().map(function (file) {
+ return rimraf(path.join(p, file), opts)
+ })
+
+ Future.wait(rimrafs)
+ fs2.rmdir(p).wait()
+ timeout = 0
+ return
+
+ } catch (er) {
+ if (er.message.match(/^EMFILE/) && timeout < EMFILE_MAX) {
+ timer(timeout++).wait()
+ } else if (er.message.match(/^EBUSY/)
+ && busyTries < opt.maxBusyTries) {
+ timer(++busyTries * 100).wait()
+ } else if (er.message.match(/^ENOENT/)) {
+ // already gone
+ return
+ } else {
+ throw er
+ }
+ }
+ }
+}
+
+var rimraf = module.exports = rimraf_.future()
diff --git a/node_modules/rimraf/package.json b/node_modules/rimraf/package.json
new file mode 100644
index 000000000..2b69536ef
--- /dev/null
+++ b/node_modules/rimraf/package.json
@@ -0,0 +1,9 @@
+{"name":"rimraf"
+,"version":"1.0.9"
+,"main":"rimraf.js"
+,"description":"A deep deletion module for node (like `rm -rf`)"
+,"author":"Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)"
+,"license":
+ {"type":"MIT", "url": "https://github.com/isaacs/rimraf/raw/master/LICENSE"}
+,"repository":"git://github.com/isaacs/rimraf.git"
+,"scripts":{"test":"cd test && bash run.sh"}}
diff --git a/node_modules/rimraf/rimraf.js b/node_modules/rimraf/rimraf.js
new file mode 100644
index 000000000..e8104e9e4
--- /dev/null
+++ b/node_modules/rimraf/rimraf.js
@@ -0,0 +1,145 @@
+module.exports = rimraf
+rimraf.sync = rimrafSync
+
+var path = require("path")
+ , fs
+
+try {
+ // optional dependency
+ fs = require("graceful-fs")
+} catch (er) {
+ fs = require("fs")
+}
+
+var lstat = process.platform === "win32" ? "stat" : "lstat"
+ , lstatSync = lstat + "Sync"
+
+// for EMFILE handling
+var timeout = 0
+ , EMFILE_MAX = 1000
+
+function rimraf (p, opts, cb) {
+ if (typeof opts === "function") cb = opts, opts = {}
+
+ if (!cb) throw new Error("No callback passed to rimraf()")
+ if (!opts) opts = {}
+
+ var busyTries = 0
+ opts.maxBusyTries = opts.maxBusyTries || 3
+
+ if (opts.gently) opts.gently = path.resolve(opts.gently)
+
+ rimraf_(p, opts, function CB (er) {
+ if (er) {
+ if (er.code === "EBUSY" && busyTries < opts.maxBusyTries) {
+ var time = (opts.maxBusyTries - busyTries) * 100
+ busyTries ++
+ // try again, with the same exact callback as this one.
+ return setTimeout(function () {
+ rimraf_(p, opts, CB)
+ })
+ }
+
+ // this one won't happen if graceful-fs is used.
+ if (er.code === "EMFILE" && timeout < EMFILE_MAX) {
+ return setTimeout(function () {
+ rimraf_(p, opts, CB)
+ }, timeout ++)
+ }
+
+ // already gone
+ if (er.code === "ENOENT") er = null
+ }
+
+ timeout = 0
+ cb(er)
+ })
+}
+
+function rimraf_ (p, opts, cb) {
+ fs[lstat](p, function (er, s) {
+ // if the stat fails, then assume it's already gone.
+ if (er) {
+ // already gone
+ if (er.code === "ENOENT") return cb()
+ // some other kind of error, permissions, etc.
+ return cb(er)
+ }
+
+ // don't delete that don't point actually live in the "gently" path
+ if (opts.gently) return clobberTest(p, s, opts, cb)
+ return rm_(p, s, opts, cb)
+ })
+}
+
+function rm_ (p, s, opts, cb) {
+ if (!s.isDirectory()) return fs.unlink(p, cb)
+ fs.readdir(p, function (er, files) {
+ if (er) return cb(er)
+ asyncForEach(files.map(function (f) {
+ return path.join(p, f)
+ }), function (file, cb) {
+ rimraf(file, opts, cb)
+ }, function (er) {
+ if (er) return cb(er)
+ fs.rmdir(p, cb)
+ })
+ })
+}
+
+function clobberTest (p, s, opts, cb) {
+ var gently = opts.gently
+ if (!s.isSymbolicLink()) next(null, path.resolve(p))
+ else realish(p, next)
+
+ function next (er, rp) {
+ if (er) return rm_(p, s, cb)
+ if (rp.indexOf(gently) !== 0) return clobberFail(p, gently, cb)
+ else return rm_(p, s, opts, cb)
+ }
+}
+
+function realish (p, cb) {
+ fs.readlink(p, function (er, r) {
+ if (er) return cb(er)
+ return cb(null, path.resolve(path.dirname(p), r))
+ })
+}
+
+function clobberFail (p, g, cb) {
+ var er = new Error("Refusing to delete: "+p+" not in "+g)
+ , constants = require("constants")
+ er.errno = constants.EEXIST
+ er.code = "EEXIST"
+ er.path = p
+ return cb(er)
+}
+
+function asyncForEach (list, fn, cb) {
+ if (!list.length) cb()
+ var c = list.length
+ , errState = null
+ list.forEach(function (item, i, list) {
+ fn(item, function (er) {
+ if (errState) return
+ if (er) return cb(errState = er)
+ if (-- c === 0) return cb()
+ })
+ })
+}
+
+// this looks simpler, but it will fail with big directory trees,
+// or on slow stupid awful cygwin filesystems
+function rimrafSync (p) {
+ try {
+ var s = fs[lstatSync](p)
+ } catch (er) {
+ if (er.code === "ENOENT") return
+ throw er
+ }
+ if (!s.isDirectory()) return fs.unlinkSync(p)
+ fs.readdirSync(p).forEach(function (f) {
+ rimrafSync(path.join(p, f))
+ })
+ fs.rmdirSync(p)
+}
diff --git a/node_modules/semver/LICENSE b/node_modules/semver/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/semver/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/semver/README.md b/node_modules/semver/README.md
new file mode 100644
index 000000000..6fa37a3d8
--- /dev/null
+++ b/node_modules/semver/README.md
@@ -0,0 +1,119 @@
+semver(1) -- The semantic versioner for npm
+===========================================
+
+## Usage
+
+ $ npm install semver
+
+ semver.valid('1.2.3') // true
+ semver.valid('a.b.c') // false
+ semver.clean(' =v1.2.3 ') // '1.2.3'
+ semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
+ semver.gt('1.2.3', '9.8.7') // false
+ semver.lt('1.2.3', '9.8.7') // true
+
+As a command-line utility:
+
+ $ semver -h
+
+ Usage: semver -v <version> [-r <range>]
+ Test if version(s) satisfy the supplied range(s),
+ and sort them.
+
+ Multiple versions or ranges may be supplied.
+
+ Program exits successfully if any valid version satisfies
+ all supplied ranges, and prints all satisfying versions.
+
+ If no versions are valid, or ranges are not satisfied,
+ then exits failure.
+
+ Versions are printed in ascending order, so supplying
+ multiple versions to the utility will just sort them.
+
+## Versions
+
+A version is the following things, in this order:
+
+* a number (Major)
+* a period
+* a number (minor)
+* a period
+* a number (patch)
+* OPTIONAL: a hyphen, followed by a number (build)
+* OPTIONAL: a collection of pretty much any non-whitespace characters
+ (tag)
+
+A leading `"="` or `"v"` character is stripped off and ignored.
+
+## Comparisons
+
+The ordering of versions is done using the following algorithm, given
+two versions and asked to find the greater of the two:
+
+* If the majors are numerically different, then take the one
+ with a bigger major number. `2.3.4 > 1.3.4`
+* If the minors are numerically different, then take the one
+ with the bigger minor number. `2.3.4 > 2.2.4`
+* If the patches are numerically different, then take the one with the
+ bigger patch number. `2.3.4 > 2.3.3`
+* If only one of them has a build number, then take the one with the
+ build number. `2.3.4-0 > 2.3.4`
+* If they both have build numbers, and the build numbers are numerically
+ different, then take the one with the bigger build number.
+ `2.3.4-10 > 2.3.4-9`
+* If only one of them has a tag, then take the one without the tag.
+ `2.3.4 > 2.3.4-beta`
+* If they both have tags, then take the one with the lexicographically
+ larger tag. `2.3.4-beta > 2.3.4-alpha`
+* At this point, they're equal.
+
+## Ranges
+
+The following range styles are supported:
+
+* `>1.2.3` Greater than a specific version.
+* `<1.2.3` Less than
+* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
+* `~1.2.3` := `>=1.2.3 <1.3.0`
+* `~1.2` := `>=1.2.0 <2.0.0`
+* `~1` := `>=1.0.0 <2.0.0`
+* `1.2.x` := `>=1.2.0 <1.3.0`
+* `1.x` := `>=1.0.0 <2.0.0`
+
+Ranges can be joined with either a space (which implies "and") or a
+`||` (which implies "or").
+
+## Functions
+
+* valid(v): Return the parsed version, or null if it's not valid.
+* inc(v, release): Return the version incremented by the release type
+ (major, minor, patch, or build), or null if it's not valid.
+
+### Comparison
+
+* gt(v1, v2): `v1 > v2`
+* gte(v1, v2): `v1 >= v2`
+* lt(v1, v2): `v1 < v2`
+* lte(v1, v2): `v1 <= v2`
+* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent,
+ even if they're not the exact same string. You already know how to
+ compare strings.
+* neq(v1, v2): `v1 != v2` The opposite of eq.
+* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call
+ the corresponding function above. `"==="` and `"!=="` do simple
+ string comparison, but are included for completeness. Throws if an
+ invalid comparison string is provided.
+* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if
+ v2 is greater. Sorts in ascending order if passed to Array.sort().
+* rcompare(v1, v2): The reverse of compare. Sorts an array of versions
+ in descending order when passed to Array.sort().
+
+
+### Ranges
+
+* validRange(range): Return the valid range or null if it's not valid
+* satisfies(version, range): Return true if the version satisfies the
+ range.
+* maxSatisfying(versions, range): Return the highest version in the list
+ that satisfies the range, or null if none of them do.
diff --git a/node_modules/semver/bin/semver b/node_modules/semver/bin/semver
new file mode 100755
index 000000000..3e6afb40d
--- /dev/null
+++ b/node_modules/semver/bin/semver
@@ -0,0 +1,71 @@
+#!/usr/bin/env node
+// Standalone semver comparison program.
+// Exits successfully and prints matching version(s) if
+// any supplied version is valid and passes all tests.
+
+var argv = process.argv.slice(2)
+ , versions = []
+ , range = []
+ , gt = []
+ , lt = []
+ , eq = []
+ , semver = require("../semver")
+
+main()
+
+function main () {
+ if (!argv.length) return help()
+ while (argv.length) {
+ var a
+ switch (a = argv.shift()) {
+ case "-v": case "--version":
+ versions.push(argv.shift())
+ break
+ case "-r": case "--range":
+ range.push(argv.shift())
+ break
+ case "-h": case "--help": case "-?":
+ return help()
+ default:
+ versions.push(a)
+ break
+ }
+ }
+
+ versions = versions.filter(semver.valid)
+ for (var i = 0, l = range.length; i < l ; i ++) {
+ versions = versions.filter(function (v) {
+ return semver.satisfies(v, range[i])
+ })
+ if (!versions.length) return fail()
+ }
+ return success(versions)
+}
+
+function fail () { process.exit(1) }
+
+function success () {
+ versions.sort(semver.compare)
+ .map(semver.clean)
+ .forEach(function (v,i,_) { console.log(v) })
+}
+
+function help () {
+ console.log(["Usage: semver -v <version> [-r <range>]"
+ ,"Test if version(s) satisfy the supplied range(s),"
+ ,"and sort them."
+ ,""
+ ,"Multiple versions or ranges may be supplied."
+ ,""
+ ,"Program exits successfully if any valid version satisfies"
+ ,"all supplied ranges, and prints all satisfying versions."
+ ,""
+ ,"If no versions are valid, or ranges are not satisfied,"
+ ,"then exits failure."
+ ,""
+ ,"Versions are printed in ascending order, so supplying"
+ ,"multiple versions to the utility will just sort them."
+ ].join("\n"))
+}
+
+
diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json
new file mode 100644
index 000000000..563549ffe
--- /dev/null
+++ b/node_modules/semver/package.json
@@ -0,0 +1,11 @@
+{ "name" : "semver"
+, "version" : "1.0.13"
+, "description" : "The semantic version parser used by npm."
+, "main" : "semver.js"
+, "scripts" : { "test" : "tap test.js" }
+, "devDependencies": { "tap" : "0.x >=0.0.4" }
+, "license" :
+ { "type" : "MIT"
+ , "url" : "https://github.com/isaacs/semver/raw/master/LICENSE" }
+, "repository" : "git://github.com/isaacs/node-semver.git"
+, "bin" : { "semver" : "./bin/semver" } }
diff --git a/node_modules/semver/semver.js b/node_modules/semver/semver.js
new file mode 100644
index 000000000..d1266736e
--- /dev/null
+++ b/node_modules/semver/semver.js
@@ -0,0 +1,305 @@
+;(function (exports) { // nothing in here is node-specific.
+
+// See http://semver.org/
+// This implementation is a *hair* less strict in that it allows
+// v1.2.3 things, and also tags that don't begin with a char.
+
+var semver = "\\s*[v=]*\\s*([0-9]+)" // major
+ + "\\.([0-9]+)" // minor
+ + "\\.([0-9]+)" // patch
+ + "(-[0-9]+-?)?" // build
+ + "([a-zA-Z-][a-zA-Z0-9-\.:]*)?" // tag
+ , exprComparator = "^((<|>)?=?)\s*("+semver+")$|^$"
+ , xRangePlain = "[v=]*([0-9]+|x|X|\\*)"
+ + "(?:\\.([0-9]+|x|X|\\*)"
+ + "(?:\\.([0-9]+|x|X|\\*)"
+ + "([a-zA-Z-][a-zA-Z0-9-\.:]*)?)?)?"
+ , xRange = "((?:<|>)?=?)?\\s*" + xRangePlain
+ , exprSpermy = "(?:~>?)"+xRange
+ , expressions = exports.expressions =
+ { parse : new RegExp("^\\s*"+semver+"\\s*$")
+ , parsePackage : new RegExp("^\\s*([^\/]+)[-@](" +semver+")\\s*$")
+ , parseRange : new RegExp(
+ "^\\s*(" + semver + ")\\s+-\\s+(" + semver + ")\\s*$")
+ , validComparator : new RegExp("^"+exprComparator+"$")
+ , parseXRange : new RegExp("^"+xRange+"$")
+ , parseSpermy : new RegExp("^"+exprSpermy+"$")
+ }
+
+
+Object.getOwnPropertyNames(expressions).forEach(function (i) {
+ exports[i] = function (str) {
+ return ("" + (str || "")).match(expressions[i])
+ }
+})
+
+exports.rangeReplace = ">=$1 <=$7"
+exports.clean = clean
+exports.compare = compare
+exports.rcompare = rcompare
+exports.satisfies = satisfies
+exports.gt = gt
+exports.gte = gte
+exports.lt = lt
+exports.lte = lte
+exports.eq = eq
+exports.neq = neq
+exports.cmp = cmp
+exports.inc = inc
+
+exports.valid = valid
+exports.validPackage = validPackage
+exports.validRange = validRange
+exports.maxSatisfying = maxSatisfying
+
+exports.replaceStars = replaceStars
+exports.toComparators = toComparators
+
+function stringify (version) {
+ var v = version
+ return [v[1]||'', v[2]||'', v[3]||''].join(".") + (v[4]||'') + (v[5]||'')
+}
+
+function clean (version) {
+ version = exports.parse(version)
+ if (!version) return version
+ return stringify(version)
+}
+
+function valid (version) {
+ if (typeof version !== "string") return null
+ return exports.parse(version) && version.trim().replace(/^[v=]+/, '')
+}
+
+function validPackage (version) {
+ if (typeof version !== "string") return null
+ return version.match(expressions.parsePackage) && version.trim()
+}
+
+// range can be one of:
+// "1.0.3 - 2.0.0" range, inclusive, like ">=1.0.3 <=2.0.0"
+// ">1.0.2" like 1.0.3 - 9999.9999.9999
+// ">=1.0.2" like 1.0.2 - 9999.9999.9999
+// "<2.0.0" like 0.0.0 - 1.9999.9999
+// ">1.0.2 <2.0.0" like 1.0.3 - 1.9999.9999
+var starExpression = /(<|>)?=?\s*\*/g
+ , starReplace = ""
+ , compTrimExpression = new RegExp("((<|>)?=?)\\s*("
+ +semver+"|"+xRangePlain+")", "g")
+ , compTrimReplace = "$1$3"
+
+function toComparators (range) {
+ var ret = (range || "").trim()
+ .replace(expressions.parseRange, exports.rangeReplace)
+ .replace(compTrimExpression, compTrimReplace)
+ .split(/\s+/)
+ .join(" ")
+ .split("||")
+ .map(function (orchunk) {
+ return orchunk
+ .split(" ")
+ .map(replaceXRanges)
+ .map(replaceSpermies)
+ .map(replaceStars)
+ .join(" ").trim()
+ })
+ .map(function (orchunk) {
+ return orchunk
+ .trim()
+ .split(/\s+/)
+ .filter(function (c) { return c.match(expressions.validComparator) })
+ })
+ .filter(function (c) { return c.length })
+ return ret
+}
+
+function replaceStars (stars) {
+ return stars.trim().replace(starExpression, starReplace)
+}
+
+// "2.x","2.x.x" --> ">=2.0.0- <2.1.0-"
+// "2.3.x" --> ">=2.3.0- <2.4.0-"
+function replaceXRanges (ranges) {
+ return ranges.split(/\s+/)
+ .map(replaceXRange)
+ .join(" ")
+}
+
+function replaceXRange (version) {
+ return version.trim().replace(expressions.parseXRange,
+ function (v, gtlt, M, m, p, t) {
+ var anyX = !M || M.toLowerCase() === "x" || M === "*"
+ || !m || m.toLowerCase() === "x" || m === "*"
+ || !p || p.toLowerCase() === "x" || p === "*"
+ , ret = v
+
+ if (gtlt && anyX) {
+ // just replace x'es with zeroes
+ ;(!M || M === "*" || M.toLowerCase() === "x") && (M = 0)
+ ;(!m || m === "*" || m.toLowerCase() === "x") && (m = 0)
+ ;(!p || p === "*" || p.toLowerCase() === "x") && (p = 0)
+ ret = gtlt + M+"."+m+"."+p+"-"
+ } else if (!M || M === "*" || M.toLowerCase() === "x") {
+ ret = "*" // allow any
+ } else if (!m || m === "*" || m.toLowerCase() === "x") {
+ // append "-" onto the version, otherwise
+ // "1.x.x" matches "2.0.0beta", since the tag
+ // *lowers* the version value
+ ret = ">="+M+".0.0- <"+(+M+1)+".0.0-"
+ } else if (!p || p === "*" || p.toLowerCase() === "x") {
+ ret = ">="+M+"."+m+".0- <"+M+"."+(+m+1)+".0-"
+ }
+ //console.error("parseXRange", [].slice.call(arguments), ret)
+ return ret
+ })
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceSpermies (version) {
+ return version.trim().replace(expressions.parseSpermy,
+ function (v, gtlt, M, m, p, t) {
+ if (gtlt) throw new Error(
+ "Using '"+gtlt+"' with ~ makes no sense. Don't do it.")
+
+ if (!M || M.toLowerCase() === "x") {
+ return ""
+ }
+ // ~1 == >=1.0.0- <2.0.0-
+ if (!m || m.toLowerCase() === "x") {
+ return ">="+M+".0.0- <"+(+M+1)+".0.0-"
+ }
+ // ~1.2 == >=1.2.0- <1.3.0-
+ if (!p || p.toLowerCase() === "x") {
+ return ">="+M+"."+m+".0- <"+M+"."+(+m+1)+".0-"
+ }
+ // ~1.2.3 == >=1.2.3- <1.3.0-
+ t = t || "-"
+ return ">="+M+"."+m+"."+p+t+" <"+M+"."+(+m+1)+".0-"
+ })
+}
+
+function validRange (range) {
+ range = replaceStars(range)
+ var c = toComparators(range)
+ return (c.length === 0)
+ ? null
+ : c.map(function (c) { return c.join(" ") }).join("||")
+}
+
+// returns the highest satisfying version in the list, or undefined
+function maxSatisfying (versions, range) {
+ return versions
+ .filter(function (v) { return satisfies(v, range) })
+ .sort(compare)
+ .pop()
+}
+function satisfies (version, range) {
+ version = valid(version)
+ if (!version) return false
+ range = toComparators(range)
+ for (var i = 0, l = range.length ; i < l ; i ++) {
+ var ok = false
+ for (var j = 0, ll = range[i].length ; j < ll ; j ++) {
+ var r = range[i][j]
+ , gtlt = r.charAt(0) === ">" ? gt
+ : r.charAt(0) === "<" ? lt
+ : false
+ , eq = r.charAt(!!gtlt) === "="
+ , sub = (!!eq) + (!!gtlt)
+ if (!gtlt) eq = true
+ r = r.substr(sub)
+ r = (r === "") ? r : valid(r)
+ ok = (r === "") || (eq && r === version) || (gtlt && gtlt(version, r))
+ if (!ok) break
+ }
+ if (ok) return true
+ }
+ return false
+}
+
+// return v1 > v2 ? 1 : -1
+function compare (v1, v2) {
+ var g = gt(v1, v2)
+ return g === null ? 0 : g ? 1 : -1
+}
+
+function rcompare (v1, v2) {
+ return compare(v2, v1)
+}
+
+function lt (v1, v2) { return gt(v2, v1) }
+function gte (v1, v2) { return !lt(v1, v2) }
+function lte (v1, v2) { return !gt(v1, v2) }
+function eq (v1, v2) { return gt(v1, v2) === null }
+function neq (v1, v2) { return gt(v1, v2) !== null }
+function cmp (v1, c, v2) {
+ switch (c) {
+ case ">": return gt(v1, v2)
+ case "<": return lt(v1, v2)
+ case ">=": return gte(v1, v2)
+ case "<=": return lte(v1, v2)
+ case "==": return eq(v1, v2)
+ case "!=": return neq(v1, v2)
+ case "===": return v1 === v2
+ case "!==": return v1 !== v2
+ default: throw new Error("Y U NO USE VALID COMPARATOR!? "+c)
+ }
+}
+
+// return v1 > v2
+function num (v) {
+ return v === undefined ? -1 : parseInt((v||"0").replace(/[^0-9]+/g, ''), 10)
+}
+function gt (v1, v2) {
+ v1 = exports.parse(v1)
+ v2 = exports.parse(v2)
+ if (!v1 || !v2) return false
+
+ for (var i = 1; i < 5; i ++) {
+ v1[i] = num(v1[i])
+ v2[i] = num(v2[i])
+ if (v1[i] > v2[i]) return true
+ else if (v1[i] !== v2[i]) return false
+ }
+ // no tag is > than any tag, or use lexicographical order.
+ var tag1 = v1[5] || ""
+ , tag2 = v2[5] || ""
+
+ // kludge: null means they were equal. falsey, and detectable.
+ // embarrassingly overclever, though, I know.
+ return tag1 === tag2 ? null
+ : !tag1 ? true
+ : !tag2 ? false
+ : tag1 > tag2
+}
+
+function inc (version, release) {
+ version = exports.parse(version)
+ if (!version) return null
+
+ var parsedIndexLookup =
+ { 'major': 1
+ , 'minor': 2
+ , 'patch': 3
+ , 'build': 4 }
+ var incIndex = parsedIndexLookup[release]
+ if (incIndex === undefined) return null
+
+ var current = num(version[incIndex])
+ version[incIndex] = current === -1 ? 1 : current + 1
+
+ for (var i = incIndex + 1; i < 5; i ++) {
+ if (num(version[i]) !== -1) version[i] = "0"
+ }
+
+ if (version[4]) version[4] = "-" + version[4]
+ version[5] = ""
+
+ return stringify(version)
+}
+})(typeof exports === "object" ? exports : semver = {})
diff --git a/node_modules/slide/.npmignore b/node_modules/slide/.npmignore
new file mode 100644
index 000000000..a13633799
--- /dev/null
+++ b/node_modules/slide/.npmignore
@@ -0,0 +1 @@
+*.pdf
diff --git a/node_modules/slide/LICENSE b/node_modules/slide/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/slide/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/slide/README.md b/node_modules/slide/README.md
new file mode 100644
index 000000000..6e4be2f94
--- /dev/null
+++ b/node_modules/slide/README.md
@@ -0,0 +1,32 @@
+# Slide - a tiny flow control library
+
+Callbacks are simple and easy if you keep the pattern consistent.
+
+Check out the [slide
+presentation](http://github.com/isaacs/slide-flow-control/raw/master/nodejs-controlling-flow.pdf),
+or the [blog post](http://howto.no.de/flow-control-in-npm).
+
+You'll laugh when you see how little code is actually in this thing.
+It's so not-enterprisey, you won't believe it. It does almost nothing,
+but it's super handy.
+
+I use this util in [a real world program](http://npmjs.org/).
+
+You should use it as an example of how to write your own flow control
+utilities. You'll never fully appreciate a flow control lib that you
+didn't write yourself.
+
+## Installation
+
+Just copy the files into your project, and use them that way, or
+you can do this:
+
+ npm install slide
+
+and then:
+
+ var asyncMap = require("slide").asyncMap
+ , chain = require("slide").chain
+ // use the power!
+
+Enjoy!
diff --git a/node_modules/slide/index.js b/node_modules/slide/index.js
new file mode 100644
index 000000000..0a9277f6e
--- /dev/null
+++ b/node_modules/slide/index.js
@@ -0,0 +1 @@
+module.exports=require("./lib/slide")
diff --git a/node_modules/slide/lib/async-map-ordered.js b/node_modules/slide/lib/async-map-ordered.js
new file mode 100644
index 000000000..5cca79a82
--- /dev/null
+++ b/node_modules/slide/lib/async-map-ordered.js
@@ -0,0 +1,65 @@
+
+throw new Error("TODO: Not yet implemented.")
+
+/*
+usage:
+
+Like asyncMap, but only can take a single cb, and guarantees
+the order of the results.
+*/
+
+module.exports = asyncMapOrdered
+
+function asyncMapOrdered (list, fn, cb_) {
+ if (typeof cb_ !== "function") throw new Error(
+ "No callback provided to asyncMapOrdered")
+
+ if (typeof fn !== "function") throw new Error(
+ "No map function provided to asyncMapOrdered")
+
+ if (list === undefined || list === null) return cb_(null, [])
+ if (!Array.isArray(list)) list = [list]
+ if (!list.length) return cb_(null, [])
+
+ var errState = null
+ , l = list.length
+ , a = l
+ , res = []
+ , resCount = 0
+ , maxArgLen = 0
+
+ function cb (index) { return function () {
+ if (errState) return
+ var er = arguments[0]
+ var argLen = arguments.length
+ maxArgLen = Math.max(maxArgLen, argLen)
+ res[index] = argLen === 1 ? [er] : Array.apply(null, arguments)
+
+ // see if any new things have been added.
+ if (list.length > l) {
+ var newList = list.slice(l)
+ a += (list.length - l)
+ var oldLen = l
+ l = list.length
+ process.nextTick(function () {
+ newList.forEach(function (ar, i) { fn(ar, cb(i + oldLen)) })
+ })
+ }
+
+ if (er || --a === 0) {
+ errState = er
+ cb_.apply(null, [errState].concat(flip(res, resCount, maxArgLen)))
+ }
+ }}
+ // expect the supplied cb function to be called
+ // "n" times for each thing in the array.
+ list.forEach(function (ar) {
+ steps.forEach(function (fn, i) { fn(ar, cb(i)) })
+ })
+}
+
+function flip (res, resCount, argLen) {
+ var flat = []
+ // res = [[er, x, y], [er, x1, y1], [er, x2, y2, z2]]
+ // return [[x, x1, x2], [y, y1, y2], [undefined, undefined, z2]]
+
diff --git a/node_modules/slide/lib/async-map.js b/node_modules/slide/lib/async-map.js
new file mode 100644
index 000000000..1ced158e0
--- /dev/null
+++ b/node_modules/slide/lib/async-map.js
@@ -0,0 +1,56 @@
+
+/*
+usage:
+
+// do something to a list of things
+asyncMap(myListOfStuff, function (thing, cb) { doSomething(thing.foo, cb) }, cb)
+// do more than one thing to each item
+asyncMap(list, fooFn, barFn, cb)
+
+*/
+
+module.exports = asyncMap
+
+function asyncMap () {
+ var steps = Array.prototype.slice.call(arguments)
+ , list = steps.shift() || []
+ , cb_ = steps.pop()
+ if (typeof cb_ !== "function") throw new Error(
+ "No callback provided to asyncMap")
+ if (!list) return cb_(null, [])
+ if (!Array.isArray(list)) list = [list]
+ var n = steps.length
+ , data = [] // 2d array
+ , errState = null
+ , l = list.length
+ , a = l * n
+ if (!a) return cb_(null, [])
+ function cb (er) {
+ if (errState) return
+ var argLen = arguments.length
+ for (var i = 1; i < argLen; i ++) if (arguments[i] !== undefined) {
+ data[i - 1] = (data[i - 1] || []).concat(arguments[i])
+ }
+ // see if any new things have been added.
+ if (list.length > l) {
+ var newList = list.slice(l)
+ a += (list.length - l) * n
+ l = list.length
+ process.nextTick(function () {
+ newList.forEach(function (ar) {
+ steps.forEach(function (fn) { fn(ar, cb) })
+ })
+ })
+ }
+
+ if (er || --a === 0) {
+ errState = er
+ cb_.apply(null, [errState].concat(data))
+ }
+ }
+ // expect the supplied cb function to be called
+ // "n" times for each thing in the array.
+ list.forEach(function (ar) {
+ steps.forEach(function (fn) { fn(ar, cb) })
+ })
+}
diff --git a/node_modules/slide/lib/bind-actor.js b/node_modules/slide/lib/bind-actor.js
new file mode 100644
index 000000000..6a3707274
--- /dev/null
+++ b/node_modules/slide/lib/bind-actor.js
@@ -0,0 +1,16 @@
+module.exports = bindActor
+function bindActor () {
+ var args =
+ Array.prototype.slice.call
+ (arguments) // jswtf.
+ , obj = null
+ , fn
+ if (typeof args[0] === "object") {
+ obj = args.shift()
+ fn = args.shift()
+ if (typeof fn === "string")
+ fn = obj[ fn ]
+ } else fn = args.shift()
+ return function (cb) {
+ fn.apply(obj, args.concat(cb)) }
+}
diff --git a/node_modules/slide/lib/chain.js b/node_modules/slide/lib/chain.js
new file mode 100644
index 000000000..17b371149
--- /dev/null
+++ b/node_modules/slide/lib/chain.js
@@ -0,0 +1,20 @@
+module.exports = chain
+var bindActor = require("./bind-actor.js")
+chain.first = {} ; chain.last = {}
+function chain (things, cb) {
+ var res = []
+ ;(function LOOP (i, len) {
+ if (i >= len) return cb(null,res)
+ if (Array.isArray(things[i]))
+ things[i] = bindActor.apply(null,
+ things[i].map(function(i){
+ return (i===chain.first) ? res[0]
+ : (i===chain.last)
+ ? res[res.length - 1] : i }))
+ if (!things[i]) return LOOP(i + 1, len)
+ things[i](function (er, data) {
+ if (er) return cb(er, res)
+ if (data !== undefined) res = res.concat(data)
+ LOOP(i + 1, len)
+ })
+ })(0, things.length) }
diff --git a/node_modules/slide/lib/slide.js b/node_modules/slide/lib/slide.js
new file mode 100644
index 000000000..6e9ec2327
--- /dev/null
+++ b/node_modules/slide/lib/slide.js
@@ -0,0 +1,3 @@
+exports.asyncMap = require("./async-map")
+exports.bindActor = require("./bind-actor")
+exports.chain = require("./chain")
diff --git a/node_modules/slide/nodejs-controlling-flow.pdf b/node_modules/slide/nodejs-controlling-flow.pdf
new file mode 100644
index 000000000..ca12d60cb
--- /dev/null
+++ b/node_modules/slide/nodejs-controlling-flow.pdf
Binary files differ
diff --git a/node_modules/slide/package.json b/node_modules/slide/package.json
new file mode 100644
index 000000000..5cc2689e6
--- /dev/null
+++ b/node_modules/slide/package.json
@@ -0,0 +1,19 @@
+{
+ "name": "slide",
+ "version": "1.1.3",
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "contributors": [
+ "S. Sriram <ssriram@gmail.com> (http://www.565labs.com)"
+ ],
+ "description": "A flow control lib small enough to fit on in a slide presentation. Derived live at Oak.JS",
+ "main": "./lib/slide.js",
+ "dependencies": {},
+ "devDependencies": {},
+ "engines": {
+ "node": "*"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/slide-flow-control.git"
+ }
+}
diff --git a/node_modules/tar/.npmignore b/node_modules/tar/.npmignore
new file mode 100644
index 000000000..856407ebe
--- /dev/null
+++ b/node_modules/tar/.npmignore
@@ -0,0 +1,5 @@
+.*.swp
+node_modules
+examples/extract/
+test/tmp/
+test/fixtures/symlink
diff --git a/node_modules/tar/.travis.yml b/node_modules/tar/.travis.yml
new file mode 100644
index 000000000..2d26206d5
--- /dev/null
+++ b/node_modules/tar/.travis.yml
@@ -0,0 +1,3 @@
+language: node_js
+node_js:
+ - 0.6
diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md
new file mode 100644
index 000000000..7cfe3bbca
--- /dev/null
+++ b/node_modules/tar/README.md
@@ -0,0 +1,50 @@
+# node-tar
+
+Tar for Node.js.
+
+## Goals of this project
+
+1. Be able to parse and reasonably extract the contents of any tar file
+ created by any program that creates tar files, period.
+
+ At least, this includes every version of:
+
+ * bsdtar
+ * gnutar
+ * solaris posix tar
+ * Joerg Schilling's star ("Schilly tar")
+
+2. Create tar files that can be extracted by any of the following tar
+ programs:
+
+ * bsdtar/libarchive version 2.6.2
+ * gnutar 1.15 and above
+ * SunOS Posix tar
+ * Joerg Schilling's star ("Schilly tar")
+
+3. 100% test coverage. Speed is important. Correctness is slightly
+ more important.
+
+4. Create the kind of tar interface that Node users would want to use.
+
+5. Satisfy npm's needs for a portable tar implementation with a
+ JavaScript interface.
+
+6. No excuses. No complaining. No tolerance for failure.
+
+## But isn't there already a tar.js?
+
+Yes, there are a few. This one is going to be better, and it will be
+fanatically maintained, because npm will depend on it.
+
+That's why I need to write it from scratch. Creating and extracting
+tarballs is such a large part of what npm does, I simply can't have it
+be a black box any longer.
+
+## Didn't you have something already? Where'd it go?
+
+It's in the "old" folder. It's not functional. Don't use it.
+
+It was a useful exploration to learn the issues involved, but like most
+software of any reasonable complexity, node-tar won't be useful until
+it's been written at least 3 times.
diff --git a/node_modules/tar/lib/buffer-entry.js b/node_modules/tar/lib/buffer-entry.js
new file mode 100644
index 000000000..5322a28ab
--- /dev/null
+++ b/node_modules/tar/lib/buffer-entry.js
@@ -0,0 +1,30 @@
+// just like the Entry class, but it buffers the contents
+//
+// XXX It would be good to set a maximum BufferEntry filesize,
+// since it eats up memory. In normal operation,
+// these are only for long filenames or link names, which are
+// rarely very big.
+
+module.exports = BufferEntry
+
+var inherits = require("inherits")
+ , Entry = require("./entry.js")
+
+function BufferEntry () {
+ Entry.apply(this, arguments)
+ this._buffer = new Buffer(this.props.size)
+ this._offset = 0
+ this.body = ""
+ this.on("end", function () {
+ this.body = this._buffer.toString().slice(0, -1)
+ })
+}
+
+// collect the bytes as they come in.
+BufferEntry.prototype.write = function (c) {
+ c.copy(this._buffer, this._offset)
+ this._offset += c.length
+ Entry.prototype.write.call(this, c)
+}
+
+inherits(BufferEntry, Entry)
diff --git a/node_modules/tar/lib/entry-writer.js b/node_modules/tar/lib/entry-writer.js
new file mode 100644
index 000000000..8e09042d0
--- /dev/null
+++ b/node_modules/tar/lib/entry-writer.js
@@ -0,0 +1,169 @@
+module.exports = EntryWriter
+
+var tar = require("../tar.js")
+ , TarHeader = require("./header.js")
+ , Entry = require("./entry.js")
+ , inherits = require("inherits")
+ , BlockStream = require("block-stream")
+ , ExtendedHeaderWriter
+ , Stream = require("stream").Stream
+ , EOF = {}
+
+inherits(EntryWriter, Stream)
+
+function EntryWriter (props) {
+ var me = this
+
+ if (!(me instanceof EntryWriter)) {
+ return new EntryWriter(props)
+ }
+
+ Stream.apply(this)
+
+ me.writable = true
+ me.readable = true
+
+ me._stream = new BlockStream(512)
+
+ me._stream.on("data", function (c) {
+ me.emit("data", c)
+ })
+
+ me._stream.on("drain", function () {
+ me.emit("drain")
+ })
+
+ me._stream.on("end", function () {
+ me.emit("end")
+ me.emit("close")
+ })
+
+ me.props = props
+ if (props.type === "Directory") {
+ props.size = 0
+ }
+ props.ustar = "ustar\0"
+ props.ustarver = "00"
+ me.path = props.path
+
+ me._buffer = []
+ me._didHeader = false
+ me._meta = false
+
+ me.on("pipe", function () {
+ me._process()
+ })
+}
+
+EntryWriter.prototype.write = function (c) {
+ // console.error(".. ew write")
+ if (this._ended) return this.emit("error", new Error("write after end"))
+ this._buffer.push(c)
+ this._process()
+ this._needDrain = this._buffer.length > 0
+ return !this._needDrain
+}
+
+EntryWriter.prototype.end = function (c) {
+ // console.error(".. ew end")
+ if (c) this._buffer.push(c)
+ this._buffer.push(EOF)
+ this._ended = true
+ this._process()
+ this._needDrain = this._buffer.length > 0
+}
+
+EntryWriter.prototype.pause = function () {
+ // console.error(".. ew pause")
+ this._paused = true
+ this.emit("pause")
+}
+
+EntryWriter.prototype.resume = function () {
+ // console.error(".. ew resume")
+ this._paused = false
+ this.emit("resume")
+ this._process()
+}
+
+EntryWriter.prototype.add = function (entry) {
+ // console.error(".. ew add")
+ if (!this.parent) return this.emit("error", new Error("no parent"))
+
+ // make sure that the _header and such is emitted, and clear out
+ // the _currentEntry link on the parent.
+ if (!this._ended) this.end()
+
+ return this.parent.add(entry)
+}
+
+EntryWriter.prototype._header = function () {
+ // console.error(".. ew header")
+ if (this._didHeader) return
+ this._didHeader = true
+
+ var headerBlock = TarHeader.encode(this.props)
+
+ if (this.props.needExtended && !this._meta) {
+ var me = this
+
+ ExtendedHeaderWriter = ExtendedHeaderWriter ||
+ require("./extended-header-writer.js")
+
+ ExtendedHeaderWriter(this.props)
+ .on("data", function (c) {
+ me.emit("data", c)
+ })
+ .on("error", function (er) {
+ me.emit("error", er)
+ })
+ .end()
+ }
+
+ // console.error(".. .. ew headerBlock emitting")
+ this.emit("data", headerBlock)
+ this.emit("header")
+}
+
+EntryWriter.prototype._process = function () {
+ // console.error(".. .. ew process")
+ if (!this._didHeader && !this._meta) {
+ this._header()
+ }
+
+ if (this._paused || this._processing) {
+ // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing)
+ return
+ }
+
+ this._processing = true
+
+ var buf = this._buffer
+ for (var i = 0; i < buf.length; i ++) {
+ // console.error(".. .. .. i=%d", i)
+
+ var c = buf[i]
+
+ if (c === EOF) this._stream.end()
+ else this._stream.write(c)
+
+ if (this._paused) {
+ // console.error(".. .. .. paused mid-emission")
+ this._processing = false
+ if (i < buf.length) {
+ this._needDrain = true
+ this._buffer = buf.slice(i + 1)
+ }
+ return
+ }
+ }
+
+ // console.error(".. .. .. emitted")
+ this._buffer.length = 0
+ this._processing = false
+
+ // console.error(".. .. .. emitting drain")
+ this.emit("drain")
+}
+
+EntryWriter.prototype.destroy = function () {}
diff --git a/node_modules/tar/lib/entry.js b/node_modules/tar/lib/entry.js
new file mode 100644
index 000000000..4fc331eb9
--- /dev/null
+++ b/node_modules/tar/lib/entry.js
@@ -0,0 +1,212 @@
+// A passthrough read/write stream that sets its properties
+// based on a header, extendedHeader, and globalHeader
+//
+// Can be either a file system object of some sort, or
+// a pax/ustar metadata entry.
+
+module.exports = Entry
+
+var TarHeader = require("./header.js")
+ , tar = require("../tar")
+ , assert = require("assert").ok
+ , Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , fstream = require("fstream").Abstract
+
+function Entry (header, extended, global) {
+ Stream.call(this)
+ this.readable = true
+ this.writable = true
+
+ this._needDrain = false
+ this._paused = false
+ this._reading = false
+ this._ending = false
+ this._ended = false
+ this._remaining = 0
+ this._queue = []
+ this._index = 0
+ this._queueLen = 0
+
+ this._read = this._read.bind(this)
+
+ this.props = {}
+ this._header = header
+ this._extended = extended || {}
+
+ // globals can change throughout the course of
+ // a file parse operation. Freeze it at its current state.
+ this._global = {}
+ var me = this
+ Object.keys(global || {}).forEach(function (g) {
+ me._global[g] = global[g]
+ })
+
+ this._setProps()
+}
+
+inherits(Entry, Stream,
+{ write: function (c) {
+ if (this._ending) this.error("write() after end()", null, true)
+ if (this._remaining === 0) {
+ this.error("invalid bytes past eof")
+ }
+
+ // often we'll get a bunch of \0 at the end of the last write,
+ // since chunks will always be 512 bytes when reading a tarball.
+ if (c.length > this._remaining) {
+ c = c.slice(0, this._remaining)
+ }
+ this._remaining -= c.length
+
+ // put it on the stack.
+ var ql = this._queueLen
+ this._queue.push(c)
+ this._queueLen ++
+
+ this._read()
+
+ // either paused, or buffered
+ if (this._paused || ql > 0) {
+ this._needDrain = true
+ return false
+ }
+
+ return true
+ }
+
+, end: function (c) {
+ if (c) this.write(c)
+ this._ending = true
+ this._read()
+ }
+
+, pause: function () {
+ this._paused = true
+ this.emit("pause")
+ }
+
+, resume: function () {
+ // console.error(" Tar Entry resume", this.path)
+ this.emit("resume")
+ this._paused = false
+ this._read()
+ return this._queueLen - this._index > 1
+ }
+
+ // This is bound to the instance
+, _read: function () {
+ // console.error(" Tar Entry _read", this.path)
+
+ if (this._paused || this._reading || this._ended) return
+
+ // set this flag so that event handlers don't inadvertently
+ // get multiple _read() calls running.
+ this._reading = true
+
+ // have any data to emit?
+ if (this._index < this._queueLen) {
+ var chunk = this._queue[this._index ++]
+ this.emit("data", chunk)
+ }
+
+ // check if we're drained
+ if (this._index >= this._queueLen) {
+ this._queue.length = this._queueLen = this._index = 0
+ if (this._needDrain) {
+ this._needDrain = false
+ this.emit("drain")
+ }
+ if (this._ending) {
+ this._ended = true
+ this.emit("end")
+ }
+ }
+
+ // if the queue gets too big, then pluck off whatever we can.
+ // this should be fairly rare.
+ var mql = this._maxQueueLen
+ if (this._queueLen > mql && this._index > 0) {
+ mql = Math.min(this._index, mql)
+ this._index -= mql
+ this._queueLen -= mql
+ this._queue = this._queue.slice(mql)
+ }
+
+ this._reading = false
+ }
+
+, _setProps: function () {
+ // props = extended->global->header->{}
+ var header = this._header
+ , extended = this._extended
+ , global = this._global
+ , props = this.props
+
+ // first get the values from the normal header.
+ var fields = tar.fields
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = header[field]
+ if (typeof val !== "undefined") props[field] = val
+ }
+
+ // next, the global header for this file.
+ // numeric values, etc, will have already been parsed.
+ ;[global, extended].forEach(function (p) {
+ Object.keys(p).forEach(function (f) {
+ if (typeof p[f] !== "undefined") props[f] = p[f]
+ })
+ })
+
+ // no nulls allowed in path or linkpath
+ ;["path", "linkpath"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = props[p].split("\0")[0]
+ }
+ })
+
+
+ // set date fields to be a proper date
+ ;["mtime", "ctime", "atime"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = new Date(props[p] * 1000)
+ }
+ })
+
+ // set the type so that we know what kind of file to create
+ var type
+ switch (tar.types[props.type]) {
+ case "OldFile":
+ case "ContiguousFile":
+ type = "File"
+ break
+
+ case "GNUDumpDir":
+ type = "Directory"
+ break
+
+ case undefined:
+ type = "Unknown"
+ break
+
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ default:
+ type = tar.types[props.type]
+ }
+
+ this.type = type
+ this.path = props.path
+ this.size = props.size
+
+ // size is special, since it signals when the file needs to end.
+ this._remaining = props.size
+ }
+, warn: fstream.warn
+, error: fstream.error
+})
diff --git a/node_modules/tar/lib/extended-header-writer.js b/node_modules/tar/lib/extended-header-writer.js
new file mode 100644
index 000000000..a130c5b16
--- /dev/null
+++ b/node_modules/tar/lib/extended-header-writer.js
@@ -0,0 +1,192 @@
+
+module.exports = ExtendedHeaderWriter
+
+var inherits = require("inherits")
+ , EntryWriter = require("./entry-writer.js")
+
+inherits(ExtendedHeaderWriter, EntryWriter)
+
+var tar = require("../tar.js")
+ , path = require("path")
+ , inherits = require("inherits")
+ , TarHeader = require("./header.js")
+
+// props is the props of the thing we need to write an
+// extended header for.
+// Don't be shy with it. Just encode everything.
+function ExtendedHeaderWriter (props) {
+ // console.error(">> ehw ctor")
+ var me = this
+
+ if (!(me instanceof ExtendedHeaderWriter)) {
+ return new ExtendedHeaderWriter(props)
+ }
+
+ me.fields = props
+
+ var p =
+ { path : ("PaxHeader" + path.join("/", props.path || ""))
+ .replace(/\\/g, "/").substr(0, 100)
+ , mode : props.mode || 0666
+ , uid : props.uid || 0
+ , gid : props.gid || 0
+ , size : 0 // will be set later
+ , mtime : props.mtime || Date.now() / 1000
+ , type : "x"
+ , linkpath : ""
+ , ustar : "ustar\0"
+ , ustarver : "00"
+ , uname : props.uname || ""
+ , gname : props.gname || ""
+ , devmaj : props.devmaj || 0
+ , devmin : props.devmin || 0
+ }
+
+
+ EntryWriter.call(me, p)
+ // console.error(">> ehw props", me.props)
+ me.props = p
+
+ me._meta = true
+}
+
+ExtendedHeaderWriter.prototype.end = function () {
+ // console.error(">> ehw end")
+ var me = this
+
+ if (me._ended) return
+ me._ended = true
+
+ me._encodeFields()
+
+ if (me.props.size === 0) {
+ // nothing to write!
+ me._ready = true
+ me._stream.end()
+ return
+ }
+
+ me._stream.write(TarHeader.encode(me.props))
+ me.body.forEach(function (l) {
+ me._stream.write(l)
+ })
+ me._ready = true
+
+ // console.error(">> ehw _process calling end()", me.props)
+ this._stream.end()
+}
+
+ExtendedHeaderWriter.prototype._encodeFields = function () {
+ // console.error(">> ehw _encodeFields")
+ this.body = []
+ if (this.fields.prefix) {
+ this.fields.path = this.fields.prefix + "/" + this.fields.path
+ this.fields.prefix = ""
+ }
+ encodeFields(this.fields, "", this.body, this.fields.noProprietary)
+ var me = this
+ this.body.forEach(function (l) {
+ me.props.size += l.length
+ })
+}
+
+function encodeFields (fields, prefix, body, nop) {
+ // console.error(">> >> ehw encodeFields")
+ // "%d %s=%s\n", <length>, <keyword>, <value>
+ // The length is a decimal number, and includes itself and the \n
+ // Numeric values are decimal strings.
+
+ Object.keys(fields).forEach(function (k) {
+ var val = fields[k]
+ , numeric = tar.numeric[k]
+
+ if (prefix) k = prefix + "." + k
+
+ // already including NODETAR.type, don't need File=true also
+ if (k === fields.type && val === true) return
+
+ switch (k) {
+ // don't include anything that's always handled just fine
+ // in the normal header, or only meaningful in the context
+ // of nodetar
+ case "mode":
+ case "cksum":
+ case "ustar":
+ case "ustarver":
+ case "prefix":
+ case "basename":
+ case "dirname":
+ case "needExtended":
+ case "block":
+ case "filter":
+ return
+
+ case "rdev":
+ if (val === 0) return
+ break
+
+ case "nlink":
+ case "dev": // Truly a hero among men, Creator of Star!
+ case "ino": // Speak his name with reverent awe! It is:
+ k = "SCHILY." + k
+ break
+
+ default: break
+ }
+
+ if (val && typeof val === "object" &&
+ !Buffer.isBuffer(val)) encodeFields(val, k, body, nop)
+ else if (val === null || val === undefined) return
+ else body.push.apply(body, encodeField(k, val, nop))
+ })
+
+ return body
+}
+
+function encodeField (k, v, nop) {
+ // lowercase keys must be valid, otherwise prefix with
+ // "NODETAR."
+ if (k.charAt(0) === k.charAt(0).toLowerCase()) {
+ var m = k.split(".")[0]
+ if (!tar.knownExtended[m]) k = "NODETAR." + k
+ }
+
+ // no proprietary
+ if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) {
+ return []
+ }
+
+ if (typeof val === "number") val = val.toString(10)
+
+ var s = new Buffer(" " + k + "=" + v + "\n")
+ , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
+
+ // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
+
+ // if adding that many digits will make it go over that length,
+ // then add one to it. For example, if the string is:
+ // " foo=bar\n"
+ // then that's 9 characters. With the "9", that bumps the length
+ // up to 10. However, this is invalid:
+ // "10 foo=bar\n"
+ // but, since that's actually 11 characters, since 10 adds another
+ // character to the length, and the length includes the number
+ // itself. In that case, just bump it up again.
+ if (s.length + digits >= Math.pow(10, digits)) digits += 1
+ // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
+
+ var len = digits + s.length
+ // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len)
+ var lenBuf = new Buffer("" + len)
+ if (lenBuf.length + s.length !== len) {
+ throw new Error("Bad length calculation\n"+
+ "len="+len+"\n"+
+ "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+
+ "lenBuf.length="+lenBuf.length+"\n"+
+ "digits="+digits+"\n"+
+ "s="+JSON.stringify(s.toString())+"\n"+
+ "s.length="+s.length)
+ }
+
+ return [lenBuf, s]
+}
diff --git a/node_modules/tar/lib/extended-header.js b/node_modules/tar/lib/extended-header.js
new file mode 100644
index 000000000..4346d6c59
--- /dev/null
+++ b/node_modules/tar/lib/extended-header.js
@@ -0,0 +1,139 @@
+// An Entry consisting of:
+//
+// "%d %s=%s\n", <length>, <keyword>, <value>
+//
+// The length is a decimal number, and includes itself and the \n
+// \0 does not terminate anything. Only the length terminates the string.
+// Numeric values are decimal strings.
+
+module.exports = ExtendedHeader
+
+var Entry = require("./entry.js")
+ , inherits = require("inherits")
+ , tar = require("../tar.js")
+ , numeric = tar.numeric
+ , keyTrans = { "SCHILY.dev": "dev"
+ , "SCHILY.ino": "ino"
+ , "SCHILY.nlink": "nlink" }
+
+function ExtendedHeader () {
+ Entry.apply(this, arguments)
+ this.on("data", this._parse)
+ this.fields = {}
+ this._position = 0
+ this._fieldPos = 0
+ this._state = SIZE
+ this._sizeBuf = []
+ this._keyBuf = []
+ this._valBuf = []
+ this._size = -1
+ this._key = ""
+}
+
+inherits(ExtendedHeader, Entry, { _parse: parse })
+
+var s = 0
+ , states = ExtendedHeader.states = {}
+ , SIZE = states.SIZE = s++
+ , KEY = states.KEY = s++
+ , VAL = states.VAL = s++
+ , ERR = states.ERR = s++
+
+Object.keys(states).forEach(function (s) {
+ states[states[s]] = states[s]
+})
+
+states[s] = null
+
+// char code values for comparison
+var _0 = "0".charCodeAt(0)
+ , _9 = "9".charCodeAt(0)
+ , point = ".".charCodeAt(0)
+ , a = "a".charCodeAt(0)
+ , Z = "Z".charCodeAt(0)
+ , a = "a".charCodeAt(0)
+ , z = "z".charCodeAt(0)
+ , space = " ".charCodeAt(0)
+ , eq = "=".charCodeAt(0)
+ , cr = "\n".charCodeAt(0)
+
+function parse (c) {
+ if (this._state === ERR) return
+
+ for ( var i = 0, l = c.length
+ ; i < l
+ ; this._position++, this._fieldPos++, i++) {
+ // console.error("top of loop, size="+this._size)
+
+ var b = c[i]
+
+ if (this._size >= 0 && this._fieldPos > this._size) {
+ error(this, "field exceeds length="+this._size)
+ return
+ }
+
+ switch (this._state) {
+ case ERR: return
+
+ case SIZE:
+ // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString())
+ if (b === space) {
+ this._state = KEY
+ // this._fieldPos = this._sizeBuf.length
+ this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10)
+ this._sizeBuf.length = 0
+ continue
+ }
+ if (b < _0 || b > _9) {
+ error(this, "expected [" + _0 + ".." + _9 + "], got " + b)
+ return
+ }
+ this._sizeBuf.push(b)
+ continue
+
+ case KEY:
+ // can be any char except =, not > size.
+ if (b === eq) {
+ this._state = VAL
+ this._key = new Buffer(this._keyBuf).toString()
+ if (keyTrans[this._key]) this._key = keyTrans[this._key]
+ this._keyBuf.length = 0
+ continue
+ }
+ this._keyBuf.push(b)
+ continue
+
+ case VAL:
+ // field must end with cr
+ if (this._fieldPos === this._size - 1) {
+ // console.error("finished with "+this._key)
+ if (b !== cr) {
+ error(this, "expected \\n at end of field")
+ return
+ }
+ var val = new Buffer(this._valBuf).toString()
+ if (numeric[this._key]) {
+ val = parseFloat(val)
+ }
+ this.fields[this._key] = val
+
+ this._valBuf.length = 0
+ this._state = SIZE
+ this._size = -1
+ this._fieldPos = -1
+ continue
+ }
+ this._valBuf.push(b)
+ continue
+ }
+ }
+}
+
+function error (me, msg) {
+ msg = "invalid header: " + msg
+ + "\nposition=" + me._position
+ + "\nfield position=" + me._fieldPos
+
+ me.error(msg)
+ me.state = ERR
+}
diff --git a/node_modules/tar/lib/extract.js b/node_modules/tar/lib/extract.js
new file mode 100644
index 000000000..e45974c72
--- /dev/null
+++ b/node_modules/tar/lib/extract.js
@@ -0,0 +1,64 @@
+// give it a tarball and a path, and it'll dump the contents
+
+module.exports = Extract
+
+var tar = require("../tar.js")
+ , fstream = require("fstream")
+ , inherits = require("inherits")
+ , path = require("path")
+
+function Extract (opts) {
+ if (!(this instanceof Extract)) return new Extract(opts)
+ tar.Parse.apply(this)
+
+ // have to dump into a directory
+ opts.type = "Directory"
+ opts.Directory = true
+
+ if (typeof opts !== "object") {
+ opts = { path: opts }
+ }
+
+ // better to drop in cwd? seems more standard.
+ opts.path = opts.path || path.resolve("node-tar-extract")
+ opts.type = "Directory"
+ opts.Directory = true
+
+ this._fst = fstream.Writer(opts)
+
+ this.pause()
+ var me = this
+
+ // Hardlinks in tarballs are relative to the root
+ // of the tarball. So, they need to be resolved against
+ // the target directory in order to be created properly.
+ me.on("entry", function (entry) {
+ if (entry.type !== "Link") return
+ entry.linkpath = entry.props.linkpath =
+ path.join(opts.path, path.join("/", entry.props.linkpath))
+ })
+
+ this._fst.on("ready", function () {
+ me.pipe(me._fst, { end: false })
+ me.resume()
+ })
+
+ // this._fst.on("end", function () {
+ // console.error("\nEEEE Extract End", me._fst.path)
+ // })
+
+ this._fst.on("close", function () {
+ // console.error("\nEEEE Extract End", me._fst.path)
+ me.emit("end")
+ me.emit("close")
+ })
+}
+
+inherits(Extract, tar.Parse)
+
+Extract.prototype._streamEnd = function () {
+ var me = this
+ if (!me._ended) me.error("unexpected eof")
+ me._fst.end()
+ // my .end() is coming later.
+}
diff --git a/node_modules/tar/lib/global-header-writer.js b/node_modules/tar/lib/global-header-writer.js
new file mode 100644
index 000000000..0bfc7b80a
--- /dev/null
+++ b/node_modules/tar/lib/global-header-writer.js
@@ -0,0 +1,14 @@
+module.exports = GlobalHeaderWriter
+
+var ExtendedHeaderWriter = require("./extended-header-writer.js")
+ , inherits = require("inherits")
+
+inherits(GlobalHeaderWriter, ExtendedHeaderWriter)
+
+function GlobalHeaderWriter (props) {
+ if (!(this instanceof GlobalHeaderWriter)) {
+ return new GlobalHeaderWriter(props)
+ }
+ ExtendedHeaderWriter.call(this, props)
+ this.props.type = "g"
+}
diff --git a/node_modules/tar/lib/header.js b/node_modules/tar/lib/header.js
new file mode 100644
index 000000000..05b237c0c
--- /dev/null
+++ b/node_modules/tar/lib/header.js
@@ -0,0 +1,385 @@
+// parse a 512-byte header block to a data object, or vice-versa
+// If the data won't fit nicely in a simple header, then generate
+// the appropriate extended header file, and return that.
+
+module.exports = TarHeader
+
+var tar = require("../tar.js")
+ , fields = tar.fields
+ , fieldOffs = tar.fieldOffs
+ , fieldEnds = tar.fieldEnds
+ , fieldSize = tar.fieldSize
+ , numeric = tar.numeric
+ , assert = require("assert").ok
+ , space = " ".charCodeAt(0)
+ , slash = "/".charCodeAt(0)
+ , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
+
+function TarHeader (block) {
+ if (!(this instanceof TarHeader)) return new TarHeader(block)
+ if (block) this.decode(block)
+}
+
+TarHeader.prototype =
+ { decode : decode
+ , encode: encode
+ , calcSum: calcSum
+ , checkSum: checkSum
+ }
+
+TarHeader.parseNumeric = parseNumeric
+TarHeader.encode = encode
+TarHeader.decode = decode
+
+// note that this will only do the normal ustar header, not any kind
+// of extended posix header file. If something doesn't fit comfortably,
+// then it will set obj.needExtended = true, and set the block to
+// the closest approximation.
+function encode (obj) {
+ if (!obj && !(this instanceof TarHeader)) throw new Error(
+ "encode must be called on a TarHeader, or supplied an object")
+
+ obj = obj || this
+ var block = obj.block = new Buffer(512)
+
+ // if the object has a "prefix", then that's actually an extension of
+ // the path field.
+ if (obj.prefix) {
+ // console.error("%% header encoding, got a prefix", obj.prefix)
+ obj.path = obj.prefix + "/" + obj.path
+ // console.error("%% header encoding, prefixed path", obj.path)
+ obj.prefix = ""
+ }
+
+ obj.needExtended = false
+
+ if (obj.mode) {
+ if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
+ obj.mode = obj.mode & 0777
+ }
+
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , off = fieldOffs[f]
+ , end = fieldEnds[f]
+ , ret
+
+ switch (field) {
+ case "cksum":
+ // special, done below, after all the others
+ break
+
+ case "prefix":
+ // special, this is an extension of the "path" field.
+ // console.error("%% header encoding, skip prefix later")
+ break
+
+ case "type":
+ // convert from long name to a single char.
+ var type = obj.type || "0"
+ if (type.length > 1) {
+ type = tar.types[obj.type]
+ if (!type) type = "0"
+ }
+ writeText(block, off, end, type)
+ break
+
+ case "path":
+ // uses the "prefix" field if > 100 bytes, but <= 255
+ var pathLen = Buffer.byteLength(obj.path)
+ , pathFSize = fieldSize[fields.path]
+ , prefFSize = fieldSize[fields.prefix]
+
+ // paths between 100 and 255 should use the prefix field.
+ // longer than 255
+ if (pathLen > pathFSize &&
+ pathLen <= pathFSize + prefFSize) {
+ // need to find a slash somewhere in the middle so that
+ // path and prefix both fit in their respective fields
+ var searchStart = pathLen - 1 - pathFSize
+ , searchEnd = prefFSize
+ , found = false
+ , pathBuf = new Buffer(obj.path)
+
+ for ( var s = searchStart
+ ; (s <= searchEnd)
+ ; s ++ ) {
+ if (pathBuf[s] === slash || pathBuf[s] === bslash) {
+ found = s
+ break
+ }
+ }
+
+ if (found !== false) {
+ prefix = pathBuf.slice(0, found).toString("utf8")
+ path = pathBuf.slice(found + 1).toString("utf8")
+
+ ret = writeText(block, off, end, path)
+ off = fieldOffs[fields.prefix]
+ end = fieldEnds[fields.prefix]
+ // console.error("%% header writing prefix", off, end, prefix)
+ ret = writeText(block, off, end, prefix) || ret
+ break
+ }
+ }
+
+ // paths less than 100 chars don't need a prefix
+ // and paths longer than 255 need an extended header and will fail
+ // on old implementations no matter what we do here.
+ // Null out the prefix, and fallthrough to default.
+ // console.error("%% header writing no prefix")
+ var poff = fieldOffs[fields.prefix]
+ , pend = fieldEnds[fields.prefix]
+ writeText(block, poff, pend, "")
+ // fallthrough
+
+ // all other fields are numeric or text
+ default:
+ ret = numeric[field]
+ ? writeNumeric(block, off, end, obj[field])
+ : writeText(block, off, end, obj[field] || "")
+ break
+ }
+ obj.needExtended = obj.needExtended || ret
+ }
+
+ var off = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ writeNumeric(block, off, end, calcSum.call(this, block))
+
+ return block
+}
+
+// if it's a negative number, or greater than will fit,
+// then use write256.
+var MAXNUM = { 12: 077777777777
+ , 11: 07777777777
+ , 8 : 07777777
+ , 7 : 0777777 }
+function writeNumeric (block, off, end, num) {
+ var writeLen = end - off
+ , maxNum = MAXNUM[writeLen] || 0
+
+ num = num || 0
+ // console.error(" numeric", num)
+
+ if (num instanceof Date ||
+ Object.prototype.toString.call(num) === "[object Date]") {
+ num = num.getTime() / 1000
+ }
+
+ if (num > maxNum || num < 0) {
+ write256(block, off, end, num)
+ // need an extended header if negative or too big.
+ return true
+ }
+
+ // god, tar is so annoying
+ // if the string is small enough, you should put a space
+ // between the octal string and the \0, but if it doesn't
+ // fit, then don't.
+ var numStr = Math.floor(num).toString(8)
+ if (num < MAXNUM[writeLen - 1]) numStr += " "
+
+ // pad with "0" chars
+ if (numStr.length < writeLen) {
+ numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
+ }
+
+ if (numStr.length !== writeLen - 1) {
+ throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
+ "expected: "+writeLen)
+ }
+ block.write(numStr, off, writeLen, "utf8")
+ block[end - 1] = 0
+}
+
+function write256 (block, off, end, num) {
+ var buf = block.slice(off, end)
+ var positive = num >= 0
+ buf[0] = positive ? 0x80 : 0xFF
+
+ // get the number as a base-256 tuple
+ if (!positive) num *= -1
+ var tuple = []
+ do {
+ var n = num % 256
+ tuple.push(n)
+ num = (num - n) / 256
+ } while (num)
+
+ var bytes = tuple.length
+
+ var fill = buf.length - bytes
+ for (var i = 1; i < fill; i ++) {
+ buf[i] = positive ? 0 : 0xFF
+ }
+
+ // tuple is a base256 number, with [0] as the *least* significant byte
+ // if it's negative, then we need to flip all the bits once we hit the
+ // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
+ // complement is (0xFF - n).
+ var zero = true
+ for (i = bytes; i > 0; i --) {
+ var byte = tuple[bytes - i]
+ if (positive) buf[fill + i] = byte
+ else if (zero && byte === 0) buf[fill + i] = 0
+ else if (zero) {
+ zero = false
+ buf[fill + i] = 0x100 - byte
+ } else buf[fill + i] = 0xFF - byte
+ }
+}
+
+function writeText (block, off, end, str) {
+ // strings are written as utf8, then padded with \0
+ var strLen = Buffer.byteLength(str)
+ , writeLen = Math.min(strLen, end - off)
+ // non-ascii fields need extended headers
+ // long fields get truncated
+ , needExtended = strLen !== str.length || strLen > writeLen
+
+ // write the string, and null-pad
+ if (writeLen > 0) block.write(str, off, writeLen, "utf8")
+ for (var i = off + writeLen; i < end; i ++) block[i] = 0
+
+ return needExtended
+}
+
+function calcSum (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ if (!block) throw new Error("Need block to checksum")
+
+ // now figure out what it would be if the cksum was " "
+ var sum = 0
+ , start = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
+ sum += block[i]
+ }
+
+ for (var i = start; i < end; i ++) {
+ sum += space
+ }
+
+ for (var i = end; i < 512; i ++) {
+ sum += block[i]
+ }
+
+ return sum
+}
+
+
+function checkSum (block) {
+ var sum = calcSum.call(this, block)
+ block = block || this.block
+
+ var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
+ cksum = parseNumeric(cksum)
+
+ return cksum === sum
+}
+
+function decode (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ this.block = block
+ this.cksumValid = this.checkSum()
+
+ var prefix = null
+
+ // slice off each field.
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = block.slice(fieldOffs[f], fieldEnds[f])
+
+ switch (field) {
+ case "ustar":
+ // if not ustar, then everything after that is just padding.
+ if (val.toString() !== "ustar\0") {
+ this.ustar = false
+ return
+ } else {
+ // console.error("ustar:", val, val.toString())
+ this.ustar = val.toString()
+ }
+ break
+
+ // prefix is special, since it might signal the xstar header
+ case "prefix":
+ var atime = parseNumeric(val.slice(131, 131 + 12))
+ , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
+ if ((val[130] === 0 || val[130] === space) &&
+ typeof atime === "number" &&
+ typeof ctime === "number" &&
+ val[131 + 12] === space &&
+ val[131 + 12 + 12] === space) {
+ this.atime = atime
+ this.ctime = ctime
+ val = val.slice(0, 130)
+ }
+ prefix = val.toString("utf8").replace(/\0+$/, "")
+ // console.error("%% header reading prefix", prefix)
+ break
+
+ // all other fields are null-padding text
+ // or a number.
+ default:
+ if (numeric[field]) {
+ this[field] = parseNumeric(val)
+ } else {
+ this[field] = val.toString("utf8").replace(/\0+$/, "")
+ }
+ break
+ }
+ }
+
+ // if we got a prefix, then prepend it to the path.
+ if (prefix) {
+ this.path = prefix + "/" + this.path
+ // console.error("%% header got a prefix", this.path)
+ }
+}
+
+function parse256 (buf) {
+ // first byte MUST be either 80 or FF
+ // 80 for positive, FF for 2's comp
+ var positive
+ if (buf[0] === 0x80) positive = true
+ else if (buf[0] === 0xFF) positive = false
+ else return null
+
+ // build up a base-256 tuple from the least sig to the highest
+ var zero = false
+ , tuple = []
+ for (var i = buf.length - 1; i > 0; i --) {
+ var byte = buf[i]
+ if (positive) tuple.push(byte)
+ else if (zero && byte === 0) tuple.push(0)
+ else if (zero) {
+ zero = false
+ tuple.push(0x100 - byte)
+ } else tuple.push(0xFF - byte)
+ }
+
+ for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
+ sum += tuple[i] * Math.pow(256, i)
+ }
+
+ return positive ? sum : -1 * sum
+}
+
+function parseNumeric (f) {
+ if (f[0] & 0x80) return parse256(f)
+
+ var str = f.toString("utf8").split("\0")[0].trim()
+ , res = parseInt(str, 8)
+
+ return isNaN(res) ? null : res
+}
+
diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js
new file mode 100644
index 000000000..ed44686c8
--- /dev/null
+++ b/node_modules/tar/lib/pack.js
@@ -0,0 +1,226 @@
+// pipe in an fstream, and it'll make a tarball.
+// key-value pair argument is global extended header props.
+
+module.exports = Pack
+
+var EntryWriter = require("./entry-writer.js")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , inherits = require("inherits")
+ , GlobalHeaderWriter = require("./global-header-writer.js")
+ , collect = require("fstream").collect
+ , eof = new Buffer(512)
+
+for (var i = 0; i < 512; i ++) eof[i] = 0
+
+inherits(Pack, Stream)
+
+function Pack (props) {
+ // console.error("-- p ctor")
+ var me = this
+ if (!(me instanceof Pack)) return new Pack(props)
+
+ if (props) me._noProprietary = props.noProprietary
+ else me._noProprietary = false
+
+ me._global = props
+
+ me.readable = true
+ me.writable = true
+ me._buffer = []
+ // console.error("-- -- set current to null in ctor")
+ me._currentEntry = null
+ me._processing = false
+
+ me._pipeRoot = null
+ me.on("pipe", function (src) {
+ if (src.root === me._pipeRoot) return
+ me._pipeRoot = src
+ src.on("end", function () {
+ me._pipeRoot = null
+ })
+ me.add(src)
+ })
+}
+
+Pack.prototype.addGlobal = function (props) {
+ // console.error("-- p addGlobal")
+ if (this._didGlobal) return
+ this._didGlobal = true
+
+ var me = this
+ GlobalHeaderWriter(props)
+ .on("data", function (c) {
+ me.emit("data", c)
+ })
+ .end()
+}
+
+Pack.prototype.add = function (stream) {
+ if (this._global && !this._didGlobal) this.addGlobal(this._global)
+
+ if (this._ended) return this.emit("error", new Error("add after end"))
+
+ collect(stream)
+ this._buffer.push(stream)
+ this._process()
+ this._needDrain = this._buffer.length > 0
+ return !this._needDrain
+}
+
+Pack.prototype.pause = function () {
+ this._paused = true
+ if (this._currentEntry) this._currentEntry.pause()
+ this.emit("pause")
+}
+
+Pack.prototype.resume = function () {
+ this._paused = false
+ if (this._currentEntry) this._currentEntry.resume()
+ this.emit("resume")
+ this._process()
+}
+
+Pack.prototype.end = function () {
+ this._ended = true
+ this._buffer.push(eof)
+ this._process()
+}
+
+Pack.prototype._process = function () {
+ var me = this
+ if (me._paused || me._processing) {
+ return
+ }
+
+ var entry = me._buffer.shift()
+
+ if (!entry) {
+ if (me._needDrain) {
+ me.emit("drain")
+ }
+ return
+ }
+
+ if (entry.ready === false) {
+ // console.error("-- entry is not ready", entry)
+ me._buffer.unshift(entry)
+ entry.on("ready", function () {
+ // console.error("-- -- ready!", entry)
+ me._process()
+ })
+ return
+ }
+
+ me._processing = true
+
+ if (entry === eof) {
+ // need 2 ending null blocks.
+ me.emit("data", eof)
+ me.emit("data", eof)
+ me.emit("end")
+ me.emit("close")
+ return
+ }
+
+ // Change the path to be relative to the root dir that was
+ // added to the tarball.
+ //
+ // XXX This should be more like how -C works, so you can
+ // explicitly set a root dir, and also explicitly set a pathname
+ // in the tarball to use. That way we can skip a lot of extra
+ // work when resolving symlinks for bundled dependencies in npm.
+
+ var root = path.dirname((entry.root || entry).path)
+ var wprops = {}
+
+ Object.keys(entry.props).forEach(function (k) {
+ wprops[k] = entry.props[k]
+ })
+
+ if (me._noProprietary) wprops.noProprietary = true
+
+ wprops.path = path.relative(root, entry.path)
+
+ // actually not a matter of opinion or taste.
+ if (process.platform === "win32") {
+ wprops.path = wprops.path.replace(/\\/g, "/")
+ }
+
+ switch (wprops.type) {
+ // sockets not supported
+ case "Socket":
+ return
+
+ case "Directory":
+ wprops.path += "/"
+ wprops.size = 0
+ break
+ case "Link":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(root, lp) || "."
+ wprops.size = 0
+ break
+ case "SymbolicLink":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
+ wprops.size = 0
+ break
+ }
+
+ // console.error("-- new writer", wprops)
+ // if (!wprops.type) {
+ // // console.error("-- no type?", entry.constructor.name, entry)
+ // }
+
+ // console.error("-- -- set current to new writer", wprops.path)
+ var writer = me._currentEntry = EntryWriter(wprops)
+
+ writer.parent = me
+
+ // writer.on("end", function () {
+ // // console.error("-- -- writer end", writer.path)
+ // })
+
+ writer.on("data", function (c) {
+ me.emit("data", c)
+ })
+
+ writer.on("header", function () {
+ Buffer.prototype.toJSON = function () {
+ return this.toString().split(/\0/).join(".")
+ }
+ // console.error("-- -- writer header %j", writer.props)
+ if (writer.props.size === 0) nextEntry()
+ })
+ writer.on("close", nextEntry)
+
+ var ended = false
+ function nextEntry () {
+ if (ended) return
+ ended = true
+
+ // console.error("-- -- writer close", writer.path)
+ // console.error("-- -- set current to null", wprops.path)
+ me._currentEntry = null
+ me._processing = false
+ me._process()
+ }
+
+ writer.on("error", function (er) {
+ // console.error("-- -- writer error", writer.path)
+ me.emit("error", er)
+ })
+
+ // if it's the root, then there's no need to add its entries,
+ // or data, since they'll be added directly.
+ if (entry === me._pipeRoot) {
+ // console.error("-- is the root, don't auto-add")
+ writer.add = null
+ }
+
+ entry.pipe(writer)
+}
+
+Pack.prototype.destroy = function () {}
+Pack.prototype.write = function () {}
diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js
new file mode 100644
index 000000000..884e73dba
--- /dev/null
+++ b/node_modules/tar/lib/parse.js
@@ -0,0 +1,270 @@
+
+// A writable stream.
+// It emits "entry" events, which provide a readable stream that has
+// header info attached.
+
+module.exports = Parse.create = Parse
+
+var stream = require("stream")
+ , Stream = stream.Stream
+ , BlockStream = require("block-stream")
+ , tar = require("../tar.js")
+ , TarHeader = require("./header.js")
+ , Entry = require("./entry.js")
+ , BufferEntry = require("./buffer-entry.js")
+ , ExtendedHeader = require("./extended-header.js")
+ , assert = require("assert").ok
+ , inherits = require("inherits")
+ , fstream = require("fstream")
+
+// reading a tar is a lot like reading a directory
+// However, we're actually not going to run the ctor,
+// since it does a stat and various other stuff.
+// This inheritance gives us the pause/resume/pipe
+// behavior that is desired.
+inherits(Parse, fstream.Reader)
+
+function Parse () {
+ var me = this
+ if (!(me instanceof Parse)) return new Parse()
+
+ // doesn't apply fstream.Reader ctor?
+ // no, becasue we don't want to stat/etc, we just
+ // want to get the entry/add logic from .pipe()
+ Stream.apply(me)
+
+ me.writable = true
+ me.readable = true
+ me._stream = new BlockStream(512)
+ me.position = 0
+
+ me._stream.on("error", function (e) {
+ me.emit("error", e)
+ })
+
+ me._stream.on("data", function (c) {
+ me._process(c)
+ })
+
+ me._stream.on("end", function () {
+ me._streamEnd()
+ })
+
+ me._stream.on("drain", function () {
+ me.emit("drain")
+ })
+}
+
+// overridden in Extract class, since it needs to
+// wait for its DirWriter part to finish before
+// emitting "end"
+Parse.prototype._streamEnd = function () {
+ var me = this
+ if (!me._ended) me.error("unexpected eof")
+ me.emit("end")
+}
+
+// a tar reader is actually a filter, not just a readable stream.
+// So, you should pipe a tarball stream into it, and it needs these
+// write/end methods to do that.
+Parse.prototype.write = function (c) {
+ if (this._ended) {
+ // gnutar puts a LOT of nulls at the end.
+ // you can keep writing these things forever.
+ // Just ignore them.
+ for (var i = 0, l = c.length; i > l; i ++) {
+ if (c[i] !== 0) return this.error("write() after end()")
+ }
+ return
+ }
+ return this._stream.write(c)
+}
+
+Parse.prototype.end = function (c) {
+ this._ended = true
+ return this._stream.end(c)
+}
+
+// don't need to do anything, since we're just
+// proxying the data up from the _stream.
+// Just need to override the parent's "Not Implemented"
+// error-thrower.
+Parse.prototype._read = function () {}
+
+Parse.prototype._process = function (c) {
+ assert(c && c.length === 512, "block size should be 512")
+
+ // one of three cases.
+ // 1. A new header
+ // 2. A part of a file/extended header
+ // 3. One of two or more EOF null blocks
+
+ if (this._entry) {
+ var entry = this._entry
+ entry.write(c)
+ if (entry._remaining === 0) {
+ entry.end()
+ this._entry = null
+ }
+ } else {
+ // either zeroes or a header
+ var zero = true
+ for (var i = 0; i < 512 && zero; i ++) {
+ zero = c[i] === 0
+ }
+
+ // eof is *at least* 2 blocks of nulls, and then the end of the
+ // file. you can put blocks of nulls between entries anywhere,
+ // so appending one tarball to another is technically valid.
+ // ending without the eof null blocks is not allowed, however.
+ if (zero) {
+ this._ended = this._eofStarted
+ this._eofStarted = true
+ } else {
+ this._ended = this._eofStarted = false
+ this._startEntry(c)
+ }
+
+ }
+
+ this.position += 512
+}
+
+// take a header chunk, start the right kind of entry.
+Parse.prototype._startEntry = function (c) {
+ var header = new TarHeader(c)
+ , self = this
+ , entry
+ , ev
+ , EntryType
+ , onend
+ , meta = false
+
+ if (null === header.size || !header.cksumValid) {
+ var e = new Error("invalid tar file")
+ e.header = header
+ e.tar_file_offset = this.position
+ e.tar_block = this.position / 512
+ this.emit("error", e)
+ }
+
+ switch (tar.types[header.type]) {
+ case "File":
+ case "OldFile":
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ case "ContiguousFile":
+ case "GNUDumpDir":
+ // start a file.
+ // pass in any extended headers
+ // These ones consumers are typically most interested in.
+ EntryType = Entry
+ ev = "entry"
+ break
+
+ case "GlobalExtendedHeader":
+ // extended headers that apply to the rest of the tarball
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._global = self._global || {}
+ Object.keys(entry.fields).forEach(function (k) {
+ self._global[k] = entry.fields[k]
+ })
+ }
+ ev = "globalExtendedHeader"
+ meta = true
+ break
+
+ case "ExtendedHeader":
+ case "OldExtendedHeader":
+ // extended headers that apply to the next entry
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._extended = entry.fields
+ }
+ ev = "extendedHeader"
+ meta = true
+ break
+
+ case "NextFileHasLongLinkpath":
+ // set linkpath=<contents> in extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.linkpath = entry.body
+ }
+ ev = "longLinkpath"
+ meta = true
+ break
+
+ case "NextFileHasLongPath":
+ case "OldGnuLongPath":
+ // set path=<contents> in file-extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.path = entry.body
+ }
+ ev = "longPath"
+ meta = true
+ break
+
+ default:
+ // all the rest we skip, but still set the _entry
+ // member, so that we can skip over their data appropriately.
+ // emit an event to say that this is an ignored entry type?
+ EntryType = Entry
+ ev = "ignoredEntry"
+ break
+ }
+
+ var global, extended
+ if (meta) {
+ global = extended = null
+ } else {
+ var global = this._global
+ var extended = this._extended
+
+ // extendedHeader only applies to one entry, so once we start
+ // an entry, it's over.
+ this._extended = null
+ }
+ entry = new EntryType(header, extended, global)
+ entry.meta = meta
+
+ // only proxy data events of normal files.
+ if (!meta) {
+ entry.on("data", function (c) {
+ me.emit("data", c)
+ })
+ }
+
+ if (onend) entry.on("end", onend)
+
+ this._entry = entry
+ var me = this
+
+ entry.on("pause", function () {
+ me.pause()
+ })
+
+ entry.on("resume", function () {
+ me.resume()
+ })
+
+ if (this.listeners("*").length) {
+ this.emit("*", ev, entry)
+ }
+
+ this.emit(ev, entry)
+
+ // Zero-byte entry. End immediately.
+ if (entry.props.size === 0) {
+ entry.end()
+ this._entry = null
+ }
+}
diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json
new file mode 100644
index 000000000..0836977a1
--- /dev/null
+++ b/node_modules/tar/package.json
@@ -0,0 +1,26 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "tar",
+ "description": "tar for node",
+ "version": "0.1.11",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-tar.git"
+ },
+ "main": "tar.js",
+ "scripts": {
+ "test": "rm -rf test/tmp; tap test/*.js"
+ },
+ "engines": {
+ "node": "~0.5.9 || 0.6 || 0.7 || 0.8"
+ },
+ "dependencies": {
+ "inherits": "1.x",
+ "block-stream": "*",
+ "fstream": "~0.1.8"
+ },
+ "devDependencies": {
+ "tap": "0.x",
+ "rimraf": "1.x"
+ }
+}
diff --git a/node_modules/tar/tar.js b/node_modules/tar/tar.js
new file mode 100644
index 000000000..b9dbca489
--- /dev/null
+++ b/node_modules/tar/tar.js
@@ -0,0 +1,172 @@
+// field paths that every tar file must have.
+// header is padded to 512 bytes.
+var f = 0
+ , fields = {}
+ , path = fields.path = f++
+ , mode = fields.mode = f++
+ , uid = fields.uid = f++
+ , gid = fields.gid = f++
+ , size = fields.size = f++
+ , mtime = fields.mtime = f++
+ , cksum = fields.cksum = f++
+ , type = fields.type = f++
+ , linkpath = fields.linkpath = f++
+ , headerSize = 512
+ , blockSize = 512
+ , fieldSize = []
+
+fieldSize[path] = 100
+fieldSize[mode] = 8
+fieldSize[uid] = 8
+fieldSize[gid] = 8
+fieldSize[size] = 12
+fieldSize[mtime] = 12
+fieldSize[cksum] = 8
+fieldSize[type] = 1
+fieldSize[linkpath] = 100
+
+// "ustar\0" may introduce another bunch of headers.
+// these are optional, and will be nulled out if not present.
+
+var ustar = fields.ustar = f++
+ , ustarver = fields.ustarver = f++
+ , uname = fields.uname = f++
+ , gname = fields.gname = f++
+ , devmaj = fields.devmaj = f++
+ , devmin = fields.devmin = f++
+ , prefix = fields.prefix = f++
+ , fill = fields.fill = f++
+
+// terminate fields.
+fields[f] = null
+
+fieldSize[ustar] = 6
+fieldSize[ustarver] = 2
+fieldSize[uname] = 32
+fieldSize[gname] = 32
+fieldSize[devmaj] = 8
+fieldSize[devmin] = 8
+fieldSize[prefix] = 155
+fieldSize[fill] = 12
+
+// nb: prefix field may in fact be 130 bytes of prefix,
+// a null char, 12 bytes for atime, 12 bytes for ctime.
+//
+// To recognize this format:
+// 1. prefix[130] === ' ' or '\0'
+// 2. atime and ctime are octal numeric values
+// 3. atime and ctime have ' ' in their last byte
+
+var fieldEnds = {}
+ , fieldOffs = {}
+ , fe = 0
+for (var i = 0; i < f; i ++) {
+ fieldOffs[i] = fe
+ fieldEnds[i] = (fe += fieldSize[i])
+}
+
+// build a translation table of field paths.
+Object.keys(fields).forEach(function (f) {
+ if (fields[f] !== null) fields[fields[f]] = f
+})
+
+// different values of the 'type' field
+// paths match the values of Stats.isX() functions, where appropriate
+var types =
+ { 0: "File"
+ , "\0": "OldFile" // like 0
+ , 1: "Link"
+ , 2: "SymbolicLink"
+ , 3: "CharacterDevice"
+ , 4: "BlockDevice"
+ , 5: "Directory"
+ , 6: "FIFO"
+ , 7: "ContiguousFile" // like 0
+ // posix headers
+ , g: "GlobalExtendedHeader" // k=v for the rest of the archive
+ , x: "ExtendedHeader" // k=v for the next file
+ // vendor-specific stuff
+ , A: "SolarisACL" // skip
+ , D: "GNUDumpDir" // like 5, but with data, which should be skipped
+ , I: "Inode" // metadata only, skip
+ , K: "NextFileHasLongLinkpath" // data = link path of next file
+ , L: "NextFileHasLongPath" // data = path of next file
+ , M: "ContinuationFile" // skip
+ , N: "OldGnuLongPath" // like L
+ , S: "SparseFile" // skip
+ , V: "TapeVolumeHeader" // skip
+ , X: "OldExtendedHeader" // like x
+ }
+
+Object.keys(types).forEach(function (t) {
+ types[types[t]] = types[types[t]] || t
+})
+
+// values for the mode field
+var modes =
+ { suid: 04000 // set uid on extraction
+ , sgid: 02000 // set gid on extraction
+ , svtx: 01000 // set restricted deletion flag on dirs on extraction
+ , uread: 0400
+ , uwrite: 0200
+ , uexec: 0100
+ , gread: 040
+ , gwrite: 020
+ , gexec: 010
+ , oread: 4
+ , owrite: 2
+ , oexec: 1
+ , all: 07777
+ }
+
+var numeric =
+ { mode: true
+ , uid: true
+ , gid: true
+ , size: true
+ , mtime: true
+ , devmaj: true
+ , devmin: true
+ , cksum: true
+ , atime: true
+ , ctime: true
+ , dev: true
+ , ino: true
+ , nlink: true
+ }
+
+Object.keys(modes).forEach(function (t) {
+ modes[modes[t]] = modes[modes[t]] || t
+})
+
+var knownExtended =
+ { atime: true
+ , charset: true
+ , comment: true
+ , ctime: true
+ , gid: true
+ , gname: true
+ , linkpath: true
+ , mtime: true
+ , path: true
+ , realtime: true
+ , security: true
+ , size: true
+ , uid: true
+ , uname: true }
+
+
+exports.fields = fields
+exports.fieldSize = fieldSize
+exports.fieldOffs = fieldOffs
+exports.fieldEnds = fieldEnds
+exports.types = types
+exports.modes = modes
+exports.numeric = numeric
+exports.headerSize = headerSize
+exports.blockSize = blockSize
+exports.knownExtended = knownExtended
+
+exports.Pack = require("./lib/pack.js")
+exports.Parse = require("./lib/parse.js")
+exports.Extract = require("./lib/extract.js")
diff --git a/node_modules/which/LICENSE b/node_modules/which/LICENSE
new file mode 100644
index 000000000..05a401094
--- /dev/null
+++ b/node_modules/which/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/which/README.md b/node_modules/which/README.md
new file mode 100644
index 000000000..ff1eb531a
--- /dev/null
+++ b/node_modules/which/README.md
@@ -0,0 +1,5 @@
+The "which" util from npm's guts.
+
+Finds the first instance of a specified executable in the PATH
+environment variable. Does not cache the results, so `hash -r` is not
+needed when the PATH changes.
diff --git a/node_modules/which/bin/which b/node_modules/which/bin/which
new file mode 100755
index 000000000..8432ce2f6
--- /dev/null
+++ b/node_modules/which/bin/which
@@ -0,0 +1,14 @@
+#!/usr/bin/env node
+var which = require("../")
+if (process.argv.length < 3) {
+ console.error("Usage: which <thing>")
+ process.exit(1)
+}
+
+which(process.argv[2], function (er, thing) {
+ if (er) {
+ console.error(er.message)
+ process.exit(er.errno || 127)
+ }
+ console.log(thing)
+})
diff --git a/node_modules/which/package.json b/node_modules/which/package.json
new file mode 100644
index 000000000..02990697f
--- /dev/null
+++ b/node_modules/which/package.json
@@ -0,0 +1,17 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
+ "name": "which",
+ "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
+ "version": "1.0.2",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-which.git"
+ },
+ "main": "which.js",
+ "bin": "./bin/which",
+ "engines": {
+ "node": "*"
+ },
+ "dependencies": {},
+ "devDependencies": {}
+}
diff --git a/node_modules/which/which.js b/node_modules/which/which.js
new file mode 100644
index 000000000..b124ead67
--- /dev/null
+++ b/node_modules/which/which.js
@@ -0,0 +1,67 @@
+module.exports = which
+which.sync = whichSync
+
+var path = require("path")
+ , fs
+ , COLON = process.platform === "win32" ? ";" : ":"
+
+try {
+ fs = require("graceful-fs")
+} catch (ex) {
+ fs = require("fs")
+}
+
+// console.log(process.execPath)
+// console.log(process.argv)
+
+function isExe (mod, uid, gid) {
+ //console.error("isExe?", (mod & 0111).toString(8))
+ var ret = (mod & 0001)
+ || (mod & 0010) && process.getgid && gid === process.getgid()
+ || (mod & 0100) && process.getuid && uid === process.getuid()
+ //console.error("isExe?", ret)
+ return ret
+}
+function which (cmd, cb) {
+ if (cmd.charAt(0) === "/") return cb(null, cmd)
+ var pathEnv = (process.env.PATH || "").split(COLON)
+ , pathExt = [""]
+ if (process.platform === "win32") {
+ pathEnv.push(process.cwd())
+ pathExt = (process.env.PATHEXT || ".EXE").split(COLON)
+ }
+ //console.error("pathEnv", pathEnv)
+ ;(function F (i, l) {
+ if (i === l) return cb(new Error("not found: "+cmd))
+ var p = path.resolve(pathEnv[i], cmd)
+ ;(function E (ii, ll) {
+ if (ii === ll) return F(i + 1, l)
+ var ext = pathExt[ii]
+ //console.error(p + ext)
+ fs.stat(p + ext, function (er, stat) {
+ if (!er &&
+ stat &&
+ stat.isFile() &&
+ isExe(stat.mode, stat.uid, stat.gid)) {
+ //console.error("yes, exe!", p + ext)
+ return cb(null, p + ext)
+ }
+ return E(ii + 1, ll)
+ })
+ })(0, pathExt.length)
+ })(0, pathEnv.length)
+}
+
+
+function whichSync (cmd) {
+ if (cmd.charAt(0) === "/") return cmd
+ var pathEnv = (process.env.PATH || "").split(COLON)
+ for (var i = 0, l = pathEnv.length; i < l; i ++) {
+ var p = path.join(pathEnv[i], cmd)
+ if (p === process.execPath) return p
+ var stat
+ try { stat = fs.statSync(p) } catch (ex) {}
+ if (stat && isExe(stat.mode, stat.uid, stat.gid)) return p
+ }
+ throw new Error("not found: "+cmd)
+}