Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorForrest L Norvell <forrest@npmjs.com>2014-10-17 09:34:11 +0400
committerForrest L Norvell <forrest@npmjs.com>2014-10-17 09:34:11 +0400
commitd7dee3f3f7d9e7c2061a4ecb4dd93e3e4bfe4f2e (patch)
tree18f67bb1fa27c82d4300269b20aa7f06546f41eb /node_modules/request
parent3d75180c2cc79fa3adfa0e4cb783a27192189a65 (diff)
request@2.45.0
Dependency updates, better proxy support, better compressed response handling, lots of 'use strict'.
Diffstat (limited to 'node_modules/request')
-rw-r--r--node_modules/request/README.md126
-rwxr-xr-xnode_modules/request/index.js27
-rw-r--r--node_modules/request/lib/cookies.js26
-rw-r--r--node_modules/request/lib/copy.js4
-rw-r--r--node_modules/request/lib/debug.js2
-rw-r--r--node_modules/request/lib/helpers.js31
-rw-r--r--node_modules/request/lib/optional.js2
-rw-r--r--node_modules/request/node_modules/aws-sign2/package.json20
-rw-r--r--node_modules/request/node_modules/bl/package.json22
-rw-r--r--node_modules/request/node_modules/caseless/package.json21
-rw-r--r--node_modules/request/node_modules/forever-agent/package.json21
-rw-r--r--node_modules/request/node_modules/form-data/node_modules/async/package.json23
-rw-r--r--node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json22
-rw-r--r--node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json30
-rw-r--r--node_modules/request/node_modules/form-data/node_modules/mime/package.json24
-rw-r--r--node_modules/request/node_modules/form-data/package.json38
-rwxr-xr-xnode_modules/request/node_modules/hawk/node_modules/boom/package.json29
-rwxr-xr-xnode_modules/request/node_modules/hawk/node_modules/cryptiles/package.json25
-rwxr-xr-xnode_modules/request/node_modules/hawk/node_modules/hoek/package.json33
-rwxr-xr-xnode_modules/request/node_modules/hawk/node_modules/sntp/package.json29
-rw-r--r--node_modules/request/node_modules/http-signature/node_modules/asn1/package.json30
-rw-r--r--node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json22
-rw-r--r--node_modules/request/node_modules/http-signature/node_modules/ctype/package.json21
-rw-r--r--node_modules/request/node_modules/http-signature/package.json26
-rw-r--r--node_modules/request/node_modules/json-stringify-safe/package.json25
-rw-r--r--node_modules/request/node_modules/mime-types/package.json30
-rw-r--r--node_modules/request/node_modules/node-uuid/package.json20
-rw-r--r--node_modules/request/node_modules/oauth-sign/package.json22
-rwxr-xr-xnode_modules/request/node_modules/qs/package.json26
-rw-r--r--node_modules/request/node_modules/stringstream/package.json24
-rw-r--r--node_modules/request/node_modules/tough-cookie/node_modules/punycode/package.json24
-rw-r--r--node_modules/request/node_modules/tough-cookie/package.json22
-rw-r--r--node_modules/request/node_modules/tunnel-agent/package.json21
-rwxr-xr-xnode_modules/request/package.json40
-rwxr-xr-xnode_modules/request/release.sh3
-rw-r--r--node_modules/request/request.js313
36 files changed, 949 insertions, 275 deletions
diff --git a/node_modules/request/README.md b/node_modules/request/README.md
index fef1fea14..279faf228 100644
--- a/node_modules/request/README.md
+++ b/node_modules/request/README.md
@@ -168,6 +168,25 @@ header is *never* sent to the endpoint server, but only to the proxy
server. All other headers are sent as-is over the established
connection.
+### Controlling proxy behaviour using environment variables
+
+The following environment variables are respected by `request`:
+
+ * `HTTP_PROXY` / `http_proxy`
+ * `HTTPS_PROXY` / `https_proxy`
+ * `NO_PROXY` / `no_proxy`
+
+When `HTTP_PROXY` / `http_proxy` are set, they will be used to proxy non-SSL requests that do not have an explicit `proxy` configuration option present. Similarly, `HTTPS_PROXY` / `https_proxy` will be respected for SSL requests that do not have an explicit `proxy` configuration option. It is valid to define a proxy in one of the environment variables, but then override it for a specific request, using the `proxy` configuration option. Furthermore, the `proxy` configuration option can be explicitly set to false / null to opt out of proxying altogether for that request.
+
+`request` is also aware of the `NO_PROXY`/`no_proxy` environment variables. These variables provide a granular way to opt out of proxying, on a per-host basis. It should contain a comma separated list of hosts to opt out of proxying. It is also possible to opt of proxying when a particular destination port is used. Finally, the variable may be set to `*` to opt out of the implicit proxy configuration of the other environment variables.
+
+Here's some examples of valid `no_proxy` values:
+
+ * `google.com` - don't proxy HTTP/HTTPS requests to Google.
+ * `google.com:443` - don't proxy HTTPS requests to Google, but *do* proxy HTTP requests to Google.
+ * `google.com:443, yahoo.com:80` - don't proxy HTTPS requests to Google, and don't proxy HTTP requests to Yahoo!
+ * `*` - ignore `https_proxy`/`http_proxy` environment variables altogether.
+
## UNIX Socket
`request` supports the `unix://` protocol for all requests. The path is assumed to be absolute to the root of the host file system.
@@ -192,25 +211,68 @@ request.post('http://service.com/upload', {form:{key:'value'}})
request.post('http://service.com/upload').form({key:'value'})
```
-For `multipart/form-data` we use the [form-data](https://github.com/felixge/node-form-data) library by [@felixge](https://github.com/felixge). You don’t need to worry about piping the form object or setting the headers, `request` will handle that for you.
+For `multipart/form-data` we use the [form-data](https://github.com/felixge/node-form-data) library by [@felixge](https://github.com/felixge). For the most basic case, you can pass your upload form data via the `formData` option.
+
```javascript
-var r = request.post('http://service.com/upload', function optionalCallback (err, httpResponse, body) {
+var formData = {
+ my_field: 'my_value',
+ my_buffer: new Buffer([1, 2, 3]),
+ my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
+ remote_file: request(remoteFile)
+};
+request.post({url:'http://service.com/upload', formData: formData}, function optionalCallback(err, httpResponse, body) {
+ if (err) {
+ return console.error('upload failed:', err);
+ }
+ console.log('Upload successful! Server responded with:', body);
+});
+```
+
+For more advanced cases (like appending form data options) you'll need access to the form itself.
+
+```javascript
+var r = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
})
-var form = r.form()
-form.append('my_field', 'my_value')
-form.append('my_buffer', new Buffer([1, 2, 3]))
-form.append('my_file', fs.createReadStream(path.join(__dirname, 'doodle.png')))
-form.append('remote_file', request('http://google.com/doodle.png'))
// Just like always, `r` is a writable stream, and can be used as such (you have until nextTick to pipe it, etc.)
// Alternatively, you can provide a callback (that's what this example does — see `optionalCallback` above).
+var form = r.form();
+form.append('my_field', 'my_value');
+form.append('my_buffer', new Buffer([1, 2, 3]));
+form.append('my_buffer', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});
+```
+See the [form-data](https://github.com/felixge/node-form-data) README for more information & examples.
+
+Some variations in different HTTP implementations require a newline/CRLF before, after, or both before and after the boundary of a `multipart/form-data` request. This has been observed in the .NET WebAPI version 4.0. You can turn on a boundary preambleCRLF or postamble by passing them as `true` to your request options.
+
+```javascript
+ request(
+ { method: 'PUT'
+ , preambleCRLF: true
+ , postambleCRLF: true
+ , uri: 'http://service.com/upload'
+ , multipart:
+ [ { 'content-type': 'application/json'
+ , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
+ }
+ , { body: 'I am an attachment' }
+ ]
+ }
+ , function (error, response, body) {
+ if (err) {
+ return console.error('upload failed:', err);
+ }
+ console.log('Upload successful! Server responded with:', body);
+ }
+ )
```
+
## HTTP Authentication
```javascript
@@ -343,10 +405,12 @@ The first argument can be either a `url` or an `options` object. The only requir
* `auth` - A hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above.
* `json` - sets `body` but to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON.
* `multipart` - (experimental) array of objects which contains their own headers and `body` attribute. Sends `multipart/related` request. See example below.
+* `preambleCRLF` - append a newline/CRLF before the boundary of your `multipart/form-data` request.
+* `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request.
* `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise.
* `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`)
* `maxRedirects` - the maximum number of redirects to follow (default: `10`)
-* `encoding` - Encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`.
+* `encoding` - Encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default).
* `pool` - A hash object containing the agents for these requests. If omitted, the request will use the global pool (which is set to node's default `maxSockets`)
* `pool.maxSockets` - Integer containing the maximum amount of sockets in the pool.
* `timeout` - Integer containing the number of milliseconds to wait for a request to respond before aborting the request
@@ -358,7 +422,7 @@ The first argument can be either a `url` or an `options` object. The only requir
* `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`. Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services)
* `httpSignature` - Options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options.
* `localAddress` - Local interface to bind for network connections.
-* `gzip` - If `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response.
+* `gzip` - If `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below.
* `tunnel` - If `true`, then *always* use a tunneling proxy. If
`false` (default), then tunneling will only be used if the
destination is `https`, or if a previous request in the redirect
@@ -487,6 +551,37 @@ request.jar()
)
```
+For backwards-compatibility, response compression is not supported by default.
+To accept gzip-compressed responses, set the `gzip` option to `true`. Note
+that the body data passed through `request` is automatically decompressed
+while the response object is unmodified and will contain compressed data if
+the server sent a compressed response.
+
+```javascript
+ var request = require('request')
+ request(
+ { method: 'GET'
+ , uri: 'http://www.google.com'
+ , gzip: true
+ }
+ , function (error, response, body) {
+ // body is the decompressed response body
+ console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity'))
+ console.log('the decoded data is: ' + body)
+ }
+ ).on('data', function(data) {
+ // decompressed data as it is received
+ console.log('decoded chunk: ' + data)
+ })
+ .on('response', function(response) {
+ // unmodified http.IncomingMessage object
+ response.on('data', function(data) {
+ // compressed data as it is received
+ console.log('received ' + data.length + ' bytes of compressed data')
+ })
+ })
+```
+
Cookies are disabled by default (else, they would be used in subsequent requests). To enable cookies, set `jar` to `true` (either in `defaults` or `options`) and install `tough-cookie`.
```javascript
@@ -510,10 +605,11 @@ OR
```javascript
// `npm install --save tough-cookie` before this works
-var j = request.jar()
-var cookie = request.cookie('your_cookie_here')
-j.setCookie(cookie, uri);
-request({url: 'http://www.google.com', jar: j}, function () {
+var j = request.jar();
+var cookie = request.cookie('key1=value1');
+var url = 'http://www.google.com';
+j.setCookieSync(cookie, url);
+request({url: url, jar: j}, function () {
request('http://images.google.com')
})
```
@@ -523,8 +619,8 @@ To inspect your cookie jar after a request
```javascript
var j = request.jar()
request({url: 'http://www.google.com', jar: j}, function () {
- var cookie_string = j.getCookieStringSync(uri); // "key1=value1; key2=value2; ..."
- var cookies = j.getCookiesSync(uri);
+ var cookie_string = j.getCookieString(uri); // "key1=value1; key2=value2; ..."
+ var cookies = j.getCookies(uri);
// [{key: 'key1', value: 'value1', domain: "www.google.com", ...}, ...]
})
```
diff --git a/node_modules/request/index.js b/node_modules/request/index.js
index ced8bd955..ad81073a4 100755
--- a/node_modules/request/index.js
+++ b/node_modules/request/index.js
@@ -12,9 +12,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+'use strict';
+
var extend = require('util')._extend
, cookies = require('./lib/cookies')
- , copy = require('./lib/copy')
, helpers = require('./lib/helpers')
, isFunction = helpers.isFunction
, constructObject = helpers.constructObject
@@ -102,7 +103,7 @@ request.cookie = function (str) {
}
request.defaults = function (options, requester) {
-
+ var self = this
var wrap = function (method) {
var headerlessOptions = function (options) {
options = extend({}, options)
@@ -125,7 +126,7 @@ request.defaults = function (options, requester) {
params.options.headers = getHeaders(params, options)
if (isFunction(requester)) {
- if (method === request) {
+ if (method === self) {
method = requester
} else {
params.options._requester = requester
@@ -136,16 +137,16 @@ request.defaults = function (options, requester) {
}
}
- defaults = wrap(this)
- defaults.get = wrap(this.get)
- defaults.patch = wrap(this.patch)
- defaults.post = wrap(this.post)
- defaults.put = wrap(this.put)
- defaults.head = wrap(this.head)
- defaults.del = wrap(this.del)
- defaults.cookie = wrap(this.cookie)
- defaults.jar = this.jar
- defaults.defaults = this.defaults
+ var defaults = wrap(self)
+ defaults.get = wrap(self.get)
+ defaults.patch = wrap(self.patch)
+ defaults.post = wrap(self.post)
+ defaults.put = wrap(self.put)
+ defaults.head = wrap(self.head)
+ defaults.del = wrap(self.del)
+ defaults.cookie = wrap(self.cookie)
+ defaults.jar = self.jar
+ defaults.defaults = self.defaults
return defaults
}
diff --git a/node_modules/request/lib/cookies.js b/node_modules/request/lib/cookies.js
index 07a9f36e3..2e2bcab6b 100644
--- a/node_modules/request/lib/cookies.js
+++ b/node_modules/request/lib/cookies.js
@@ -1,3 +1,5 @@
+'use strict';
+
var optional = require('./optional')
, tough = optional('tough-cookie')
, Cookie = tough && tough.Cookie
@@ -13,16 +15,28 @@ exports.parse = function(str) {
return Cookie.parse(str)
};
+// Adapt the sometimes-Async api of tough.CookieJar to our requirements
+function RequestJar() {
+ this._jar = new CookieJar();
+}
+RequestJar.prototype.setCookie = function(cookieOrStr, uri, options) {
+ return this._jar.setCookieSync(cookieOrStr, uri, options || {});
+};
+RequestJar.prototype.getCookieString = function(uri) {
+ return this._jar.getCookieStringSync(uri);
+};
+RequestJar.prototype.getCookies = function(uri) {
+ return this._jar.getCookiesSync(uri);
+};
+
exports.jar = function() {
if (!CookieJar) {
// tough-cookie not loaded, return a stub object:
return {
- setCookieSync: function(){},
- getCookieStringSync: function(){},
- getCookiesSync: function(){}
+ setCookie: function(){},
+ getCookieString: function(){},
+ getCookies: function(){}
};
}
- var jar = new CookieJar();
- jar._jar = jar; // For backwards compatibility
- return jar;
+ return new RequestJar();
};
diff --git a/node_modules/request/lib/copy.js b/node_modules/request/lib/copy.js
index 56831ff80..2f55ac3cf 100644
--- a/node_modules/request/lib/copy.js
+++ b/node_modules/request/lib/copy.js
@@ -1,3 +1,5 @@
+'use strict';
+
module.exports =
function copy (obj) {
var o = {}
@@ -5,4 +7,4 @@ function copy (obj) {
o[i] = obj[i]
})
return o
-} \ No newline at end of file
+}
diff --git a/node_modules/request/lib/debug.js b/node_modules/request/lib/debug.js
index d61ec88d7..119217594 100644
--- a/node_modules/request/lib/debug.js
+++ b/node_modules/request/lib/debug.js
@@ -1,3 +1,5 @@
+'use strict';
+
var util = require('util')
, request = require('../index')
;
diff --git a/node_modules/request/lib/helpers.js b/node_modules/request/lib/helpers.js
index 27a38f4a8..2ee455025 100644
--- a/node_modules/request/lib/helpers.js
+++ b/node_modules/request/lib/helpers.js
@@ -1,4 +1,9 @@
+'use strict';
+
var extend = require('util')._extend
+ , jsonSafeStringify = require('json-stringify-safe')
+ , crypto = require('crypto')
+ ;
function constructObject(initialObject) {
initialObject = initialObject || {}
@@ -43,8 +48,34 @@ function paramsHaveRequestBody(params) {
)
}
+function safeStringify (obj) {
+ var ret
+ try {
+ ret = JSON.stringify(obj)
+ } catch (e) {
+ ret = jsonSafeStringify(obj)
+ }
+ return ret
+}
+
+function md5 (str) {
+ return crypto.createHash('md5').update(str).digest('hex')
+}
+
+function isReadStream (rs) {
+ return rs.readable && rs.path && rs.mode;
+}
+
+function toBase64 (str) {
+ return (new Buffer(str || "", "ascii")).toString("base64")
+}
+
exports.isFunction = isFunction
exports.constructObject = constructObject
exports.constructOptionsFrom = constructOptionsFrom
exports.filterForCallback = filterForCallback
exports.paramsHaveRequestBody = paramsHaveRequestBody
+exports.safeStringify = safeStringify
+exports.md5 = md5
+exports.isReadStream = isReadStream
+exports.toBase64 = toBase64
diff --git a/node_modules/request/lib/optional.js b/node_modules/request/lib/optional.js
index af0cc15f8..5fe427068 100644
--- a/node_modules/request/lib/optional.js
+++ b/node_modules/request/lib/optional.js
@@ -1,3 +1,5 @@
+'use strict';
+
module.exports = function(moduleName) {
try {
return module.parent.require(moduleName);
diff --git a/node_modules/request/node_modules/aws-sign2/package.json b/node_modules/request/node_modules/aws-sign2/package.json
index dd0dc5ea2..9104550c8 100644
--- a/node_modules/request/node_modules/aws-sign2/package.json
+++ b/node_modules/request/node_modules/aws-sign2/package.json
@@ -22,10 +22,26 @@
"bugs": {
"url": "https://github.com/mikeal/aws-sign/issues"
},
- "homepage": "https://github.com/mikeal/aws-sign",
"_id": "aws-sign2@0.5.0",
- "_shasum": "c57103f7a17fc037f02d7c2e64b602ea223f7d63",
+ "dist": {
+ "shasum": "c57103f7a17fc037f02d7c2e64b602ea223f7d63",
+ "tarball": "http://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz"
+ },
"_from": "aws-sign2@>=0.5.0 <0.6.0",
+ "_npmVersion": "1.3.2",
+ "_npmUser": {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "c57103f7a17fc037f02d7c2e64b602ea223f7d63",
"_resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.5.0.tgz",
+ "homepage": "https://github.com/mikeal/aws-sign",
"scripts": {}
}
diff --git a/node_modules/request/node_modules/bl/package.json b/node_modules/request/node_modules/bl/package.json
index f9f40c43b..a5692e03c 100644
--- a/node_modules/request/node_modules/bl/package.json
+++ b/node_modules/request/node_modules/bl/package.json
@@ -33,13 +33,29 @@
"faucet": "~0.0.1",
"brtapsauce": "~0.3.0"
},
- "readme": "# bl *(BufferList)*\n\n**A Node.js Buffer list collector, reader and streamer thingy.**\n\n[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/)\n[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/)\n\n**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!\n\nThe original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.\n\n```js\nconst BufferList = require('bl')\n\nvar bl = new BufferList()\nbl.append(new Buffer('abcd'))\nbl.append(new Buffer('efg'))\nbl.append('hi') // bl will also accept & convert Strings\nbl.append(new Buffer('j'))\nbl.append(new Buffer([ 0x3, 0x4 ]))\n\nconsole.log(bl.length) // 12\n\nconsole.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'\nconsole.log(bl.slice(3, 10).toString('ascii')) // 'defghij'\nconsole.log(bl.slice(3, 6).toString('ascii')) // 'def'\nconsole.log(bl.slice(3, 8).toString('ascii')) // 'defgh'\nconsole.log(bl.slice(5, 10).toString('ascii')) // 'fghij'\n\n// or just use toString!\nconsole.log(bl.toString()) // 'abcdefghij\\u0003\\u0004'\nconsole.log(bl.toString('ascii', 3, 8)) // 'defgh'\nconsole.log(bl.toString('ascii', 5, 10)) // 'fghij'\n\n// other standard Buffer readables\nconsole.log(bl.readUInt16BE(10)) // 0x0304\nconsole.log(bl.readUInt16LE(10)) // 0x0403\n```\n\nGive it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:\n\n```js\nconst bl = require('bl')\n , fs = require('fs')\n\nfs.createReadStream('README.md')\n .pipe(bl(function (err, data) { // note 'new' isn't strictly required\n // `data` is a complete Buffer object containing the full data\n console.log(data.toString())\n }))\n```\n\nNote that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.\n\nOr to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):\n```js\nconst hyperquest = require('hyperquest')\n , bl = require('bl')\n , url = 'https://raw.github.com/rvagg/bl/master/README.md'\n\nhyperquest(url).pipe(bl(function (err, data) {\n console.log(data.toString())\n}))\n```\n\nOr, use it as a readable stream to recompose a list of Buffers to an output source:\n\n```js\nconst BufferList = require('bl')\n , fs = require('fs')\n\nvar bl = new BufferList()\nbl.append(new Buffer('abcd'))\nbl.append(new Buffer('efg'))\nbl.append(new Buffer('hi'))\nbl.append(new Buffer('j'))\n\nbl.pipe(fs.createWriteStream('gibberish.txt'))\n```\n\n## API\n\n * <a href=\"#ctor\"><code><b>new BufferList([ callback ])</b></code></a>\n * <a href=\"#length\"><code>bl.<b>length</b></code></a>\n * <a href=\"#append\"><code>bl.<b>append(buffer)</b></code></a>\n * <a href=\"#get\"><code>bl.<b>get(index)</b></code></a>\n * <a href=\"#slice\"><code>bl.<b>slice([ start[, end ] ])</b></code></a>\n * <a href=\"#copy\"><code>bl.<b>copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])</b></code></a>\n * <a href=\"#duplicate\"><code>bl.<b>duplicate()</b></code></a>\n * <a href=\"#consume\"><code>bl.<b>consume(bytes)</b></code></a>\n * <a href=\"#toString\"><code>bl.<b>toString([encoding, [ start, [ end ]]])</b></code></a>\n * <a href=\"#readXX\"><code>bl.<b>readDoubleBE()</b></code>, <code>bl.<b>readDoubleLE()</b></code>, <code>bl.<b>readFloatBE()</b></code>, <code>bl.<b>readFloatLE()</b></code>, <code>bl.<b>readInt32BE()</b></code>, <code>bl.<b>readInt32LE()</b></code>, <code>bl.<b>readUInt32BE()</b></code>, <code>bl.<b>readUInt32LE()</b></code>, <code>bl.<b>readInt16BE()</b></code>, <code>bl.<b>readInt16LE()</b></code>, <code>bl.<b>readUInt16BE()</b></code>, <code>bl.<b>readUInt16LE()</b></code>, <code>bl.<b>readInt8()</b></code>, <code>bl.<b>readUInt8()</b></code></a>\n * <a href=\"#streams\">Streams</a>\n\n--------------------------------------------------------\n<a name=\"ctor\"></a>\n### new BufferList([ callback | buffer | buffer array ])\nThe constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.\n\nNormally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.\n\n`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:\n\n```js\nvar bl = require('bl')\nvar myinstance = bl()\n\n// equivilant to:\n\nvar BufferList = require('bl')\nvar myinstance = new BufferList()\n```\n\n--------------------------------------------------------\n<a name=\"length\"></a>\n### bl.length\nGet the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.\n\n--------------------------------------------------------\n<a name=\"append\"></a>\n### bl.append(buffer)\n`append(buffer)` adds an additional buffer or BufferList to the internal list.\n\n--------------------------------------------------------\n<a name=\"get\"></a>\n### bl.get(index)\n`get()` will return the byte at the specified index.\n\n--------------------------------------------------------\n<a name=\"slice\"></a>\n### bl.slice([ start, [ end ] ])\n`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.\n\nIf the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.\n\n--------------------------------------------------------\n<a name=\"copy\"></a>\n### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])\n`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.\n\n--------------------------------------------------------\n<a name=\"duplicate\"></a>\n### bl.duplicate()\n`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:\n\n```js\nvar bl = new BufferList()\n\nbl.append('hello')\nbl.append(' world')\nbl.append('\\n')\n\nbl.duplicate().pipe(process.stdout, { end: false })\n\nconsole.log(bl.toString())\n```\n\n--------------------------------------------------------\n<a name=\"consume\"></a>\n### bl.consume(bytes)\n`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers&mdash;initial offsets will be calculated accordingly in order to give you a consistent view of the data.\n\n--------------------------------------------------------\n<a name=\"toString\"></a>\n### bl.toString([encoding, [ start, [ end ]]])\n`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.\n\n--------------------------------------------------------\n<a name=\"readXX\"></a>\n### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()\n\nAll of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.\n\nSee the <b><code>[Buffer](http://nodejs.org/docs/latest/api/buffer.html)</code></b> documentation for how these work.\n\n--------------------------------------------------------\n<a name=\"streams\"></a>\n### Streams\n**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance.\n\n--------------------------------------------------------\n\n## Contributors\n\n**bl** is brought to you by the following hackers:\n\n * [Rod Vagg](https://github.com/rvagg)\n * [Matteo Collina](https://github.com/mcollina)\n * [Jarett Cruger](https://github.com/jcrugzz)\n\n=======\n\n## License\n\n**bl** is Copyright (c) 2013 Rod Vagg [@rvagg](https://twitter.com/rvagg) and licenced under the MIT licence. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.\n",
- "readmeFilename": "README.md",
+ "gitHead": "4987a76bf6bafd7616e62c7023c955e62f3a9461",
"bugs": {
"url": "https://github.com/rvagg/bl/issues"
},
"_id": "bl@0.9.3",
"_shasum": "c41eff3e7cb31bde107c8f10076d274eff7f7d44",
"_from": "bl@>=0.9.0 <0.10.0",
- "_resolved": "https://registry.npmjs.org/bl/-/bl-0.9.3.tgz"
+ "_npmVersion": "1.4.27",
+ "_npmUser": {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ "maintainers": [
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ }
+ ],
+ "dist": {
+ "shasum": "c41eff3e7cb31bde107c8f10076d274eff7f7d44",
+ "tarball": "http://registry.npmjs.org/bl/-/bl-0.9.3.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/bl/-/bl-0.9.3.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/caseless/package.json b/node_modules/request/node_modules/caseless/package.json
index 9554229f6..3725c1026 100644
--- a/node_modules/request/node_modules/caseless/package.json
+++ b/node_modules/request/node_modules/caseless/package.json
@@ -27,11 +27,26 @@
"devDependencies": {
"tape": "^2.10.2"
},
- "readme": "## Caseless -- wrap an object to set and get property with caseless semantics but also preserve caseing.\n\nThis library is incredibly useful when working with HTTP headers. It allows you to get/set/check for headers in a caseless manor while also preserving the caseing of headers the first time they are set.\n\n## Usage\n\n```javascript\nvar headers = {}\n , c = caseless(headers)\n ;\nc.set('a-Header', 'asdf')\nc.get('a-header') === 'asdf'\n```\n\n## has(key)\n\nHas takes a name and if it finds a matching header will return that header name with the preserved caseing it was set with.\n\n```javascript\nc.has('a-header') === 'a-Header'\n```\n\n## set(key, value[, clobber=true])\n\nSet is fairly straight forward except that if the header exists and clobber is disabled it will add `','+value` to the existing header.\n\n```javascript\nc.set('a-Header', 'fdas')\nc.set('a-HEADER', 'more', false)\nc.get('a-header') === 'fdsa,more'\n```\n\n## swap(key)\n\nSwaps the casing of a header with the new one that is passed in.\n\n```javascript\nvar headers = {}\n , c = caseless(headers)\n ;\nc.set('a-Header', 'fdas')\nc.swap('a-HEADER')\nc.has('a-header') === 'a-HEADER'\nheaders === {'a-HEADER': 'fdas'}\n```\n",
- "readmeFilename": "README.md",
"homepage": "https://github.com/mikeal/caseless",
"_id": "caseless@0.6.0",
"_shasum": "8167c1ab8397fb5bb95f96d28e5a81c50f247ac4",
"_from": "caseless@>=0.6.0 <0.7.0",
- "_resolved": "https://registry.npmjs.org/caseless/-/caseless-0.6.0.tgz"
+ "_npmVersion": "1.4.9",
+ "_npmUser": {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "8167c1ab8397fb5bb95f96d28e5a81c50f247ac4",
+ "tarball": "http://registry.npmjs.org/caseless/-/caseless-0.6.0.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/caseless/-/caseless-0.6.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/forever-agent/package.json b/node_modules/request/node_modules/forever-agent/package.json
index cc5d6e32c..1bb444193 100644
--- a/node_modules/request/node_modules/forever-agent/package.json
+++ b/node_modules/request/node_modules/forever-agent/package.json
@@ -17,15 +17,30 @@
"engines": {
"node": "*"
},
- "readme": "forever-agent\n=============\n\nHTTP Agent that keeps socket connections alive between keep-alive requests. Formerly part of mikeal/request, now a standalone module.\n",
- "readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/mikeal/forever-agent/issues"
},
"homepage": "https://github.com/mikeal/forever-agent",
"_id": "forever-agent@0.5.2",
- "_shasum": "6d0e09c4921f94a27f63d3b49c5feff1ea4c5130",
+ "dist": {
+ "shasum": "6d0e09c4921f94a27f63d3b49c5feff1ea4c5130",
+ "tarball": "http://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz"
+ },
"_from": "forever-agent@>=0.5.0 <0.6.0",
+ "_npmVersion": "1.3.21",
+ "_npmUser": {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "6d0e09c4921f94a27f63d3b49c5feff1ea4c5130",
"_resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.5.2.tgz",
+ "readme": "ERROR: No README data found!",
"scripts": {}
}
diff --git a/node_modules/request/node_modules/form-data/node_modules/async/package.json b/node_modules/request/node_modules/form-data/node_modules/async/package.json
index e1993095e..e8f9ed81b 100644
--- a/node_modules/request/node_modules/form-data/node_modules/async/package.json
+++ b/node_modules/request/node_modules/form-data/node_modules/async/package.json
@@ -35,11 +35,26 @@
"scripts": {
"test": "nodeunit test/test-async.js"
},
- "readme": "# Async.js\n\n[![Build Status via Travis CI](https://travis-ci.org/caolan/async.svg?branch=master)](https://travis-ci.org/caolan/async)\n\n\nAsync is a utility module which provides straight-forward, powerful functions\nfor working with asynchronous JavaScript. Although originally designed for\nuse with [Node.js](http://nodejs.org), it can also be used directly in the\nbrowser. Also supports [component](https://github.com/component/component).\n\nAsync provides around 20 functions that include the usual 'functional'\nsuspects (`map`, `reduce`, `filter`, `each`…) as well as some common patterns\nfor asynchronous control flow (`parallel`, `series`, `waterfall`…). All these\nfunctions assume you follow the Node.js convention of providing a single\ncallback as the last argument of your `async` function.\n\n\n## Quick Examples\n\n```javascript\nasync.map(['file1','file2','file3'], fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n\nasync.filter(['file1','file2','file3'], fs.exists, function(results){\n // results now equals an array of the existing files\n});\n\nasync.parallel([\n function(){ ... },\n function(){ ... }\n], callback);\n\nasync.series([\n function(){ ... },\n function(){ ... }\n]);\n```\n\nThere are many more functions available so take a look at the docs below for a\nfull list. This module aims to be comprehensive, so if you feel anything is\nmissing please create a GitHub issue for it.\n\n## Common Pitfalls\n\n### Binding a context to an iterator\n\nThis section is really about `bind`, not about `async`. If you are wondering how to\nmake `async` execute your iterators in a given context, or are confused as to why\na method of another library isn't working as an iterator, study this example:\n\n```js\n// Here is a simple object with an (unnecessarily roundabout) squaring method\nvar AsyncSquaringLibrary = {\n squareExponent: 2,\n square: function(number, callback){ \n var result = Math.pow(number, this.squareExponent);\n setTimeout(function(){\n callback(null, result);\n }, 200);\n }\n};\n\nasync.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){\n // result is [NaN, NaN, NaN]\n // This fails because the `this.squareExponent` expression in the square\n // function is not evaluated in the context of AsyncSquaringLibrary, and is\n // therefore undefined.\n});\n\nasync.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){\n // result is [1, 4, 9]\n // With the help of bind we can attach a context to the iterator before\n // passing it to async. Now the square function will be executed in its \n // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent`\n // will be as expected.\n});\n```\n\n## Download\n\nThe source is available for download from\n[GitHub](http://github.com/caolan/async).\nAlternatively, you can install using Node Package Manager (`npm`):\n\n npm install async\n\n__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed\n\n## In the Browser\n\nSo far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. \n\nUsage:\n\n```html\n<script type=\"text/javascript\" src=\"async.js\"></script>\n<script type=\"text/javascript\">\n\n async.map(data, asyncProcess, function(err, results){\n alert(results);\n });\n\n</script>\n```\n\n## Documentation\n\n### Collections\n\n* [`each`](#each)\n* [`eachSeries`](#eachSeries)\n* [`eachLimit`](#eachLimit)\n* [`map`](#map)\n* [`mapSeries`](#mapSeries)\n* [`mapLimit`](#mapLimit)\n* [`filter`](#filter)\n* [`filterSeries`](#filterSeries)\n* [`reject`](#reject)\n* [`rejectSeries`](#rejectSeries)\n* [`reduce`](#reduce)\n* [`reduceRight`](#reduceRight)\n* [`detect`](#detect)\n* [`detectSeries`](#detectSeries)\n* [`sortBy`](#sortBy)\n* [`some`](#some)\n* [`every`](#every)\n* [`concat`](#concat)\n* [`concatSeries`](#concatSeries)\n\n### Control Flow\n\n* [`series`](#seriestasks-callback)\n* [`parallel`](#parallel)\n* [`parallelLimit`](#parallellimittasks-limit-callback)\n* [`whilst`](#whilst)\n* [`doWhilst`](#doWhilst)\n* [`until`](#until)\n* [`doUntil`](#doUntil)\n* [`forever`](#forever)\n* [`waterfall`](#waterfall)\n* [`compose`](#compose)\n* [`seq`](#seq)\n* [`applyEach`](#applyEach)\n* [`applyEachSeries`](#applyEachSeries)\n* [`queue`](#queue)\n* [`priorityQueue`](#priorityQueue)\n* [`cargo`](#cargo)\n* [`auto`](#auto)\n* [`retry`](#retry)\n* [`iterator`](#iterator)\n* [`apply`](#apply)\n* [`nextTick`](#nextTick)\n* [`times`](#times)\n* [`timesSeries`](#timesSeries)\n\n### Utils\n\n* [`memoize`](#memoize)\n* [`unmemoize`](#unmemoize)\n* [`log`](#log)\n* [`dir`](#dir)\n* [`noConflict`](#noConflict)\n\n\n## Collections\n\n<a name=\"forEach\" />\n<a name=\"each\" />\n### each(arr, iterator, callback)\n\nApplies the function `iterator` to each item in `arr`, in parallel.\nThe `iterator` is called with an item from the list, and a callback for when it\nhas finished. If the `iterator` passes an error to its `callback`, the main\n`callback` (for the `each` function) is immediately called with the error.\n\nNote, that since this function applies `iterator` to each item in parallel,\nthere is no guarantee that the iterator functions will complete in order.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A function to apply to each item in `arr`.\n The iterator is passed a `callback(err)` which must be called once it has \n completed. If no error has occured, the `callback` should be run without \n arguments or with an explicit `null` argument.\n* `callback(err)` - A callback which is called when all `iterator` functions\n have finished, or an error occurs.\n\n__Examples__\n\n\n```js\n// assuming openFiles is an array of file names and saveFile is a function\n// to save the modified contents of that file:\n\nasync.each(openFiles, saveFile, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n```js\n// assuming openFiles is an array of file names \n\nasync.each(openFiles, function( file, callback) {\n \n // Perform operation on file here.\n console.log('Processing file ' + file);\n \n if( file.length > 32 ) {\n console.log('This file name is too long');\n callback('File name too long');\n } else {\n // Do work to process file here\n console.log('File processed');\n callback();\n }\n}, function(err){\n // if any of the file processing produced an error, err would equal that error\n if( err ) {\n // One of the iterations produced an error.\n // All processing will now stop.\n console.log('A file failed to process');\n } else {\n console.log('All files have been processed successfully');\n }\n});\n```\n\n---------------------------------------\n\n<a name=\"forEachSeries\" />\n<a name=\"eachSeries\" />\n### eachSeries(arr, iterator, callback)\n\nThe same as [`each`](#each), only `iterator` is applied to each item in `arr` in\nseries. The next `iterator` is only called once the current one has completed. \nThis means the `iterator` functions will complete in order.\n\n\n---------------------------------------\n\n<a name=\"forEachLimit\" />\n<a name=\"eachLimit\" />\n### eachLimit(arr, limit, iterator, callback)\n\nThe same as [`each`](#each), only no more than `limit` `iterator`s will be simultaneously \nrunning at any time.\n\nNote that the items in `arr` are not processed in batches, so there is no guarantee that \nthe first `limit` `iterator` functions will complete before any others are started.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `limit` - The maximum number of `iterator`s to run at any time.\n* `iterator(item, callback)` - A function to apply to each item in `arr`.\n The iterator is passed a `callback(err)` which must be called once it has \n completed. If no error has occured, the callback should be run without \n arguments or with an explicit `null` argument.\n* `callback(err)` - A callback which is called when all `iterator` functions\n have finished, or an error occurs.\n\n__Example__\n\n```js\n// Assume documents is an array of JSON objects and requestApi is a\n// function that interacts with a rate-limited REST api.\n\nasync.eachLimit(documents, 20, requestApi, function(err){\n // if any of the saves produced an error, err would equal that error\n});\n```\n\n---------------------------------------\n\n<a name=\"map\" />\n### map(arr, iterator, callback)\n\nProduces a new array of values by mapping each value in `arr` through\nthe `iterator` function. The `iterator` is called with an item from `arr` and a\ncallback for when it has finished processing. Each of these callback takes 2 arguments: \nan `error`, and the transformed item from `arr`. If `iterator` passes an error to this \ncallback, the main `callback` (for the `map` function) is immediately called with the error.\n\nNote, that since this function applies the `iterator` to each item in parallel,\nthere is no guarantee that the `iterator` functions will complete in order. \nHowever, the results array will be in the same order as the original `arr`.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A function to apply to each item in `arr`.\n The iterator is passed a `callback(err, transformed)` which must be called once \n it has completed with an error (which can be `null`) and a transformed item.\n* `callback(err, results)` - A callback which is called when all `iterator`\n functions have finished, or an error occurs. Results is an array of the\n transformed items from the `arr`.\n\n__Example__\n\n```js\nasync.map(['file1','file2','file3'], fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n<a name=\"mapSeries\" />\n### mapSeries(arr, iterator, callback)\n\nThe same as [`map`](#map), only the `iterator` is applied to each item in `arr` in\nseries. The next `iterator` is only called once the current one has completed. \nThe results array will be in the same order as the original.\n\n\n---------------------------------------\n\n<a name=\"mapLimit\" />\n### mapLimit(arr, limit, iterator, callback)\n\nThe same as [`map`](#map), only no more than `limit` `iterator`s will be simultaneously \nrunning at any time.\n\nNote that the items are not processed in batches, so there is no guarantee that \nthe first `limit` `iterator` functions will complete before any others are started.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `limit` - The maximum number of `iterator`s to run at any time.\n* `iterator(item, callback)` - A function to apply to each item in `arr`.\n The iterator is passed a `callback(err, transformed)` which must be called once \n it has completed with an error (which can be `null`) and a transformed item.\n* `callback(err, results)` - A callback which is called when all `iterator`\n calls have finished, or an error occurs. The result is an array of the\n transformed items from the original `arr`.\n\n__Example__\n\n```js\nasync.mapLimit(['file1','file2','file3'], 1, fs.stat, function(err, results){\n // results is now an array of stats for each file\n});\n```\n\n---------------------------------------\n\n<a name=\"select\" />\n<a name=\"filter\" />\n### filter(arr, iterator, callback)\n\n__Alias:__ `select`\n\nReturns a new array of all the values in `arr` which pass an async truth test.\n_The callback for each `iterator` call only accepts a single argument of `true` or\n`false`; it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like `fs.exists`. This operation is\nperformed in parallel, but the results array will be in the same order as the\noriginal.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A truth test to apply to each item in `arr`.\n The `iterator` is passed a `callback(truthValue)`, which must be called with a \n boolean argument once it has completed.\n* `callback(results)` - A callback which is called after all the `iterator`\n functions have finished.\n\n__Example__\n\n```js\nasync.filter(['file1','file2','file3'], fs.exists, function(results){\n // results now equals an array of the existing files\n});\n```\n\n---------------------------------------\n\n<a name=\"selectSeries\" />\n<a name=\"filterSeries\" />\n### filterSeries(arr, iterator, callback)\n\n__Alias:__ `selectSeries`\n\nThe same as [`filter`](#filter) only the `iterator` is applied to each item in `arr` in\nseries. The next `iterator` is only called once the current one has completed. \nThe results array will be in the same order as the original.\n\n---------------------------------------\n\n<a name=\"reject\" />\n### reject(arr, iterator, callback)\n\nThe opposite of [`filter`](#filter). Removes values that pass an `async` truth test.\n\n---------------------------------------\n\n<a name=\"rejectSeries\" />\n### rejectSeries(arr, iterator, callback)\n\nThe same as [`reject`](#reject), only the `iterator` is applied to each item in `arr`\nin series.\n\n\n---------------------------------------\n\n<a name=\"reduce\" />\n### reduce(arr, memo, iterator, callback)\n\n__Aliases:__ `inject`, `foldl`\n\nReduces `arr` into a single value using an async `iterator` to return\neach successive step. `memo` is the initial state of the reduction. \nThis function only operates in series. \n\nFor performance reasons, it may make sense to split a call to this function into \na parallel map, and then use the normal `Array.prototype.reduce` on the results. \nThis function is for situations where each step in the reduction needs to be async; \nif you can get the data before reducing it, then it's probably a good idea to do so.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `memo` - The initial state of the reduction.\n* `iterator(memo, item, callback)` - A function applied to each item in the\n array to produce the next step in the reduction. The `iterator` is passed a\n `callback(err, reduction)` which accepts an optional error as its first \n argument, and the state of the reduction as the second. If an error is \n passed to the callback, the reduction is stopped and the main `callback` is \n immediately called with the error.\n* `callback(err, result)` - A callback which is called after all the `iterator`\n functions have finished. Result is the reduced value.\n\n__Example__\n\n```js\nasync.reduce([1,2,3], 0, function(memo, item, callback){\n // pointless async:\n process.nextTick(function(){\n callback(null, memo + item)\n });\n}, function(err, result){\n // result is now equal to the last value of memo, which is 6\n});\n```\n\n---------------------------------------\n\n<a name=\"reduceRight\" />\n### reduceRight(arr, memo, iterator, callback)\n\n__Alias:__ `foldr`\n\nSame as [`reduce`](#reduce), only operates on `arr` in reverse order.\n\n\n---------------------------------------\n\n<a name=\"detect\" />\n### detect(arr, iterator, callback)\n\nReturns the first value in `arr` that passes an async truth test. The\n`iterator` is applied in parallel, meaning the first iterator to return `true` will\nfire the detect `callback` with that result. That means the result might not be\nthe first item in the original `arr` (in terms of order) that passes the test.\n\nIf order within the original `arr` is important, then look at [`detectSeries`](#detectSeries).\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A truth test to apply to each item in `arr`.\n The iterator is passed a `callback(truthValue)` which must be called with a \n boolean argument once it has completed.\n* `callback(result)` - A callback which is called as soon as any iterator returns\n `true`, or after all the `iterator` functions have finished. Result will be\n the first item in the array that passes the truth test (iterator) or the\n value `undefined` if none passed.\n\n__Example__\n\n```js\nasync.detect(['file1','file2','file3'], fs.exists, function(result){\n // result now equals the first file in the list that exists\n});\n```\n\n---------------------------------------\n\n<a name=\"detectSeries\" />\n### detectSeries(arr, iterator, callback)\n\nThe same as [`detect`](#detect), only the `iterator` is applied to each item in `arr`\nin series. This means the result is always the first in the original `arr` (in\nterms of array order) that passes the truth test.\n\n\n---------------------------------------\n\n<a name=\"sortBy\" />\n### sortBy(arr, iterator, callback)\n\nSorts a list by the results of running each `arr` value through an async `iterator`.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A function to apply to each item in `arr`.\n The iterator is passed a `callback(err, sortValue)` which must be called once it\n has completed with an error (which can be `null`) and a value to use as the sort\n criteria.\n* `callback(err, results)` - A callback which is called after all the `iterator`\n functions have finished, or an error occurs. Results is the items from\n the original `arr` sorted by the values returned by the `iterator` calls.\n\n__Example__\n\n```js\nasync.sortBy(['file1','file2','file3'], function(file, callback){\n fs.stat(file, function(err, stats){\n callback(err, stats.mtime);\n });\n}, function(err, results){\n // results is now the original array of files sorted by\n // modified date\n});\n```\n\n__Sort Order__\n\nBy modifying the callback parameter the sorting order can be influenced:\n\n```js\n//ascending order\nasync.sortBy([1,9,3,5], function(x, callback){\n callback(err, x);\n}, function(err,result){\n //result callback\n} );\n\n//descending order\nasync.sortBy([1,9,3,5], function(x, callback){\n callback(err, x*-1); //<- x*-1 instead of x, turns the order around\n}, function(err,result){\n //result callback\n} );\n```\n\n---------------------------------------\n\n<a name=\"some\" />\n### some(arr, iterator, callback)\n\n__Alias:__ `any`\n\nReturns `true` if at least one element in the `arr` satisfies an async test.\n_The callback for each iterator call only accepts a single argument of `true` or\n`false`; it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like `fs.exists`. Once any iterator\ncall returns `true`, the main `callback` is immediately called.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A truth test to apply to each item in the array\n in parallel. The iterator is passed a callback(truthValue) which must be \n called with a boolean argument once it has completed.\n* `callback(result)` - A callback which is called as soon as any iterator returns\n `true`, or after all the iterator functions have finished. Result will be\n either `true` or `false` depending on the values of the async tests.\n\n__Example__\n\n```js\nasync.some(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then at least one of the files exists\n});\n```\n\n---------------------------------------\n\n<a name=\"every\" />\n### every(arr, iterator, callback)\n\n__Alias:__ `all`\n\nReturns `true` if every element in `arr` satisfies an async test.\n_The callback for each `iterator` call only accepts a single argument of `true` or\n`false`; it does not accept an error argument first!_ This is in-line with the\nway node libraries work with truth tests like `fs.exists`.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A truth test to apply to each item in the array\n in parallel. The iterator is passed a callback(truthValue) which must be \n called with a boolean argument once it has completed.\n* `callback(result)` - A callback which is called after all the `iterator`\n functions have finished. Result will be either `true` or `false` depending on\n the values of the async tests.\n\n__Example__\n\n```js\nasync.every(['file1','file2','file3'], fs.exists, function(result){\n // if result is true then every file exists\n});\n```\n\n---------------------------------------\n\n<a name=\"concat\" />\n### concat(arr, iterator, callback)\n\nApplies `iterator` to each item in `arr`, concatenating the results. Returns the\nconcatenated list. The `iterator`s are called in parallel, and the results are\nconcatenated as they return. There is no guarantee that the results array will\nbe returned in the original order of `arr` passed to the `iterator` function.\n\n__Arguments__\n\n* `arr` - An array to iterate over.\n* `iterator(item, callback)` - A function to apply to each item in `arr`.\n The iterator is passed a `callback(err, results)` which must be called once it \n has completed with an error (which can be `null`) and an array of results.\n* `callback(err, results)` - A callback which is called after all the `iterator`\n functions have finished, or an error occurs. Results is an array containing\n the concatenated results of the `iterator` function.\n\n__Example__\n\n```js\nasync.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){\n // files is now a list of filenames that exist in the 3 directories\n});\n```\n\n---------------------------------------\n\n<a name=\"concatSeries\" />\n### concatSeries(arr, iterator, callback)\n\nSame as [`concat`](#concat), but executes in series instead of parallel.\n\n\n## Control Flow\n\n<a name=\"series\" />\n### series(tasks, [callback])\n\nRun the functions in the `tasks` array in series, each one running once the previous\nfunction has completed. If any functions in the series pass an error to its\ncallback, no more functions are run, and `callback` is immediately called with the value of the error. \nOtherwise, `callback` receives an array of results when `tasks` have completed.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function, and the results will be passed to the final `callback` as an object\ninstead of an array. This can be a more readable way of handling results from\n[`series`](#series).\n\n**Note** that while many implementations preserve the order of object properties, the\n[ECMAScript Language Specifcation](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) \nexplicitly states that\n\n> The mechanics and order of enumerating the properties is not specified.\n\nSo if you rely on the order in which your series of functions are executed, and want\nthis to work on all platforms, consider using an array. \n\n__Arguments__\n\n* `tasks` - An array or object containing functions to run, each function is passed\n a `callback(err, result)` it must call on completion with an error `err` (which can\n be `null`) and an optional `result` value.\n* `callback(err, results)` - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the `task` callbacks.\n\n__Example__\n\n```js\nasync.series([\n function(callback){\n // do some stuff ...\n callback(null, 'one');\n },\n function(callback){\n // do some more stuff ...\n callback(null, 'two');\n }\n],\n// optional callback\nfunction(err, results){\n // results is now equal to ['one', 'two']\n});\n\n\n// an example using an object instead of an array\nasync.series({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equal to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n<a name=\"parallel\" />\n### parallel(tasks, [callback])\n\nRun the `tasks` array of functions in parallel, without waiting until the previous\nfunction has completed. If any of the functions pass an error to its\ncallback, the main `callback` is immediately called with the value of the error.\nOnce the `tasks` have completed, the results are passed to the final `callback` as an\narray.\n\nIt is also possible to use an object instead of an array. Each property will be\nrun as a function and the results will be passed to the final `callback` as an object\ninstead of an array. This can be a more readable way of handling results from\n[`parallel`](#parallel).\n\n\n__Arguments__\n\n* `tasks` - An array or object containing functions to run. Each function is passed \n a `callback(err, result)` which it must call on completion with an error `err` \n (which can be `null`) and an optional `result` value.\n* `callback(err, results)` - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the task callbacks.\n\n__Example__\n\n```js\nasync.parallel([\n function(callback){\n setTimeout(function(){\n callback(null, 'one');\n }, 200);\n },\n function(callback){\n setTimeout(function(){\n callback(null, 'two');\n }, 100);\n }\n],\n// optional callback\nfunction(err, results){\n // the results array will equal ['one','two'] even though\n // the second function had a shorter timeout.\n});\n\n\n// an example using an object instead of an array\nasync.parallel({\n one: function(callback){\n setTimeout(function(){\n callback(null, 1);\n }, 200);\n },\n two: function(callback){\n setTimeout(function(){\n callback(null, 2);\n }, 100);\n }\n},\nfunction(err, results) {\n // results is now equals to: {one: 1, two: 2}\n});\n```\n\n---------------------------------------\n\n<a name=\"parallelLimit\" />\n### parallelLimit(tasks, limit, [callback])\n\nThe same as [`parallel`](#parallel), only `tasks` are executed in parallel \nwith a maximum of `limit` tasks executing at any time.\n\nNote that the `tasks` are not executed in batches, so there is no guarantee that \nthe first `limit` tasks will complete before any others are started.\n\n__Arguments__\n\n* `tasks` - An array or object containing functions to run, each function is passed \n a `callback(err, result)` it must call on completion with an error `err` (which can\n be `null`) and an optional `result` value.\n* `limit` - The maximum number of `tasks` to run at any time.\n* `callback(err, results)` - An optional callback to run once all the functions\n have completed. This function gets a results array (or object) containing all \n the result arguments passed to the `task` callbacks.\n\n---------------------------------------\n\n<a name=\"whilst\" />\n### whilst(test, fn, callback)\n\nRepeatedly call `fn`, while `test` returns `true`. Calls `callback` when stopped,\nor an error occurs.\n\n__Arguments__\n\n* `test()` - synchronous truth test to perform before each execution of `fn`.\n* `fn(callback)` - A function which is called each time `test` passes. The function is\n passed a `callback(err)`, which must be called once it has completed with an \n optional `err` argument.\n* `callback(err)` - A callback which is called after the test fails and repeated\n execution of `fn` has stopped.\n\n__Example__\n\n```js\nvar count = 0;\n\nasync.whilst(\n function () { return count < 5; },\n function (callback) {\n count++;\n setTimeout(callback, 1000);\n },\n function (err) {\n // 5 seconds have passed\n }\n);\n```\n\n---------------------------------------\n\n<a name=\"doWhilst\" />\n### doWhilst(fn, test, callback)\n\nThe post-check version of [`whilst`](#whilst). To reflect the difference in \nthe order of operations, the arguments `test` and `fn` are switched. \n\n`doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.\n\n---------------------------------------\n\n<a name=\"until\" />\n### until(test, fn, callback)\n\nRepeatedly call `fn` until `test` returns `true`. Calls `callback` when stopped,\nor an error occurs.\n\nThe inverse of [`whilst`](#whilst).\n\n---------------------------------------\n\n<a name=\"doUntil\" />\n### doUntil(fn, test, callback)\n\nLike [`doWhilst`](#doWhilst), except the `test` is inverted. Note the argument ordering differs from `until`.\n\n---------------------------------------\n\n<a name=\"forever\" />\n### forever(fn, errback)\n\nCalls the asynchronous function `fn` with a callback parameter that allows it to\ncall itself again, in series, indefinitely.\n\nIf an error is passed to the callback then `errback` is called with the\nerror, and execution stops, otherwise it will never be called.\n\n```js\nasync.forever(\n function(next) {\n // next is suitable for passing to things that need a callback(err [, whatever]);\n // it will result in this function being called again.\n },\n function(err) {\n // if next is called with a value in its first parameter, it will appear\n // in here as 'err', and execution will stop.\n }\n);\n```\n\n---------------------------------------\n\n<a name=\"waterfall\" />\n### waterfall(tasks, [callback])\n\nRuns the `tasks` array of functions in series, each passing their results to the next in\nthe array. However, if any of the `tasks` pass an error to their own callback, the\nnext function is not executed, and the main `callback` is immediately called with\nthe error.\n\n__Arguments__\n\n* `tasks` - An array of functions to run, each function is passed a \n `callback(err, result1, result2, ...)` it must call on completion. The first\n argument is an error (which can be `null`) and any further arguments will be \n passed as arguments in order to the next task.\n* `callback(err, [results])` - An optional callback to run once all the functions\n have completed. This will be passed the results of the last task's callback.\n\n\n\n__Example__\n\n```js\nasync.waterfall([\n function(callback){\n callback(null, 'one', 'two');\n },\n function(arg1, arg2, callback){\n // arg1 now equals 'one' and arg2 now equals 'two'\n callback(null, 'three');\n },\n function(arg1, callback){\n // arg1 now equals 'three'\n callback(null, 'done');\n }\n], function (err, result) {\n // result now equals 'done' \n});\n```\n\n---------------------------------------\n<a name=\"compose\" />\n### compose(fn1, fn2...)\n\nCreates a function which is a composition of the passed asynchronous\nfunctions. Each function consumes the return value of the function that\nfollows. Composing functions `f()`, `g()`, and `h()` would produce the result of\n`f(g(h()))`, only this version uses callbacks to obtain the return values.\n\nEach function is executed with the `this` binding of the composed function.\n\n__Arguments__\n\n* `functions...` - the asynchronous functions to compose\n\n\n__Example__\n\n```js\nfunction add1(n, callback) {\n setTimeout(function () {\n callback(null, n + 1);\n }, 10);\n}\n\nfunction mul3(n, callback) {\n setTimeout(function () {\n callback(null, n * 3);\n }, 10);\n}\n\nvar add1mul3 = async.compose(mul3, add1);\n\nadd1mul3(4, function (err, result) {\n // result now equals 15\n});\n```\n\n---------------------------------------\n<a name=\"seq\" />\n### seq(fn1, fn2...)\n\nVersion of the compose function that is more natural to read.\nEach following function consumes the return value of the latter function. \n\nEach function is executed with the `this` binding of the composed function.\n\n__Arguments__\n\n* functions... - the asynchronous functions to compose\n\n\n__Example__\n\n```js\n// Requires lodash (or underscore), express3 and dresende's orm2.\n// Part of an app, that fetches cats of the logged user.\n// This example uses `seq` function to avoid overnesting and error \n// handling clutter.\napp.get('/cats', function(request, response) {\n function handleError(err, data, callback) {\n if (err) {\n console.error(err);\n response.json({ status: 'error', message: err.message });\n }\n else {\n callback(data);\n }\n }\n var User = request.models.User;\n async.seq(\n _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data))\n handleError,\n function(user, fn) {\n user.getCats(fn); // 'getCats' has signature (callback(err, data))\n },\n handleError,\n function(cats) {\n response.json({ status: 'ok', message: 'Cats found', data: cats });\n }\n )(req.session.user_id);\n }\n});\n```\n\n---------------------------------------\n<a name=\"applyEach\" />\n### applyEach(fns, args..., callback)\n\nApplies the provided arguments to each function in the array, calling \n`callback` after all functions have completed. If you only provide the first\nargument, then it will return a function which lets you pass in the\narguments as if it were a single function call.\n\n__Arguments__\n\n* `fns` - the asynchronous functions to all call with the same arguments\n* `args...` - any number of separate arguments to pass to the function\n* `callback` - the final argument should be the callback, called when all\n functions have completed processing\n\n\n__Example__\n\n```js\nasync.applyEach([enableSearch, updateSchema], 'bucket', callback);\n\n// partial application example:\nasync.each(\n buckets,\n async.applyEach([enableSearch, updateSchema]),\n callback\n);\n```\n\n---------------------------------------\n\n<a name=\"applyEachSeries\" />\n### applyEachSeries(arr, iterator, callback)\n\nThe same as [`applyEach`](#applyEach) only the functions are applied in series.\n\n---------------------------------------\n\n<a name=\"queue\" />\n### queue(worker, concurrency)\n\nCreates a `queue` object with the specified `concurrency`. Tasks added to the\n`queue` are processed in parallel (up to the `concurrency` limit). If all\n`worker`s are in progress, the task is queued until one becomes available. \nOnce a `worker` completes a `task`, that `task`'s callback is called.\n\n__Arguments__\n\n* `worker(task, callback)` - An asynchronous function for processing a queued\n task, which must call its `callback(err)` argument when finished, with an \n optional `error` as an argument.\n* `concurrency` - An `integer` for determining how many `worker` functions should be\n run in parallel.\n\n__Queue objects__\n\nThe `queue` object returned by this function has the following properties and\nmethods:\n\n* `length()` - a function returning the number of items waiting to be processed.\n* `started` - a function returning whether or not any items have been pushed and processed by the queue\n* `running()` - a function returning the number of items currently being processed.\n* `idle()` - a function returning false if there are items waiting or being processed, or true if not.\n* `concurrency` - an integer for determining how many `worker` functions should be\n run in parallel. This property can be changed after a `queue` is created to\n alter the concurrency on-the-fly.\n* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once \n the `worker` has finished processing the task. Instead of a single task, a `tasks` array\n can be submitted. The respective callback is used for every task in the list.\n* `unshift(task, [callback])` - add a new task to the front of the `queue`.\n* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit, \n and further tasks will be queued.\n* `empty` - a callback that is called when the last item from the `queue` is given to a `worker`.\n* `drain` - a callback that is called when the last item from the `queue` has returned from the `worker`.\n* `paused` - a boolean for determining whether the queue is in a paused state\n* `pause()` - a function that pauses the processing of tasks until `resume()` is called.\n* `resume()` - a function that resumes the processing of queued tasks when the queue is paused.\n* `kill()` - a function that empties remaining tasks from the queue forcing it to go idle.\n\n__Example__\n\n```js\n// create a queue object with concurrency 2\n\nvar q = async.queue(function (task, callback) {\n console.log('hello ' + task.name);\n callback();\n}, 2);\n\n\n// assign a callback\nq.drain = function() {\n console.log('all items have been processed');\n}\n\n// add some items to the queue\n\nq.push({name: 'foo'}, function (err) {\n console.log('finished processing foo');\n});\nq.push({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the queue (batch-wise)\n\nq.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) {\n console.log('finished processing bar');\n});\n\n// add some items to the front of the queue\n\nq.unshift({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\n```\n\n\n---------------------------------------\n\n<a name=\"priorityQueue\" />\n### priorityQueue(worker, concurrency)\n\nThe same as [`queue`](#queue) only tasks are assigned a priority and completed in ascending priority order. There are two differences between `queue` and `priorityQueue` objects:\n\n* `push(task, priority, [callback])` - `priority` should be a number. If an array of\n `tasks` is given, all tasks will be assigned the same priority.\n* The `unshift` method was removed.\n\n---------------------------------------\n\n<a name=\"cargo\" />\n### cargo(worker, [payload])\n\nCreates a `cargo` object with the specified payload. Tasks added to the\ncargo will be processed altogether (up to the `payload` limit). If the\n`worker` is in progress, the task is queued until it becomes available. Once\nthe `worker` has completed some tasks, each callback of those tasks is called.\nCheck out [this animation](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) for how `cargo` and `queue` work.\n\nWhile [queue](#queue) passes only one task to one of a group of workers\nat a time, cargo passes an array of tasks to a single worker, repeating\nwhen the worker is finished.\n\n__Arguments__\n\n* `worker(tasks, callback)` - An asynchronous function for processing an array of\n queued tasks, which must call its `callback(err)` argument when finished, with \n an optional `err` argument.\n* `payload` - An optional `integer` for determining how many tasks should be\n processed per round; if omitted, the default is unlimited.\n\n__Cargo objects__\n\nThe `cargo` object returned by this function has the following properties and\nmethods:\n\n* `length()` - A function returning the number of items waiting to be processed.\n* `payload` - An `integer` for determining how many tasks should be\n process per round. This property can be changed after a `cargo` is created to\n alter the payload on-the-fly.\n* `push(task, [callback])` - Adds `task` to the `queue`. The callback is called\n once the `worker` has finished processing the task. Instead of a single task, an array of `tasks` \n can be submitted. The respective callback is used for every task in the list.\n* `saturated` - A callback that is called when the `queue.length()` hits the concurrency and further tasks will be queued.\n* `empty` - A callback that is called when the last item from the `queue` is given to a `worker`.\n* `drain` - A callback that is called when the last item from the `queue` has returned from the `worker`.\n\n__Example__\n\n```js\n// create a cargo object with payload 2\n\nvar cargo = async.cargo(function (tasks, callback) {\n for(var i=0; i<tasks.length; i++){\n console.log('hello ' + tasks[i].name);\n }\n callback();\n}, 2);\n\n\n// add some items\n\ncargo.push({name: 'foo'}, function (err) {\n console.log('finished processing foo');\n});\ncargo.push({name: 'bar'}, function (err) {\n console.log('finished processing bar');\n});\ncargo.push({name: 'baz'}, function (err) {\n console.log('finished processing baz');\n});\n```\n\n---------------------------------------\n\n<a name=\"auto\" />\n### auto(tasks, [callback])\n\nDetermines the best order for running the functions in `tasks`, based on their \nrequirements. Each function can optionally depend on other functions being completed \nfirst, and each function is run as soon as its requirements are satisfied. \n\nIf any of the functions pass an error to their callback, it will not \ncomplete (so any other functions depending on it will not run), and the main \n`callback` is immediately called with the error. Functions also receive an \nobject containing the results of functions which have completed so far.\n\nNote, all functions are called with a `results` object as a second argument, \nso it is unsafe to pass functions in the `tasks` object which cannot handle the\nextra argument. \n\nFor example, this snippet of code:\n\n```js\nasync.auto({\n readData: async.apply(fs.readFile, 'data.txt', 'utf-8')\n}, callback);\n```\n\nwill have the effect of calling `readFile` with the results object as the last\nargument, which will fail:\n\n```js\nfs.readFile('data.txt', 'utf-8', cb, {});\n```\n\nInstead, wrap the call to `readFile` in a function which does not forward the \n`results` object:\n\n```js\nasync.auto({\n readData: function(cb, results){\n fs.readFile('data.txt', 'utf-8', cb);\n }\n}, callback);\n```\n\n__Arguments__\n\n* `tasks` - An object. Each of its properties is either a function or an array of\n requirements, with the function itself the last item in the array. The object's key\n of a property serves as the name of the task defined by that property,\n i.e. can be used when specifying requirements for other tasks.\n The function receives two arguments: (1) a `callback(err, result)` which must be \n called when finished, passing an `error` (which can be `null`) and the result of \n the function's execution, and (2) a `results` object, containing the results of\n the previously executed functions.\n* `callback(err, results)` - An optional callback which is called when all the\n tasks have been completed. It receives the `err` argument if any `tasks` \n pass an error to their callback. Results are always returned; however, if \n an error occurs, no further `tasks` will be performed, and the results\n object will only contain partial results.\n\n\n__Example__\n\n```js\nasync.auto({\n get_data: function(callback){\n console.log('in get_data');\n // async code to get some data\n callback(null, 'data', 'converted to array');\n },\n make_folder: function(callback){\n console.log('in make_folder');\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n callback(null, 'folder');\n },\n write_file: ['get_data', 'make_folder', function(callback, results){\n console.log('in write_file', JSON.stringify(results));\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n callback(null, 'filename');\n }],\n email_link: ['write_file', function(callback, results){\n console.log('in email_link', JSON.stringify(results));\n // once the file is written let's email a link to it...\n // results.write_file contains the filename returned by write_file.\n callback(null, {'file':results.write_file, 'email':'user@example.com'});\n }]\n}, function(err, results) {\n console.log('err = ', err);\n console.log('results = ', results);\n});\n```\n\nThis is a fairly trivial example, but to do this using the basic parallel and\nseries functions would look like this:\n\n```js\nasync.parallel([\n function(callback){\n console.log('in get_data');\n // async code to get some data\n callback(null, 'data', 'converted to array');\n },\n function(callback){\n console.log('in make_folder');\n // async code to create a directory to store a file in\n // this is run at the same time as getting the data\n callback(null, 'folder');\n }\n],\nfunction(err, results){\n async.series([\n function(callback){\n console.log('in write_file', JSON.stringify(results));\n // once there is some data and the directory exists,\n // write the data to a file in the directory\n results.push('filename');\n callback(null);\n },\n function(callback){\n console.log('in email_link', JSON.stringify(results));\n // once the file is written let's email a link to it...\n callback(null, {'file':results.pop(), 'email':'user@example.com'});\n }\n ]);\n});\n```\n\nFor a complicated series of `async` tasks, using the [`auto`](#auto) function makes adding\nnew tasks much easier (and the code more readable).\n\n\n---------------------------------------\n\n<a name=\"retry\" />\n### retry([times = 5], task, [callback])\n\nAttempts to get a successful response from `task` no more than `times` times before\nreturning an error. If the task is successful, the `callback` will be passed the result\nof the successfull task. If all attemps fail, the callback will be passed the error and\nresult (if any) of the final attempt.\n\n__Arguments__\n\n* `times` - An integer indicating how many times to attempt the `task` before giving up. Defaults to 5.\n* `task(callback, results)` - A function which receives two arguments: (1) a `callback(err, result)`\n which must be called when finished, passing `err` (which can be `null`) and the `result` of \n the function's execution, and (2) a `results` object, containing the results of\n the previously executed functions (if nested inside another control flow).\n* `callback(err, results)` - An optional callback which is called when the\n task has succeeded, or after the final failed attempt. It receives the `err` and `result` arguments of the last attempt at completing the `task`.\n\nThe [`retry`](#retry) function can be used as a stand-alone control flow by passing a\ncallback, as shown below:\n\n```js\nasync.retry(3, apiMethod, function(err, result) {\n // do something with the result\n});\n```\n\nIt can also be embeded within other control flow functions to retry individual methods\nthat are not as reliable, like this:\n\n```js\nasync.auto({\n users: api.getUsers.bind(api),\n payments: async.retry(3, api.getPayments.bind(api))\n}, function(err, results) {\n // do something with the results\n});\n```\n\n\n---------------------------------------\n\n<a name=\"iterator\" />\n### iterator(tasks)\n\nCreates an iterator function which calls the next function in the `tasks` array,\nreturning a continuation to call the next one after that. It's also possible to\n“peek” at the next iterator with `iterator.next()`.\n\nThis function is used internally by the `async` module, but can be useful when\nyou want to manually control the flow of functions in series.\n\n__Arguments__\n\n* `tasks` - An array of functions to run.\n\n__Example__\n\n```js\nvar iterator = async.iterator([\n function(){ sys.p('one'); },\n function(){ sys.p('two'); },\n function(){ sys.p('three'); }\n]);\n\nnode> var iterator2 = iterator();\n'one'\nnode> var iterator3 = iterator2();\n'two'\nnode> iterator3();\n'three'\nnode> var nextfn = iterator2.next();\nnode> nextfn();\n'three'\n```\n\n---------------------------------------\n\n<a name=\"apply\" />\n### apply(function, arguments..)\n\nCreates a continuation function with some arguments already applied. \n\nUseful as a shorthand when combined with other control flow functions. Any arguments\npassed to the returned function are added to the arguments originally passed\nto apply.\n\n__Arguments__\n\n* `function` - The function you want to eventually apply all arguments to.\n* `arguments...` - Any number of arguments to automatically apply when the\n continuation is called.\n\n__Example__\n\n```js\n// using apply\n\nasync.parallel([\n async.apply(fs.writeFile, 'testfile1', 'test1'),\n async.apply(fs.writeFile, 'testfile2', 'test2'),\n]);\n\n\n// the same process without using apply\n\nasync.parallel([\n function(callback){\n fs.writeFile('testfile1', 'test1', callback);\n },\n function(callback){\n fs.writeFile('testfile2', 'test2', callback);\n }\n]);\n```\n\nIt's possible to pass any number of additional arguments when calling the\ncontinuation:\n\n```js\nnode> var fn = async.apply(sys.puts, 'one');\nnode> fn('two', 'three');\none\ntwo\nthree\n```\n\n---------------------------------------\n\n<a name=\"nextTick\" />\n### nextTick(callback)\n\nCalls `callback` on a later loop around the event loop. In Node.js this just\ncalls `process.nextTick`; in the browser it falls back to `setImmediate(callback)`\nif available, otherwise `setTimeout(callback, 0)`, which means other higher priority\nevents may precede the execution of `callback`.\n\nThis is used internally for browser-compatibility purposes.\n\n__Arguments__\n\n* `callback` - The function to call on a later loop around the event loop.\n\n__Example__\n\n```js\nvar call_order = [];\nasync.nextTick(function(){\n call_order.push('two');\n // call_order now equals ['one','two']\n});\ncall_order.push('one')\n```\n\n<a name=\"times\" />\n### times(n, callback)\n\nCalls the `callback` function `n` times, and accumulates results in the same manner\nyou would use with [`map`](#map).\n\n__Arguments__\n\n* `n` - The number of times to run the function.\n* `callback` - The function to call `n` times.\n\n__Example__\n\n```js\n// Pretend this is some complicated async factory\nvar createUser = function(id, callback) {\n callback(null, {\n id: 'user' + id\n })\n}\n// generate 5 users\nasync.times(5, function(n, next){\n createUser(n, function(err, user) {\n next(err, user)\n })\n}, function(err, users) {\n // we should now have 5 users\n});\n```\n\n<a name=\"timesSeries\" />\n### timesSeries(n, callback)\n\nThe same as [`times`](#times), only the iterator is applied to each item in `arr` in\nseries. The next `iterator` is only called once the current one has completed. \nThe results array will be in the same order as the original.\n\n\n## Utils\n\n<a name=\"memoize\" />\n### memoize(fn, [hasher])\n\nCaches the results of an `async` function. When creating a hash to store function\nresults against, the callback is omitted from the hash and an optional hash\nfunction can be used.\n\nThe cache of results is exposed as the `memo` property of the function returned\nby `memoize`.\n\n__Arguments__\n\n* `fn` - The function to proxy and cache results from.\n* `hasher` - Tn optional function for generating a custom hash for storing\n results. It has all the arguments applied to it apart from the callback, and\n must be synchronous.\n\n__Example__\n\n```js\nvar slow_fn = function (name, callback) {\n // do something\n callback(null, result);\n};\nvar fn = async.memoize(slow_fn);\n\n// fn can now be used as if it were slow_fn\nfn('some name', function () {\n // callback\n});\n```\n\n<a name=\"unmemoize\" />\n### unmemoize(fn)\n\nUndoes a [`memoize`](#memoize)d function, reverting it to the original, unmemoized\nform. Handy for testing.\n\n__Arguments__\n\n* `fn` - the memoized function\n\n<a name=\"log\" />\n### log(function, arguments)\n\nLogs the result of an `async` function to the `console`. Only works in Node.js or\nin browsers that support `console.log` and `console.error` (such as FF and Chrome).\nIf multiple arguments are returned from the async function, `console.log` is\ncalled on each argument in order.\n\n__Arguments__\n\n* `function` - The function you want to eventually apply all arguments to.\n* `arguments...` - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, 'hello ' + name);\n }, 1000);\n};\n```\n```js\nnode> async.log(hello, 'world');\n'hello world'\n```\n\n---------------------------------------\n\n<a name=\"dir\" />\n### dir(function, arguments)\n\nLogs the result of an `async` function to the `console` using `console.dir` to\ndisplay the properties of the resulting object. Only works in Node.js or\nin browsers that support `console.dir` and `console.error` (such as FF and Chrome).\nIf multiple arguments are returned from the async function, `console.dir` is\ncalled on each argument in order.\n\n__Arguments__\n\n* `function` - The function you want to eventually apply all arguments to.\n* `arguments...` - Any number of arguments to apply to the function.\n\n__Example__\n\n```js\nvar hello = function(name, callback){\n setTimeout(function(){\n callback(null, {hello: name});\n }, 1000);\n};\n```\n```js\nnode> async.dir(hello, 'world');\n{hello: 'world'}\n```\n\n---------------------------------------\n\n<a name=\"noConflict\" />\n### noConflict()\n\nChanges the value of `async` back to its original value, returning a reference to the\n`async` object.\n",
- "readmeFilename": "README.md",
"homepage": "https://github.com/caolan/async",
"_id": "async@0.9.0",
- "_shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
+ "dist": {
+ "shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
+ "tarball": "http://registry.npmjs.org/async/-/async-0.9.0.tgz"
+ },
"_from": "async@>=0.9.0 <0.10.0",
- "_resolved": "https://registry.npmjs.org/async/-/async-0.9.0.tgz"
+ "_npmVersion": "1.4.3",
+ "_npmUser": {
+ "name": "caolan",
+ "email": "caolan.mcmahon@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "caolan",
+ "email": "caolan@caolanmcmahon.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
+ "_resolved": "https://registry.npmjs.org/async/-/async-0.9.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json b/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json
index c226e9a7e..3324a13e9 100644
--- a/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json
+++ b/node_modules/request/node_modules/form-data/node_modules/combined-stream/node_modules/delayed-stream/package.json
@@ -21,14 +21,22 @@
"fake": "0.2.0",
"far": "0.0.1"
},
- "readme": "# delayed-stream\n\nBuffers events from a stream until you are ready to handle them.\n\n## Installation\n\n``` bash\nnpm install delayed-stream\n```\n\n## Usage\n\nThe following example shows how to write a http echo server that delays its\nresponse by 1000 ms.\n\n``` javascript\nvar DelayedStream = require('delayed-stream');\nvar http = require('http');\n\nhttp.createServer(function(req, res) {\n var delayed = DelayedStream.create(req);\n\n setTimeout(function() {\n res.writeHead(200);\n delayed.pipe(res);\n }, 1000);\n});\n```\n\nIf you are not using `Stream#pipe`, you can also manually release the buffered\nevents by calling `delayedStream.resume()`:\n\n``` javascript\nvar delayed = DelayedStream.create(req);\n\nsetTimeout(function() {\n // Emit all buffered events and resume underlaying source\n delayed.resume();\n}, 1000);\n```\n\n## Implementation\n\nIn order to use this meta stream properly, here are a few things you should\nknow about the implementation.\n\n### Event Buffering / Proxying\n\nAll events of the `source` stream are hijacked by overwriting the `source.emit`\nmethod. Until node implements a catch-all event listener, this is the only way.\n\nHowever, delayed-stream still continues to emit all events it captures on the\n`source`, regardless of whether you have released the delayed stream yet or\nnot.\n\nUpon creation, delayed-stream captures all `source` events and stores them in\nan internal event buffer. Once `delayedStream.release()` is called, all\nbuffered events are emitted on the `delayedStream`, and the event buffer is\ncleared. After that, delayed-stream merely acts as a proxy for the underlaying\nsource.\n\n### Error handling\n\nError events on `source` are buffered / proxied just like any other events.\nHowever, `delayedStream.create` attaches a no-op `'error'` listener to the\n`source`. This way you only have to handle errors on the `delayedStream`\nobject, rather than in two places.\n\n### Buffer limits\n\ndelayed-stream provides a `maxDataSize` property that can be used to limit\nthe amount of data being buffered. In order to protect you from bad `source`\nstreams that don't react to `source.pause()`, this feature is enabled by\ndefault.\n\n## API\n\n### DelayedStream.create(source, [options])\n\nReturns a new `delayedStream`. Available options are:\n\n* `pauseStream`\n* `maxDataSize`\n\nThe description for those properties can be found below.\n\n### delayedStream.source\n\nThe `source` stream managed by this object. This is useful if you are\npassing your `delayedStream` around, and you still want to access properties\non the `source` object.\n\n### delayedStream.pauseStream = true\n\nWhether to pause the underlaying `source` when calling\n`DelayedStream.create()`. Modifying this property afterwards has no effect.\n\n### delayedStream.maxDataSize = 1024 * 1024\n\nThe amount of data to buffer before emitting an `error`.\n\nIf the underlaying source is emitting `Buffer` objects, the `maxDataSize`\nrefers to bytes.\n\nIf the underlaying source is emitting JavaScript strings, the size refers to\ncharacters.\n\nIf you know what you are doing, you can set this property to `Infinity` to\ndisable this feature. You can also modify this property during runtime.\n\n### delayedStream.maxDataSize = 1024 * 1024\n\nThe amount of data to buffer before emitting an `error`.\n\nIf the underlaying source is emitting `Buffer` objects, the `maxDataSize`\nrefers to bytes.\n\nIf the underlaying source is emitting JavaScript strings, the size refers to\ncharacters.\n\nIf you know what you are doing, you can set this property to `Infinity` to\ndisable this feature.\n\n### delayedStream.dataSize = 0\n\nThe amount of data buffered so far.\n\n### delayedStream.readable\n\nAn ECMA5 getter that returns the value of `source.readable`.\n\n### delayedStream.resume()\n\nIf the `delayedStream` has not been released so far, `delayedStream.release()`\nis called.\n\nIn either case, `source.resume()` is called.\n\n### delayedStream.pause()\n\nCalls `source.pause()`.\n\n### delayedStream.pipe(dest)\n\nCalls `delayedStream.resume()` and then proxies the arguments to `source.pipe`.\n\n### delayedStream.release()\n\nEmits and clears all events that have been buffered up so far. This does not\nresume the underlaying source, use `delayedStream.resume()` instead.\n\n## License\n\ndelayed-stream is licensed under the MIT license.\n",
- "readmeFilename": "Readme.md",
- "bugs": {
- "url": "https://github.com/felixge/node-delayed-stream/issues"
- },
"_id": "delayed-stream@0.0.5",
+ "_engineSupported": true,
+ "_npmVersion": "1.0.3",
+ "_nodeVersion": "v0.4.9-pre",
+ "_defaultsLoaded": true,
+ "dist": {
+ "shasum": "d4b1f43a93e8296dfe02694f4680bc37a313c73f",
+ "tarball": "http://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz"
+ },
+ "scripts": {},
+ "directories": {},
"_shasum": "d4b1f43a93e8296dfe02694f4680bc37a313c73f",
- "_from": "delayed-stream@0.0.5",
"_resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz",
- "scripts": {}
+ "_from": "delayed-stream@0.0.5",
+ "bugs": {
+ "url": "https://github.com/felixge/node-delayed-stream/issues"
+ },
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json b/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json
index 3a2e261b8..080953f16 100644
--- a/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json
+++ b/node_modules/request/node_modules/form-data/node_modules/combined-stream/package.json
@@ -25,13 +25,37 @@
"devDependencies": {
"far": "~0.0.7"
},
- "readme": "# combined-stream [![Build Status](https://travis-ci.org/felixge/node-combined-stream.svg?branch=master)](https://travis-ci.org/felixge/node-combined-stream)\n\nA stream that emits multiple other streams one after another.\n\n## Installation\n\n``` bash\nnpm install combined-stream\n```\n\n## Usage\n\nHere is a simple example that shows how you can use combined-stream to combine\ntwo files into one:\n\n``` javascript\nvar CombinedStream = require('combined-stream');\nvar fs = require('fs');\n\nvar combinedStream = CombinedStream.create();\ncombinedStream.append(fs.createReadStream('file1.txt'));\ncombinedStream.append(fs.createReadStream('file2.txt'));\n\ncombinedStream.pipe(fs.createWriteStream('combined.txt'));\n```\n\nWhile the example above works great, it will pause all source streams until\nthey are needed. If you don't want that to happen, you can set `pauseStreams`\nto `false`:\n\n``` javascript\nvar CombinedStream = require('combined-stream');\nvar fs = require('fs');\n\nvar combinedStream = CombinedStream.create({pauseStreams: false});\ncombinedStream.append(fs.createReadStream('file1.txt'));\ncombinedStream.append(fs.createReadStream('file2.txt'));\n\ncombinedStream.pipe(fs.createWriteStream('combined.txt'));\n```\n\nHowever, what if you don't have all the source streams yet, or you don't want\nto allocate the resources (file descriptors, memory, etc.) for them right away?\nWell, in that case you can simply provide a callback that supplies the stream\nby calling a `next()` function:\n\n``` javascript\nvar CombinedStream = require('combined-stream');\nvar fs = require('fs');\n\nvar combinedStream = CombinedStream.create();\ncombinedStream.append(function(next) {\n next(fs.createReadStream('file1.txt'));\n});\ncombinedStream.append(function(next) {\n next(fs.createReadStream('file2.txt'));\n});\n\ncombinedStream.pipe(fs.createWriteStream('combined.txt'));\n```\n\n## API\n\n### CombinedStream.create([options])\n\nReturns a new combined stream object. Available options are:\n\n* `maxDataSize`\n* `pauseStreams`\n\nThe effect of those options is described below.\n\n### combinedStream.pauseStreams = `true`\n\nWhether to apply back pressure to the underlaying streams. If set to `false`,\nthe underlaying streams will never be paused. If set to `true`, the\nunderlaying streams will be paused right after being appended, as well as when\n`delayedStream.pipe()` wants to throttle.\n\n### combinedStream.maxDataSize = `2 * 1024 * 1024`\n\nThe maximum amount of bytes (or characters) to buffer for all source streams.\nIf this value is exceeded, `combinedStream` emits an `'error'` event.\n\n### combinedStream.dataSize = `0`\n\nThe amount of bytes (or characters) currently buffered by `combinedStream`.\n\n### combinedStream.append(stream)\n\nAppends the given `stream` to the combinedStream object. If `pauseStreams` is\nset to `true, this stream will also be paused right away.\n\n`streams` can also be a function that takes one parameter called `next`. `next`\nis a function that must be invoked in order to provide the `next` stream, see\nexample above.\n\nRegardless of how the `stream` is appended, combined-stream always attaches an\n`'error'` listener to it, so you don't have to do that manually.\n\nSpecial case: `stream` can also be a String or Buffer.\n\n### combinedStream.write(data)\n\nYou should not call this, `combinedStream` takes care of piping the appended\nstreams into itself for you.\n\n### combinedStream.resume()\n\nCauses `combinedStream` to start drain the streams it manages. The function is\nidempotent, and also emits a `'resume'` event each time which usually goes to\nthe stream that is currently being drained.\n\n### combinedStream.pause();\n\nIf `combinedStream.pauseStreams` is set to `false`, this does nothing.\nOtherwise a `'pause'` event is emitted, this goes to the stream that is\ncurrently being drained, so you can use it to apply back pressure.\n\n### combinedStream.end();\n\nSets `combinedStream.writable` to false, emits an `'end'` event, and removes\nall streams from the queue.\n\n### combinedStream.destroy();\n\nSame as `combinedStream.end()`, except it emits a `'close'` event instead of\n`'end'`.\n\n## License\n\ncombined-stream is licensed under the MIT license.\n",
- "readmeFilename": "Readme.md",
+ "gitHead": "19d9bdd4c20f6806c2ae8adb00a53fb6fd154740",
"bugs": {
"url": "https://github.com/felixge/node-combined-stream/issues"
},
"_id": "combined-stream@0.0.5",
"_shasum": "29ed76e5c9aad07c4acf9ca3d32601cce28697a2",
"_from": "combined-stream@>=0.0.4 <0.1.0",
- "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.5.tgz"
+ "_npmVersion": "1.4.14",
+ "_npmUser": {
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
+ },
+ "maintainers": [
+ {
+ "name": "felixge",
+ "email": "felix@debuggable.com"
+ },
+ {
+ "name": "celer",
+ "email": "celer@scrypt.net"
+ },
+ {
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
+ }
+ ],
+ "dist": {
+ "shasum": "29ed76e5c9aad07c4acf9ca3d32601cce28697a2",
+ "tarball": "http://registry.npmjs.org/combined-stream/-/combined-stream-0.0.5.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.5.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/form-data/node_modules/mime/package.json b/node_modules/request/node_modules/form-data/node_modules/mime/package.json
index dfa8a2186..b666b72a2 100644
--- a/node_modules/request/node_modules/form-data/node_modules/mime/package.json
+++ b/node_modules/request/node_modules/form-data/node_modules/mime/package.json
@@ -30,10 +30,30 @@
"bugs": {
"url": "https://github.com/broofa/node-mime/issues"
},
- "homepage": "https://github.com/broofa/node-mime",
"_id": "mime@1.2.11",
- "_shasum": "58203eed86e3a5ef17aed2b7d9ebd47f0a60dd10",
+ "dist": {
+ "shasum": "58203eed86e3a5ef17aed2b7d9ebd47f0a60dd10",
+ "tarball": "http://registry.npmjs.org/mime/-/mime-1.2.11.tgz"
+ },
"_from": "mime@>=1.2.11 <1.3.0",
+ "_npmVersion": "1.3.6",
+ "_npmUser": {
+ "name": "broofa",
+ "email": "robert@broofa.com"
+ },
+ "maintainers": [
+ {
+ "name": "broofa",
+ "email": "robert@broofa.com"
+ },
+ {
+ "name": "bentomas",
+ "email": "benjamin@benjaminthomas.org"
+ }
+ ],
+ "directories": {},
+ "_shasum": "58203eed86e3a5ef17aed2b7d9ebd47f0a60dd10",
"_resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz",
+ "homepage": "https://github.com/broofa/node-mime",
"scripts": {}
}
diff --git a/node_modules/request/node_modules/form-data/package.json b/node_modules/request/node_modules/form-data/package.json
index 0d513e732..7700d9992 100644
--- a/node_modules/request/node_modules/form-data/package.json
+++ b/node_modules/request/node_modules/form-data/package.json
@@ -35,8 +35,7 @@
"formidable": "~1.0.14",
"request": "~2.36.0"
},
- "readme": "# Form-Data [![Build Status](https://travis-ci.org/felixge/node-form-data.png?branch=master)](https://travis-ci.org/felixge/node-form-data) [![Dependency Status](https://gemnasium.com/felixge/node-form-data.png)](https://gemnasium.com/felixge/node-form-data)\n\nA module to create readable ```\"multipart/form-data\"``` streams. Can be used to submit forms and file uploads to other web applications.\n\nThe API of this module is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd].\n\n[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface\n[streams2-thing]: http://nodejs.org/api/stream.html#stream_compatibility_with_older_node_versions\n\n## Install\n\n```\nnpm install form-data\n```\n\n## Usage\n\nIn this example we are constructing a form with 3 fields that contain a string,\na buffer and a file stream.\n\n``` javascript\nvar FormData = require('form-data');\nvar fs = require('fs');\n\nvar form = new FormData();\nform.append('my_field', 'my value');\nform.append('my_buffer', new Buffer(10));\nform.append('my_file', fs.createReadStream('/foo/bar.jpg'));\n```\n\nAlso you can use http-response stream:\n\n``` javascript\nvar FormData = require('form-data');\nvar http = require('http');\n\nvar form = new FormData();\n\nhttp.request('http://nodejs.org/images/logo.png', function(response) {\n form.append('my_field', 'my value');\n form.append('my_buffer', new Buffer(10));\n form.append('my_logo', response);\n});\n```\n\nOr @mikeal's request stream:\n\n``` javascript\nvar FormData = require('form-data');\nvar request = require('request');\n\nvar form = new FormData();\n\nform.append('my_field', 'my value');\nform.append('my_buffer', new Buffer(10));\nform.append('my_logo', request('http://nodejs.org/images/logo.png'));\n```\n\nIn order to submit this form to a web application, call ```submit(url, [callback])``` method:\n\n``` javascript\nform.submit('http://example.org/', function(err, res) {\n // res – response object (http.IncomingMessage) //\n res.resume(); // for node-0.10.x\n});\n\n```\n\nFor more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods.\n\n### Alternative submission methods\n\nYou can use node's http client interface:\n\n``` javascript\nvar http = require('http');\n\nvar request = http.request({\n method: 'post',\n host: 'example.org',\n path: '/upload',\n headers: form.getHeaders()\n});\n\nform.pipe(request);\n\nrequest.on('response', function(res) {\n console.log(res.statusCode);\n});\n```\n\nOr if you would prefer the `'Content-Length'` header to be set for you:\n\n``` javascript\nform.submit('example.org/upload', function(err, res) {\n console.log(res.statusCode);\n});\n```\n\nTo use custom headers and pre-known length in parts:\n\n``` javascript\nvar CRLF = '\\r\\n';\nvar form = new FormData();\n\nvar options = {\n header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF,\n knownLength: 1\n};\n\nform.append('my_buffer', buffer, options);\n\nform.submit('http://example.com/', function(err, res) {\n if (err) throw err;\n console.log('Done');\n});\n```\n\nForm-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide \"file\"-related information manually:\n\n``` javascript\nsomeModule.stream(function(err, stdout, stderr) {\n if (err) throw err;\n\n var form = new FormData();\n\n form.append('file', stdout, {\n filename: 'unicycle.jpg',\n contentType: 'image/jpg',\n knownLength: 19806\n });\n\n form.submit('http://example.com/', function(err, res) {\n if (err) throw err;\n console.log('Done');\n });\n});\n```\n\nFor edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter:\n\n``` javascript\nform.submit({\n host: 'example.com',\n path: '/probably.php?extra=params',\n auth: 'username:password'\n}, function(err, res) {\n console.log(res.statusCode);\n});\n```\n\nIn case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`:\n\n``` javascript\nform.submit({\n host: 'example.com',\n path: '/surelynot.php',\n headers: {'x-test-header': 'test-header-value'}\n}, function(err, res) {\n console.log(res.statusCode);\n});\n```\n\n## Notes\n\n- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround.\n- If it feels like FormData hangs after submit and you're on ```node-0.10```, please check [Compatibility with Older Node Versions][streams2-thing]\n\n## TODO\n\n- Add new streams (0.10) support and try really hard not to break it for 0.8.x.\n\n## License\n\nForm-Data is licensed under the MIT license.\n",
- "readmeFilename": "Readme.md",
+ "gitHead": "5f5f4809ea685f32658809fa0f13d7eface0e45a",
"bugs": {
"url": "https://github.com/felixge/node-form-data/issues"
},
@@ -44,5 +43,38 @@
"_id": "form-data@0.1.4",
"_shasum": "91abd788aba9702b1aabfa8bc01031a2ac9e3b12",
"_from": "form-data@>=0.1.0 <0.2.0",
- "_resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz"
+ "_npmVersion": "1.4.14",
+ "_npmUser": {
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
+ },
+ "maintainers": [
+ {
+ "name": "felixge",
+ "email": "felix@debuggable.com"
+ },
+ {
+ "name": "idralyuk",
+ "email": "igor@buran.us"
+ },
+ {
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
+ },
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ {
+ "name": "celer",
+ "email": "dtyree77@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "91abd788aba9702b1aabfa8bc01031a2ac9e3b12",
+ "tarball": "http://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/form-data/-/form-data-0.1.4.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/hawk/node_modules/boom/package.json b/node_modules/request/node_modules/hawk/node_modules/boom/package.json
index eceac038c..c7875b4cb 100755
--- a/node_modules/request/node_modules/hawk/node_modules/boom/package.json
+++ b/node_modules/request/node_modules/hawk/node_modules/boom/package.json
@@ -36,14 +36,29 @@
"url": "http://github.com/spumko/boom/raw/master/LICENSE"
}
],
- "readme": "<a href=\"https://github.com/spumko\"><img src=\"https://raw.github.com/spumko/spumko/master/images/from.png\" align=\"right\" /></a>\n![boom Logo](https://raw.github.com/spumko/boom/master/images/boom.png)\n\nHTTP-friendly error objects\n\n[![Build Status](https://secure.travis-ci.org/spumko/boom.png)](http://travis-ci.org/spumko/boom)\n",
- "readmeFilename": "README.md",
+ "_id": "boom@0.4.2",
+ "dist": {
+ "shasum": "7a636e9ded4efcefb19cef4947a3c67dfaee911b",
+ "tarball": "http://registry.npmjs.org/boom/-/boom-0.4.2.tgz"
+ },
+ "_from": "boom@>=0.4.0 <0.5.0",
+ "_npmVersion": "1.2.18",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ "maintainers": [
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "7a636e9ded4efcefb19cef4947a3c67dfaee911b",
+ "_resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz",
"bugs": {
"url": "https://github.com/spumko/boom/issues"
},
- "homepage": "https://github.com/spumko/boom",
- "_id": "boom@0.4.2",
- "_shasum": "7a636e9ded4efcefb19cef4947a3c67dfaee911b",
- "_from": "boom@>=0.4.0 <0.5.0",
- "_resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz"
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/spumko/boom"
}
diff --git a/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json b/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json
index 69dd3d911..124861335 100755
--- a/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json
+++ b/node_modules/request/node_modules/hawk/node_modules/cryptiles/package.json
@@ -37,14 +37,29 @@
"url": "http://github.com/hueniverse/cryptiles/raw/master/LICENSE"
}
],
- "readme": "cryptiles\n=========\n\nGeneral purpose crypto utilities\n\n[![Build Status](https://secure.travis-ci.org/hueniverse/cryptiles.png)](http://travis-ci.org/hueniverse/cryptiles)\n",
- "readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/hueniverse/cryptiles/issues"
},
- "homepage": "https://github.com/hueniverse/cryptiles",
"_id": "cryptiles@0.2.2",
- "_shasum": "ed91ff1f17ad13d3748288594f8a48a0d26f325c",
+ "dist": {
+ "shasum": "ed91ff1f17ad13d3748288594f8a48a0d26f325c",
+ "tarball": "http://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz"
+ },
"_from": "cryptiles@>=0.2.0 <0.3.0",
- "_resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz"
+ "_npmVersion": "1.2.24",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ "maintainers": [
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "ed91ff1f17ad13d3748288594f8a48a0d26f325c",
+ "_resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz",
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/hueniverse/cryptiles"
}
diff --git a/node_modules/request/node_modules/hawk/node_modules/hoek/package.json b/node_modules/request/node_modules/hawk/node_modules/hoek/package.json
index 3954bfacd..789de2adb 100755
--- a/node_modules/request/node_modules/hawk/node_modules/hoek/package.json
+++ b/node_modules/request/node_modules/hawk/node_modules/hoek/package.json
@@ -38,14 +38,33 @@
"url": "http://github.com/spumko/hoek/raw/master/LICENSE"
}
],
- "readme": "<a href=\"https://github.com/spumko\"><img src=\"https://raw.github.com/spumko/spumko/master/images/from.png\" align=\"right\" /></a>\r\n![hoek Logo](https://raw.github.com/spumko/hoek/master/images/hoek.png)\r\n\r\nGeneral purpose node utilities\r\n\r\n[![Build Status](https://secure.travis-ci.org/spumko/hoek.png)](http://travis-ci.org/spumko/hoek)\r\n\r\n# Table of Contents\r\n\r\n* [Introduction](#introduction \"Introduction\")\r\n* [Object](#object \"Object\")\r\n * [clone](#cloneobj \"clone\")\r\n * [merge](#mergetarget-source-isnulloverride-ismergearrays \"merge\")\r\n * [applyToDefaults](#applytodefaultsdefaults-options \"applyToDefaults\")\r\n * [unique](#uniquearray-key \"unique\")\r\n * [mapToObject](#maptoobjectarray-key \"mapToObject\")\r\n * [intersect](#intersectarray1-array2 \"intersect\")\r\n * [matchKeys](#matchkeysobj-keys \"matchKeys\")\r\n * [flatten](#flattenarray-target \"flatten\")\r\n * [removeKeys](#removekeysobject-keys \"removeKeys\")\r\n * [reach](#reachobj-chain \"reach\")\r\n * [inheritAsync](#inheritasyncself-obj-keys \"inheritAsync\")\r\n * [rename](#renameobj-from-to \"rename\")\r\n* [Timer](#timer \"Timer\")\r\n* [Binary Encoding/Decoding](#binary \"Binary Encoding/Decoding\")\r\n * [base64urlEncode](#binary64urlEncodevalue \"binary64urlEncode\")\r\n * [base64urlDecode](#binary64urlDecodevalue \"binary64urlDecode\")\r\n* [Escaping Characters](#escaped \"Escaping Characters\")\r\n * [escapeHtml](#escapeHtmlstring \"escapeHtml\")\r\n * [escapeHeaderAttribute](#escapeHeaderAttributeattribute \"escapeHeaderAttribute\")\r\n * [escapeRegex](#escapeRegexstring \"escapeRegex\")\r\n* [Errors](#errors \"Errors\")\r\n * [assert](#assertmessage \"assert\")\r\n * [abort](#abortmessage \"abort\")\r\n * [displayStack](#displayStackslice \"displayStack\")\r\n * [callStack](#callStackslice \"callStack\")\r\n * [toss](#tosscondition \"toss\")\r\n* [Load files](#load-files \"Load Files\")\r\n * [loadPackage](#loadPackagedir \"loadpackage\")\r\n * [loadDirModules](#loadDirModulespath-excludefiles-target \"loaddirmodules\")\r\n\r\n\r\n\r\n# Introduction\r\n\r\nThe *Hoek* general purpose node utilities library is used to aid in a variety of manners. It comes with useful methods for Arrays (clone, merge, applyToDefaults), Objects (removeKeys, copy), Asserting and more. \r\n\r\nFor example, to use Hoek to set configuration with default options:\r\n```javascript\r\nvar Hoek = require('hoek');\r\n\r\nvar default = {url : \"www.github.com\", port : \"8000\", debug : true}\r\n\r\nvar config = Hoek.applyToDefaults(default, {port : \"3000\", admin : true});\r\n\r\n// In this case, config would be { url: 'www.github.com', port: '3000', debug: true, admin: true }\r\n```\r\n\r\nUnder each of the sections (such as Array), there are subsections which correspond to Hoek methods. Each subsection will explain how to use the corresponding method. In each js excerpt below, the var Hoek = require('hoek') is omitted for brevity.\r\n\r\n## Object\r\n\r\nHoek provides several helpful methods for objects and arrays.\r\n\r\n### clone(obj)\r\n\r\nThis method is used to clone an object or an array. A *deep copy* is made (duplicates everything, including values that are objects). \r\n\r\n```javascript\r\n\r\nvar nestedObj = {\r\n w: /^something$/ig,\r\n x: {\r\n a: [1, 2, 3],\r\n b: 123456,\r\n c: new Date()\r\n },\r\n y: 'y',\r\n z: new Date()\r\n };\r\n\r\nvar copy = Hoek.clone(nestedObj);\r\n\r\ncopy.x.b = 100;\r\n\r\nconsole.log(copy.y) // results in 'y'\r\nconsole.log(nestedObj.x.b) // results in 123456\r\nconsole.log(copy.x.b) // results in 100\r\n```\r\n\r\n### merge(target, source, isNullOverride, isMergeArrays)\r\nisNullOverride, isMergeArrays default to true\r\n\r\nMerge all the properties of source into target, source wins in conflic, and by default null and undefined from source are applied\r\n\r\n\r\n```javascript\r\n\r\nvar target = {a: 1, b : 2}\r\nvar source = {a: 0, c: 5}\r\nvar source2 = {a: null, c: 5}\r\n\r\nvar targetArray = [1, 2, 3];\r\nvar sourceArray = [4, 5];\r\n\r\nvar newTarget = Hoek.merge(target, source); // results in {a: 0, b: 2, c: 5}\r\nnewTarget = Hoek.merge(target, source2); // results in {a: null, b: 2, c: 5}\r\nnewTarget = Hoek.merge(target, source2, false); // results in {a: 1, b: 2, c: 5}\r\n\r\nnewTarget = Hoek.merge(targetArray, sourceArray) // results in [1, 2, 3, 4, 5]\r\nnewTarget = Hoek.merge(targetArray, sourceArray, true, false) // results in [4, 5]\r\n\r\n\r\n\r\n\r\n```\r\n\r\n### applyToDefaults(defaults, options)\r\n\r\nApply options to a copy of the defaults\r\n\r\n```javascript\r\n\r\nvar defaults = {host: \"localhost\", port: 8000};\r\nvar options = {port: 8080};\r\n\r\nvar config = Hoek.applyToDefaults(defaults, options); // results in {host: \"localhost\", port: 8080};\r\n\r\n\r\n```\r\n\r\n### unique(array, key)\r\n\r\nRemove duplicate items from Array\r\n\r\n```javascript\r\n\r\nvar array = [1, 2, 2, 3, 3, 4, 5, 6];\r\n\r\nvar newArray = Hoek.unique(array); // results in [1,2,3,4,5,6];\r\n\r\narray = [{id: 1}, {id: 1}, {id: 2}];\r\n\r\nnewArray = Hoek.unique(array, \"id\") // results in [{id: 1}, {id: 2}]\r\n\r\n```\r\n\r\n### mapToObject(array, key)\r\n\r\nConvert an Array into an Object\r\n\r\n```javascript\r\n\r\nvar array = [1,2,3];\r\nvar newObject = Hoek.mapToObject(array); // results in [{\"1\": true}, {\"2\": true}, {\"3\": true}]\r\n\r\narray = [{id: 1}, {id: 2}];\r\nnewObject = Hoek.mapToObject(array, \"id\") // results in [{\"id\": 1}, {\"id\": 2}]\r\n\r\n```\r\n### intersect(array1, array2)\r\n\r\nFind the common unique items in two arrays\r\n\r\n```javascript\r\n\r\nvar array1 = [1, 2, 3];\r\nvar array2 = [1, 4, 5];\r\n\r\nvar newArray = Hoek.intersect(array1, array2) // results in [1]\r\n\r\n```\r\n\r\n### matchKeys(obj, keys) \r\n\r\nFind which keys are present\r\n\r\n```javascript\r\n\r\nvar obj = {a: 1, b: 2, c: 3};\r\nvar keys = [\"a\", \"e\"];\r\n\r\nHoek.matchKeys(obj, keys) // returns [\"a\"]\r\n\r\n```\r\n\r\n### flatten(array, target)\r\n\r\nFlatten an array\r\n\r\n```javascript\r\n\r\nvar array = [1, 2, 3];\r\nvar target = [4, 5]; \r\n\r\nvar flattenedArray = Hoek.flatten(array, target) // results in [4, 5, 1, 2, 3];\r\n\r\n```\r\n\r\n### removeKeys(object, keys)\r\n\r\nRemove keys\r\n\r\n```javascript\r\n\r\nvar object = {a: 1, b: 2, c: 3, d: 4};\r\n\r\nvar keys = [\"a\", \"b\"];\r\n\r\nHoek.removeKeys(object, keys) // object is now {c: 3, d: 4}\r\n\r\n```\r\n\r\n### reach(obj, chain)\r\n\r\nConverts an object key chain string to reference\r\n\r\n```javascript\r\n\r\nvar chain = 'a.b.c';\r\nvar obj = {a : {b : { c : 1}}};\r\n\r\nHoek.reach(obj, chain) // returns 1\r\n\r\n```\r\n\r\n### inheritAsync(self, obj, keys) \r\n\r\nInherits a selected set of methods from an object, wrapping functions in asynchronous syntax and catching errors\r\n\r\n```javascript\r\n\r\nvar targetFunc = function () { };\r\n\r\nvar proto = {\r\n a: function () {\r\n return 'a!';\r\n },\r\n b: function () {\r\n return 'b!';\r\n },\r\n c: function () {\r\n throw new Error('c!');\r\n }\r\n };\r\n\r\nvar keys = ['a', 'c'];\r\n\r\nHoek.inheritAsync(targetFunc, proto, ['a', 'c']);\r\n\r\nvar target = new targetFunc();\r\n\r\ntarget.a(function(err, result){console.log(result)} // returns 'a!' \r\n\r\ntarget.c(function(err, result){console.log(result)} // returns undefined\r\n\r\ntarget.b(function(err, result){console.log(result)} // gives error: Object [object Object] has no method 'b'\r\n\r\n```\r\n\r\n### rename(obj, from, to)\r\n\r\nRename a key of an object\r\n\r\n```javascript\r\n\r\nvar obj = {a : 1, b : 2};\r\n\r\nHoek.rename(obj, \"a\", \"c\"); // obj is now {c : 1, b : 2}\r\n\r\n```\r\n\r\n\r\n# Timer\r\n\r\nA Timer object. Initializing a new timer object sets the ts to the number of milliseconds elapsed since 1 January 1970 00:00:00 UTC.\r\n\r\n```javascript\r\n\r\n\r\nexample : \r\n\r\n\r\nvar timerObj = new Hoek.Timer();\r\nconsole.log(\"Time is now: \" + timerObj.ts)\r\nconsole.log(\"Elapsed time from initialization: \" + timerObj.elapsed() + 'milliseconds')\r\n\r\n```\r\n\r\n# Binary Encoding/Decoding\r\n\r\n### base64urlEncode(value)\r\n\r\nEncodes value in Base64 or URL encoding\r\n\r\n### base64urlDecode(value)\r\n\r\nDecodes data in Base64 or URL encoding.\r\n# Escaping Characters\r\n\r\nHoek provides convenient methods for escaping html characters. The escaped characters are as followed:\r\n\r\n```javascript\r\n\r\ninternals.htmlEscaped = {\r\n '&': '&amp;',\r\n '<': '&lt;',\r\n '>': '&gt;',\r\n '\"': '&quot;',\r\n \"'\": '&#x27;',\r\n '`': '&#x60;'\r\n};\r\n\r\n```\r\n\r\n### escapeHtml(string)\r\n\r\n```javascript\r\n\r\nvar string = '<html> hey </html>';\r\nvar escapedString = Hoek.escapeHtml(string); // returns &lt;html&gt; hey &lt;/html&gt;\r\n\r\n```\r\n\r\n### escapeHeaderAttribute(attribute)\r\n\r\nEscape attribute value for use in HTTP header\r\n\r\n```javascript\r\n\r\nvar a = Hoek.escapeHeaderAttribute('I said \"go w\\\\o me\"'); //returns I said \\\"go w\\\\o me\\\"\r\n\r\n\r\n```\r\n\r\n\r\n### escapeRegex(string)\r\n\r\nEscape string for Regex construction\r\n\r\n```javascript\r\n\r\nvar a = Hoek.escapeRegex('4^f$s.4*5+-_?%=#!:@|~\\\\/`\"(>)[<]d{}s,'); // returns 4\\^f\\$s\\.4\\*5\\+\\-_\\?%\\=#\\!\\:@\\|~\\\\\\/`\"\\(>\\)\\[<\\]d\\{\\}s\\,\r\n\r\n\r\n\r\n```\r\n\r\n# Errors\r\n\r\n### assert(message)\r\n\r\n```javascript\r\n\r\nvar a = 1, b =2;\r\n\r\nHoek.assert(a === b, 'a should equal b'); // ABORT: a should equal b\r\n\r\n```\r\n\r\n### abort(message)\r\n\r\nFirst checks if process.env.NODE_ENV === 'test', and if so, throws error message. Otherwise,\r\ndisplays most recent stack and then exits process.\r\n\r\n\r\n\r\n### displayStack(slice)\r\n\r\nDisplays the trace stack\r\n\r\n```javascript\r\n\r\nvar stack = Hoek.displayStack();\r\nconsole.log(stack) // returns something like:\r\n\r\n[ 'null (/Users/user/Desktop/hoek/test.js:4:18)',\r\n 'Module._compile (module.js:449:26)',\r\n 'Module._extensions..js (module.js:467:10)',\r\n 'Module.load (module.js:356:32)',\r\n 'Module._load (module.js:312:12)',\r\n 'Module.runMain (module.js:492:10)',\r\n 'startup.processNextTick.process._tickCallback (node.js:244:9)' ]\r\n\r\n```\r\n\r\n### callStack(slice)\r\n\r\nReturns a trace stack array.\r\n\r\n```javascript\r\n\r\nvar stack = Hoek.callStack();\r\nconsole.log(stack) // returns something like:\r\n\r\n[ [ '/Users/user/Desktop/hoek/test.js', 4, 18, null, false ],\r\n [ 'module.js', 449, 26, 'Module._compile', false ],\r\n [ 'module.js', 467, 10, 'Module._extensions..js', false ],\r\n [ 'module.js', 356, 32, 'Module.load', false ],\r\n [ 'module.js', 312, 12, 'Module._load', false ],\r\n [ 'module.js', 492, 10, 'Module.runMain', false ],\r\n [ 'node.js',\r\n 244,\r\n 9,\r\n 'startup.processNextTick.process._tickCallback',\r\n false ] ]\r\n\r\n\r\n```\r\n\r\n### toss(condition)\r\n\r\ntoss(condition /*, [message], callback */)\r\n\r\nReturn an error as first argument of a callback\r\n\r\n\r\n# Load Files\r\n\r\n### loadPackage(dir)\r\n\r\nLoad and parse package.json process root or given directory\r\n\r\n```javascript\r\n\r\nvar pack = Hoek.loadPackage(); // pack.name === 'hoek'\r\n\r\n```\r\n\r\n### loadDirModules(path, excludeFiles, target) \r\n\r\nLoads modules from a given path; option to exclude files (array).\r\n\r\n\r\n\r\n\r\n",
- "readmeFilename": "README.md",
+ "_id": "hoek@0.9.1",
+ "dist": {
+ "shasum": "3d322462badf07716ea7eb85baf88079cddce505",
+ "tarball": "http://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz"
+ },
+ "_from": "hoek@>=0.9.0 <0.10.0",
+ "_npmVersion": "1.2.18",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ "maintainers": [
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ {
+ "name": "thegoleffect",
+ "email": "thegoleffect@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "3d322462badf07716ea7eb85baf88079cddce505",
+ "_resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz",
"bugs": {
"url": "https://github.com/spumko/hoek/issues"
},
- "homepage": "https://github.com/spumko/hoek",
- "_id": "hoek@0.9.1",
- "_shasum": "3d322462badf07716ea7eb85baf88079cddce505",
- "_from": "hoek@>=0.9.0 <0.10.0",
- "_resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz"
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/spumko/hoek"
}
diff --git a/node_modules/request/node_modules/hawk/node_modules/sntp/package.json b/node_modules/request/node_modules/hawk/node_modules/sntp/package.json
index c7a1b6464..0656c84e1 100755
--- a/node_modules/request/node_modules/hawk/node_modules/sntp/package.json
+++ b/node_modules/request/node_modules/hawk/node_modules/sntp/package.json
@@ -37,14 +37,29 @@
"url": "http://github.com/hueniverse/sntp/raw/master/LICENSE"
}
],
- "readme": "# sntp\n\nAn SNTP v4 client (RFC4330) for node. Simpy connects to the NTP or SNTP server requested and returns the server time\nalong with the roundtrip duration and clock offset. To adjust the local time to the NTP time, add the returned `t` offset\nto the local time.\n\n[![Build Status](https://secure.travis-ci.org/hueniverse/sntp.png)](http://travis-ci.org/hueniverse/sntp)\n\n# Usage\n\n```javascript\nvar Sntp = require('sntp');\n\n// All options are optional\n\nvar options = {\n host: 'nist1-sj.ustiming.org', // Defaults to pool.ntp.org\n port: 123, // Defaults to 123 (NTP)\n resolveReference: true, // Default to false (not resolving)\n timeout: 1000 // Defaults to zero (no timeout)\n};\n\n// Request server time\n\nSntp.time(options, function (err, time) {\n\n if (err) {\n console.log('Failed: ' + err.message);\n process.exit(1);\n }\n\n console.log('Local clock is off by: ' + time.t + ' milliseconds');\n process.exit(0);\n});\n```\n\nIf an application needs to maintain continuous time synchronization, the module provides a stateful method for\nquerying the current offset only when the last one is too old (defaults to daily).\n\n```javascript\n// Request offset once\n\nSntp.offset(function (err, offset) {\n\n console.log(offset); // New (served fresh)\n\n // Request offset again\n\n Sntp.offset(function (err, offset) {\n\n console.log(offset); // Identical (served from cache)\n });\n});\n```\n\nTo set a background offset refresh, start the interval and use the provided now() method. If for any reason the\nclient fails to obtain an up-to-date offset, the current system clock is used.\n\n```javascript\nvar before = Sntp.now(); // System time without offset\n\nSntp.start(function () {\n\n var now = Sntp.now(); // With offset\n Sntp.stop();\n});\n```\n\n",
- "readmeFilename": "README.md",
+ "_id": "sntp@0.2.4",
+ "dist": {
+ "shasum": "fb885f18b0f3aad189f824862536bceeec750900",
+ "tarball": "http://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz"
+ },
+ "_from": "sntp@>=0.2.0 <0.3.0",
+ "_npmVersion": "1.2.18",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ "maintainers": [
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "fb885f18b0f3aad189f824862536bceeec750900",
+ "_resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz",
"bugs": {
"url": "https://github.com/hueniverse/sntp/issues"
},
- "homepage": "https://github.com/hueniverse/sntp",
- "_id": "sntp@0.2.4",
- "_shasum": "fb885f18b0f3aad189f824862536bceeec750900",
- "_from": "sntp@>=0.2.0 <0.3.0",
- "_resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz"
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/hueniverse/sntp"
}
diff --git a/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json b/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json
index d42d3ecb9..8c68193cd 100644
--- a/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json
+++ b/node_modules/request/node_modules/http-signature/node_modules/asn1/package.json
@@ -32,14 +32,32 @@
"pretest": "which gjslint; if [[ \"$?\" = 0 ]] ; then gjslint --nojsdoc -r lib -r tst; else echo \"Missing gjslint. Skipping lint\"; fi",
"test": "tap ./tst"
},
- "readme": "node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS.\nCurrently BER encoding is supported; at some point I'll likely have to do DER.\n\n## Usage\n\nMostly, if you're *actually* needing to read and write ASN.1, you probably don't\nneed this readme to explain what and why. If you have no idea what ASN.1 is,\nsee this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc\n\nThe source is pretty much self-explanatory, and has read/write methods for the\ncommon types out there.\n\n### Decoding\n\nThe following reads an ASN.1 sequence with a boolean.\n\n var Ber = require('asn1').Ber;\n\n var reader = new Ber.Reader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff]));\n\n reader.readSequence();\n console.log('Sequence len: ' + reader.length);\n if (reader.peek() === Ber.Boolean)\n console.log(reader.readBoolean());\n\n### Encoding\n\nThe following generates the same payload as above.\n\n var Ber = require('asn1').Ber;\n\n var writer = new Ber.Writer();\n\n writer.startSequence();\n writer.writeBoolean(true);\n writer.endSequence();\n\n console.log(writer.buffer);\n\n## Installation\n\n npm install asn1\n\n## License\n\nMIT.\n\n## Bugs\n\nSee <https://github.com/mcavage/node-asn1/issues>.\n",
- "readmeFilename": "README.md",
- "bugs": {
- "url": "https://github.com/mcavage/node-asn1/issues"
+ "_npmUser": {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
},
- "homepage": "https://github.com/mcavage/node-asn1",
"_id": "asn1@0.1.11",
+ "_engineSupported": true,
+ "_npmVersion": "1.1.0-beta-4",
+ "_nodeVersion": "v0.6.6",
+ "_defaultsLoaded": true,
+ "dist": {
+ "shasum": "559be18376d08a4ec4dbe80877d27818639b2df7",
+ "tarball": "http://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ }
+ ],
+ "directories": {},
"_shasum": "559be18376d08a4ec4dbe80877d27818639b2df7",
+ "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz",
"_from": "asn1@0.1.11",
- "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.1.11.tgz"
+ "bugs": {
+ "url": "https://github.com/mcavage/node-asn1/issues"
+ },
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/mcavage/node-asn1"
}
diff --git a/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json b/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json
index 633e3e68f..b4cfa49be 100644
--- a/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json
+++ b/node_modules/request/node_modules/http-signature/node_modules/assert-plus/package.json
@@ -13,11 +13,25 @@
"engines": {
"node": ">=0.6"
},
- "readme": "# node-assert-plus\n\nThis library is a super small wrapper over node's assert module that has two\nthings: (1) the ability to disable assertions with the environment variable\nNODE_NDEBUG, and (2) some API wrappers for argument testing. Like\n`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks\nlike this:\n\n var assert = require('assert-plus');\n\n function fooAccount(options, callback) {\n\t assert.object(options, 'options');\n\t\tassert.number(options.id, 'options.id);\n\t\tassert.bool(options.isManager, 'options.isManager');\n\t\tassert.string(options.name, 'options.name');\n\t\tassert.arrayOfString(options.email, 'options.email');\n\t\tassert.func(callback, 'callback');\n\n // Do stuff\n\t\tcallback(null, {});\n }\n\n# API\n\nAll methods that *aren't* part of node's core assert API are simply assumed to\ntake an argument, and then a string 'name' that's not a message; `AssertionError`\nwill be thrown if the assertion fails with a message like:\n\n AssertionError: foo (string) is required\n\tat test (/home/mark/work/foo/foo.js:3:9)\n\tat Object.<anonymous> (/home/mark/work/foo/foo.js:15:1)\n\tat Module._compile (module.js:446:26)\n\tat Object..js (module.js:464:10)\n\tat Module.load (module.js:353:31)\n\tat Function._load (module.js:311:12)\n\tat Array.0 (module.js:484:10)\n\tat EventEmitter._tickCallback (node.js:190:38)\n\nfrom:\n\n function test(foo) {\n\t assert.string(foo, 'foo');\n }\n\nThere you go. You can check that arrays are of a homogenous type with `Arrayof$Type`:\n\n function test(foo) {\n\t assert.arrayOfString(foo, 'foo');\n }\n\nYou can assert IFF an argument is not `undefined` (i.e., an optional arg):\n\n assert.optionalString(foo, 'foo');\n\nLastly, you can opt-out of assertion checking altogether by setting the\nenvironment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have\nlots of assertions, and don't want to pay `typeof ()` taxes to v8 in\nproduction.\n\nThe complete list of APIs is:\n\n* assert.bool\n* assert.buffer\n* assert.func\n* assert.number\n* assert.object\n* assert.string\n* assert.arrayOfBool\n* assert.arrayOfFunc\n* assert.arrayOfNumber\n* assert.arrayOfObject\n* assert.arrayOfString\n* assert.optionalBool\n* assert.optionalBuffer\n* assert.optionalFunc\n* assert.optionalNumber\n* assert.optionalObject\n* assert.optionalString\n* assert.optionalArrayOfBool\n* assert.optionalArrayOfFunc\n* assert.optionalArrayOfNumber\n* assert.optionalArrayOfObject\n* assert.optionalArrayOfString\n* assert.AssertionError\n* assert.fail\n* assert.ok\n* assert.equal\n* assert.notEqual\n* assert.deepEqual\n* assert.notDeepEqual\n* assert.strictEqual\n* assert.notStrictEqual\n* assert.throws\n* assert.doesNotThrow\n* assert.ifError\n\n# Installation\n\n npm install assert-plus\n\n## License\n\nThe MIT License (MIT)\nCopyright (c) 2012 Mark Cavage\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n## Bugs\n\nSee <https://github.com/mcavage/node-assert-plus/issues>.\n",
- "readmeFilename": "README.md",
"_id": "assert-plus@0.1.2",
+ "dist": {
+ "shasum": "d93ffdbb67ac5507779be316a7d65146417beef8",
+ "tarball": "http://registry.npmjs.org/assert-plus/-/assert-plus-0.1.2.tgz"
+ },
+ "_npmVersion": "1.1.59",
+ "_npmUser": {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ }
+ ],
+ "directories": {},
"_shasum": "d93ffdbb67ac5507779be316a7d65146417beef8",
- "_from": "assert-plus@0.1.2",
"_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.2.tgz",
- "scripts": {}
+ "_from": "assert-plus@0.1.2",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json b/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json
index a7e8a0445..5840d050c 100644
--- a/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json
+++ b/node_modules/request/node_modules/http-signature/node_modules/ctype/package.json
@@ -11,11 +11,26 @@
"node": ">= 0.4"
},
"main": "ctype.js",
- "readme": "Node-CType is a way to read and write binary data in structured and easy to use\nformat. Its name comes from the C header file.\n\nTo get started, simply clone the repository or use npm to install it. Once it is\nthere, simply require it.\n\ngit clone git://github.com/rmustacc/node-ctype\nnpm install ctype\nvar mod_ctype = require('ctype')\n\n\nThere are two APIs that you can use, depending on what abstraction you'd like.\nThe low level API let's you read and write individual integers and floats from\nbuffers. The higher level API let's you read and write structures of these. To\nillustrate this, let's looks look at how we would read and write a binary\nencoded x,y point.\n\nIn C we would define this structure as follows:\n\ntypedef struct point {\n\tuint16_t\tp_x;\n\tuint16_t\tp_y;\n} point_t;\n\nTo read a binary encoded point from a Buffer, we first need to create a CType\nparser (where we specify the endian and other options) and add the typedef.\n\nvar parser = new mod_ctype.Parser({ endian: 'big' });\nparser.typedef('point_t', [\n\t{ x: { type: 'uint16_t' } },\n\t{ y: { type: 'uint16_t' } }\n]);\n\nFrom here, given a buffer buf and an offset into it, we can read a point.\n\nvar out = parser.readData([ { point: { type: 'point_t' } } ], buffer, 0);\nconsole.log(out);\n{ point: { x: 23, y: 42 } }\n\nAnother way to get the same information would be to use the low level methods.\nNote that these require you to manually deal with the offset. Here's how we'd\nget the same values of x and y from the buffer.\n\nvar x = mod_ctype.ruint16(buf, 'big', 0);\nvar y = mod_ctype.ruint16(buf, 'big', 2);\nconsole.log(x + ', ' + y);\n23, 42\n\nThe true power of this API comes from the ability to define and nest typedefs,\njust as you would in C. By default, the following types are defined by default.\nNote that they return a Number, unless indicated otherwise.\n\n * int8_t\n * int16_t\n * int32_t\n * int64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * uint8_t\n * uint16_t\n * uint32_t\n * uint64_t (returns an array where val[0] << 32 + val[1] would be the value)\n * float\n * double\n * char (either returns a buffer with that character or a uint8_t)\n * char[] (returns an object with the buffer and the number of characters read which is either the total amount requested or until the first 0)\n\n\nctf2json integration:\n\nNode-CType supports consuming the output of ctf2json. Once you read in a JSON file,\nall you have to do to add all the definitions it contains is:\n\nvar data, parser;\ndata = JSON.parse(parsedJSONData);\nparser = mod_ctype.parseCTF(data, { endian: 'big' });\n\nFor more documentation, see the file README.old. Full documentation is in the\nprocess of being rewritten as a series of manual pages which will be available\nin the repository and online for viewing.\n\nTo read the ctio manual page simple run, from the root of the workspace:\n\nman -Mman -s 3ctype ctio\n",
- "readmeFilename": "README",
"_id": "ctype@0.5.2",
+ "dist": {
+ "shasum": "fe8091d468a373a0b0c9ff8bbfb3425c00973a1d",
+ "tarball": "http://registry.npmjs.org/ctype/-/ctype-0.5.2.tgz"
+ },
+ "_npmVersion": "1.1.59",
+ "_npmUser": {
+ "name": "rm",
+ "email": "rm@fingolfin.org"
+ },
+ "maintainers": [
+ {
+ "name": "rm",
+ "email": "rm@fingolfin.org"
+ }
+ ],
+ "directories": {},
"_shasum": "fe8091d468a373a0b0c9ff8bbfb3425c00973a1d",
- "_from": "ctype@0.5.2",
"_resolved": "https://registry.npmjs.org/ctype/-/ctype-0.5.2.tgz",
+ "_from": "ctype@0.5.2",
+ "readme": "ERROR: No README data found!",
"scripts": {}
}
diff --git a/node_modules/request/node_modules/http-signature/package.json b/node_modules/request/node_modules/http-signature/package.json
index 3307a8a20..c6bfef975 100644
--- a/node_modules/request/node_modules/http-signature/package.json
+++ b/node_modules/request/node_modules/http-signature/package.json
@@ -27,12 +27,28 @@
},
"readme": "# node-http-signature\n\nnode-http-signature is a node.js library that has client and server components\nfor Joyent's [HTTP Signature Scheme](http_signing.md).\n\n## Usage\n\nNote the example below signs a request with the same key/cert used to start an\nHTTP server. This is almost certainly not what you actaully want, but is just\nused to illustrate the API calls; you will need to provide your own key\nmanagement in addition to this library.\n\n### Client\n\n var fs = require('fs');\n var https = require('https');\n var httpSignature = require('http-signature');\n\n var key = fs.readFileSync('./key.pem', 'ascii');\n\n var options = {\n host: 'localhost',\n port: 8443,\n path: '/',\n method: 'GET',\n headers: {}\n };\n\n // Adds a 'Date' header in, signs it, and adds the\n // 'Authorization' header in.\n var req = https.request(options, function(res) {\n console.log(res.statusCode);\n });\n\n\n httpSignature.sign(req, {\n key: key,\n keyId: './cert.pem'\n });\n\n req.end();\n\n### Server\n\n var fs = require('fs');\n var https = require('https');\n var httpSignature = require('http-signature');\n\n var options = {\n key: fs.readFileSync('./key.pem'),\n cert: fs.readFileSync('./cert.pem')\n };\n\n https.createServer(options, function (req, res) {\n var rc = 200;\n var parsed = httpSignature.parseRequest(req);\n var pub = fs.readFileSync(parsed.keyId, 'ascii');\n if (!httpSignature.verifySignature(parsed, pub))\n rc = 401;\n\n res.writeHead(rc);\n res.end();\n }).listen(8443);\n\n## Installation\n\n npm install http-signature\n\n## License\n\nMIT.\n\n## Bugs\n\nSee <https://github.com/joyent/node-http-signature/issues>.\n",
"readmeFilename": "README.md",
+ "_id": "http-signature@0.10.0",
+ "dist": {
+ "shasum": "1494e4f5000a83c0f11bcc12d6007c530cb99582",
+ "tarball": "http://registry.npmjs.org/http-signature/-/http-signature-0.10.0.tgz"
+ },
+ "_from": "http-signature@>=0.10.0 <0.11.0",
+ "_npmVersion": "1.2.18",
+ "_npmUser": {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "1494e4f5000a83c0f11bcc12d6007c530cb99582",
+ "_resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.0.tgz",
"bugs": {
"url": "https://github.com/joyent/node-http-signature/issues"
},
- "homepage": "https://github.com/joyent/node-http-signature",
- "_id": "http-signature@0.10.0",
- "_shasum": "1494e4f5000a83c0f11bcc12d6007c530cb99582",
- "_from": "http-signature@>=0.10.0 <0.11.0",
- "_resolved": "https://registry.npmjs.org/http-signature/-/http-signature-0.10.0.tgz"
+ "homepage": "https://github.com/joyent/node-http-signature"
}
diff --git a/node_modules/request/node_modules/json-stringify-safe/package.json b/node_modules/request/node_modules/json-stringify-safe/package.json
index 311f7cc9d..90549cb6a 100644
--- a/node_modules/request/node_modules/json-stringify-safe/package.json
+++ b/node_modules/request/node_modules/json-stringify-safe/package.json
@@ -22,14 +22,29 @@
"url": "http://blog.izs.me"
},
"license": "BSD",
- "readmeFilename": "README.md",
- "readme": "# json-stringify-safe\n\nLike JSON.stringify, but doesn't throw on circular references.\n\n## Usage\n\nTakes the same arguments as `JSON.stringify`.\n\n```javascript\nvar stringify = require('json-stringify-safe');\nvar circularObj = {};\ncircularObj.circularRef = circularObj;\ncircularObj.list = [ circularObj, circularObj ];\nconsole.log(stringify(circularObj, null, 2));\n```\n\nOutput:\n\n```json\n{\n \"circularRef\": \"[Circular]\",\n \"list\": [\n \"[Circular]\",\n \"[Circular]\"\n ]\n}\n```\n\n## Details\n\n```\nstringify(obj, serializer, indent, decycler)\n```\n\nThe first three arguments are the same as to JSON.stringify. The last\nis an argument that's only used when the object has been seen already.\n\nThe default `decycler` function returns the string `'[Circular]'`.\nIf, for example, you pass in `function(k,v){}` (return nothing) then it\nwill prune cycles. If you pass in `function(k,v){ return {foo: 'bar'}}`,\nthen cyclical objects will always be represented as `{\"foo\":\"bar\"}` in\nthe result.\n\n```\nstringify.getSerialize(serializer, decycler)\n```\n\nReturns a serializer that can be used elsewhere. This is the actual\nfunction that's passed to JSON.stringify.\n",
"bugs": {
"url": "https://github.com/isaacs/json-stringify-safe/issues"
},
- "homepage": "https://github.com/isaacs/json-stringify-safe",
"_id": "json-stringify-safe@5.0.0",
- "_shasum": "4c1f228b5050837eba9d21f50c2e6e320624566e",
+ "dist": {
+ "shasum": "4c1f228b5050837eba9d21f50c2e6e320624566e",
+ "tarball": "http://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.0.tgz"
+ },
"_from": "json-stringify-safe@>=5.0.0 <5.1.0",
- "_resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.0.tgz"
+ "_npmVersion": "1.3.6",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "directories": {},
+ "_shasum": "4c1f228b5050837eba9d21f50c2e6e320624566e",
+ "_resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.0.tgz",
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/isaacs/json-stringify-safe"
}
diff --git a/node_modules/request/node_modules/mime-types/package.json b/node_modules/request/node_modules/mime-types/package.json
index 0a9b09b01..67432028b 100644
--- a/node_modules/request/node_modules/mime-types/package.json
+++ b/node_modules/request/node_modules/mime-types/package.json
@@ -32,8 +32,7 @@
"scripts": {
"test": "make test"
},
- "readme": "# mime-types\n[![NPM version](https://badge.fury.io/js/mime-types.svg)](https://badge.fury.io/js/mime-types) [![Build Status](https://travis-ci.org/expressjs/mime-types.svg?branch=master)](https://travis-ci.org/expressjs/mime-types)\n\nThe ultimate javascript content-type utility.\n\n### Install\n\n```sh\n$ npm install mime-types\n```\n\n#### Similar to [node-mime](https://github.com/broofa/node-mime), except:\n\n- __No fallbacks.__ Instead of naively returning the first available type, `mime-types` simply returns `false`, so do `var type = mime.lookup('unrecognized') || 'application/octet-stream'`.\n- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`.\n- Additional mime types are added such as jade and stylus. Feel free to add more!\n- Browser support via Browserify and Component by converting lists to JSON files.\n\nOtherwise, the API is compatible.\n\n### Adding Types\n\nIf you'd like to add additional types,\nsimply create a PR adding the type to `custom.json` and\na reference link to the [sources](SOURCES.md).\n\nDo __NOT__ edit `mime.json` or `node.json`.\nThose are pulled using `build.js`.\nYou should only touch `custom.json`.\n\n## API\n\n```js\nvar mime = require('mime-types')\n```\n\nAll functions return `false` if input is invalid or not found.\n\n### mime.lookup(path)\n\nLookup the content-type associated with a file.\n\n```js\nmime.lookup('json') // 'application/json'\nmime.lookup('.md') // 'text/x-markdown'\nmime.lookup('file.html') // 'text/html'\nmime.lookup('folder/file.js') // 'application/javascript'\n\nmime.lookup('cats') // false\n```\n\n### mime.contentType(type)\n\nCreate a full content-type header given a content-type or extension.\n\n```js\nmime.contentType('markdown') // 'text/x-markdown; charset=utf-8'\nmime.contentType('file.json') // 'application/json; charset=utf-8'\n```\n\n### mime.extension(type)\n\nGet the default extension for a content-type.\n\n```js\nmime.extension('application/octet-stream') // 'bin'\n```\n\n### mime.charset(type)\n\nLookup the implied default charset of a content-type.\n\n```js\nmime.charset('text/x-markdown') // 'UTF-8'\n```\n\n### mime.types[extension] = type\n\nA map of content-types by extension.\n\n### mime.extensions[type] = [extensions]\n\nA map of extensions by content-type.\n\n### mime.define(types)\n\nGlobally add definitions.\n`types` must be an object of the form:\n\n```js\n{\n \"<content-type>\": [extensions...],\n \"<content-type>\": [extensions...]\n}\n```\n\nSee the `.json` files in `lib/` for examples.\n\n## License\n\n[MIT](LICENSE)\n",
- "readmeFilename": "README.md",
+ "gitHead": "e82b23836eb42003b8346fb31769da2fb7eb54e8",
"bugs": {
"url": "https://github.com/expressjs/mime-types/issues"
},
@@ -41,5 +40,30 @@
"_id": "mime-types@1.0.2",
"_shasum": "995ae1392ab8affcbfcb2641dd054e943c0d5dce",
"_from": "mime-types@>=1.0.1 <1.1.0",
- "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.2.tgz"
+ "_npmVersion": "1.4.21",
+ "_npmUser": {
+ "name": "dougwilson",
+ "email": "doug@somethingdoug.com"
+ },
+ "maintainers": [
+ {
+ "name": "jongleberry",
+ "email": "jonathanrichardong@gmail.com"
+ },
+ {
+ "name": "fishrock123",
+ "email": "fishrock123@rocketmail.com"
+ },
+ {
+ "name": "dougwilson",
+ "email": "doug@somethingdoug.com"
+ }
+ ],
+ "dist": {
+ "shasum": "995ae1392ab8affcbfcb2641dd054e943c0d5dce",
+ "tarball": "http://registry.npmjs.org/mime-types/-/mime-types-1.0.2.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-1.0.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/node-uuid/package.json b/node_modules/request/node_modules/node-uuid/package.json
index 2dc06c68a..bead110cc 100644
--- a/node_modules/request/node_modules/node-uuid/package.json
+++ b/node_modules/request/node_modules/node-uuid/package.json
@@ -29,10 +29,26 @@
"bugs": {
"url": "https://github.com/broofa/node-uuid/issues"
},
- "homepage": "https://github.com/broofa/node-uuid",
"_id": "node-uuid@1.4.1",
- "_shasum": "39aef510e5889a3dca9c895b506c73aae1bac048",
+ "dist": {
+ "shasum": "39aef510e5889a3dca9c895b506c73aae1bac048",
+ "tarball": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.1.tgz"
+ },
"_from": "node-uuid@>=1.4.0 <1.5.0",
+ "_npmVersion": "1.3.6",
+ "_npmUser": {
+ "name": "broofa",
+ "email": "robert@broofa.com"
+ },
+ "maintainers": [
+ {
+ "name": "broofa",
+ "email": "robert@broofa.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "39aef510e5889a3dca9c895b506c73aae1bac048",
"_resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.1.tgz",
+ "homepage": "https://github.com/broofa/node-uuid",
"scripts": {}
}
diff --git a/node_modules/request/node_modules/oauth-sign/package.json b/node_modules/request/node_modules/oauth-sign/package.json
index f6664d0fa..d8765b6e9 100644
--- a/node_modules/request/node_modules/oauth-sign/package.json
+++ b/node_modules/request/node_modules/oauth-sign/package.json
@@ -25,9 +25,25 @@
"bugs": {
"url": "https://github.com/mikeal/oauth-sign/issues"
},
- "homepage": "https://github.com/mikeal/oauth-sign",
"_id": "oauth-sign@0.4.0",
- "_shasum": "f22956f31ea7151a821e5f2fb32c113cad8b9f69",
+ "dist": {
+ "shasum": "f22956f31ea7151a821e5f2fb32c113cad8b9f69",
+ "tarball": "http://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz"
+ },
"_from": "oauth-sign@>=0.4.0 <0.5.0",
- "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz"
+ "_npmVersion": "1.3.2",
+ "_npmUser": {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "f22956f31ea7151a821e5f2fb32c113cad8b9f69",
+ "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.4.0.tgz",
+ "homepage": "https://github.com/mikeal/oauth-sign"
}
diff --git a/node_modules/request/node_modules/qs/package.json b/node_modules/request/node_modules/qs/package.json
index 83bd36dea..d65387274 100755
--- a/node_modules/request/node_modules/qs/package.json
+++ b/node_modules/request/node_modules/qs/package.json
@@ -29,13 +29,33 @@
"url": "http://github.com/hapijs/qs/raw/master/LICENSE"
}
],
- "readme": "# qs\n\nA querystring parsing and stringifying library with some added security.\n\n[![Build Status](https://secure.travis-ci.org/hapijs/qs.svg)](http://travis-ci.org/hapijs/qs)\n\nLead Maintainer: [Nathan LaFreniere](https://github.com/nlf)\n\nThe **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring).\n\n## Usage\n\n```javascript\nvar Qs = require('qs');\n\nvar obj = Qs.parse('a=c'); // { a: 'c' }\nvar str = Qs.stringify(obj); // 'a=c'\n```\n\n### Parsing Objects\n\n```javascript\nQs.parse(string, [depth], [delimiter]);\n```\n\n**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`.\nFor example, the string `'foo[bar]=baz'` converts to:\n\n```javascript\n{\n foo: {\n bar: 'baz'\n }\n}\n```\n\nURI encoded strings work too:\n\n```javascript\nQs.parse('a%5Bb%5D=c');\n// { a: { b: 'c' } }\n```\n\nYou can also nest your objects, like `'foo[bar][baz]=foobarbaz'`:\n\n```javascript\n{\n foo: {\n bar: {\n baz: 'foobarbaz'\n }\n }\n}\n```\n\nBy default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like\n`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be:\n\n```javascript\n{\n a: {\n b: {\n c: {\n d: {\n e: {\n f: {\n '[g][h][i]': 'j'\n }\n }\n }\n }\n }\n }\n}\n```\n\nThis depth can be overridden by passing a `depth` option to `Qs.parse(string, depth)`:\n\n```javascript\nQs.parse('a[b][c][d][e][f][g][h][i]=j', 1);\n// { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } }\n```\n\nThe depth limit mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number.\n\nAn optional delimiter can also be passed:\n\n```javascript\nQs.parse('a=b;c=d', ';');\n// { a: 'b', c: 'd' }\n```\n\n### Parsing Arrays\n\n**qs** can also parse arrays using a similar `[]` notation:\n\n```javascript\nQs.parse('a[]=b&a[]=c');\n// { a: ['b', 'c'] }\n```\n\nYou may specify an index as well:\n\n```javascript\nQs.parse('a[1]=c&a[0]=b');\n// { a: ['b', 'c'] }\n```\n\nNote that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number\nto create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving\ntheir order:\n\n```javascript\nQs.parse('a[1]=b&a[15]=c');\n// { a: ['b', 'c'] }\n```\n\nNote that an empty string is also a value, and will be preserved:\n\n```javascript\nQs.parse('a[]=&a[]=b');\n// { a: ['', 'b'] }\nQs.parse('a[0]=b&a[1]=&a[2]=c');\n// { a: ['b', '', 'c'] }\n```\n\n**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will\ninstead be converted to an object with the index as the key:\n\n```javascript\nQs.parse('a[100]=b');\n// { a: { '100': 'b' } }\n```\n\nIf you mix notations, **qs** will merge the two items into an object:\n\n```javascript\nQs.parse('a[0]=b&a[b]=c');\n// { a: { '0': 'b', b: 'c' } }\n```\n\nYou can also create arrays of objects:\n\n```javascript\nQs.parse('a[][b]=c');\n// { a: [{ b: 'c' }] }\n```\n\n### Stringifying\n\n```javascript\nQs.stringify(object, [delimiter]);\n```\n\nWhen stringifying, **qs** always URI encodes output. Objects are stringified as you would expect:\n\n```javascript\nQs.stringify({ a: 'b' });\n// 'a=b'\nQs.stringify({ a: { b: 'c' } });\n// 'a%5Bb%5D=c'\n```\n\nExamples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage.\n\nWhen arrays are stringified, they are always given explicit indices:\n\n```javascript\nQs.stringify({ a: ['b', 'c', 'd'] });\n// 'a[0]=b&a[1]=c&a[2]=d'\n```\n\nEmpty strings and null values will omit the value, but the equals sign (=) remains in place:\n\n```javascript\nQs.stringify({ a: '' });\n// 'a='\n```\n\nProperties that are set to `undefined` will be omitted entirely:\n\n```javascript\nQs.stringify({ a: null, b: undefined });\n// 'a='\n```\n\nThe delimiter may be overridden with stringify as well:\n\n```javascript\nQs.stringify({ a: 'b', c: 'd' }, ';');\n// 'a=b;c=d'\n```\n",
- "readmeFilename": "README.md",
+ "gitHead": "bd9455fea88d1c51a80dbf57ef0f99b4e553177d",
"bugs": {
"url": "https://github.com/hapijs/qs/issues"
},
"_id": "qs@1.2.2",
"_shasum": "19b57ff24dc2a99ce1f8bdf6afcda59f8ef61f88",
"_from": "qs@>=1.2.0 <1.3.0",
- "_resolved": "https://registry.npmjs.org/qs/-/qs-1.2.2.tgz"
+ "_npmVersion": "1.4.21",
+ "_npmUser": {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ },
+ "maintainers": [
+ {
+ "name": "nlf",
+ "email": "quitlahok@gmail.com"
+ },
+ {
+ "name": "hueniverse",
+ "email": "eran@hueniverse.com"
+ }
+ ],
+ "dist": {
+ "shasum": "19b57ff24dc2a99ce1f8bdf6afcda59f8ef61f88",
+ "tarball": "http://registry.npmjs.org/qs/-/qs-1.2.2.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/qs/-/qs-1.2.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/stringstream/package.json b/node_modules/request/node_modules/stringstream/package.json
index bd1e23d97..b71cf2859 100644
--- a/node_modules/request/node_modules/stringstream/package.json
+++ b/node_modules/request/node_modules/stringstream/package.json
@@ -21,13 +21,29 @@
"license": "MIT",
"readme": "# Decode streams into strings The Right Way(tm)\n\n```javascript\nvar fs = require('fs')\nvar zlib = require('zlib')\nvar strs = require('stringstream')\n\nvar utf8Stream = fs.createReadStream('massiveLogFile.gz')\n .pipe(zlib.createGunzip())\n .pipe(strs('utf8'))\n```\n\nNo need to deal with `setEncoding()` weirdness, just compose streams\nlike they were supposed to be!\n\nHandles input and output encoding:\n\n```javascript\n// Stream from utf8 to hex to base64... Why not, ay.\nvar hex64Stream = fs.createReadStream('myFile')\n .pipe(strs('utf8', 'hex'))\n .pipe(strs('hex', 'base64'))\n```\n\nAlso deals with `base64` output correctly by aligning each emitted data\nchunk so that there are no dangling `=` characters:\n\n```javascript\nvar stream = fs.createReadStream('myFile').pipe(strs('base64'))\n\nvar base64Str = ''\n\nstream.on('data', function(data) { base64Str += data })\nstream.on('end', function() {\n console.log('My base64 encoded file is: ' + base64Str) // Wouldn't work with setEncoding()\n console.log('Original file is: ' + new Buffer(base64Str, 'base64'))\n})\n```\n",
"readmeFilename": "README.md",
+ "_id": "stringstream@0.0.4",
+ "dist": {
+ "shasum": "0f0e3423f942960b5692ac324a57dd093bc41a92",
+ "tarball": "http://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz"
+ },
+ "_npmVersion": "1.2.0",
+ "_npmUser": {
+ "name": "hichaelmart",
+ "email": "michael.hart.au@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "hichaelmart",
+ "email": "michael.hart.au@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "0f0e3423f942960b5692ac324a57dd093bc41a92",
+ "_resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz",
+ "_from": "stringstream@>=0.0.4 <0.1.0",
"bugs": {
"url": "https://github.com/mhart/StringStream/issues"
},
"homepage": "https://github.com/mhart/StringStream",
- "_id": "stringstream@0.0.4",
- "_shasum": "0f0e3423f942960b5692ac324a57dd093bc41a92",
- "_from": "stringstream@>=0.0.4 <0.1.0",
- "_resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.4.tgz",
"scripts": {}
}
diff --git a/node_modules/request/node_modules/tough-cookie/node_modules/punycode/package.json b/node_modules/request/node_modules/tough-cookie/node_modules/punycode/package.json
index 311294252..ab13fc836 100644
--- a/node_modules/request/node_modules/tough-cookie/node_modules/punycode/package.json
+++ b/node_modules/request/node_modules/tough-cookie/node_modules/punycode/package.json
@@ -60,10 +60,28 @@
"qunitjs": "~1.11.0",
"requirejs": "^2.1.14"
},
- "readme": "# Punycode.js [![Build status](https://travis-ci.org/bestiejs/punycode.js.svg?branch=master)](https://travis-ci.org/bestiejs/punycode.js) [![Code coverage status](http://img.shields.io/coveralls/bestiejs/punycode.js/master.svg)](https://coveralls.io/r/bestiejs/punycode.js) [![Dependency status](https://gemnasium.com/bestiejs/punycode.js.svg)](https://gemnasium.com/bestiejs/punycode.js)\n\nA robust Punycode converter that fully complies to [RFC 3492](http://tools.ietf.org/html/rfc3492) and [RFC 5891](http://tools.ietf.org/html/rfc5891), and works on nearly all JavaScript platforms.\n\nThis JavaScript library is the result of comparing, optimizing and documenting different open-source implementations of the Punycode algorithm:\n\n* [The C example code from RFC 3492](http://tools.ietf.org/html/rfc3492#appendix-C)\n* [`punycode.c` by _Markus W. Scherer_ (IBM)](http://opensource.apple.com/source/ICU/ICU-400.42/icuSources/common/punycode.c)\n* [`punycode.c` by _Ben Noordhuis_](https://github.com/bnoordhuis/punycode/blob/master/punycode.c)\n* [JavaScript implementation by _some_](http://stackoverflow.com/questions/183485/can-anyone-recommend-a-good-free-javascript-for-punycode-to-unicode-conversion/301287#301287)\n* [`punycode.js` by _Ben Noordhuis_](https://github.com/joyent/node/blob/426298c8c1c0d5b5224ac3658c41e7c2a3fe9377/lib/punycode.js) (note: [not fully compliant](https://github.com/joyent/node/issues/2072))\n\nThis project is [bundled](https://github.com/joyent/node/blob/master/lib/punycode.js) with [Node.js v0.6.2+](https://github.com/joyent/node/compare/975f1930b1...61e796decc).\n\n## Installation\n\nVia [npm](http://npmjs.org/) (only required for Node.js releases older than v0.6.2):\n\n```bash\nnpm install punycode\n```\n\nVia [Bower](http://bower.io/):\n\n```bash\nbower install punycode\n```\n\nVia [Component](https://github.com/component/component):\n\n```bash\ncomponent install bestiejs/punycode.js\n```\n\nIn a browser:\n\n```html\n<script src=\"punycode.js\"></script>\n```\n\nIn [Narwhal](http://narwhaljs.org/), [Node.js](http://nodejs.org/), and [RingoJS](http://ringojs.org/):\n\n```js\nvar punycode = require('punycode');\n```\n\nIn [Rhino](http://www.mozilla.org/rhino/):\n\n```js\nload('punycode.js');\n```\n\nUsing an AMD loader like [RequireJS](http://requirejs.org/):\n\n```js\nrequire(\n {\n 'paths': {\n 'punycode': 'path/to/punycode'\n }\n },\n ['punycode'],\n function(punycode) {\n console.log(punycode);\n }\n);\n```\n\n## API\n\n### `punycode.decode(string)`\n\nConverts a Punycode string of ASCII symbols to a string of Unicode symbols.\n\n```js\n// decode domain name parts\npunycode.decode('maana-pta'); // 'mañana'\npunycode.decode('--dqo34k'); // '☃-⌘'\n```\n\n### `punycode.encode(string)`\n\nConverts a string of Unicode symbols to a Punycode string of ASCII symbols.\n\n```js\n// encode domain name parts\npunycode.encode('mañana'); // 'maana-pta'\npunycode.encode('☃-⌘'); // '--dqo34k'\n```\n\n### `punycode.toUnicode(input)`\n\nConverts a Punycode string representing a domain name or an email address to Unicode. Only the Punycoded parts of the input will be converted, i.e. it doesn’t matter if you call it on a string that has already been converted to Unicode.\n\n```js\n// decode domain names\npunycode.toUnicode('xn--maana-pta.com');\n// → 'mañana.com'\npunycode.toUnicode('xn----dqo34k.com');\n// → '☃-⌘.com'\n\n// decode email addresses\npunycode.toUnicode('джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq');\n// → 'джумла@джpумлатест.bрфa'\n```\n\n### `punycode.toASCII(input)`\n\nConverts a Unicode string representing a domain name or an email address to Punycode. Only the non-ASCII parts of the input will be converted, i.e. it doesn’t matter if you call it with a domain that's already in ASCII.\n\n```js\n// encode domain names\npunycode.toASCII('mañana.com');\n// → 'xn--maana-pta.com'\npunycode.toASCII('☃-⌘.com');\n// → 'xn----dqo34k.com'\n\n// encode email addresses\npunycode.toASCII('джумла@джpумлатест.bрфa');\n// → 'джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq'\n```\n\n### `punycode.ucs2`\n\n#### `punycode.ucs2.decode(string)`\n\nCreates an array containing the numeric code point values of each Unicode symbol in the string. While [JavaScript uses UCS-2 internally](http://mathiasbynens.be/notes/javascript-encoding), this function will convert a pair of surrogate halves (each of which UCS-2 exposes as separate characters) into a single code point, matching UTF-16.\n\n```js\npunycode.ucs2.decode('abc');\n// → [0x61, 0x62, 0x63]\n// surrogate pair for U+1D306 TETRAGRAM FOR CENTRE:\npunycode.ucs2.decode('\\uD834\\uDF06');\n// → [0x1D306]\n```\n\n#### `punycode.ucs2.encode(codePoints)`\n\nCreates a string based on an array of numeric code point values.\n\n```js\npunycode.ucs2.encode([0x61, 0x62, 0x63]);\n// → 'abc'\npunycode.ucs2.encode([0x1D306]);\n// → '\\uD834\\uDF06'\n```\n\n### `punycode.version`\n\nA string representing the current Punycode.js version number.\n\n## Unit tests & code coverage\n\nAfter cloning this repository, run `npm install --dev` to install the dependencies needed for Punycode.js development and testing. You may want to install Istanbul _globally_ using `npm install istanbul -g`.\n\nOnce that’s done, you can run the unit tests in Node using `npm test` or `node tests/tests.js`. To run the tests in Rhino, Ringo, Narwhal, PhantomJS, and web browsers as well, use `grunt test`.\n\nTo generate the code coverage report, use `grunt cover`.\n\nFeel free to fork if you see possible improvements!\n\n## Author\n\n| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias \"Follow @mathias on Twitter\") |\n|---|\n| [Mathias Bynens](http://mathiasbynens.be/) |\n\n## Contributors\n\n| [![twitter/jdalton](https://gravatar.com/avatar/299a3d891ff1920b69c364d061007043?s=70)](https://twitter.com/jdalton \"Follow @jdalton on Twitter\") |\n|---|\n| [John-David Dalton](http://allyoucanleet.com/) |\n\n## License\n\nPunycode.js is available under the [MIT](http://mths.be/mit) license.\n",
- "readmeFilename": "README.md",
"_id": "punycode@1.3.1",
"_shasum": "710afe5123c20a1530b712e3e682b9118fe8058e",
"_from": "punycode@>=0.2.0",
- "_resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.1.tgz"
+ "_npmVersion": "1.4.9",
+ "_npmUser": {
+ "name": "mathias",
+ "email": "mathias@qiwi.be"
+ },
+ "maintainers": [
+ {
+ "name": "mathias",
+ "email": "mathias@qiwi.be"
+ },
+ {
+ "name": "reconbot",
+ "email": "wizard@roborooter.com"
+ }
+ ],
+ "dist": {
+ "shasum": "710afe5123c20a1530b712e3e682b9118fe8058e",
+ "tarball": "http://registry.npmjs.org/punycode/-/punycode-1.3.1.tgz"
+ },
+ "_resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.1.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/node_modules/tough-cookie/package.json b/node_modules/request/node_modules/tough-cookie/package.json
index 037187169..f4ede6689 100644
--- a/node_modules/request/node_modules/tough-cookie/package.json
+++ b/node_modules/request/node_modules/tough-cookie/package.json
@@ -41,7 +41,27 @@
"readme": "[RFC6265](http://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js\n\n![Tough Cookie](http://www.goinstant.com.s3.amazonaws.com/tough-cookie.jpg)\n\n[![Build Status](https://travis-ci.org/goinstant/node-cookie.png?branch=master)](https://travis-ci.org/goinstant/node-cookie)\n\n[![NPM Stats](https://nodei.co/npm/tough-cookie.png?downloads=true&stars=true)](https://npmjs.org/package/tough-cookie)\n![NPM Downloads](https://nodei.co/npm-dl/tough-cookie.png?months=9)\n\n# Synopsis\n\n``` javascript\nvar tough = require('tough-cookie'); // note: not 'cookie', 'cookies' or 'node-cookie'\nvar Cookie = tough.Cookie;\nvar cookie = Cookie.parse(header);\ncookie.value = 'somethingdifferent';\nheader = cookie.toString();\n\nvar cookiejar = new tough.CookieJar();\ncookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb);\n// ...\ncookiejar.getCookies('http://example.com/otherpath',function(err,cookies) {\n res.headers['cookie'] = cookies.join('; ');\n});\n```\n\n# Installation\n\nIt's _so_ easy!\n\n`npm install tough-cookie`\n\nRequires `punycode`, which should get installed automatically for you. Note that node.js v0.6.2+ bundles punycode by default.\n\nWhy the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken.\n\n# API\n\ntough\n=====\n\nFunctions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be \"bound\".\n\nparseDate(string[,strict])\n-----------------\n\nParse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`. If strict is set to true then leading/trailing non-seperator characters around the time part will cause the parsing to fail (e.g. \"Thu, 01 Jan 1970 00:00:010 GMT\" has an extra trailing zero but Chrome, an assumedly RFC-compliant browser, treats this as valid).\n\nformatDate(date)\n----------------\n\nFormat a Date into a RFC1123 string (the RFC6265-recommended format).\n\ncanonicalDomain(str)\n--------------------\n\nTransforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects).\n\ndomainMatch(str,domStr[,canonicalize=true])\n-------------------------------------------\n\nAnswers \"does this real domain match the domain in a cookie?\". The `str` is the \"current\" domain-name and the `domStr` is the \"cookie\" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a \"suffix match\".\n\nThe `canonicalize` parameter will run the other two paramters through `canonicalDomain` or not.\n\ndefaultPath(path)\n-----------------\n\nGiven a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the \"directory\" of a \"file\" in the path, but is specified by Section 5.1.4 of the RFC.\n\nThe `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output.\n\npathMatch(reqPath,cookiePath)\n-----------------------------\n\nAnswers \"does the request-path path-match a given cookie-path?\" as per RFC6265 Section 5.1.4. Returns a boolean.\n\nThis is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`.\n\nparse(header[,strict=false])\n----------------------------\n\nalias for `Cookie.parse(header[,strict])`\n\nfromJSON(string)\n----------------\n\nalias for `Cookie.fromJSON(string)`\n\ngetPublicSuffix(hostname)\n-------------------------\n\nReturns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it.\n\nFor example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`.\n\nFor further information, see http://publicsuffix.org/. This module derives its list from that site.\n\ncookieCompare(a,b)\n------------------\n\nFor use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). Longest `.path`s go first, then sorted oldest to youngest.\n\n``` javascript\nvar cookies = [ /* unsorted array of Cookie objects */ ];\ncookies = cookies.sort(cookieCompare);\n```\n\npermuteDomain(domain)\n---------------------\n\nGenerates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores.\n\n\npermutePath(path)\n-----------------\n\nGenerates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores.\n\nCookie\n======\n\nCookie.parse(header[,strict=false])\n-----------------------------------\n\nParses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed. If in strict mode, returns `undefined` if the cookie doesn't follow the guidelines in section 4 of RFC6265. Generally speaking, strict mode can be used to validate your own generated Set-Cookie headers, but acting as a client you want to be lenient and leave strict mode off.\n\nHere's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response:\n\n``` javascript\nif (res.headers['set-cookie'] instanceof Array)\n cookies = res.headers['set-cookie'].map(function (c) { return (Cookie.parse(c)); });\nelse\n cookies = [Cookie.parse(res.headers['set-cookie'])];\n```\n\nCookie.fromJSON(string)\n-----------------------\n\nConvert a JSON string to a `Cookie` object. Does a `JSON.parse()` and converts the `.created`, `.lastAccessed` and `.expires` properties into `Date` objects.\n\nProperties\n==========\n\n * _key_ - string - the name or key of the cookie (default \"\")\n * _value_ - string - the value of the cookie (default \"\")\n * _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `\"Infinity\"`). See `setExpires()`\n * _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `\"Infinity\"` and `\"-Infinity\"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()`\n * _domain_ - string - the `Domain=` attribute of the cookie\n * _path_ - string - the `Path=` of the cookie\n * _secure_ - boolean - the `Secure` cookie flag\n * _httpOnly_ - boolean - the `HttpOnly` cookie flag\n * _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside)\n\nAfter a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes:\n\n * _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied)\n * _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one.\n * _created_ - `Date` - when this cookie was added to the jar\n * _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute.\n\nConstruction([{options}])\n------------\n\nReceives an options object that can contain any Cookie properties, uses the default for unspecified properties.\n\n.toString()\n-----------\n\nencode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`.\n\n.cookieString()\n---------------\n\nencode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '=').\n\n.setExpires(String)\n-------------------\n\nsets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `\"Infinity\"` (a string) is set.\n\n.setMaxAge(number)\n-------------------\n\nsets the maxAge in seconds. Coerces `-Infinity` to `\"-Infinity\"` and `Infinity` to `\"Infinity\"` so it JSON serializes correctly.\n\n.expiryTime([now=Date.now()])\n-----------------------------\n\n.expiryDate([now=Date.now()])\n-----------------------------\n\nexpiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds.\n\nMax-Age takes precedence over Expires (as per the RFC). The `.created` attribute -- or, by default, the `now` paramter -- is used to offset the `.maxAge` attribute.\n\nIf Expires (`.expires`) is set, that's returned.\n\nOtherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for \"Tue, 19 Jan 2038 03:14:07 GMT\" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents).\n\n.TTL([now=Date.now()])\n---------\n\ncompute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply.\n\nThe \"number\" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned.\n\n.canonicalizedDoman()\n---------------------\n\n.cdomain()\n----------\n\nreturn the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters.\n\n.validate()\n-----------\n\nStatus: *IN PROGRESS*. Works for a few things, but is by no means comprehensive.\n\nvalidates cookie attributes for semantic correctness. Useful for \"lint\" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct:\n\n``` javascript\nif (cookie.validate() === true) {\n // it's tasty\n} else {\n // yuck!\n}\n```\n\nCookieJar\n=========\n\nConstruction([store = new MemoryCookieStore()][, rejectPublicSuffixes])\n------------\n\nSimply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used.\n\n\nAttributes\n----------\n\n * _rejectPublicSuffixes_ - boolean - reject cookies with domains like \"com\" and \"co.uk\" (default: `true`)\n\nSince eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods.\n\n.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))\n-------------------------------------------------------------------\n\nAttempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.created`, `.lastAccessed` and `.hostOnly` properties.\n\nThe `options` object can be omitted and can have the following properties:\n\n * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.\n * _secure_ - boolean - autodetect from url - indicates if this is a \"Secure\" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.\n * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies\n * _strict_ - boolean - default `false` - perform extra checks\n * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. CookieStore errors aren't ignored by this option.\n\nAs per the RFC, the `.hostOnly` property is set if there was no \"Domain=\" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual).\n\n.setCookieSync(cookieOrString, currentUrl, [{options}])\n-------------------------------------------------------\n\nSynchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\n.storeCookie(cookie, [{options},] cb(err,cookie))\n-------------------------------------------------\n\n__REMOVED__ removed in lieu of the CookieStore API below\n\n.getCookies(currentUrl, [{options},] cb(err,cookies))\n-----------------------------------------------------\n\nRetrieve the list of cookies that can be sent in a Cookie header for the current url.\n\nIf an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given.\n\nThe `options` object can be omitted and can have the following properties:\n\n * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.\n * _secure_ - boolean - autodetect from url - indicates if this is a \"Secure\" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.\n * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies\n * _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially).\n * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the CookieStore `fetchCookies` function (the default MemoryCookieStore supports it).\n\nThe `.lastAccessed` property of the returned cookies will have been updated.\n\n.getCookiesSync(currentUrl, [{options}])\n----------------------------------------\n\nSynchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\n.getCookieString(...)\n---------------------\n\nAccepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`.\n\n.getCookieStringSync(...)\n-------------------------\n\nSynchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\n.getSetCookieStrings(...)\n-------------------------\n\nReturns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`.\n\n.getSetCookieStringsSync(...)\n-----------------------------\n\nSynchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\nStore\n=====\n\nBase class for CookieJar stores.\n\n# CookieStore API\n\nThe storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores.\n\nStores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`. Stores are asynchronous by default, but if `store.synchronous` is set, then the `*Sync` methods on the CookieJar can be used.\n\nAll `domain` parameters will have been normalized before calling.\n\nThe Cookie store must have all of the following methods.\n\nstore.findCookie(domain, path, key, cb(err,cookie))\n---------------------------------------------------\n\nRetrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned.\n\nCallback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error).\n\nstore.findCookies(domain, path, cb(err,cookies))\n------------------------------------------------\n\nLocates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above.\n\nIf no cookies are found, the callback MUST be passed an empty array.\n\nThe resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done.\n\nAs of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only).\n\nstore.putCookie(cookie, cb(err))\n--------------------------------\n\nAdds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur.\n\nThe `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties.\n\nPass an error if the cookie cannot be stored.\n\nstore.updateCookie(oldCookie, newCookie, cb(err))\n-------------------------------------------------\n\nUpdate an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store.\n\nThe `.lastAccessed` property will always be different between the two objects and `.created` will always be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are sorted (or selected for deletion).\n\nStores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object.\n\nThe `newCookie` and `oldCookie` objects MUST NOT be modified.\n\nPass an error if the newCookie cannot be stored.\n\nstore.removeCookie(domain, path, key, cb(err))\n----------------------------------------------\n\nRemove a cookie from the store (see notes on `findCookie` about the uniqueness constraint).\n\nThe implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie.\n\nstore.removeCookies(domain, path, cb(err))\n------------------------------------------\n\nRemoves matching cookies from the store. The `path` paramter is optional, and if missing means all paths in a domain should be removed.\n\nPass an error ONLY if removing any existing cookies failed.\n\n# TODO\n\n * _full_ RFC5890/RFC5891 canonicalization for domains in `cdomain()`\n * the optional `punycode` requirement implements RFC3492, but RFC6265 requires RFC5891\n * better tests for `validate()`?\n\n# Copyright and License\n\n(tl;dr: MIT with some MPL/1.1)\n\nCopyright 2012- GoInstant, Inc. and other contributors. All rights reserved.\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to\ndeal in the Software without restriction, including without limitation the\nrights to use, copy, modify, merge, publish, distribute, sublicense, and/or\nsell copies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\nIN THE SOFTWARE.\n\nPortions may be licensed under different licenses (in particular public-suffix.txt is MPL/1.1); please read the LICENSE file for full details.\n",
"readmeFilename": "README.md",
"_id": "tough-cookie@0.12.1",
- "_shasum": "8220c7e21abd5b13d96804254bd5a81ebf2c7d62",
+ "dist": {
+ "shasum": "8220c7e21abd5b13d96804254bd5a81ebf2c7d62",
+ "tarball": "http://registry.npmjs.org/tough-cookie/-/tough-cookie-0.12.1.tgz"
+ },
"_from": "tough-cookie@>=0.12.0",
+ "_npmVersion": "1.3.11",
+ "_npmUser": {
+ "name": "goinstant",
+ "email": "support@goinstant.com"
+ },
+ "maintainers": [
+ {
+ "name": "jstash",
+ "email": "jeremy@goinstant.com"
+ },
+ {
+ "name": "goinstant",
+ "email": "services@goinstant.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "8220c7e21abd5b13d96804254bd5a81ebf2c7d62",
"_resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-0.12.1.tgz"
}
diff --git a/node_modules/request/node_modules/tunnel-agent/package.json b/node_modules/request/node_modules/tunnel-agent/package.json
index 89cc9e2db..5b1ebba15 100644
--- a/node_modules/request/node_modules/tunnel-agent/package.json
+++ b/node_modules/request/node_modules/tunnel-agent/package.json
@@ -17,15 +17,30 @@
"engines": {
"node": "*"
},
- "readme": "tunnel-agent\n============\n\nHTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module.\n",
- "readmeFilename": "README.md",
"bugs": {
"url": "https://github.com/mikeal/tunnel-agent/issues"
},
"homepage": "https://github.com/mikeal/tunnel-agent",
"_id": "tunnel-agent@0.4.0",
- "_shasum": "b1184e312ffbcf70b3b4c78e8c219de7ebb1c550",
+ "dist": {
+ "shasum": "b1184e312ffbcf70b3b4c78e8c219de7ebb1c550",
+ "tarball": "http://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.0.tgz"
+ },
"_from": "tunnel-agent@>=0.4.0 <0.5.0",
+ "_npmVersion": "1.3.21",
+ "_npmUser": {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mikeal",
+ "email": "mikeal.rogers@gmail.com"
+ }
+ ],
+ "directories": {},
+ "_shasum": "b1184e312ffbcf70b3b4c78e8c219de7ebb1c550",
"_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.0.tgz",
+ "readme": "ERROR: No README data found!",
"scripts": {}
}
diff --git a/node_modules/request/package.json b/node_modules/request/package.json
index 057e8e97b..253ae5a99 100755
--- a/node_modules/request/package.json
+++ b/node_modules/request/package.json
@@ -7,7 +7,7 @@
"util",
"utility"
],
- "version": "2.44.0",
+ "version": "2.45.0",
"author": {
"name": "Mikeal Rogers",
"email": "mikeal.rogers@gmail.com"
@@ -20,9 +20,9 @@
"url": "http://github.com/mikeal/request/issues"
},
"license": "Apache-2.0",
- "engines": [
- "node >= 0.8.0"
- ],
+ "engines": {
+ "node": ">=0.8.0"
+ },
"main": "index.js",
"dependencies": {
"bl": "~0.9.0",
@@ -33,8 +33,8 @@
"mime-types": "~1.0.1",
"node-uuid": "~1.4.0",
"tunnel-agent": "~0.4.0",
- "tough-cookie": ">=0.12.0",
"form-data": "~0.1.0",
+ "tough-cookie": ">=0.12.0",
"http-signature": "~0.10.0",
"oauth-sign": "~0.4.0",
"hawk": "1.1.1",
@@ -43,7 +43,6 @@
},
"optionalDependencies": {
"tough-cookie": ">=0.12.0",
- "form-data": "~0.1.0",
"http-signature": "~0.10.0",
"oauth-sign": "~0.4.0",
"hawk": "1.1.1",
@@ -51,31 +50,38 @@
"stringstream": "~0.0.4"
},
"scripts": {
- "test": "node tests/run.js"
+ "test": "npm run lint && node tests/run.js",
+ "lint": "./node_modules/eslint/bin/eslint.js lib/ *.js"
},
"devDependencies": {
- "rimraf": "~2.2.8"
+ "rimraf": "~2.2.8",
+ "eslint": "0.5.1"
},
+ "gitHead": "fff5c951778859dc1f3d17f38f7d4426cbb75918",
"homepage": "https://github.com/mikeal/request",
- "_id": "request@2.44.0",
- "_shasum": "78d62454d68853cadfb07ad31f58b9ec98072ea8",
- "_from": "request@>=2.44.0 <2.45.0",
- "_npmVersion": "1.4.9",
+ "_id": "request@2.45.0",
+ "_shasum": "29d713a0a07f17fb2e7b61815d2010681718e93c",
+ "_from": "request@>=2.45.0 <2.46.0",
+ "_npmVersion": "1.4.14",
"_npmUser": {
- "name": "mikeal",
- "email": "mikeal.rogers@gmail.com"
+ "name": "nylen",
+ "email": "jnylen@gmail.com"
},
"maintainers": [
{
"name": "mikeal",
"email": "mikeal.rogers@gmail.com"
+ },
+ {
+ "name": "nylen",
+ "email": "jnylen@gmail.com"
}
],
"dist": {
- "shasum": "78d62454d68853cadfb07ad31f58b9ec98072ea8",
- "tarball": "http://registry.npmjs.org/request/-/request-2.44.0.tgz"
+ "shasum": "29d713a0a07f17fb2e7b61815d2010681718e93c",
+ "tarball": "http://registry.npmjs.org/request/-/request-2.45.0.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/request/-/request-2.44.0.tgz",
+ "_resolved": "https://registry.npmjs.org/request/-/request-2.45.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/node_modules/request/release.sh b/node_modules/request/release.sh
new file mode 100755
index 000000000..05e7767fc
--- /dev/null
+++ b/node_modules/request/release.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+npm version minor && npm publish && npm version patch && git push --tags && git push origin master
diff --git a/node_modules/request/request.js b/node_modules/request/request.js
index 8a29c3548..8e7660bdf 100644
--- a/node_modules/request/request.js
+++ b/node_modules/request/request.js
@@ -1,15 +1,19 @@
+'use strict';
+
var optional = require('./lib/optional')
, http = require('http')
, https = optional('https')
- , tls = optional('tls')
, url = require('url')
, util = require('util')
, stream = require('stream')
, qs = require('qs')
, querystring = require('querystring')
- , crypto = require('crypto')
, zlib = require('zlib')
-
+ , helpers = require('./lib/helpers')
+ , safeStringify = helpers.safeStringify
+ , md5 = helpers.md5
+ , isReadStream = helpers.isReadStream
+ , toBase64 = helpers.toBase64
, bl = require('bl')
, oauth = optional('oauth-sign')
, hawk = optional('hawk')
@@ -18,7 +22,6 @@ var optional = require('./lib/optional')
, uuid = require('node-uuid')
, mime = require('mime-types')
, tunnel = require('tunnel-agent')
- , _safeStringify = require('json-stringify-safe')
, stringstream = optional('stringstream')
, caseless = require('caseless')
@@ -33,13 +36,6 @@ var optional = require('./lib/optional')
, net = require('net')
;
-function safeStringify (obj) {
- var ret
- try { ret = JSON.stringify(obj) }
- catch (e) { ret = _safeStringify(obj) }
- return ret
-}
-
var globalPool = {}
var isUrl = /^https?:|^unix:/
@@ -70,79 +66,45 @@ var defaultProxyHeaderWhiteList = [
'via'
]
-function isReadStream (rs) {
- return rs.readable && rs.path && rs.mode;
-}
-
-function toBase64 (str) {
- return (new Buffer(str || "", "ascii")).toString("base64")
-}
-
-function md5 (str) {
- return crypto.createHash('md5').update(str).digest('hex')
-}
-
-// Return a simpler request object to allow serialization
-function requestToJSON() {
- return {
- uri: this.uri,
- method: this.method,
- headers: this.headers
- }
-}
-
-// Return a simpler response object to allow serialization
-function responseToJSON() {
- return {
- statusCode: this.statusCode,
- body: this.body,
- headers: this.headers,
- request: requestToJSON.call(this.request)
- }
-}
-
function Request (options) {
- stream.Stream.call(this)
- this.readable = true
- this.writable = true
+ // if tunnel property of options was not given default to false
+ // if given the method property in options, set property explicitMethod to true
- if (typeof options === 'string') {
- options = {uri:options}
- }
+ // extend the Request instance with any non-reserved properties
+ // remove any reserved functions from the options object
+ // set Request instance to be readable and writable
+ // call init
+ var self = this
+ stream.Stream.call(self)
var reserved = Object.keys(Request.prototype)
- for (var i in options) {
- if (reserved.indexOf(i) === -1) {
- this[i] = options[i]
- } else {
- if (typeof options[i] === 'function') {
- delete options[i]
- }
- }
+ var nonReserved = filterForNonReserved(reserved, options)
+ util._extend(this, nonReserved)
+ options = filterOutReservedFunctions(reserved, options)
+
+ self.readable = true
+ self.writable = true
+ if (typeof options.tunnel === 'undefined') {
+ options.tunnel = false
}
-
if (options.method) {
- this.explicitMethod = true
+ self.explicitMethod = true
}
-
- // Assume that we're not going to tunnel unless we need to
- if (typeof options.tunnel === 'undefined') options.tunnel = false
-
- this.init(options)
+ self.canTunnel = options.tunnel !== false && tunnel
+ self.init(options)
}
-util.inherits(Request, stream.Stream)
+util.inherits(Request, stream.Stream)
// Set up the tunneling agent if necessary
Request.prototype.setupTunnel = function () {
var self = this
- if (typeof self.proxy == 'string') self.proxy = url.parse(self.proxy)
+ if (typeof self.proxy === 'string') self.proxy = url.parse(self.proxy)
if (!self.proxy) return false
// Don't need to use a tunneling proxy
- if (!self.tunnel && self.uri.protocol !== 'https:')
- return
+ if (!self.tunnel && self.uri.protocol !== 'https:') return false
// do the HTTP CONNECT dance using koichik/node-tunnel
@@ -249,8 +211,8 @@ Request.prototype.init = function (options) {
if (!self.uri) {
// this will throw if unhandled but is handleable when in a redirect
return self.emit('error', new Error("options.uri is a required argument"))
- } else {
- if (typeof self.uri == "string") self.uri = url.parse(self.uri)
+ } else if (typeof self.uri === "string") {
+ self.uri = url.parse(self.uri)
}
if (self.strictSSL === false) {
@@ -259,12 +221,51 @@ Request.prototype.init = function (options) {
if(!self.hasOwnProperty('proxy')) {
// check for HTTP(S)_PROXY environment variables
- if(self.uri.protocol == "http:") {
+ if(self.uri.protocol === "http:") {
self.proxy = process.env.HTTP_PROXY || process.env.http_proxy || null;
- } else if(self.uri.protocol == "https:") {
+ } else if(self.uri.protocol === "https:") {
self.proxy = process.env.HTTPS_PROXY || process.env.https_proxy ||
process.env.HTTP_PROXY || process.env.http_proxy || null;
}
+
+ // respect NO_PROXY environment variables
+ // ref: http://lynx.isc.org/current/breakout/lynx_help/keystrokes/environments.html
+ var noProxy = process.env.NO_PROXY || process.env.no_proxy || null
+
+ // easy case first - if NO_PROXY is '*'
+ if (noProxy === '*') {
+ self.proxy = null
+ } else if (noProxy !== null) {
+ var noProxyItem, hostname, port, noProxyItemParts, noProxyHost, noProxyPort, noProxyList
+
+ // canonicalize the hostname, so that 'oogle.com' won't match 'google.com'
+ hostname = self.uri.hostname.replace(/^\.*/, '.').toLowerCase()
+ noProxyList = noProxy.split(',')
+
+ for (var i = 0, len = noProxyList.length; i < len; i++) {
+ noProxyItem = noProxyList[i].trim().toLowerCase()
+
+ // no_proxy can be granular at the port level, which complicates things a bit.
+ if (noProxyItem.indexOf(':') > -1) {
+ noProxyItemParts = noProxyItem.split(':', 2)
+ noProxyHost = noProxyItemParts[0].replace(/^\.*/, '.')
+ noProxyPort = noProxyItemParts[1]
+
+ port = self.uri.port || (self.uri.protocol === 'https:' ? '443' : '80')
+ if (port === noProxyPort && hostname.indexOf(noProxyHost) === hostname.length - noProxyHost.length) {
+ // we've found a match - ports are same and host ends with no_proxy entry.
+ self.proxy = null
+ break
+ }
+ } else {
+ noProxyItem = noProxyItem.replace(/^\.*/, '.')
+ if (hostname.indexOf(noProxyItem) === hostname.length - noProxyItem.length) {
+ self.proxy = null
+ break
+ }
+ }
+ }
+ }
}
// Pass in `tunnel:true` to *always* tunnel through proxies
@@ -275,7 +276,7 @@ Request.prototype.init = function (options) {
if (!self.uri.pathname) {self.uri.pathname = '/'}
- if (!self.uri.host && !self.protocol=='unix:') {
+ if (!self.uri.host && self.uri.protocol !== 'unix:') {
// Invalid URI: it may generate lot of bad errors, like "TypeError: Cannot call method 'indexOf' of undefined" in CookieJar
// Detect and reject it as soon as possible
var faultyUri = url.format(self.uri)
@@ -286,8 +287,8 @@ Request.prototype.init = function (options) {
// they should be warned that it can be caused by a redirection (can save some hair)
message += '. This can be caused by a crappy redirection.'
}
- self.emit('error', new Error(message))
- return // This error was fatal
+ // This error was fatal
+ return self.emit('error', new Error(message))
}
self._redirectsFollowed = self._redirectsFollowed || 0
@@ -306,7 +307,7 @@ Request.prototype.init = function (options) {
if (self.uri.port) {
if ( !(self.uri.port === 80 && self.uri.protocol === 'http:') &&
!(self.uri.port === 443 && self.uri.protocol === 'https:') )
- self.setHeader('host', self.getHeader('host') + (':'+self.uri.port) )
+ self.setHeader('host', self.getHeader('host') + (':' + self.uri.port) )
}
self.setHost = true
}
@@ -314,8 +315,8 @@ Request.prototype.init = function (options) {
self.jar(self._jar || options.jar)
if (!self.uri.port) {
- if (self.uri.protocol == 'http:') {self.uri.port = 80}
- else if (self.uri.protocol == 'https:') {self.uri.port = 443}
+ if (self.uri.protocol === 'http:') {self.uri.port = 80}
+ else if (self.uri.protocol === 'https:') {self.uri.port = 443}
}
if (self.proxy && !self.tunnel) {
@@ -361,6 +362,16 @@ Request.prototype.init = function (options) {
self.form(options.form)
}
+ if (options.formData) {
+ var formData = options.formData
+ var requestForm = self.form()
+ for (var formKey in formData) {
+ if (formData.hasOwnProperty(formKey)) {
+ requestForm.append(formKey, formData[formKey])
+ }
+ }
+ }
+
if (options.qs) self.qs(options.qs)
if (self.uri.path) {
@@ -406,16 +417,16 @@ Request.prototype.init = function (options) {
}
if (self.uri.auth && !self.hasHeader('authorization')) {
- var authPieces = self.uri.auth.split(':').map(function(item){ return querystring.unescape(item) })
- self.auth(authPieces[0], authPieces.slice(1).join(':'), true)
+ var uriAuthPieces = self.uri.auth.split(':').map(function(item){ return querystring.unescape(item) })
+ self.auth(uriAuthPieces[0], uriAuthPieces.slice(1).join(':'), true)
}
if (self.proxy && !self.tunnel) {
if (self.proxy.auth && !self.proxyAuthorization) {
- var authPieces = self.proxy.auth.split(':').map(function(item){
+ var proxyAuthPieces = self.proxy.auth.split(':').map(function(item){
return querystring.unescape(item)
})
- var authHeader = 'Basic ' + toBase64(authPieces.join(':'))
+ var authHeader = 'Basic ' + toBase64(proxyAuthPieces.join(':'))
self.proxyAuthorization = authHeader
}
if (self.proxyAuthorization)
@@ -565,42 +576,41 @@ Request.prototype.init = function (options) {
self.unixsocket = true;
- var full_path = self.uri.href.replace(self.uri.protocol+'/', '');
+ var full_path = self.uri.href.replace(self.uri.protocol + '/', '');
var lookup = full_path.split('/');
- var error_connecting = true;
var lookup_table = {};
- do { lookup_table[lookup.join('/')]={} } while(lookup.pop())
- for (r in lookup_table){
+ do { lookup_table[lookup.join('/')] = {} } while(lookup.pop())
+ for (var r in lookup_table){
try_next(r);
}
- function try_next(table_row){
+ function try_next(table_row) {
var client = net.connect( table_row );
client.path = table_row
- client.on('error', function(){ lookup_table[this.path].error_connecting=true; this.end(); });
- client.on('connect', function(){ lookup_table[this.path].error_connecting=false; this.end(); });
+ client.on('error', function(){ lookup_table[this.path].error_connecting = true; this.end(); });
+ client.on('connect', function(){ lookup_table[this.path].error_connecting = false; this.end(); });
table_row.client = client;
}
wait_for_socket_response();
- response_counter = 0;
+ var response_counter = 0;
function wait_for_socket_response(){
var detach;
- if('undefined' == typeof setImmediate ) detach = process.nextTick
+ if(typeof setImmediate === 'undefined') detach = process.nextTick
else detach = setImmediate;
detach(function(){
// counter to prevent infinite blocking waiting for an open socket to be found.
response_counter++;
var trying = false;
for (r in lookup_table){
- if('undefined' == typeof lookup_table[r].error_connecting)
+ if(typeof lookup_table[r].error_connecting === 'undefined')
trying = true;
}
- if(trying && response_counter<1000)
+ if(trying && response_counter < 1000)
wait_for_socket_response()
else
set_socket_properties();
@@ -615,7 +625,7 @@ Request.prototype.init = function (options) {
}
}
if(!host){
- self.emit('error', new Error("Failed to connect to any socket in "+full_path))
+ self.emit('error', new Error("Failed to connect to any socket in " + full_path))
}
var path = full_path.replace(host, '')
@@ -769,10 +779,10 @@ Request.prototype.getAgent = function () {
// we're using a stored agent. Make sure it's protocol-specific
poolKey = this.uri.protocol + poolKey
- // already generated an agent for this setting
- if (this.pool[poolKey]) return this.pool[poolKey]
+ // generate a new agent for this setting if none yet exists
+ if (!this.pool[poolKey]) this.pool[poolKey] = new Agent(options)
- return this.pool[poolKey] = new Agent(options)
+ return this.pool[poolKey]
}
Request.prototype.start = function () {
@@ -803,7 +813,7 @@ Request.prototype.start = function () {
if (self.timeout && !self.timeoutTimer) {
self.timeoutTimer = setTimeout(function () {
- self.req.abort()
+ self.abort()
var e = new Error("ETIMEDOUT")
e.code = "ETIMEDOUT"
self.emit("error", e)
@@ -827,6 +837,10 @@ Request.prototype.start = function () {
self.req.on('drain', function() {
self.emit('drain')
})
+ self.req.on('socket', function(socket) {
+ self.emit('socket', socket)
+ })
+
self.on('end', function() {
if ( self.req.connection ) self.req.connection.removeListener('error', self._parserErrorHandler)
})
@@ -850,8 +864,8 @@ Request.prototype.onResponse = function (response) {
return
}
if (self._paused) response.pause()
- // Check that response.resume is defined. Workaround for browserify.
- else response.resume && response.resume()
+ // response.resume should be defined, but check anyway before calling. Workaround for browserify.
+ else if (response.resume) response.resume()
self.response = response
response.request = self
@@ -863,7 +877,7 @@ Request.prototype.onResponse = function (response) {
!response.client.authorized)) {
debug('strict ssl error', self.uri.href)
var sslErr = response.hasOwnProperty('client') ? response.client.authorizationError : self.uri.href + " does not support SSL";
- self.emit('error', new Error('SSL Error: '+ sslErr))
+ self.emit('error', new Error('SSL Error: ' + sslErr))
return
}
@@ -873,11 +887,11 @@ Request.prototype.onResponse = function (response) {
self.timeoutTimer = null
}
- var targetCookieJar = (self._jar && self._jar.setCookieSync)?self._jar:globalCookieJar;
+ var targetCookieJar = (self._jar && self._jar.setCookie) ? self._jar : globalCookieJar;
var addCookie = function (cookie) {
//set the cookie if it's domain in the href's domain.
try {
- targetCookieJar.setCookieSync(cookie, self.uri.href, {ignoreError: true});
+ targetCookieJar.setCookie(cookie, self.uri.href, {ignoreError: true});
} catch (e) {
self.emit('error', e);
}
@@ -911,7 +925,7 @@ Request.prototype.onResponse = function (response) {
break
}
}
- } else if (response.statusCode == 401 && self._hasAuth && !self._sentAuth) {
+ } else if (response.statusCode === 401 && self._hasAuth && !self._sentAuth) {
var authHeader = response.caseless.get('www-authenticate')
var authVerb = authHeader && authHeader.split(' ')[0].toLowerCase()
debug('reauth', authVerb)
@@ -969,12 +983,12 @@ Request.prototype.onResponse = function (response) {
authHeader = []
for (var k in authValues) {
- if (!authValues[k]) {
- //ignore
- } else if (k === 'qop' || k === 'nc' || k === 'algorithm') {
- authHeader.push(k + '=' + authValues[k])
- } else {
- authHeader.push(k + '="' + authValues[k] + '"')
+ if (authValues[k]) {
+ if (k === 'qop' || k === 'nc' || k === 'algorithm') {
+ authHeader.push(k + '=' + authValues[k])
+ } else {
+ authHeader.push(k + '="' + authValues[k] + '"')
+ }
}
}
authHeader = 'Digest ' + authHeader.join(', ')
@@ -994,7 +1008,7 @@ Request.prototype.onResponse = function (response) {
if (self._paused) response.resume()
if (self._redirectsFollowed >= self.maxRedirects) {
- self.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop "+self.uri.href))
+ self.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop " + self.uri.href))
return
}
self._redirectsFollowed += 1
@@ -1016,13 +1030,13 @@ Request.prototype.onResponse = function (response) {
, redirectUri: redirectTo
}
)
- if (self.followAllRedirects && response.statusCode != 401 && response.statusCode != 307) self.method = 'GET'
+ if (self.followAllRedirects && response.statusCode !== 401 && response.statusCode !== 307) self.method = 'GET'
// self.method = 'GET' // Force all redirects to use GET || commented out fixes #215
delete self.src
delete self.req
delete self.agent
delete self._started
- if (response.statusCode != 401 && response.statusCode != 307) {
+ if (response.statusCode !== 401 && response.statusCode !== 307) {
// Remove parameters from the previous response, unless this is the second request
// for a server that requires digest authentication.
delete self.body
@@ -1140,7 +1154,7 @@ Request.prototype.onResponse = function (response) {
} catch (e) {}
}
debug('emitting complete', self.uri.href)
- if(response.body == undefined && !self._json) {
+ if(typeof response.body === 'undefined' && !self._json) {
response.body = "";
}
self.emit('complete', response, response.body)
@@ -1250,7 +1264,7 @@ Request.prototype.multipart = function (multipart) {
multipart.forEach(function (part) {
var body = part.body
- if(body == null) throw Error('Body attribute missing in multipart.')
+ if(typeof body === 'undefined') throw new Error('Body attribute missing in multipart.')
delete part.body
var preamble = '--' + self.boundary + '\r\n'
Object.keys(part).forEach(function (key) {
@@ -1262,6 +1276,11 @@ Request.prototype.multipart = function (multipart) {
self.body.push(new Buffer('\r\n'))
})
self.body.push(new Buffer('--' + self.boundary + '--'))
+
+ if (self.postambleCRLF) {
+ self.body.push(new Buffer('\r\n'))
+ }
+
return self
}
Request.prototype.json = function (val) {
@@ -1301,7 +1320,7 @@ Request.prototype.auth = function (user, pass, sendImmediately, bearer) {
if (bearer !== undefined) {
this._bearer = bearer
this._hasAuth = true
- if (sendImmediately || typeof sendImmediately == 'undefined') {
+ if (sendImmediately || typeof sendImmediately === 'undefined') {
if (typeof bearer === 'function') {
bearer = bearer()
}
@@ -1317,7 +1336,7 @@ Request.prototype.auth = function (user, pass, sendImmediately, bearer) {
this._pass = pass
this._hasAuth = true
var header = typeof pass !== 'undefined' ? user + ':' + pass : user
- if (sendImmediately || typeof sendImmediately == 'undefined') {
+ if (sendImmediately || typeof sendImmediately === 'undefined') {
this.setHeader('authorization', 'Basic ' + toBase64(header))
this._sentAuth = true
}
@@ -1389,7 +1408,7 @@ Request.prototype.oauth = function (_oauth) {
}
var oa = {}
- for (var i in _oauth) oa['oauth_'+i] = _oauth[i]
+ for (var i in _oauth) oa['oauth_' + i] = _oauth[i]
if ('oauth_realm' in oa) delete oa.oauth_realm
if (!oa.oauth_version) oa.oauth_version = '1.0'
@@ -1409,7 +1428,7 @@ Request.prototype.oauth = function (_oauth) {
var realm = _oauth.realm ? 'realm="' + _oauth.realm + '",' : '';
var authHeader = 'OAuth ' + realm +
- Object.keys(oa).sort().map(function (i) {return i+'="'+oauth.rfc3986(oa[i])+'"'}).join(',')
+ Object.keys(oa).sort().map(function (i) {return i + '="' + oauth.rfc3986(oa[i]) + '"'}).join(',')
authHeader += ',oauth_signature="' + oauth.rfc3986(signature) + '"'
this.setHeader('Authorization', authHeader)
return this
@@ -1426,11 +1445,11 @@ Request.prototype.jar = function (jar) {
cookies = false
this._disableCookies = true
} else {
- var targetCookieJar = (jar && jar.getCookieStringSync)?jar:globalCookieJar;
+ var targetCookieJar = (jar && jar.getCookieString) ? jar : globalCookieJar;
var urihref = this.uri.href
//fetch cookie in the Specified host
if (targetCookieJar) {
- cookies = targetCookieJar.getCookieStringSync(urihref);
+ cookies = targetCookieJar.getCookieString(urihref);
}
}
@@ -1488,10 +1507,60 @@ Request.prototype.destroy = function () {
else if (this.response) this.response.destroy()
}
-Request.prototype.toJSON = requestToJSON
-
Request.defaultProxyHeaderWhiteList =
defaultProxyHeaderWhiteList.slice()
+// Helpers
+
+// Return a simpler request object to allow serialization
+function requestToJSON() {
+ return {
+ uri: this.uri,
+ method: this.method,
+ headers: this.headers
+ }
+}
+
+// Return a simpler response object to allow serialization
+function responseToJSON() {
+ return {
+ statusCode: this.statusCode,
+ body: this.body,
+ headers: this.headers,
+ request: requestToJSON.call(this.request)
+ }
+}
+
+function filterForNonReserved(reserved, options) {
+ // Filter out properties that are not reserved.
+ // Reserved values are passed in at call site.
+
+ var object = {}
+ for (var i in options) {
+ var notReserved = (reserved.indexOf(i) === -1)
+ if (notReserved) {
+ object[i] = options[i]
+ }
+ }
+ return object
+}
+
+function filterOutReservedFunctions(reserved, options) {
+ // Filter out properties that are functions and are reserved.
+ // Reserved values are passed in at call site.
+
+ var object = {}
+ for (var i in options) {
+ var isReserved = !(reserved.indexOf(i) === -1)
+ var isFunction = (typeof options[i] === 'function')
+ if (!(isReserved && isFunction)) {
+ object[i] = options[i]
+ }
+ }
+ return object
+}
+
+// Exports
+Request.prototype.toJSON = requestToJSON
module.exports = Request