Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRebecca Turner <me@re-becca.org>2017-08-19 04:07:46 +0300
committerRebecca Turner <me@re-becca.org>2017-08-23 02:17:26 +0300
commitd080379f620c716afa2c1d2e2ffc0a1ac3459194 (patch)
tree80fc46155e116b054623358cefba27fe816a6862 /node_modules
parent5ec72ab5b27c5c83cee9ff568cf75a9479d4b83a (diff)
pacote@6.0.1
Credit: @zkat
Diffstat (limited to 'node_modules')
-rw-r--r--node_modules/pacote/CHANGELOG.md60
-rw-r--r--node_modules/pacote/README.md3
-rw-r--r--node_modules/pacote/extract.js27
-rw-r--r--node_modules/pacote/lib/extract-stream.js63
-rw-r--r--node_modules/pacote/lib/fetchers/registry/fetch.js1
-rw-r--r--node_modules/pacote/lib/finalize-manifest.js49
-rw-r--r--node_modules/pacote/lib/util/pack-dir.js35
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/LICENSE15
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/README.md69
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/index.js214
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE15
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/README.md60
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/index.js266
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/package.json71
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/README.md46
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/index.js227
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/package.json60
-rw-r--r--node_modules/pacote/node_modules/npm-packlist/package.json68
-rw-r--r--node_modules/pacote/node_modules/tar-fs/.npmignore2
-rw-r--r--node_modules/pacote/node_modules/tar-fs/.travis.yml3
-rw-r--r--node_modules/pacote/node_modules/tar-fs/LICENSE21
-rw-r--r--node_modules/pacote/node_modules/tar-fs/README.md143
-rw-r--r--node_modules/pacote/node_modules/tar-fs/index.js323
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/.npmignore1
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/.travis.yml5
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/LICENSE21
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/README.md56
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/index.js80
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/LICENSE21
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/README.md52
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/index.js83
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/package.json62
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/package.json61
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/test-browser.js58
-rw-r--r--node_modules/pacote/node_modules/tar-fs/node_modules/pump/test.js46
-rw-r--r--node_modules/pacote/node_modules/tar-fs/package.json70
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/a/hello.txt1
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/b/a/test.txt1
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/c/.npmignore1
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file20
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-dir/file50
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file30
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file40
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/e/directory/.ignore0
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/fixtures/e/file0
-rw-r--r--node_modules/pacote/node_modules/tar-fs/test/index.js227
-rw-r--r--node_modules/pacote/node_modules/tar-stream/LICENSE21
-rw-r--r--node_modules/pacote/node_modules/tar-stream/README.md168
-rw-r--r--node_modules/pacote/node_modules/tar-stream/extract.js246
-rw-r--r--node_modules/pacote/node_modules/tar-stream/headers.js286
-rw-r--r--node_modules/pacote/node_modules/tar-stream/index.js2
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/bl/.npmignore1
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/bl/.travis.yml15
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/bl/LICENSE.md13
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/bl/README.md208
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/bl/bl.js280
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/bl/package.json62
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/bl/test/test.js701
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/LICENSE21
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/README.md52
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/index.js83
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/package.json62
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/.npmignore1
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/LICENCE19
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/Makefile4
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/README.md32
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/immutable.js19
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/mutable.js17
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/package.json86
-rw-r--r--node_modules/pacote/node_modules/tar-stream/node_modules/xtend/test.js83
-rw-r--r--node_modules/pacote/node_modules/tar-stream/pack.js254
-rw-r--r--node_modules/pacote/node_modules/tar-stream/package.json88
-rw-r--r--node_modules/pacote/node_modules/tar/LICENSE15
-rw-r--r--node_modules/pacote/node_modules/tar/README.md883
-rw-r--r--node_modules/pacote/node_modules/tar/index.js18
-rw-r--r--node_modules/pacote/node_modules/tar/lib/create.js110
-rw-r--r--node_modules/pacote/node_modules/tar/lib/extract.js127
-rw-r--r--node_modules/pacote/node_modules/tar/lib/header.js272
-rw-r--r--node_modules/pacote/node_modules/tar/lib/high-level-opt.js29
-rw-r--r--node_modules/pacote/node_modules/tar/lib/large-numbers.js92
-rw-r--r--node_modules/pacote/node_modules/tar/lib/list.js132
-rw-r--r--node_modules/pacote/node_modules/tar/lib/mkdir.js207
-rw-r--r--node_modules/pacote/node_modules/tar/lib/pack.js399
-rw-r--r--node_modules/pacote/node_modules/tar/lib/parse.js415
-rw-r--r--node_modules/pacote/node_modules/tar/lib/pax.js145
-rw-r--r--node_modules/pacote/node_modules/tar/lib/read-entry.js94
-rw-r--r--node_modules/pacote/node_modules/tar/lib/replace.js211
-rw-r--r--node_modules/pacote/node_modules/tar/lib/types.js44
-rw-r--r--node_modules/pacote/node_modules/tar/lib/unpack.js481
-rw-r--r--node_modules/pacote/node_modules/tar/lib/update.js36
-rw-r--r--node_modules/pacote/node_modules/tar/lib/warn-mixin.js14
-rw-r--r--node_modules/pacote/node_modules/tar/lib/winchars.js23
-rw-r--r--node_modules/pacote/node_modules/tar/lib/write-entry.js395
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/.npmignore4
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/.travis.yml7
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/README.md46
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/b.js12
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js11
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js12
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js11
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js12
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/numbers.js41
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/timer.js15
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/bench/test.js160
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/d.js7
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/e.js17
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/eos.js12
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/foo (renamed from node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file1)0
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/index.js295
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsxbin0 -> 54935 bytes
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/package.json64
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/test/basic.js438
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minipass/test/empty-end.js38
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minizlib/LICENSE26
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minizlib/README.md44
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minizlib/constants.js46
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minizlib/index.js333
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/minizlib/package.json71
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/yallist/LICENSE15
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/yallist/README.md204
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/yallist/iterator.js8
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/yallist/package.json63
-rw-r--r--node_modules/pacote/node_modules/tar/node_modules/yallist/yallist.js376
-rw-r--r--node_modules/pacote/node_modules/tar/package.json76
-rw-r--r--node_modules/pacote/package.json43
125 files changed, 7883 insertions, 4277 deletions
diff --git a/node_modules/pacote/CHANGELOG.md b/node_modules/pacote/CHANGELOG.md
index 49f29dfe3..d09a5cbe4 100644
--- a/node_modules/pacote/CHANGELOG.md
+++ b/node_modules/pacote/CHANGELOG.md
@@ -2,6 +2,66 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+<a name="6.0.1"></a>
+## [6.0.1](https://github.com/zkat/pacote/compare/v6.0.0...v6.0.1) (2017-08-22)
+
+
+### Bug Fixes
+
+* **finalize:** insist on getting a package.json ([f72ee91](https://github.com/zkat/pacote/commit/f72ee91))
+
+
+
+<a name="6.0.0"></a>
+# [6.0.0](https://github.com/zkat/pacote/compare/v5.0.1...v6.0.0) (2017-08-19)
+
+
+### Bug Fixes
+
+* **tar:** bring back the .gitignore -> .npmignore logic (#113) ([0dd518e](https://github.com/zkat/pacote/commit/0dd518e))
+
+
+### BREAKING CHANGES
+
+* **tar:** this reverts a previous change to disable this feature.
+
+
+
+<a name="5.0.1"></a>
+## [5.0.1](https://github.com/zkat/pacote/compare/v5.0.0...v5.0.1) (2017-08-17)
+
+
+### Bug Fixes
+
+* **tar:** chown directories on extract as well ([2fa4598](https://github.com/zkat/pacote/commit/2fa4598))
+
+
+
+<a name="5.0.0"></a>
+# [5.0.0](https://github.com/zkat/pacote/compare/v4.0.0...v5.0.0) (2017-08-16)
+
+
+### Bug Fixes
+
+* **registry:** Pass maxSockets options down (#110) ([3f05b79](https://github.com/zkat/pacote/commit/3f05b79))
+
+
+### Features
+
+* **deps:** replace tar-fs/tar-stream with tar[@3](https://github.com/3) ([28c80a9](https://github.com/zkat/pacote/commit/28c80a9))
+* **tar:** switch to tarv3 ([53899c7](https://github.com/zkat/pacote/commit/53899c7))
+
+
+### BREAKING CHANGES
+
+* **tar:** this changes the underlying tar library, and thus may introduce some subtle low-level incompatibility. Also:
+
+* The tarball packer built into pacote works much closer to how the one npm injects does.
+* Special characters on Windows will now be escaped the way tar(1) usually does: by replacing them with the `0xf000` masked character on the way out.
+* Directories won't be chowned.
+
+
+
<a name="4.0.0"></a>
# [4.0.0](https://github.com/zkat/pacote/compare/v3.0.0...v4.0.0) (2017-06-29)
diff --git a/node_modules/pacote/README.md b/node_modules/pacote/README.md
index e931dd809..339777624 100644
--- a/node_modules/pacote/README.md
+++ b/node_modules/pacote/README.md
@@ -22,7 +22,7 @@ needed to reduce excess operations, using [`cacache`](https://npm.im/cacache).
* [`extract`](#extract)
* [`prefetch`](#prefetch)
* [`options`](#options)
- * [`clearMemoized`](#clear-memoized)
+ * [`clearMemoized`](#clearMemoized)
### Example
@@ -151,6 +151,7 @@ directly for matching contents before performing any other operations.
##### `opts.@somescope:registry`
##### `opts.auth`
##### `opts.log`
+##### `opts.maxSockets`
Default: `silentNpmLog`
diff --git a/node_modules/pacote/extract.js b/node_modules/pacote/extract.js
index 01036519a..4312f1a9a 100644
--- a/node_modules/pacote/extract.js
+++ b/node_modules/pacote/extract.js
@@ -4,8 +4,8 @@ const BB = require('bluebird')
const cacache = require('cacache')
const extractStream = require('./lib/extract-stream')
+const mkdirp = BB.promisify(require('mkdirp'))
const npa = require('npm-package-arg')
-const pipe = BB.promisify(require('mississippi').pipe)
const optCheck = require('./lib/util/opt-check')
const retry = require('promise-retry')
const rimraf = BB.promisify(require('rimraf'))
@@ -59,21 +59,34 @@ function extract (spec, dest, opts) {
}
function extractByDigest (start, spec, dest, opts) {
- const xtractor = extractStream(dest, opts)
- const cached = cacache.get.stream.byDigest(opts.cache, opts.integrity, opts)
- return pipe(cached, xtractor).then(() => {
+ return mkdirp(dest).then(() => {
+ const xtractor = extractStream(dest, opts)
+ const cached = cacache.get.stream.byDigest(opts.cache, opts.integrity, opts)
+ cached.pipe(xtractor)
+ return new BB((resolve, reject) => {
+ cached.on('error', reject)
+ xtractor.on('error', reject)
+ xtractor.on('close', resolve)
+ })
+ }).then(() => {
opts.log.silly('pacote', `${spec} extracted to ${dest} by content address ${Date.now() - start}ms`)
})
}
let fetch
function extractByManifest (start, spec, dest, opts) {
- const xtractor = extractStream(dest, opts)
- return BB.resolve(null).then(() => {
+ return mkdirp(dest).then(() => {
+ const xtractor = extractStream(dest, opts)
if (!fetch) {
fetch = require('./lib/fetch')
}
- return pipe(fetch.tarball(spec, opts), xtractor)
+ const tardata = fetch.tarball(spec, opts)
+ tardata.pipe(xtractor)
+ return new BB((resolve, reject) => {
+ tardata.on('error', reject)
+ xtractor.on('error', reject)
+ xtractor.on('close', resolve)
+ })
}).then(() => {
opts.log.silly('pacote', `${spec} extracted in ${Date.now() - start}ms`)
})
diff --git a/node_modules/pacote/lib/extract-stream.js b/node_modules/pacote/lib/extract-stream.js
index f6f68bc1e..117531948 100644
--- a/node_modules/pacote/lib/extract-stream.js
+++ b/node_modules/pacote/lib/extract-stream.js
@@ -1,61 +1,42 @@
'use strict'
-const gunzip = require('./util/gunzip-maybe')
const path = require('path')
-const pipeline = require('mississippi').pipeline
-const tar = require('tar-fs')
+const tar = require('tar')
module.exports = extractStream
function extractStream (dest, opts) {
opts = opts || {}
- const sawIgnores = {}
- return pipeline(gunzip(), tar.extract(dest, {
- map: (header) => {
- if (process.platform !== 'win32') {
- header.uid = opts.uid == null ? header.uid : opts.uid
- header.gid = opts.gid == null ? header.gid : opts.gid
+ const sawIgnores = new Set()
+ return tar.x({
+ cwd: dest,
+ filter: (name, entry) => !entry.header.type.match(/^.*link$/i),
+ strip: 1,
+ onwarn: msg => opts.log && opts.log.warn('tar', msg),
+ uid: opts.uid,
+ gid: opts.gid,
+ onentry (entry) {
+ if (entry.type.toLowerCase() === 'file') {
+ entry.mode = opts.fmode & ~(opts.umask || 0)
+ } else if (entry.type.toLowerCase() === 'directory') {
+ entry.mode = opts.dmode & ~(opts.umask || 0)
}
+
// Note: This mirrors logic in the fs read operations that are
// employed during tarball creation, in the fstream-npm module.
// It is duplicated here to handle tarballs that are created
// using other means, such as system tar or git archive.
- if (header.type === 'file') {
- const base = path.basename(header.name)
+ if (entry.type.toLowerCase() === 'file') {
+ const base = path.basename(entry.path)
if (base === '.npmignore') {
- sawIgnores[header.name] = true
+ sawIgnores.add(entry.path)
} else if (base === '.gitignore') {
- const npmignore = header.name.replace(/\.gitignore$/, '.npmignore')
- if (!sawIgnores[npmignore]) {
+ const npmignore = entry.path.replace(/\.gitignore$/, '.npmignore')
+ if (!sawIgnores.has(npmignore)) {
// Rename, may be clobbered later.
- header.name = npmignore
+ entry.path = npmignore
}
}
}
- return header
- },
- ignore: makeIgnore(opts.log),
- dmode: opts.dmode,
- fmode: opts.fmode,
- umask: opts.umask,
- strip: 1
- }))
-}
-
-function makeIgnore (log) {
- const sawIgnores = {}
- return (name, header) => _ignore(name, header, sawIgnores, log)
-}
-
-function _ignore (name, header, sawIgnores, logger) {
- if (header.type.match(/^.*link$/)) {
- if (logger) {
- logger.warn(
- 'extract-stream',
- 'excluding symbolic link',
- header.name, '->', header.linkname)
}
- return true
- }
-
- return false
+ })
}
diff --git a/node_modules/pacote/lib/fetchers/registry/fetch.js b/node_modules/pacote/lib/fetchers/registry/fetch.js
index 1c6c8e8d6..a947ccea5 100644
--- a/node_modules/pacote/lib/fetchers/registry/fetch.js
+++ b/node_modules/pacote/lib/fetchers/registry/fetch.js
@@ -22,6 +22,7 @@ function regFetch (uri, registry, opts) {
integrity: opts.integrity,
key: opts.key,
localAddress: opts.localAddress,
+ maxSockets: opts.maxSockets,
memoize: opts.memoize,
noProxy: opts.noProxy,
Promise: BB,
diff --git a/node_modules/pacote/lib/finalize-manifest.js b/node_modules/pacote/lib/finalize-manifest.js
index cd303a9fa..321b37cdc 100644
--- a/node_modules/pacote/lib/finalize-manifest.js
+++ b/node_modules/pacote/lib/finalize-manifest.js
@@ -6,18 +6,17 @@ const cacache = require('cacache')
const cacheKey = require('./util/cache-key')
const fetchFromManifest = require('./fetch').fromManifest
const finished = BB.promisify(require('mississippi').finished)
-const gunzip = require('./util/gunzip-maybe')
const minimatch = require('minimatch')
const normalize = require('normalize-package-data')
const optCheck = require('./util/opt-check')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const ssri = require('ssri')
-const tar = require('tar-stream')
+const tar = require('tar')
// `finalizeManifest` takes as input the various kinds of manifests that
-// manifest handlers ('lib/handlers/*/manifest.js') return, and makes sure they
-// are:
+// manifest handlers ('lib/fetchers/*.js#manifest()') return, and makes sure
+// they are:
//
// * filled out with any required data that the handler couldn't fill in
// * formatted consistently
@@ -149,23 +148,23 @@ function tarballedProps (pkg, spec, opts) {
} else {
opts = optCheck(opts)
const tarStream = fetchFromManifest(pkg, spec, opts)
- const extracted = needsExtract && tar.extract()
- extracted && extracted.on('entry', (h, str, next) => {
- // Drain it
- str.on('data', () => {}).on('end', next).on('error', next)
- })
+ const extracted = needsExtract && new tar.Parse()
return BB.join(
needsShrinkwrap && jsonFromStream('npm-shrinkwrap.json', extracted),
needsManifest && jsonFromStream('package.json', extracted),
needsBin && getPaths(extracted),
needsHash && ssri.fromStream(tarStream, { algorithms: ['sha1'] }),
- needsExtract && pipe(tarStream, gunzip(), extracted),
+ needsExtract && pipe(tarStream, extracted),
(sr, mani, paths, hash) => {
+ if (needsManifest && !mani) {
+ const err = new Error(`Non-registry package missing package.json: ${spec}.`)
+ err.code = 'ENOPACKAGEJSON'
+ throw err
+ }
const extraProps = mani || {}
delete extraProps._resolved
// drain out the rest of the tarball
- tarStream.unpipe()
- tarStream.on('data', () => {})
+ tarStream.resume()
// if we have directories.bin, we need to collect any matching files
// to add to bin
if (paths && paths.length) {
@@ -199,25 +198,22 @@ function tarballedProps (pkg, spec, opts) {
function jsonFromStream (filename, dataStream) {
return BB.fromNode(cb => {
dataStream.on('error', cb)
- dataStream.on('finish', cb)
- dataStream.on('entry', function handler (header, stream, next) {
- const filePath = header.name.replace(/[^/]+\//, '')
+ dataStream.on('close', cb)
+ dataStream.on('entry', entry => {
+ const filePath = entry.header.path.replace(/[^/]+\//, '')
if (filePath !== filename) {
- next()
+ entry.resume()
} else {
let data = ''
- stream.on('data', d => { data += d })
- stream.on('error', cb)
- finished(stream).then(() => {
- dataStream.removeListener('entry', handler)
+ entry.on('data', d => { data += d })
+ entry.on('error', cb)
+ finished(entry).then(() => {
try {
cb(null, JSON.parse(data))
- next()
} catch (err) {
cb(err)
}
}, err => {
- dataStream.removeListener('entry', handler)
cb(err)
})
}
@@ -229,12 +225,11 @@ function getPaths (dataStream) {
return BB.fromNode(cb => {
let paths = []
dataStream.on('error', cb)
- dataStream.on('finish', () => cb(null, paths))
- dataStream.on('entry', function handler (header, stream, next) {
- const filePath = header.name.replace(/[^/]+\//, '')
- stream.on('data', () => {})
+ dataStream.on('close', () => cb(null, paths))
+ dataStream.on('entry', function handler (entry) {
+ const filePath = entry.header.path.replace(/[^/]+\//, '')
+ entry.resume()
paths.push(filePath)
- next()
})
})
}
diff --git a/node_modules/pacote/lib/util/pack-dir.js b/node_modules/pacote/lib/util/pack-dir.js
index 54a94e586..7625f4faf 100644
--- a/node_modules/pacote/lib/util/pack-dir.js
+++ b/node_modules/pacote/lib/util/pack-dir.js
@@ -5,39 +5,40 @@ const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('./cache-key')
const optCheck = require('./opt-check')
+const packlist = require('npm-packlist')
const pipe = BB.promisify(require('mississippi').pipe)
-const tar = require('tar-fs')
+const tar = require('tar')
module.exports = packDir
function packDir (manifest, label, dir, target, opts) {
opts = optCheck(opts)
const packer = opts.dirPacker
- ? opts.dirPacker(manifest, dir)
- : tar.pack(dir, {
- map: header => {
- header.name = 'package/' + header.name
- header.mtime = 0 // make tarballs idempotent
- return header
- },
- ignore: (name) => {
- return name.match(/\.git/)
- }
- })
+ ? BB.resolve(opts.dirPacker(manifest, dir))
+ : mkPacker(dir)
if (!opts.cache) {
- return pipe(packer, target).catch(err => {
- throw err
- })
+ return packer.then(packer => pipe(packer, target))
} else {
const cacher = cacache.put.stream(
opts.cache, cacheKey('packed-dir', label), opts
).on('integrity', i => {
target.emit('integrity', i)
})
- return BB.all([
+ return packer.then(packer => BB.all([
pipe(packer, cacher),
pipe(packer, target)
- ])
+ ]))
}
}
+
+function mkPacker (dir) {
+ return packlist({path: dir}).then(files => {
+ return tar.c({
+ cwd: dir,
+ gzip: true,
+ portable: true,
+ prefix: 'package/'
+ }, files)
+ })
+}
diff --git a/node_modules/pacote/node_modules/npm-packlist/LICENSE b/node_modules/pacote/node_modules/npm-packlist/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-packlist/README.md b/node_modules/pacote/node_modules/npm-packlist/README.md
new file mode 100644
index 000000000..9efcc2c69
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/README.md
@@ -0,0 +1,69 @@
+# npm-packlist
+
+[![Build Status](https://travis-ci.com/npm/npm-packlist.svg?token=hHeDp9pQmz9kvsgRNVHy&branch=master)](https://travis-ci.com/npm/npm-packlist)
+
+Get a list of the files to add from a folder into an npm package
+
+These can be handed to [tar](http://npm.im/tar) like so to make an npm
+package tarball:
+
+```js
+const packlist = require('npm-packlist')
+const tar = require('tar')
+const packageDir = '/path/to/package'
+const packageTarball = '/path/to/package.tgz'
+
+packlist({ path: packageDir })
+ .then(files => tar.create({
+ prefix: 'package/',
+ cwd: packageDir,
+ file: packageTarball,
+ gzip: true
+ }, files))
+ .then(_ => {
+ // tarball has been created, continue with your day
+ })
+```
+
+This uses the following rules:
+
+1. If a `package.json` file is found, and it has a `files` list,
+ then ignore everything that isn't in `files`. Always include the
+ readme, license, notice, changes, changelog, and history files, if
+ they exist, and the package.json file itself.
+2. If there's no `package.json` file (or it has no `files` list), and
+ there is a `.npmignore` file, then ignore all the files in the
+ `.npmignore` file.
+3. If there's no `package.json` with a `files` list, and there's no
+ `.npmignore` file, but there is a `.gitignore` file, then ignore
+ all the files in the `.gitignore` file.
+4. Everything in the root `node_modules` is ignored, unless it's a
+ bundled dependency. If it IS a bundled dependency, and it's a
+ symbolic link, then the target of the link is included, not the
+ symlink itself.
+4. Unless they're explicitly included (by being in a `files` list, or
+ a `!negated` rule in a relevant `.npmignore` or `.gitignore`),
+ always ignore certain common cruft files:
+
+ 1. .npmignore and .gitignore files (their effect is in the package
+ already, there's no need to include them in the package)
+ 2. editor junk like `.*.swp`, `._*` and `.*.orig` files
+ 3. A `/test/` or `/tests/` folder at the root
+ 4. `.npmrc` files (these may contain private configs)
+ 5. The `node_modules/.bin` folder
+ 6. Waf and gyp cruft like `/build/config.gypi` and `.lock-wscript`
+ 7. Darwin's `.DS_Store` files because wtf are those even
+ 8. `npm-debug.log` files at the root of a project
+
+ You can explicitly re-include any of these with a `files` list in
+ `package.json` or a negated ignore file rule.
+
+## API
+
+Same API as [ignore-walk](http://npm.im/ignore-walk), just hard-coded
+file list and rule sets.
+
+The `Walker` and `WalkerSync` classes take a `bundled` argument, which
+is a list of package names to include from node_modules. When calling
+the top-level `packlist()` and `packlist.sync()` functions, this
+module calls into `npm-bundled` directly.
diff --git a/node_modules/pacote/node_modules/npm-packlist/index.js b/node_modules/pacote/node_modules/npm-packlist/index.js
new file mode 100644
index 000000000..b53391d8a
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/index.js
@@ -0,0 +1,214 @@
+'use strict'
+
+// Do a two-pass walk, first to get the list of packages that need to be
+// bundled, then again to get the actual files and folders.
+// Keep a cache of node_modules content and package.json data, so that the
+// second walk doesn't have to re-do all the same work.
+
+const bundleWalk = require('npm-bundled')
+const BundleWalker = bundleWalk.BundleWalker
+const BundleWalkerSync = bundleWalk.BundleWalkerSync
+
+const ignoreWalk = require('ignore-walk')
+const IgnoreWalker = ignoreWalk.Walker
+const IgnoreWalkerSync = ignoreWalk.WalkerSync
+
+const rootBuiltinRules = Symbol('root-builtin-rules')
+const packageNecessaryRules = Symbol('package-necessary-rules')
+const path = require('path')
+
+const defaultRules = [
+ '.npmignore',
+ '.gitignore',
+ '**/.git/',
+ '**/.svn/',
+ '**/.hg/',
+ '**/CVS/',
+ '/.lock-wscript',
+ '/.wafpickle-*',
+ '/build/config.gypi',
+ 'npm-debug.log',
+ '**/.npmrc',
+ '.*.swp',
+ '.DS_Store',
+ '._*',
+ '*.orig',
+ '/test{,s}/',
+ 'package-lock.json'
+]
+
+// a decorator that applies our custom rules to an ignore walker
+const npmWalker = Class => class Walker extends Class {
+ constructor (opt) {
+ opt = opt || {}
+
+ // the order in which rules are applied.
+ opt.ignoreFiles = [
+ rootBuiltinRules,
+ 'package.json',
+ '.npmignore',
+ '.gitignore',
+ packageNecessaryRules
+ ]
+
+ opt.includeEmpty = false
+ opt.path = opt.path || process.cwd()
+ opt.follow = path.basename(opt.path) === 'node_modules'
+ super(opt)
+
+ // ignore a bunch of things by default at the root level.
+ // also ignore anything in node_modules, except bundled dependencies
+ if (!this.parent) {
+ this.bundled = opt.bundled || []
+ this.bundledScopes = Array.from(new Set(
+ this.bundled.filter(f => /^@/.test(f))
+ .map(f => f.split('/')[0])))
+ const rules = defaultRules.join('\n') + '\n'
+ this.packageJsonCache = opt.packageJsonCache || new Map()
+ super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_)
+ } else {
+ this.bundled = []
+ this.bundledScopes = []
+ this.packageJsonCache = this.parent.packageJsonCache
+ }
+ }
+
+ filterEntry (entry, partial) {
+ // get the partial path from the root of the walk
+ const p = this.path.substr(this.root.length + 1)
+ const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/
+ const pkg = pkgre.test(entry) ? entry.replace(pkgre, '$1') : null
+
+ return (
+ // if we're in a bundled package, check with the parent.
+ /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry(
+ this.basename + '/' + entry, partial)
+
+ // if package is bundled, all files included
+ // also include @scope dirs for bundled scoped deps
+ // they'll be ignored if no files end up in them.
+ : pkg ? -1 !== this.bundled.indexOf(pkg) ||
+ -1 !== this.bundledScopes.indexOf(pkg)
+
+ // only walk top node_modules if we want to bundle something
+ : entry === 'node_modules' && !this.parent ? !!this.bundled.length
+
+ // always include package.json at the root.
+ : entry === 'package.json' && !this.parent ? true
+
+ // otherwise, follow ignore-walk's logic
+ : super.filterEntry(entry, partial)
+ )
+ }
+
+ filterEntries () {
+ if (this.ignoreRules['package.json'])
+ this.ignoreRules['.gitignore'] = this.ignoreRules['.npmignore'] = null
+ else if (this.ignoreRules['.npmignore'])
+ this.ignoreRules['.gitignore'] = null
+ this.filterEntries = super.filterEntries
+ super.filterEntries()
+ }
+
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ if (this.packageJsonCache.has(ig))
+ this.onPackageJson(ig, this.packageJsonCache.get(ig), then)
+ else
+ super.addIgnoreFile(file, then)
+ }
+
+ onPackageJson (ig, pkg, then) {
+ this.packageJsonCache.set(ig, pkg)
+
+ // if there's a browser or main, make sure we don't ignore it
+ const rules = [
+ pkg.browser ? '!' + pkg.browser : '',
+ pkg.main ? '!' + pkg.main : '',
+ '!@(readme|license|licence|notice|changes|changelog|history){,.*}'
+ ].filter(f => f).join('\n') + '\n'
+ super.onReadIgnoreFile(packageNecessaryRules, rules, _=>_)
+
+ if (Array.isArray(pkg.files))
+ super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map(
+ f => '!' + f + '\n!' + f.replace(/\/+$/, '') + '/**'
+ ).join('\n') + '\n', then)
+ else
+ then()
+ }
+
+ // override parent onstat function to nix all symlinks
+ onstat (st, entry, file, dir, then) {
+ if (st.isSymbolicLink())
+ then()
+ else
+ super.onstat(st, entry, file, dir, then)
+ }
+
+ onReadIgnoreFile (file, data, then) {
+ if (file === 'package.json')
+ try {
+ this.onPackageJson(file, JSON.parse(data), then)
+ } catch (er) {
+ // ignore package.json files that are not json
+ then()
+ }
+ else
+ super.onReadIgnoreFile(file, data, then)
+ }
+
+ sort (a, b) {
+ return sort(a, b)
+ }
+}
+
+class Walker extends npmWalker(IgnoreWalker) {
+ walker (entry, then) {
+ new Walker(this.walkerOpt(entry)).on('done', then).start()
+ }
+}
+
+class WalkerSync extends npmWalker(IgnoreWalkerSync) {
+ walker (entry, then) {
+ new WalkerSync(this.walkerOpt(entry)).start()
+ then()
+ }
+}
+
+const walk = (options, callback) => {
+ options = options || {}
+ const p = new Promise((resolve, reject) => {
+ const bw = new BundleWalker(options).start()
+ bw.on('done', bundled => {
+ options.bundled = bundled
+ options.packageJsonCache = bw.packageJsonCache
+ new Walker(options).on('done', resolve).on('error', reject).start()
+ })
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+ options = options || {}
+ const bw = new BundleWalkerSync(options).start()
+ options.bundled = bw.result
+ options.packageJsonCache = bw.packageJsonCache
+ const walker = new WalkerSync(options)
+ walker.start()
+ return walker.result
+}
+
+// package.json first, node_modules last, files before folders, alphasort
+const sort = (a, b) =>
+ a === 'package.json' ? -1
+ : b === 'package.json' ? 1
+ : /^node_modules/.test(a) && !/^node_modules/.test(b) ? 1
+ : /^node_modules/.test(b) && !/^node_modules/.test(a) ? -1
+ : path.dirname(a) === '.' && path.dirname(b) !== '.' ? -1
+ : path.dirname(b) === '.' && path.dirname(a) !== '.' ? 1
+ : a.localeCompare(b)
+
+module.exports = walk
+walk.sync = walkSync
+walk.Walker = Walker
+walk.WalkerSync = WalkerSync
diff --git a/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/README.md b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/README.md
new file mode 100644
index 000000000..66b69e894
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/README.md
@@ -0,0 +1,60 @@
+# ignore-walk
+
+[![Build
+Status](https://travis-ci.org/isaacs/ignore-walk.svg?branch=master)](https://travis-ci.org/isaacs/ignore-walk)
+
+Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.
+
+Walk a directory creating a list of entries, parsing any `.ignore`
+files met along the way to exclude files.
+
+## USAGE
+
+```javascript
+const walk = require('ignore-walk')
+
+// All options are optional, defaults provided.
+
+// this function returns a promise, but you can also pass a cb
+// if you like that approach better.
+walk({
+ path: '...', // root dir to start in. defaults to process.cwd()
+ ignoreFiles: [ '.gitignore' ], // list of filenames. defaults to ['.ignore']
+ includeEmpty: true|false, // true to include empty dirs, default false
+ follow: true|false // true to follow symlink dirs, default false
+}, callback)
+
+// to walk synchronously, do it this way:
+const result = walk.sync({ path: '/wow/such/filepath' })
+```
+
+If you want to get at the underlying classes, they're at `walk.Walker`
+and `walk.WalkerSync`.
+
+## OPTIONS
+
+* `path` The path to start in. Defaults to `process.cwd()`
+
+* `ignoreFiles` Filenames to treat as ignore files. The default is
+ `['.ignore']`. (This is where you'd put `.gitignore` or
+ `.npmignore` or whatever.) If multiple ignore files are in a
+ directory, then rules from each are applied in the order that the
+ files are listed.
+
+* `includeEmpty` Set to `true` to include empty directories, assuming
+ they are not excluded by any of the ignore rules. If not set, then
+ this follows the standard `git` behavior of not including
+ directories that are empty.
+
+ Note: this will cause an empty directory to be included if it
+ would contain an included entry, even if it would have otherwise
+ been excluded itself.
+
+ For example, given the rules `*` (ignore everything) and `!/a/b/c`
+ (re-include the entry at `/a/b/c`), the directory `/a/b` will be
+ included if it is empty.
+
+* `follow` Set to `true` to treat symbolically linked directories as
+ directories, recursing into them. There is no handling for nested
+ symlinks, so `ELOOP` errors can occur in some cases when using this
+ option. Defaults to `false`.
diff --git a/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/index.js b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/index.js
new file mode 100644
index 000000000..22517fb0e
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/index.js
@@ -0,0 +1,266 @@
+'use strict'
+
+const fs = require('fs')
+const path = require('path')
+const EE = require('events').EventEmitter
+const Minimatch = require('minimatch').Minimatch
+
+class Walker extends EE {
+ constructor (opts) {
+ opts = opts || {}
+ super(opts)
+ this.path = opts.path || process.cwd()
+ this.basename = path.basename(this.path)
+ this.ignoreFiles = opts.ignoreFiles || [ '.ignore' ]
+ this.ignoreRules = {}
+ this.parent = opts.parent || null
+ this.includeEmpty = !!opts.includeEmpty
+ this.root = this.parent ? this.parent.root : this.path
+ this.follow = !!opts.follow
+ this.result = this.parent ? this.parent.result : []
+ this.entries = null
+ this.sawError = false
+ }
+
+ sort (a, b) {
+ return a.localeCompare(b)
+ }
+
+ emit (ev, data) {
+ let ret = false
+ if (!(this.sawError && ev === 'error')) {
+ if (ev === 'error')
+ this.sawError = true
+ else if (ev === 'done' && !this.parent)
+ data = data.sort(this.sort)
+ if (ev === 'error' && this.parent)
+ ret = this.parent.emit('error', data)
+ else
+ ret = super.emit(ev, data)
+ }
+ return ret
+ }
+
+ start () {
+ fs.readdir(this.path, (er, entries) =>
+ er ? this.emit('error', er) : this.onReaddir(entries))
+ return this
+ }
+
+ isIgnoreFile (e) {
+ return e !== "." &&
+ e !== ".." &&
+ -1 !== this.ignoreFiles.indexOf(e)
+ }
+
+ onReaddir (entries) {
+ this.entries = entries
+ if (entries.length === 0) {
+ if (this.includeEmpty)
+ this.result.push(this.path.substr(this.root.length + 1))
+ this.emit('done', this.result)
+ } else {
+ const hasIg = this.entries.some(e =>
+ this.isIgnoreFile(e))
+
+ if (hasIg)
+ this.addIgnoreFiles()
+ else
+ this.filterEntries()
+ }
+ }
+
+ addIgnoreFiles () {
+ const newIg = this.entries
+ .filter(e => this.isIgnoreFile(e))
+
+ let igCount = newIg.length
+ const then = _ => {
+ if (--igCount === 0)
+ this.filterEntries()
+ }
+
+ newIg.forEach(e => this.addIgnoreFile(e, then))
+ }
+
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ fs.readFile(ig, 'utf8', (er, data) =>
+ er ? this.emit('error', er) : this.onReadIgnoreFile(file, data, then))
+ }
+
+ onReadIgnoreFile (file, data, then) {
+ const mmopt = {
+ matchBase: true,
+ dot: true,
+ flipNegate: true,
+ nocase: true
+ }
+ const rules = data.split(/\r?\n/)
+ .filter(line => !/^#|^$/.test(line.trim()))
+ .map(r => new Minimatch(r, mmopt))
+
+ if (rules.length)
+ this.ignoreRules[file] = rules
+
+ then()
+ }
+
+ filterEntries () {
+ // at this point we either have ignore rules, or just inheriting
+ // this exclusion is at the point where we know the list of
+ // entries in the dir, but don't know what they are. since
+ // some of them *might* be directories, we have to run the
+ // match in dir-mode as well, so that we'll pick up partials
+ // of files that will be included later. Anything included
+ // at this point will be checked again later once we know
+ // what it is.
+ const filtered = this.entries.map(entry => {
+ // at this point, we don't know if it's a dir or not.
+ const passFile = this.filterEntry(entry)
+ const passDir = this.filterEntry(entry, true)
+ return (passFile || passDir) ? [entry, passFile, passDir] : false
+ }).filter(e => e)
+
+ // now we stat them all
+ // if it's a dir, and passes as a dir, then recurse
+ // if it's not a dir, but passes as a file, add to set
+ let entryCount = filtered.length
+ if (entryCount === 0) {
+ this.emit('done', this.result)
+ } else {
+ const then = _ => {
+ if (-- entryCount === 0)
+ this.emit('done', this.result)
+ }
+ filtered.forEach(filt => {
+ const entry = filt[0]
+ const file = filt[1]
+ const dir = filt[2]
+ this.stat(entry, file, dir, then)
+ })
+ }
+ }
+
+ onstat (st, entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ if (!st.isDirectory()) {
+ if (file)
+ this.result.push(abs.substr(this.root.length + 1))
+ then()
+ } else {
+ // is a directory
+ if (dir)
+ this.walker(entry, then)
+ else
+ then()
+ }
+ }
+
+ stat (entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ fs[this.follow ? 'stat' : 'lstat'](abs, (er, st) => {
+ if (er)
+ this.emit('error', er)
+ else
+ this.onstat(st, entry, file, dir, then)
+ })
+ }
+
+ walkerOpt (entry) {
+ return {
+ path: this.path + '/' + entry,
+ parent: this,
+ ignoreFiles: this.ignoreFiles,
+ follow: this.follow,
+ includeEmpty: this.includeEmpty
+ }
+ }
+
+ walker (entry, then) {
+ new Walker(this.walkerOpt(entry)).on('done', then).start()
+ }
+
+ filterEntry (entry, partial) {
+ let included = true
+
+ // this = /a/b/c
+ // entry = d
+ // parent /a/b sees c/d
+ if (this.parent && this.parent.filterEntry) {
+ var pt = this.basename + "/" + entry
+ included = this.parent.filterEntry(pt, partial)
+ }
+
+ this.ignoreFiles.forEach(f => {
+ if (this.ignoreRules[f]) {
+ this.ignoreRules[f].forEach(rule => {
+ // negation means inclusion
+ // so if it's negated, and already included, no need to check
+ // likewise if it's neither negated nor included
+ if (rule.negate !== included) {
+ // first, match against /foo/bar
+ // then, against foo/bar
+ // then, in the case of partials, match with a /
+ const match = rule.match('/' + entry) ||
+ rule.match(entry) ||
+ (!!partial && (
+ rule.match('/' + entry + '/') ||
+ rule.match(entry + '/'))) ||
+ (!!partial && rule.negate && (
+ rule.match('/' + entry, true) ||
+ rule.match(entry, true)))
+
+ if (match)
+ included = rule.negate
+ }
+ })
+ }
+ })
+
+ return included
+ }
+}
+
+class WalkerSync extends Walker {
+ constructor (opt) {
+ super(opt)
+ }
+
+ start () {
+ this.onReaddir(fs.readdirSync(this.path))
+ return this
+ }
+
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ this.onReadIgnoreFile(file, fs.readFileSync(ig, 'utf8'), then)
+ }
+
+ stat (entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ const st = fs[this.follow ? 'statSync' : 'lstatSync'](abs)
+ this.onstat(st, entry, file, dir, then)
+ }
+
+ walker (entry, then) {
+ new WalkerSync(this.walkerOpt(entry)).start()
+ then()
+ }
+}
+
+const walk = (options, callback) => {
+ const p = new Promise((resolve, reject) => {
+ new Walker(options).on('done', resolve).on('error', reject).start()
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+ return new WalkerSync(options).start().result
+}
+
+module.exports = walk
+walk.sync = walkSync
+walk.Walker = Walker
+walk.WalkerSync = WalkerSync
diff --git a/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/package.json b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/package.json
new file mode 100644
index 000000000..8dbddcce2
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/node_modules/ignore-walk/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "ignore-walk@^3.0.0",
+ "_id": "ignore-walk@3.0.0",
+ "_inBundle": false,
+ "_integrity": "sha512-tKHrQ70YReq6IFyAs/XAQy91mgLVpLExNh3HrjExr6vqg8FLq/vd27D4eAN0K2PodhLjiQu5Xc2Q+AkW/T7hKQ==",
+ "_location": "/pacote/npm-packlist/ignore-walk",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "ignore-walk@^3.0.0",
+ "name": "ignore-walk",
+ "escapedName": "ignore-walk",
+ "rawSpec": "^3.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^3.0.0"
+ },
+ "_requiredBy": [
+ "/pacote/npm-packlist"
+ ],
+ "_resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.0.tgz",
+ "_shasum": "e407919edee5c47c63473b319bfe3ea4a771a57e",
+ "_spec": "ignore-walk@^3.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/npm-packlist",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/ignore-walk/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "minimatch": "^3.0.4"
+ },
+ "deprecated": false,
+ "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.",
+ "devDependencies": {
+ "mkdirp": "^0.5.1",
+ "mutate-fs": "^1.1.0",
+ "rimraf": "^2.6.1",
+ "tap": "^10.3.2"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/isaacs/ignore-walk#readme",
+ "keywords": [
+ "ignorefile",
+ "ignore",
+ "file",
+ ".gitignore",
+ ".npmignore",
+ "glob"
+ ],
+ "license": "ISC",
+ "main": "index.js",
+ "name": "ignore-walk",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/ignore-walk.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "3.0.0"
+}
diff --git a/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/README.md b/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/README.md
new file mode 100644
index 000000000..2974afa0e
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/README.md
@@ -0,0 +1,46 @@
+# npm-bundled
+
+Run this in a node package, and it'll tell you which things in
+node_modules are bundledDependencies, or transitive dependencies of
+bundled dependencies.
+
+## USAGE
+
+To get the list of deps at the top level that are bundled (or
+transitive deps of a bundled dep) run this:
+
+```js
+const bundled = require('npm-bundled')
+
+// async version
+bundled({ path: '/path/to/pkg/defaults/to/cwd'}, (er, list) => {
+ // er means it had an error, which is _hella_ weird
+ // list is a list of package names, like `fooblz` or `@corp/blerg`
+ // the might not all be deps of the top level, because transitives
+})
+
+// async promise version
+bundled({ path: '/path/to/pkg/defaults/to/cwd'}).then(list => {
+ // so promisey!
+ // actually the callback version returns a promise, too, it just
+ // attaches the supplied callback to the promise
+})
+
+// sync version, throws if there's an error
+const list = bundled({ path: '/path/to/pkg/defaults/to/cwd'})
+```
+
+That's basically all you need to know. If you care to dig into it,
+you can also use the `bundled.Walker` and `bundled.WalkerSync`
+classes to get fancy.
+
+This library does not write anything to the filesystem, but it _may_
+have undefined behavior if the structure of `node_modules` changes
+while it's reading deps.
+
+All symlinks are followed. This means that it can lead to surprising
+results if a symlinked bundled dependency has a missing dependency
+that is satisfied at the top level. Since package creation resolves
+symlinks as well, this is an edge case where package creation and
+development environment are not going to be aligned, and is best
+avoided.
diff --git a/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/index.js b/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/index.js
new file mode 100644
index 000000000..dadd84734
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/index.js
@@ -0,0 +1,227 @@
+'use strict'
+
+// walk the tree of deps starting from the top level list of bundled deps
+// Any deps at the top level that are depended on by a bundled dep that
+// does not have that dep in its own node_modules folder are considered
+// bundled deps as well. This list of names can be passed to npm-packlist
+// as the "bundled" argument. Additionally, packageJsonCache is shared so
+// packlist doesn't have to re-read files already consumed in this pass
+
+const fs = require('fs')
+const path = require('path')
+const EE = require('events').EventEmitter
+
+class BundleWalker extends EE {
+ constructor (opt) {
+ opt = opt || {}
+ super(opt)
+ this.path = path.resolve(opt.path || process.cwd())
+
+ this.parent = opt.parent || null
+ if (this.parent) {
+ this.result = this.parent.result
+ // only collect results in node_modules folders at the top level
+ // since the node_modules in a bundled dep is included always
+ if (!this.parent.parent) {
+ const base = path.basename(this.path)
+ const scope = path.basename(path.dirname(this.path))
+ this.result.add(/^@/.test(scope) ? scope + '/' + base : base)
+ }
+ this.root = this.parent.root
+ this.packageJsonCache = this.parent.packageJsonCache
+ } else {
+ this.result = new Set()
+ this.root = this.path
+ this.packageJsonCache = opt.packageJsonCache || new Map()
+ }
+
+ this.didDone = false
+ this.children = 0
+ this.node_modules = []
+ this.package = null
+ this.bundle = null
+ }
+
+ done () {
+ if (!this.didDone) {
+ this.didDone = true
+ if (!this.parent) {
+ const res = Array.from(this.result)
+ this.result = res
+ this.emit('done', res)
+ } else {
+ this.emit('done')
+ }
+ }
+ }
+
+ start () {
+ const pj = this.path + '/package.json'
+ if (this.packageJsonCache.has(pj))
+ this.onPackage(this.packageJsonCache.get(pj))
+ else
+ this.readPackageJson(pj)
+ return this
+ }
+
+ readPackageJson (pj) {
+ fs.readFile(pj, (er, data) =>
+ er ? this.done() : this.onPackageJson(pj, data))
+ }
+
+ onPackageJson (pj, data) {
+ try {
+ this.package = JSON.parse(data + '')
+ } catch (er) {
+ return this.done()
+ }
+ this.packageJsonCache.set(pj, this.package)
+ this.onPackage(this.package)
+ }
+
+ onPackage (pkg) {
+ // all deps are bundled if we got here as a child.
+ // otherwise, only bundle bundledDeps
+ // Get a unique-ified array with a short-lived Set
+ const bdRaw = this.parent
+ ? Object.keys(pkg.dependencies || {}).concat(
+ Object.keys(pkg.optionalDependencies || {}))
+ : pkg.bundleDependencies || pkg.bundledDependencies || []
+
+ const bd = Array.from(new Set(
+ Array.isArray(bdRaw) ? bdRaw : Object.keys(bdRaw)))
+
+ if (!bd.length)
+ return this.done()
+
+ this.bundle = bd
+ const nm = this.path + '/node_modules'
+ this.readModules()
+ }
+
+ readModules () {
+ readdirNodeModules(this.path + '/node_modules', (er, nm) =>
+ er ? this.onReaddir([]) : this.onReaddir(nm))
+ }
+
+ onReaddir (nm) {
+ // keep track of what we have, in case children need it
+ this.node_modules = nm
+
+ this.bundle.forEach(dep => this.childDep(dep))
+ if (this.children === 0)
+ this.done()
+ }
+
+ childDep (dep) {
+ if (this.node_modules.indexOf(dep) !== -1) {
+ this.child(dep)
+ } else if (this.parent) {
+ this.parent.childDep(dep)
+ }
+ }
+
+ child (dep) {
+ const p = this.path + '/node_modules/' + dep
+ this.children += 1
+ const child = new BundleWalker({
+ path: p,
+ parent: this
+ })
+ child.on('done', _ => {
+ if (--this.children === 0)
+ this.done()
+ })
+ child.start()
+ }
+}
+
+class BundleWalkerSync extends BundleWalker {
+ constructor (opt) {
+ super(opt)
+ }
+
+ start () {
+ super.start()
+ this.done()
+ return this
+ }
+
+ readPackageJson (pj) {
+ try {
+ this.onPackageJson(pj, fs.readFileSync(pj))
+ } catch (er) {}
+ return this
+ }
+
+ readModules () {
+ try {
+ this.onReaddir(readdirNodeModulesSync(this.path + '/node_modules'))
+ } catch (er) {
+ this.onReaddir([])
+ }
+ }
+
+ child (dep) {
+ new BundleWalkerSync({
+ path: this.path + '/node_modules/' + dep,
+ parent: this
+ }).start()
+ }
+}
+
+const readdirNodeModules = (nm, cb) => {
+ fs.readdir(nm, (er, set) => {
+ if (er)
+ cb(er)
+ else {
+ const scopes = set.filter(f => /^@/.test(f))
+ if (!scopes.length)
+ cb(null, set)
+ else {
+ const unscoped = set.filter(f => !/^@/.test(f))
+ let count = scopes.length
+ scopes.forEach(scope => {
+ fs.readdir(nm + '/' + scope, (er, pkgs) => {
+ if (er || !pkgs.length)
+ unscoped.push(scope)
+ else
+ unscoped.push.apply(unscoped, pkgs.map(p => scope + '/' + p))
+ if (--count === 0)
+ cb(null, unscoped)
+ })
+ })
+ }
+ }
+ })
+}
+
+const readdirNodeModulesSync = nm => {
+ const set = fs.readdirSync(nm)
+ const unscoped = set.filter(f => !/^@/.test(f))
+ const scopes = set.filter(f => /^@/.test(f)).map(scope => {
+ try {
+ const pkgs = fs.readdirSync(nm + '/' + scope)
+ return pkgs.length ? pkgs.map(p => scope + '/' + p) : [scope]
+ } catch (er) {
+ return [scope]
+ }
+ }).reduce((a, b) => a.concat(b), [])
+ return unscoped.concat(scopes)
+}
+
+const walk = (options, callback) => {
+ const p = new Promise((resolve, reject) => {
+ new BundleWalker(options).on('done', resolve).on('error', reject).start()
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+ return new BundleWalkerSync(options).start().result
+}
+
+module.exports = walk
+walk.sync = walkSync
+walk.BundleWalker = BundleWalker
+walk.BundleWalkerSync = BundleWalkerSync
diff --git a/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/package.json b/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/package.json
new file mode 100644
index 000000000..66f142383
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/node_modules/npm-bundled/package.json
@@ -0,0 +1,60 @@
+{
+ "_from": "npm-bundled@^1.0.1",
+ "_id": "npm-bundled@1.0.3",
+ "_inBundle": false,
+ "_integrity": "sha512-ByQ3oJ/5ETLyglU2+8dBObvhfWXX8dtPZDMePCahptliFX2iIuhyEszyFk401PZUNQH20vvdW5MLjJxkwU80Ow==",
+ "_location": "/pacote/npm-packlist/npm-bundled",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "npm-bundled@^1.0.1",
+ "name": "npm-bundled",
+ "escapedName": "npm-bundled",
+ "rawSpec": "^1.0.1",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.1"
+ },
+ "_requiredBy": [
+ "/pacote/npm-packlist"
+ ],
+ "_resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.3.tgz",
+ "_shasum": "7e71703d973af3370a9591bafe3a63aca0be2308",
+ "_spec": "npm-bundled@^1.0.1",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/npm-packlist",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/npm-bundled/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof",
+ "devDependencies": {
+ "mkdirp": "^0.5.1",
+ "mutate-fs": "^1.1.0",
+ "rimraf": "^2.6.1",
+ "tap": "^10.3.2"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/npm/npm-bundled#readme",
+ "license": "ISC",
+ "main": "index.js",
+ "name": "npm-bundled",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/npm-bundled.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js -J --100"
+ },
+ "version": "1.0.3"
+}
diff --git a/node_modules/pacote/node_modules/npm-packlist/package.json b/node_modules/pacote/node_modules/npm-packlist/package.json
new file mode 100644
index 000000000..0fb908f7f
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-packlist/package.json
@@ -0,0 +1,68 @@
+{
+ "_from": "npm-packlist@^1.1.6",
+ "_id": "npm-packlist@1.1.8",
+ "_inBundle": false,
+ "_integrity": "sha512-ZF/1c80o8g+rbJoe898m3p/gpquP/UK92vuTIw4wIVmoBZhRPGCPu8p+DJFV5MOa+HUT7CKVp+g9Hz+ayGW/+A==",
+ "_location": "/pacote/npm-packlist",
+ "_phantomChildren": {
+ "minimatch": "3.0.4"
+ },
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "npm-packlist@^1.1.6",
+ "name": "npm-packlist",
+ "escapedName": "npm-packlist",
+ "rawSpec": "^1.1.6",
+ "saveSpec": null,
+ "fetchSpec": "^1.1.6"
+ },
+ "_requiredBy": [
+ "/pacote"
+ ],
+ "_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.1.8.tgz",
+ "_shasum": "aa7e56734ed038aba50724d79d0bc9c2acad372a",
+ "_spec": "npm-packlist@^1.1.6",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/npm-packlist/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "ignore-walk": "^3.0.0",
+ "npm-bundled": "^1.0.1"
+ },
+ "deprecated": false,
+ "description": "Get a list of the files to add from a folder into an npm package",
+ "devDependencies": {
+ "mkdirp": "^0.5.1",
+ "rimraf": "^2.6.1",
+ "tap": "^10.3.2"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://www.npmjs.com/package/npm-packlist",
+ "license": "ISC",
+ "main": "index.js",
+ "name": "npm-packlist",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/npm-packlist.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J"
+ },
+ "version": "1.1.8"
+}
diff --git a/node_modules/pacote/node_modules/tar-fs/.npmignore b/node_modules/pacote/node_modules/tar-fs/.npmignore
deleted file mode 100644
index 118a1375d..000000000
--- a/node_modules/pacote/node_modules/tar-fs/.npmignore
+++ /dev/null
@@ -1,2 +0,0 @@
-node_modules
-test/fixtures/copy
diff --git a/node_modules/pacote/node_modules/tar-fs/.travis.yml b/node_modules/pacote/node_modules/tar-fs/.travis.yml
deleted file mode 100644
index 6e5919de3..000000000
--- a/node_modules/pacote/node_modules/tar-fs/.travis.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-language: node_js
-node_js:
- - "0.10"
diff --git a/node_modules/pacote/node_modules/tar-fs/LICENSE b/node_modules/pacote/node_modules/tar-fs/LICENSE
deleted file mode 100644
index 757562ec5..000000000
--- a/node_modules/pacote/node_modules/tar-fs/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE. \ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar-fs/README.md b/node_modules/pacote/node_modules/tar-fs/README.md
deleted file mode 100644
index 6cc3b077a..000000000
--- a/node_modules/pacote/node_modules/tar-fs/README.md
+++ /dev/null
@@ -1,143 +0,0 @@
-# tar-fs
-
-filesystem bindings for [tar-stream](https://github.com/mafintosh/tar-stream).
-
-```
-npm install tar-fs
-```
-
-[![build status](https://secure.travis-ci.org/mafintosh/tar-fs.png)](http://travis-ci.org/mafintosh/tar-fs)
-
-## Usage
-
-tar-fs allows you to pack directories into tarballs and extract tarballs into directories.
-
-It doesn't gunzip for you, so if you want to extract a `.tar.gz` with this you'll need to use something like [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in addition to this.
-
-``` js
-var tar = require('tar-fs')
-var fs = require('fs')
-
-// packing a directory
-tar.pack('./my-directory').pipe(fs.createWriteStream('my-tarball.tar'))
-
-// extracting a directory
-fs.createReadStream('my-other-tarball.tar').pipe(tar.extract('./my-other-directory'))
-```
-
-To ignore various files when packing or extracting add a ignore function to the options. `ignore`
-is also an alias for `filter`. Additionally you get `header` if you use ignore while extracting.
-That way you could also filter by metadata.
-
-``` js
-var pack = tar.pack('./my-directory', {
- ignore: function(name) {
- return path.extname(name) === '.bin' // ignore .bin files when packing
- }
-})
-
-var extract = tar.extract('./my-other-directory', {
- ignore: function(name) {
- return path.extname(name) === '.bin' // ignore .bin files inside the tarball when extracing
- }
-})
-
-var extractFilesDirs = tar.extract('./my-other-other-directory', {
- ignore: function(_, header) {
- // pass files & directories, ignore e.g. symlinks
- return header.type !== 'file' && header.type !== 'directory'
- }
-})
-```
-
-You can also specify which entries to pack using the `entries` option
-
-```js
-var pack = tar.pack('./my-directory', {
- entries: ['file1', 'subdir/file2'] // only the specific entries will be packed
-})
-```
-
-If you want to modify the headers when packing/extracting add a map function to the options
-
-``` js
-var pack = tar.pack('./my-directory', {
- map: function(header) {
- header.name = 'prefixed/'+header.name
- return header
- }
-})
-
-var extract = tar.extract('./my-directory', {
- map: function(header) {
- header.name = 'another-prefix/'+header.name
- return header
- }
-})
-```
-
-Similarly you can use `mapStream` incase you wanna modify the input/output file streams
-
-``` js
-var pack = tar.pack('./my-directory', {
- mapStream: function(fileStream, header) {
- if (path.extname(header.name) === '.js') {
- return fileStream.pipe(someTransform)
- }
- return fileStream;
- }
-})
-
-var extract = tar.extract('./my-directory', {
- mapStream: function(fileStream, header) {
- if (path.extname(header.name) === '.js') {
- return fileStream.pipe(someTransform)
- }
- return fileStream;
- }
-})
-```
-
-Set `options.fmode` and `options.dmode` to ensure that files/directories extracted have the corresponding modes
-
-``` js
-var extract = tar.extract('./my-directory', {
- dmode: parseInt(555, 8), // all dirs should be readable
- fmode: parseInt(444, 8) // all files should be readable
-})
-```
-
-It can be useful to use `dmode` and `fmode` if you are packing/unpacking tarballs between *nix/windows to ensure that all files/directories unpacked are readable.
-
-Alternatively you can set `options.readable` and/or `options.writable` to set the dmode and fmode to readable/writable.
-
-``` js
-var extract = tar.extract('./my-directory', {
- readable: true, // all dirs and files should be readable
- writable: true, // all dirs and files should be writable
-})
-```
-
-Set `options.strict` to `false` if you want to ignore errors due to unsupported entry types (like device files)
-
-To dereference symlinks (pack the contents of the symlink instead of the link itself) set `options.dereference` to `true`.
-
-## Copy a directory
-
-Copying a directory with permissions and mtime intact is as simple as
-
-``` js
-tar.pack('source-directory').pipe(tar.extract('dest-directory'))
-```
-
-## Performance
-
-Packing and extracting a 6.1 GB with 2496 directories and 2398 files yields the following results on my Macbook Air.
-[See the benchmark here](https://gist.github.com/mafintosh/8102201)
-
-* tar-fs: 34.261 ms
-* [node-tar](https://github.com/isaacs/node-tar): 366.123 ms (or 10x slower)
-
-## License
-
-MIT
diff --git a/node_modules/pacote/node_modules/tar-fs/index.js b/node_modules/pacote/node_modules/tar-fs/index.js
deleted file mode 100644
index 4b345b7d0..000000000
--- a/node_modules/pacote/node_modules/tar-fs/index.js
+++ /dev/null
@@ -1,323 +0,0 @@
-var chownr = require('chownr')
-var tar = require('tar-stream')
-var pump = require('pump')
-var mkdirp = require('mkdirp')
-var fs = require('fs')
-var path = require('path')
-var os = require('os')
-
-var win32 = os.platform() === 'win32'
-
-var noop = function () {}
-
-var echo = function (name) {
- return name
-}
-
-var normalize = !win32 ? echo : function (name) {
- return name.replace(/\\/g, '/').replace(/[:?<>|]/g, '_')
-}
-
-var statAll = function (fs, stat, cwd, ignore, entries, sort) {
- var queue = entries || ['.']
-
- return function loop (callback) {
- if (!queue.length) return callback()
- var next = queue.shift()
- var nextAbs = path.join(cwd, next)
-
- stat(nextAbs, function (err, stat) {
- if (err) return callback(err)
-
- if (!stat.isDirectory()) return callback(null, next, stat)
-
- fs.readdir(nextAbs, function (err, files) {
- if (err) return callback(err)
-
- if (sort) files.sort()
- for (var i = 0; i < files.length; i++) {
- if (!ignore(path.join(cwd, next, files[i]))) queue.push(path.join(next, files[i]))
- }
-
- callback(null, next, stat)
- })
- })
- }
-}
-
-var strip = function (map, level) {
- return function (header) {
- header.name = header.name.split('/').slice(level).join('/')
-
- var linkname = header.linkname
- if (linkname && (header.type === 'link' || path.isAbsolute(linkname))) {
- header.linkname = linkname.split('/').slice(level).join('/')
- }
-
- return map(header)
- }
-}
-
-exports.pack = function (cwd, opts) {
- if (!cwd) cwd = '.'
- if (!opts) opts = {}
-
- var xfs = opts.fs || fs
- var ignore = opts.ignore || opts.filter || noop
- var map = opts.map || noop
- var mapStream = opts.mapStream || echo
- var statNext = statAll(xfs, opts.dereference ? xfs.stat : xfs.lstat, cwd, ignore, opts.entries, opts.sort)
- var strict = opts.strict !== false
- var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
- var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
- var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
- var pack = opts.pack || tar.pack()
-
- if (opts.strip) map = strip(map, opts.strip)
-
- if (opts.readable) {
- dmode |= parseInt(555, 8)
- fmode |= parseInt(444, 8)
- }
- if (opts.writable) {
- dmode |= parseInt(333, 8)
- fmode |= parseInt(222, 8)
- }
-
- var onsymlink = function (filename, header) {
- xfs.readlink(path.join(cwd, filename), function (err, linkname) {
- if (err) return pack.destroy(err)
- header.linkname = normalize(linkname)
- pack.entry(header, onnextentry)
- })
- }
-
- var onstat = function (err, filename, stat) {
- if (err) return pack.destroy(err)
- if (!filename) return pack.finalize()
-
- if (stat.isSocket()) return onnextentry() // tar does not support sockets...
-
- var header = {
- name: normalize(filename),
- mode: (stat.mode | (stat.isDirectory() ? dmode : fmode)) & umask,
- mtime: stat.mtime,
- size: stat.size,
- type: 'file',
- uid: stat.uid,
- gid: stat.gid
- }
-
- if (stat.isDirectory()) {
- header.size = 0
- header.type = 'directory'
- header = map(header) || header
- return pack.entry(header, onnextentry)
- }
-
- if (stat.isSymbolicLink()) {
- header.size = 0
- header.type = 'symlink'
- header = map(header) || header
- return onsymlink(filename, header)
- }
-
- // TODO: add fifo etc...
-
- header = map(header) || header
-
- if (!stat.isFile()) {
- if (strict) return pack.destroy(new Error('unsupported type for ' + filename))
- return onnextentry()
- }
-
- var entry = pack.entry(header, onnextentry)
- if (!entry) return
-
- var rs = mapStream(xfs.createReadStream(path.join(cwd, filename)), header)
-
- rs.on('error', function (err) { // always forward errors on destroy
- entry.destroy(err)
- })
-
- pump(rs, entry)
- }
-
- var onnextentry = function (err) {
- if (err) return pack.destroy(err)
- statNext(onstat)
- }
-
- onnextentry()
-
- return pack
-}
-
-var head = function (list) {
- return list.length ? list[list.length - 1] : null
-}
-
-var processGetuid = function () {
- return process.getuid ? process.getuid() : -1
-}
-
-var processUmask = function () {
- return process.umask ? process.umask() : 0
-}
-
-exports.extract = function (cwd, opts) {
- if (!cwd) cwd = '.'
- if (!opts) opts = {}
-
- var xfs = opts.fs || fs
- var ignore = opts.ignore || opts.filter || noop
- var map = opts.map || noop
- var mapStream = opts.mapStream || echo
- var own = opts.chown !== false && !win32 && processGetuid() === 0
- var extract = opts.extract || tar.extract()
- var stack = []
- var now = new Date()
- var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask()
- var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0
- var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0
- var strict = opts.strict !== false
-
- if (opts.strip) map = strip(map, opts.strip)
-
- if (opts.readable) {
- dmode |= parseInt(555, 8)
- fmode |= parseInt(444, 8)
- }
- if (opts.writable) {
- dmode |= parseInt(333, 8)
- fmode |= parseInt(222, 8)
- }
-
- var utimesParent = function (name, cb) { // we just set the mtime on the parent dir again everytime we write an entry
- var top
- while ((top = head(stack)) && name.slice(0, top[0].length) !== top[0]) stack.pop()
- if (!top) return cb()
- xfs.utimes(top[0], now, top[1], cb)
- }
-
- var utimes = function (name, header, cb) {
- if (opts.utimes === false) return cb()
-
- if (header.type === 'directory') return xfs.utimes(name, now, header.mtime, cb)
- if (header.type === 'symlink') return utimesParent(name, cb) // TODO: how to set mtime on link?
-
- xfs.utimes(name, now, header.mtime, function (err) {
- if (err) return cb(err)
- utimesParent(name, cb)
- })
- }
-
- var chperm = function (name, header, cb) {
- var link = header.type === 'symlink'
- var chmod = link ? xfs.lchmod : xfs.chmod
- var chown = link ? xfs.lchown : xfs.chown
-
- if (!chmod) return cb()
-
- var mode = (header.mode | (header.type === 'directory' ? dmode : fmode)) & umask
- chmod(name, mode, function (err) {
- if (err) return cb(err)
- if (!own) return cb()
- if (!chown) return cb()
- chown(name, header.uid, header.gid, cb)
- })
- }
-
- extract.on('entry', function (header, stream, next) {
- header = map(header) || header
- header.name = normalize(header.name)
- var name = path.join(cwd, path.join('/', header.name))
-
- if (ignore(name, header)) {
- stream.resume()
- return next()
- }
-
- var stat = function (err) {
- if (err) return next(err)
- utimes(name, header, function (err) {
- if (err) return next(err)
- if (win32) return next()
- chperm(name, header, next)
- })
- }
-
- var onsymlink = function () {
- if (win32) return next() // skip symlinks on win for now before it can be tested
- xfs.unlink(name, function () {
- xfs.symlink(header.linkname, name, stat)
- })
- }
-
- var onlink = function () {
- if (win32) return next() // skip links on win for now before it can be tested
- xfs.unlink(name, function () {
- var srcpath = path.resolve(cwd, header.linkname)
-
- xfs.link(srcpath, name, function (err) {
- if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) {
- stream = xfs.createReadStream(srcpath)
- return onfile()
- }
-
- stat(err)
- })
- })
- }
-
- var onfile = function () {
- var ws = xfs.createWriteStream(name)
- var rs = mapStream(stream, header)
-
- ws.on('error', function (err) { // always forward errors on destroy
- rs.destroy(err)
- })
-
- pump(rs, ws, function (err) {
- if (err) return next(err)
- ws.on('close', stat)
- })
- }
-
- if (header.type === 'directory') {
- stack.push([name, header.mtime])
- return mkdirfix(name, {
- fs: xfs, own: own, uid: header.uid, gid: header.gid
- }, stat)
- }
-
- mkdirfix(path.dirname(name), {
- fs: xfs, own: own, uid: header.uid, gid: header.gid
- }, function (err) {
- if (err) return next(err)
-
- switch (header.type) {
- case 'file': return onfile()
- case 'link': return onlink()
- case 'symlink': return onsymlink()
- }
-
- if (strict) return next(new Error('unsupported type for ' + name + ' (' + header.type + ')'))
-
- stream.resume()
- next()
- })
- })
-
- return extract
-}
-
-function mkdirfix (name, opts, cb) {
- mkdirp(name, {fs: opts.xfs}, function (err, made) {
- if (!err && made && opts.own) {
- chownr(made, opts.uid, opts.gid, cb)
- } else {
- cb(err)
- }
- })
-}
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.npmignore b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.npmignore
deleted file mode 100644
index 3c3629e64..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.travis.yml b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.travis.yml
deleted file mode 100644
index 17f94330e..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/.travis.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-language: node_js
-node_js:
- - "0.10"
-
-script: "npm test"
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/LICENSE b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/LICENSE
deleted file mode 100644
index 757562ec5..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE. \ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/README.md b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/README.md
deleted file mode 100644
index 5029b27d6..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/README.md
+++ /dev/null
@@ -1,56 +0,0 @@
-# pump
-
-pump is a small node module that pipes streams together and destroys all of them if one of them closes.
-
-```
-npm install pump
-```
-
-[![build status](http://img.shields.io/travis/mafintosh/pump.svg?style=flat)](http://travis-ci.org/mafintosh/pump)
-
-## What problem does it solve?
-
-When using standard `source.pipe(dest)` source will _not_ be destroyed if dest emits close or an error.
-You are also not able to provide a callback to tell when then pipe has finished.
-
-pump does these two things for you
-
-## Usage
-
-Simply pass the streams you want to pipe together to pump and add an optional callback
-
-``` js
-var pump = require('pump')
-var fs = require('fs')
-
-var source = fs.createReadStream('/dev/random')
-var dest = fs.createWriteStream('/dev/null')
-
-pump(source, dest, function(err) {
- console.log('pipe finished', err)
-})
-
-setTimeout(function() {
- dest.destroy() // when dest is closed pump will destroy source
-}, 1000)
-```
-
-You can use pump to pipe more than two streams together as well
-
-``` js
-var transform = someTransformStream()
-
-pump(source, transform, anotherTransform, dest, function(err) {
- console.log('pipe finished', err)
-})
-```
-
-If `source`, `transform`, `anotherTransform` or `dest` closes all of them will be destroyed.
-
-## License
-
-MIT
-
-## Related
-
-`pump` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/index.js b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/index.js
deleted file mode 100644
index 060ce5f4f..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/index.js
+++ /dev/null
@@ -1,80 +0,0 @@
-var once = require('once')
-var eos = require('end-of-stream')
-var fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes
-
-var noop = function () {}
-
-var isFn = function (fn) {
- return typeof fn === 'function'
-}
-
-var isFS = function (stream) {
- if (!fs) return false // browser
- return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
-}
-
-var isRequest = function (stream) {
- return stream.setHeader && isFn(stream.abort)
-}
-
-var destroyer = function (stream, reading, writing, callback) {
- callback = once(callback)
-
- var closed = false
- stream.on('close', function () {
- closed = true
- })
-
- eos(stream, {readable: reading, writable: writing}, function (err) {
- if (err) return callback(err)
- closed = true
- callback()
- })
-
- var destroyed = false
- return function (err) {
- if (closed) return
- if (destroyed) return
- destroyed = true
-
- if (isFS(stream)) return stream.close() // use close for fs streams to avoid fd leaks
- if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
-
- if (isFn(stream.destroy)) return stream.destroy()
-
- callback(err || new Error('stream was destroyed'))
- }
-}
-
-var call = function (fn) {
- fn()
-}
-
-var pipe = function (from, to) {
- return from.pipe(to)
-}
-
-var pump = function () {
- var streams = Array.prototype.slice.call(arguments)
- var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
-
- if (Array.isArray(streams[0])) streams = streams[0]
- if (streams.length < 2) throw new Error('pump requires two streams per minimum')
-
- var error
- var destroys = streams.map(function (stream, i) {
- var reading = i < streams.length - 1
- var writing = i > 0
- return destroyer(stream, reading, writing, function (err) {
- if (!error) error = err
- if (err) destroys.forEach(call)
- if (reading) return
- destroys.forEach(call)
- callback(error)
- })
- })
-
- return streams.reduce(pipe)
-}
-
-module.exports = pump
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/LICENSE b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/LICENSE
deleted file mode 100644
index 757562ec5..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE. \ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/README.md b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/README.md
deleted file mode 100644
index f2560c939..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# end-of-stream
-
-A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
-
- npm install end-of-stream
-
-## Usage
-
-Simply pass a stream and a callback to the `eos`.
-Both legacy streams, streams2 and stream3 are supported.
-
-``` js
-var eos = require('end-of-stream');
-
-eos(readableStream, function(err) {
- // this will be set to the stream instance
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended', this === readableStream);
-});
-
-eos(writableStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished', this === writableStream);
-});
-
-eos(duplexStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended and finished', this === duplexStream);
-});
-
-eos(duplexStream, {readable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished but might still be readable');
-});
-
-eos(duplexStream, {writable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended but might still be writable');
-});
-
-eos(readableStream, {error:false}, function(err) {
- // do not treat emit('error', err) as a end-of-stream
-});
-```
-
-## License
-
-MIT
-
-## Related
-
-`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/index.js b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/index.js
deleted file mode 100644
index b3a906863..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-var once = require('once');
-
-var noop = function() {};
-
-var isRequest = function(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
-};
-
-var isChildProcess = function(stream) {
- return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
-};
-
-var eos = function(stream, opts, callback) {
- if (typeof opts === 'function') return eos(stream, null, opts);
- if (!opts) opts = {};
-
- callback = once(callback || noop);
-
- var ws = stream._writableState;
- var rs = stream._readableState;
- var readable = opts.readable || (opts.readable !== false && stream.readable);
- var writable = opts.writable || (opts.writable !== false && stream.writable);
-
- var onlegacyfinish = function() {
- if (!stream.writable) onfinish();
- };
-
- var onfinish = function() {
- writable = false;
- if (!readable) callback.call(stream);
- };
-
- var onend = function() {
- readable = false;
- if (!writable) callback.call(stream);
- };
-
- var onexit = function(exitCode) {
- callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
- };
-
- var onclose = function() {
- if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close'));
- if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close'));
- };
-
- var onrequest = function() {
- stream.req.on('finish', onfinish);
- };
-
- if (isRequest(stream)) {
- stream.on('complete', onfinish);
- stream.on('abort', onclose);
- if (stream.req) onrequest();
- else stream.on('request', onrequest);
- } else if (writable && !ws) { // legacy streams
- stream.on('end', onlegacyfinish);
- stream.on('close', onlegacyfinish);
- }
-
- if (isChildProcess(stream)) stream.on('exit', onexit);
-
- stream.on('end', onend);
- stream.on('finish', onfinish);
- if (opts.error !== false) stream.on('error', callback);
- stream.on('close', onclose);
-
- return function() {
- stream.removeListener('complete', onfinish);
- stream.removeListener('abort', onclose);
- stream.removeListener('request', onrequest);
- if (stream.req) stream.req.removeListener('finish', onfinish);
- stream.removeListener('end', onlegacyfinish);
- stream.removeListener('close', onlegacyfinish);
- stream.removeListener('finish', onfinish);
- stream.removeListener('exit', onexit);
- stream.removeListener('end', onend);
- stream.removeListener('error', callback);
- stream.removeListener('close', onclose);
- };
-};
-
-module.exports = eos;
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/package.json b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/package.json
deleted file mode 100644
index 87562d3ce..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/node_modules/end-of-stream/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "end-of-stream@^1.1.0",
- "_id": "end-of-stream@1.4.0",
- "_inBundle": false,
- "_integrity": "sha1-epDYM+/abPpurA9JSduw+tOmMgY=",
- "_location": "/pacote/tar-fs/pump/end-of-stream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "end-of-stream@^1.1.0",
- "name": "end-of-stream",
- "escapedName": "end-of-stream",
- "rawSpec": "^1.1.0",
- "saveSpec": null,
- "fetchSpec": "^1.1.0"
- },
- "_requiredBy": [
- "/pacote/tar-fs/pump"
- ],
- "_resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.0.tgz",
- "_shasum": "7a90d833efda6cfa6eac0f4949dbb0fad3a63206",
- "_spec": "end-of-stream@^1.1.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-fs/node_modules/pump",
- "author": {
- "name": "Mathias Buus",
- "email": "mathiasbuus@gmail.com"
- },
- "bugs": {
- "url": "https://github.com/mafintosh/end-of-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "once": "^1.4.0"
- },
- "deprecated": false,
- "description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/mafintosh/end-of-stream",
- "keywords": [
- "stream",
- "streams",
- "callback",
- "finish",
- "close",
- "end",
- "wait"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "end-of-stream",
- "repository": {
- "type": "git",
- "url": "git://github.com/mafintosh/end-of-stream.git"
- },
- "scripts": {
- "test": "node test.js"
- },
- "version": "1.4.0"
-}
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/package.json b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/package.json
deleted file mode 100644
index 52bd98e40..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
- "_from": "pump@^1.0.0",
- "_id": "pump@1.0.2",
- "_inBundle": false,
- "_integrity": "sha1-Oz7mUS+U8OV1U4wXmV+fFpkKXVE=",
- "_location": "/pacote/tar-fs/pump",
- "_phantomChildren": {
- "once": "1.4.0"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "pump@^1.0.0",
- "name": "pump",
- "escapedName": "pump",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-fs"
- ],
- "_resolved": "https://registry.npmjs.org/pump/-/pump-1.0.2.tgz",
- "_shasum": "3b3ee6512f94f0e575538c17995f9f16990a5d51",
- "_spec": "pump@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-fs",
- "author": {
- "name": "Mathias Buus Madsen",
- "email": "mathiasbuus@gmail.com"
- },
- "browser": {
- "fs": false
- },
- "bugs": {
- "url": "https://github.com/mafintosh/pump/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "end-of-stream": "^1.1.0",
- "once": "^1.3.1"
- },
- "deprecated": false,
- "description": "pipe streams together and close all of them if one of them closes",
- "homepage": "https://github.com/mafintosh/pump#readme",
- "keywords": [
- "streams",
- "pipe",
- "destroy",
- "callback"
- ],
- "license": "MIT",
- "name": "pump",
- "repository": {
- "type": "git",
- "url": "git://github.com/mafintosh/pump.git"
- },
- "scripts": {
- "test": "node test.js"
- },
- "version": "1.0.2"
-}
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test-browser.js b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test-browser.js
deleted file mode 100644
index 80e852c7d..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test-browser.js
+++ /dev/null
@@ -1,58 +0,0 @@
-var stream = require('stream')
-var pump = require('./index')
-
-var rs = new stream.Readable()
-var ws = new stream.Writable()
-
-rs._read = function (size) {
- this.push(Buffer(size).fill('abc'))
-}
-
-ws._write = function (chunk, encoding, cb) {
- setTimeout(function () {
- cb()
- }, 100)
-}
-
-var toHex = function () {
- var reverse = new (require('stream').Transform)()
-
- reverse._transform = function (chunk, enc, callback) {
- reverse.push(chunk.toString('hex'))
- callback()
- }
-
- return reverse
-}
-
-var wsClosed = false
-var rsClosed = false
-var callbackCalled = false
-
-var check = function () {
- if (wsClosed && rsClosed && callbackCalled) console.log('done')
-}
-
-ws.on('finish', function () {
- wsClosed = true
- check()
-})
-
-rs.on('end', function () {
- rsClosed = true
- check()
-})
-
-pump(rs, toHex(), toHex(), toHex(), ws, function () {
- callbackCalled = true
- check()
-})
-
-setTimeout(function () {
- rs.push(null)
- rs.emit('close')
-}, 1000)
-
-setTimeout(function () {
- if (!check()) throw new Error('timeout')
-}, 5000)
diff --git a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test.js b/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test.js
deleted file mode 100644
index 64e772ca5..000000000
--- a/node_modules/pacote/node_modules/tar-fs/node_modules/pump/test.js
+++ /dev/null
@@ -1,46 +0,0 @@
-var pump = require('./index')
-
-var rs = require('fs').createReadStream('/dev/random')
-var ws = require('fs').createWriteStream('/dev/null')
-
-var toHex = function () {
- var reverse = new (require('stream').Transform)()
-
- reverse._transform = function (chunk, enc, callback) {
- reverse.push(chunk.toString('hex'))
- callback()
- }
-
- return reverse
-}
-
-var wsClosed = false
-var rsClosed = false
-var callbackCalled = false
-
-var check = function () {
- if (wsClosed && rsClosed && callbackCalled) process.exit(0)
-}
-
-ws.on('close', function () {
- wsClosed = true
- check()
-})
-
-rs.on('close', function () {
- rsClosed = true
- check()
-})
-
-pump(rs, toHex(), toHex(), toHex(), ws, function () {
- callbackCalled = true
- check()
-})
-
-setTimeout(function () {
- rs.destroy()
-}, 1000)
-
-setTimeout(function () {
- throw new Error('timeout')
-}, 5000)
diff --git a/node_modules/pacote/node_modules/tar-fs/package.json b/node_modules/pacote/node_modules/tar-fs/package.json
deleted file mode 100644
index a740eb614..000000000
--- a/node_modules/pacote/node_modules/tar-fs/package.json
+++ /dev/null
@@ -1,70 +0,0 @@
-{
- "_from": "tar-fs@^1.15.3",
- "_id": "tar-fs@1.15.3",
- "_inBundle": false,
- "_integrity": "sha1-7M+TXpQUk9gVECjmNuUc5MPKfyA=",
- "_location": "/pacote/tar-fs",
- "_phantomChildren": {
- "once": "1.4.0"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "tar-fs@^1.15.3",
- "name": "tar-fs",
- "escapedName": "tar-fs",
- "rawSpec": "^1.15.3",
- "saveSpec": null,
- "fetchSpec": "^1.15.3"
- },
- "_requiredBy": [
- "/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-1.15.3.tgz",
- "_shasum": "eccf935e941493d8151028e636e51ce4c3ca7f20",
- "_spec": "tar-fs@^1.15.3",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
- "author": {
- "name": "Mathias Buus"
- },
- "bugs": {
- "url": "https://github.com/mafintosh/tar-fs/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "chownr": "^1.0.1",
- "mkdirp": "^0.5.1",
- "pump": "^1.0.0",
- "tar-stream": "^1.1.2"
- },
- "deprecated": false,
- "description": "filesystem bindings for tar-stream",
- "devDependencies": {
- "rimraf": "^2.2.8",
- "standard": "^4.5.4",
- "tape": "^3.0.0"
- },
- "directories": {
- "test": "test"
- },
- "homepage": "https://github.com/mafintosh/tar-fs",
- "keywords": [
- "tar",
- "fs",
- "file",
- "tarball",
- "directory",
- "stream"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "tar-fs",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/mafintosh/tar-fs.git"
- },
- "scripts": {
- "test": "standard && tape test/index.js"
- },
- "version": "1.15.3"
-}
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/a/hello.txt b/node_modules/pacote/node_modules/tar-fs/test/fixtures/a/hello.txt
deleted file mode 100644
index 3b18e512d..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/a/hello.txt
+++ /dev/null
@@ -1 +0,0 @@
-hello world
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/b/a/test.txt b/node_modules/pacote/node_modules/tar-fs/test/fixtures/b/a/test.txt
deleted file mode 100644
index 9daeafb98..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/b/a/test.txt
+++ /dev/null
@@ -1 +0,0 @@
-test
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/c/.npmignore b/node_modules/pacote/node_modules/tar-fs/test/fixtures/c/.npmignore
deleted file mode 100644
index 2b2328d77..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/c/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-link
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file2 b/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file2
deleted file mode 100644
index e69de29bb..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file2
+++ /dev/null
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-dir/file5 b/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-dir/file5
deleted file mode 100644
index e69de29bb..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-dir/file5
+++ /dev/null
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file3 b/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file3
deleted file mode 100644
index e69de29bb..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file3
+++ /dev/null
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file4 b/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file4
deleted file mode 100644
index e69de29bb..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/sub-files/file4
+++ /dev/null
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/directory/.ignore b/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/directory/.ignore
deleted file mode 100644
index e69de29bb..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/directory/.ignore
+++ /dev/null
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/file b/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/file
deleted file mode 100644
index e69de29bb..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/e/file
+++ /dev/null
diff --git a/node_modules/pacote/node_modules/tar-fs/test/index.js b/node_modules/pacote/node_modules/tar-fs/test/index.js
deleted file mode 100644
index 01ca87f2a..000000000
--- a/node_modules/pacote/node_modules/tar-fs/test/index.js
+++ /dev/null
@@ -1,227 +0,0 @@
-var test = require('tape')
-var rimraf = require('rimraf')
-var tar = require('../index')
-var path = require('path')
-var fs = require('fs')
-var os = require('os')
-
-var win32 = os.platform() === 'win32'
-
-var mtime = function (st) {
- return Math.floor(st.mtime.getTime() / 1000)
-}
-
-test('copy a -> copy/a', function (t) {
- t.plan(5)
-
- var a = path.join(__dirname, 'fixtures', 'a')
- var b = path.join(__dirname, 'fixtures', 'copy', 'a')
-
- rimraf.sync(b)
- tar.pack(a)
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b)
- t.same(files.length, 1)
- t.same(files[0], 'hello.txt')
- var fileB = path.join(b, files[0])
- var fileA = path.join(a, files[0])
- t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8'))
- t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode)
- t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA)))
- })
-})
-
-test('copy b -> copy/b', function (t) {
- t.plan(8)
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'b')
-
- rimraf.sync(b)
- tar.pack(a)
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b)
- t.same(files.length, 1)
- t.same(files[0], 'a')
- var dirB = path.join(b, files[0])
- var dirA = path.join(a, files[0])
- t.same(fs.statSync(dirB).mode, fs.statSync(dirA).mode)
- t.same(mtime(fs.statSync(dirB)), mtime(fs.statSync(dirA)))
- t.ok(fs.statSync(dirB).isDirectory())
- var fileB = path.join(dirB, 'test.txt')
- var fileA = path.join(dirA, 'test.txt')
- t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8'))
- t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode)
- t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA)))
- })
-})
-
-test('symlink', function (t) {
- if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow
- t.plan(1)
- t.ok(true)
- return
- }
-
- t.plan(5)
-
- var a = path.join(__dirname, 'fixtures', 'c')
-
- rimraf.sync(path.join(a, 'link'))
- fs.symlinkSync('.gitignore', path.join(a, 'link'))
-
- var b = path.join(__dirname, 'fixtures', 'copy', 'c')
-
- rimraf.sync(b)
- tar.pack(a)
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 2)
- t.same(files[0], '.gitignore')
- t.same(files[1], 'link')
-
- var linkA = path.join(a, 'link')
- var linkB = path.join(b, 'link')
-
- t.same(mtime(fs.lstatSync(linkB)), mtime(fs.lstatSync(linkA)))
- t.same(fs.readlinkSync(linkB), fs.readlinkSync(linkA))
- })
-})
-
-test('follow symlinks', function (t) {
- if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow
- t.plan(1)
- t.ok(true)
- return
- }
-
- t.plan(5)
-
- var a = path.join(__dirname, 'fixtures', 'c')
-
- rimraf.sync(path.join(a, 'link'))
- fs.symlinkSync('.gitignore', path.join(a, 'link'))
-
- var b = path.join(__dirname, 'fixtures', 'copy', 'c-dereference')
-
- rimraf.sync(b)
- tar.pack(a, {dereference: true})
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 2)
- t.same(files[0], '.gitignore')
- t.same(files[1], 'link')
-
- var file1 = path.join(b, '.gitignore')
- var file2 = path.join(b, 'link')
-
- t.same(mtime(fs.lstatSync(file1)), mtime(fs.lstatSync(file2)))
- t.same(fs.readFileSync(file1), fs.readFileSync(file2))
- })
-})
-
-test('strip', function (t) {
- t.plan(2)
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip')
-
- rimraf.sync(b)
-
- tar.pack(a)
- .pipe(tar.extract(b, {strip: 1}))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 1)
- t.same(files[0], 'test.txt')
- })
-})
-
-test('strip + map', function (t) {
- t.plan(2)
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip')
-
- rimraf.sync(b)
-
- var uppercase = function (header) {
- header.name = header.name.toUpperCase()
- return header
- }
-
- tar.pack(a)
- .pipe(tar.extract(b, {strip: 1, map: uppercase}))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- t.same(files.length, 1)
- t.same(files[0], 'TEST.TXT')
- })
-})
-
-test('map + dir + permissions', function (t) {
- t.plan(win32 ? 1 : 2) // skip chmod test, it's not working like unix
-
- var a = path.join(__dirname, 'fixtures', 'b')
- var b = path.join(__dirname, 'fixtures', 'copy', 'a-perms')
-
- rimraf.sync(b)
-
- var aWithMode = function (header) {
- if (header.name === 'a') {
- header.mode = parseInt(700, 8)
- }
- return header
- }
-
- tar.pack(a)
- .pipe(tar.extract(b, {map: aWithMode}))
- .on('finish', function () {
- var files = fs.readdirSync(b).sort()
- var stat = fs.statSync(path.join(b, 'a'))
- t.same(files.length, 1)
- if (!win32) {
- t.same(stat.mode & parseInt(777, 8), parseInt(700, 8))
- }
- })
-})
-
-test('specific entries', function (t) {
- t.plan(6)
-
- var a = path.join(__dirname, 'fixtures', 'd')
- var b = path.join(__dirname, 'fixtures', 'copy', 'd-entries')
-
- var entries = [ 'file1', 'sub-files/file3', 'sub-dir' ]
-
- rimraf.sync(b)
- tar.pack(a, {entries: entries})
- .pipe(tar.extract(b))
- .on('finish', function () {
- var files = fs.readdirSync(b)
- t.same(files.length, 3)
- t.notSame(files.indexOf('file1'), -1)
- t.notSame(files.indexOf('sub-files'), -1)
- t.notSame(files.indexOf('sub-dir'), -1)
- var subFiles = fs.readdirSync(path.join(b, 'sub-files'))
- t.same(subFiles, ['file3'])
- var subDir = fs.readdirSync(path.join(b, 'sub-dir'))
- t.same(subDir, ['file5'])
- })
-})
-
-test('check type while mapping header on packing', function (t) {
- t.plan(3)
-
- var e = path.join(__dirname, 'fixtures', 'e')
-
- var checkHeaderType = function (header) {
- if (header.name.indexOf('.') === -1) t.same(header.type, header.name)
- }
-
- tar.pack(e, { map: checkHeaderType })
-})
diff --git a/node_modules/pacote/node_modules/tar-stream/LICENSE b/node_modules/pacote/node_modules/tar-stream/LICENSE
deleted file mode 100644
index 757562ec5..000000000
--- a/node_modules/pacote/node_modules/tar-stream/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE. \ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar-stream/README.md b/node_modules/pacote/node_modules/tar-stream/README.md
deleted file mode 100644
index 96abbca1b..000000000
--- a/node_modules/pacote/node_modules/tar-stream/README.md
+++ /dev/null
@@ -1,168 +0,0 @@
-# tar-stream
-
-tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
-
-Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this.
-
-```
-npm install tar-stream
-```
-
-[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream)
-[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://opensource.org/licenses/MIT)
-
-## Usage
-
-tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both.
-
-
-It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc)
-
-## Related
-
-If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module.
-
-## Packing
-
-To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries.
-
-``` js
-var tar = require('tar-stream')
-var pack = tar.pack() // pack is a streams2 stream
-
-// add a file called my-test.txt with the content "Hello World!"
-pack.entry({ name: 'my-test.txt' }, 'Hello World!')
-
-// add a file called my-stream-test.txt from a stream
-var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
- // the stream was added
- // no more entries
- pack.finalize()
-})
-
-entry.write('hello')
-entry.write(' ')
-entry.write('world')
-entry.end()
-
-// pipe the pack stream somewhere
-pack.pipe(process.stdout)
-```
-
-## Extracting
-
-To extract a stream use `tar.extract()` and listen for `extract.on('entry', (header, stream, next) )`
-
-``` js
-var extract = tar.extract()
-
-extract.on('entry', function(header, stream, next) {
- // header is the tar header
- // stream is the content body (might be an empty stream)
- // call next when you are done with this entry
-
- stream.on('end', function() {
- next() // ready for next entry
- })
-
- stream.resume() // just auto drain the stream
-})
-
-extract.on('finish', function() {
- // all entries read
-})
-
-pack.pipe(extract)
-```
-
-The tar archive is streamed sequentially, meaning you **must** drain each entry's stream as you get them or else the main extract stream will receive backpressure and stop reading.
-
-## Headers
-
-The header object using in `entry` should contain the following properties.
-Most of these values can be found by stat'ing a file.
-
-``` js
-{
- name: 'path/to/this/entry.txt',
- size: 1314, // entry size. defaults to 0
- mode: 0644, // entry mode. defaults to to 0755 for dirs and 0644 otherwise
- mtime: new Date(), // last modified date for entry. defaults to now.
- type: 'file', // type of entry. defaults to file. can be:
- // file | link | symlink | directory | block-device
- // character-device | fifo | contiguous-file
- linkname: 'path', // linked file name
- uid: 0, // uid of entry owner. defaults to 0
- gid: 0, // gid of entry owner. defaults to 0
- uname: 'maf', // uname of entry owner. defaults to null
- gname: 'staff', // gname of entry owner. defaults to null
- devmajor: 0, // device major version. defaults to 0
- devminor: 0 // device minor version. defaults to 0
-}
-```
-
-## Modifying existing tarballs
-
-Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball.
-
-``` js
-var extract = tar.extract()
-var pack = tar.pack()
-var path = require('path')
-
-extract.on('entry', function(header, stream, callback) {
- // let's prefix all names with 'tmp'
- header.name = path.join('tmp', header.name)
- // write the new entry to the pack stream
- stream.pipe(pack.entry(header, callback))
-})
-
-extract.on('finish', function() {
- // all entries done - lets finalize it
- pack.finalize()
-})
-
-// pipe the old tarball to the extractor
-oldTarballStream.pipe(extract)
-
-// pipe the new tarball the another stream
-pack.pipe(newTarballStream)
-```
-
-## Saving tarball to fs
-
-
-``` js
-var fs = require('fs')
-var tar = require('tar-stream')
-
-var pack = tar.pack() // pack is a streams2 stream
-var path = 'YourTarBall.tar'
-var yourTarball = fs.createWriteStream(path)
-
-// add a file called YourFile.txt with the content "Hello World!"
-pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) {
- if (err) throw err
- pack.finalize()
-})
-
-// pipe the pack stream to your file
-pack.pipe(yourTarball)
-
-yourTarball.on('close', function () {
- console.log(path + ' has been written')
- fs.stat(path, function(err, stats) {
- if (err) throw err
- console.log(stats)
- console.log('Got file info successfully!')
- })
-})
-```
-
-## Performance
-
-[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance)
-
-# License
-
-MIT
diff --git a/node_modules/pacote/node_modules/tar-stream/extract.js b/node_modules/pacote/node_modules/tar-stream/extract.js
deleted file mode 100644
index 8be2a472c..000000000
--- a/node_modules/pacote/node_modules/tar-stream/extract.js
+++ /dev/null
@@ -1,246 +0,0 @@
-var util = require('util')
-var bl = require('bl')
-var xtend = require('xtend')
-var headers = require('./headers')
-
-var Writable = require('readable-stream').Writable
-var PassThrough = require('readable-stream').PassThrough
-
-var noop = function () {}
-
-var overflow = function (size) {
- size &= 511
- return size && 512 - size
-}
-
-var emptyStream = function (self, offset) {
- var s = new Source(self, offset)
- s.end()
- return s
-}
-
-var mixinPax = function (header, pax) {
- if (pax.path) header.name = pax.path
- if (pax.linkpath) header.linkname = pax.linkpath
- header.pax = pax
- return header
-}
-
-var Source = function (self, offset) {
- this._parent = self
- this.offset = offset
- PassThrough.call(this)
-}
-
-util.inherits(Source, PassThrough)
-
-Source.prototype.destroy = function (err) {
- this._parent.destroy(err)
-}
-
-var Extract = function (opts) {
- if (!(this instanceof Extract)) return new Extract(opts)
- Writable.call(this, opts)
-
- this._offset = 0
- this._buffer = bl()
- this._missing = 0
- this._onparse = noop
- this._header = null
- this._stream = null
- this._overflow = null
- this._cb = null
- this._locked = false
- this._destroyed = false
- this._pax = null
- this._paxGlobal = null
- this._gnuLongPath = null
- this._gnuLongLinkPath = null
-
- var self = this
- var b = self._buffer
-
- var oncontinue = function () {
- self._continue()
- }
-
- var onunlock = function (err) {
- self._locked = false
- if (err) return self.destroy(err)
- if (!self._stream) oncontinue()
- }
-
- var onstreamend = function () {
- self._stream = null
- var drain = overflow(self._header.size)
- if (drain) self._parse(drain, ondrain)
- else self._parse(512, onheader)
- if (!self._locked) oncontinue()
- }
-
- var ondrain = function () {
- self._buffer.consume(overflow(self._header.size))
- self._parse(512, onheader)
- oncontinue()
- }
-
- var onpaxglobalheader = function () {
- var size = self._header.size
- self._paxGlobal = headers.decodePax(b.slice(0, size))
- b.consume(size)
- onstreamend()
- }
-
- var onpaxheader = function () {
- var size = self._header.size
- self._pax = headers.decodePax(b.slice(0, size))
- if (self._paxGlobal) self._pax = xtend(self._paxGlobal, self._pax)
- b.consume(size)
- onstreamend()
- }
-
- var ongnulongpath = function () {
- var size = self._header.size
- this._gnuLongPath = headers.decodeLongPath(b.slice(0, size))
- b.consume(size)
- onstreamend()
- }
-
- var ongnulonglinkpath = function () {
- var size = self._header.size
- this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size))
- b.consume(size)
- onstreamend()
- }
-
- var onheader = function () {
- var offset = self._offset
- var header
- try {
- header = self._header = headers.decode(b.slice(0, 512))
- } catch (err) {
- self.emit('error', err)
- }
- b.consume(512)
-
- if (!header) {
- self._parse(512, onheader)
- oncontinue()
- return
- }
- if (header.type === 'gnu-long-path') {
- self._parse(header.size, ongnulongpath)
- oncontinue()
- return
- }
- if (header.type === 'gnu-long-link-path') {
- self._parse(header.size, ongnulonglinkpath)
- oncontinue()
- return
- }
- if (header.type === 'pax-global-header') {
- self._parse(header.size, onpaxglobalheader)
- oncontinue()
- return
- }
- if (header.type === 'pax-header') {
- self._parse(header.size, onpaxheader)
- oncontinue()
- return
- }
-
- if (self._gnuLongPath) {
- header.name = self._gnuLongPath
- self._gnuLongPath = null
- }
-
- if (self._gnuLongLinkPath) {
- header.linkname = self._gnuLongLinkPath
- self._gnuLongLinkPath = null
- }
-
- if (self._pax) {
- self._header = header = mixinPax(header, self._pax)
- self._pax = null
- }
-
- self._locked = true
-
- if (!header.size || header.type === 'directory') {
- self._parse(512, onheader)
- self.emit('entry', header, emptyStream(self, offset), onunlock)
- return
- }
-
- self._stream = new Source(self, offset)
-
- self.emit('entry', header, self._stream, onunlock)
- self._parse(header.size, onstreamend)
- oncontinue()
- }
-
- this._parse(512, onheader)
-}
-
-util.inherits(Extract, Writable)
-
-Extract.prototype.destroy = function (err) {
- if (this._destroyed) return
- this._destroyed = true
-
- if (err) this.emit('error', err)
- this.emit('close')
- if (this._stream) this._stream.emit('close')
-}
-
-Extract.prototype._parse = function (size, onparse) {
- if (this._destroyed) return
- this._offset += size
- this._missing = size
- this._onparse = onparse
-}
-
-Extract.prototype._continue = function () {
- if (this._destroyed) return
- var cb = this._cb
- this._cb = noop
- if (this._overflow) this._write(this._overflow, undefined, cb)
- else cb()
-}
-
-Extract.prototype._write = function (data, enc, cb) {
- if (this._destroyed) return
-
- var s = this._stream
- var b = this._buffer
- var missing = this._missing
-
- // we do not reach end-of-chunk now. just forward it
-
- if (data.length < missing) {
- this._missing -= data.length
- this._overflow = null
- if (s) return s.write(data, cb)
- b.append(data)
- return cb()
- }
-
- // end-of-chunk. the parser should call cb.
-
- this._cb = cb
- this._missing = 0
-
- var overflow = null
- if (data.length > missing) {
- overflow = data.slice(missing)
- data = data.slice(0, missing)
- }
-
- if (s) s.end(data)
- else b.append(data)
-
- this._overflow = overflow
- this._onparse()
-}
-
-module.exports = Extract
diff --git a/node_modules/pacote/node_modules/tar-stream/headers.js b/node_modules/pacote/node_modules/tar-stream/headers.js
deleted file mode 100644
index 8aab8b561..000000000
--- a/node_modules/pacote/node_modules/tar-stream/headers.js
+++ /dev/null
@@ -1,286 +0,0 @@
-var ZEROS = '0000000000000000000'
-var SEVENS = '7777777777777777777'
-var ZERO_OFFSET = '0'.charCodeAt(0)
-var USTAR = 'ustar\x0000'
-var MASK = parseInt('7777', 8)
-
-var clamp = function (index, len, defaultValue) {
- if (typeof index !== 'number') return defaultValue
- index = ~~index // Coerce to integer.
- if (index >= len) return len
- if (index >= 0) return index
- index += len
- if (index >= 0) return index
- return 0
-}
-
-var toType = function (flag) {
- switch (flag) {
- case 0:
- return 'file'
- case 1:
- return 'link'
- case 2:
- return 'symlink'
- case 3:
- return 'character-device'
- case 4:
- return 'block-device'
- case 5:
- return 'directory'
- case 6:
- return 'fifo'
- case 7:
- return 'contiguous-file'
- case 72:
- return 'pax-header'
- case 55:
- return 'pax-global-header'
- case 27:
- return 'gnu-long-link-path'
- case 28:
- case 30:
- return 'gnu-long-path'
- }
-
- return null
-}
-
-var toTypeflag = function (flag) {
- switch (flag) {
- case 'file':
- return 0
- case 'link':
- return 1
- case 'symlink':
- return 2
- case 'character-device':
- return 3
- case 'block-device':
- return 4
- case 'directory':
- return 5
- case 'fifo':
- return 6
- case 'contiguous-file':
- return 7
- case 'pax-header':
- return 72
- }
-
- return 0
-}
-
-var alloc = function (size) {
- var buf = new Buffer(size)
- buf.fill(0)
- return buf
-}
-
-var indexOf = function (block, num, offset, end) {
- for (; offset < end; offset++) {
- if (block[offset] === num) return offset
- }
- return end
-}
-
-var cksum = function (block) {
- var sum = 8 * 32
- for (var i = 0; i < 148; i++) sum += block[i]
- for (var j = 156; j < 512; j++) sum += block[j]
- return sum
-}
-
-var encodeOct = function (val, n) {
- val = val.toString(8)
- if (val.length > n) return SEVENS.slice(0, n) + ' '
- else return ZEROS.slice(0, n - val.length) + val + ' '
-}
-
-/* Copied from the node-tar repo and modified to meet
- * tar-stream coding standard.
- *
- * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
- */
-function parse256 (buf) {
- // first byte MUST be either 80 or FF
- // 80 for positive, FF for 2's comp
- var positive
- if (buf[0] === 0x80) positive = true
- else if (buf[0] === 0xFF) positive = false
- else return null
-
- // build up a base-256 tuple from the least sig to the highest
- var zero = false
- var tuple = []
- for (var i = buf.length - 1; i > 0; i--) {
- var byte = buf[i]
- if (positive) tuple.push(byte)
- else if (zero && byte === 0) tuple.push(0)
- else if (zero) {
- zero = false
- tuple.push(0x100 - byte)
- } else tuple.push(0xFF - byte)
- }
-
- var sum = 0
- var l = tuple.length
- for (i = 0; i < l; i++) {
- sum += tuple[i] * Math.pow(256, i)
- }
-
- return positive ? sum : -1 * sum
-}
-
-var decodeOct = function (val, offset, length) {
- val = val.slice(offset, offset + length)
- offset = 0
-
- // If prefixed with 0x80 then parse as a base-256 integer
- if (val[offset] & 0x80) {
- return parse256(val)
- } else {
- // Older versions of tar can prefix with spaces
- while (offset < val.length && val[offset] === 32) offset++
- var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
- while (offset < end && val[offset] === 0) offset++
- if (end === offset) return 0
- return parseInt(val.slice(offset, end).toString(), 8)
- }
-}
-
-var decodeStr = function (val, offset, length) {
- return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString()
-}
-
-var addLength = function (str) {
- var len = Buffer.byteLength(str)
- var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
- if (len + digits > Math.pow(10, digits)) digits++
-
- return (len + digits) + str
-}
-
-exports.decodeLongPath = function (buf) {
- return decodeStr(buf, 0, buf.length)
-}
-
-exports.encodePax = function (opts) { // TODO: encode more stuff in pax
- var result = ''
- if (opts.name) result += addLength(' path=' + opts.name + '\n')
- if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
- var pax = opts.pax
- if (pax) {
- for (var key in pax) {
- result += addLength(' ' + key + '=' + pax[key] + '\n')
- }
- }
- return new Buffer(result)
-}
-
-exports.decodePax = function (buf) {
- var result = {}
-
- while (buf.length) {
- var i = 0
- while (i < buf.length && buf[i] !== 32) i++
- var len = parseInt(buf.slice(0, i).toString(), 10)
- if (!len) return result
-
- var b = buf.slice(i + 1, len - 1).toString()
- var keyIndex = b.indexOf('=')
- if (keyIndex === -1) return result
- result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
-
- buf = buf.slice(len)
- }
-
- return result
-}
-
-exports.encode = function (opts) {
- var buf = alloc(512)
- var name = opts.name
- var prefix = ''
-
- if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
- if (Buffer.byteLength(name) !== name.length) return null // utf-8
-
- while (Buffer.byteLength(name) > 100) {
- var i = name.indexOf('/')
- if (i === -1) return null
- prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
- name = name.slice(i + 1)
- }
-
- if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
- if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
-
- buf.write(name)
- buf.write(encodeOct(opts.mode & MASK, 6), 100)
- buf.write(encodeOct(opts.uid, 6), 108)
- buf.write(encodeOct(opts.gid, 6), 116)
- buf.write(encodeOct(opts.size, 11), 124)
- buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
-
- buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
-
- if (opts.linkname) buf.write(opts.linkname, 157)
-
- buf.write(USTAR, 257)
- if (opts.uname) buf.write(opts.uname, 265)
- if (opts.gname) buf.write(opts.gname, 297)
- buf.write(encodeOct(opts.devmajor || 0, 6), 329)
- buf.write(encodeOct(opts.devminor || 0, 6), 337)
-
- if (prefix) buf.write(prefix, 345)
-
- buf.write(encodeOct(cksum(buf), 6), 148)
-
- return buf
-}
-
-exports.decode = function (buf) {
- var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
-
- var name = decodeStr(buf, 0, 100)
- var mode = decodeOct(buf, 100, 8)
- var uid = decodeOct(buf, 108, 8)
- var gid = decodeOct(buf, 116, 8)
- var size = decodeOct(buf, 124, 12)
- var mtime = decodeOct(buf, 136, 12)
- var type = toType(typeflag)
- var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100)
- var uname = decodeStr(buf, 265, 32)
- var gname = decodeStr(buf, 297, 32)
- var devmajor = decodeOct(buf, 329, 8)
- var devminor = decodeOct(buf, 337, 8)
-
- if (buf[345]) name = decodeStr(buf, 345, 155) + '/' + name
-
- // to support old tar versions that use trailing / to indicate dirs
- if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
-
- var c = cksum(buf)
-
- // checksum is still initial value if header was null.
- if (c === 8 * 32) return null
-
- // valid checksum
- if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
-
- return {
- name: name,
- mode: mode,
- uid: uid,
- gid: gid,
- size: size,
- mtime: new Date(1000 * mtime),
- type: type,
- linkname: linkname,
- uname: uname,
- gname: gname,
- devmajor: devmajor,
- devminor: devminor
- }
-}
diff --git a/node_modules/pacote/node_modules/tar-stream/index.js b/node_modules/pacote/node_modules/tar-stream/index.js
deleted file mode 100644
index 648170482..000000000
--- a/node_modules/pacote/node_modules/tar-stream/index.js
+++ /dev/null
@@ -1,2 +0,0 @@
-exports.extract = require('./extract')
-exports.pack = require('./pack')
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.npmignore b/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.npmignore
deleted file mode 100644
index 40b878db5..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules/ \ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.travis.yml b/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.travis.yml
deleted file mode 100644
index 8c6fc4810..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/.travis.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-sudo: false
-language: node_js
-node_js:
- - '0.10'
- - '0.12'
- - '4'
- - '6'
- - '7'
-branches:
- only:
- - master
-notifications:
- email:
- - rod@vagg.org
- - matteo.collina@gmail.com
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/LICENSE.md b/node_modules/pacote/node_modules/tar-stream/node_modules/bl/LICENSE.md
deleted file mode 100644
index ff35a3472..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/LICENSE.md
+++ /dev/null
@@ -1,13 +0,0 @@
-The MIT License (MIT)
-=====================
-
-Copyright (c) 2013-2016 bl contributors
-----------------------------------
-
-*bl contributors listed at <https://github.com/rvagg/bl#contributors>*
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/README.md b/node_modules/pacote/node_modules/tar-stream/node_modules/bl/README.md
deleted file mode 100644
index da0c18338..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/README.md
+++ /dev/null
@@ -1,208 +0,0 @@
-# bl *(BufferList)*
-
-[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl)
-
-**A Node.js Buffer list collector, reader and streamer thingy.**
-
-[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/)
-[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/)
-
-**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!
-
-The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.
-
-```js
-const BufferList = require('bl')
-
-var bl = new BufferList()
-bl.append(new Buffer('abcd'))
-bl.append(new Buffer('efg'))
-bl.append('hi') // bl will also accept & convert Strings
-bl.append(new Buffer('j'))
-bl.append(new Buffer([ 0x3, 0x4 ]))
-
-console.log(bl.length) // 12
-
-console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'
-console.log(bl.slice(3, 10).toString('ascii')) // 'defghij'
-console.log(bl.slice(3, 6).toString('ascii')) // 'def'
-console.log(bl.slice(3, 8).toString('ascii')) // 'defgh'
-console.log(bl.slice(5, 10).toString('ascii')) // 'fghij'
-
-// or just use toString!
-console.log(bl.toString()) // 'abcdefghij\u0003\u0004'
-console.log(bl.toString('ascii', 3, 8)) // 'defgh'
-console.log(bl.toString('ascii', 5, 10)) // 'fghij'
-
-// other standard Buffer readables
-console.log(bl.readUInt16BE(10)) // 0x0304
-console.log(bl.readUInt16LE(10)) // 0x0403
-```
-
-Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:
-
-```js
-const bl = require('bl')
- , fs = require('fs')
-
-fs.createReadStream('README.md')
- .pipe(bl(function (err, data) { // note 'new' isn't strictly required
- // `data` is a complete Buffer object containing the full data
- console.log(data.toString())
- }))
-```
-
-Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.
-
-Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):
-```js
-const hyperquest = require('hyperquest')
- , bl = require('bl')
- , url = 'https://raw.github.com/rvagg/bl/master/README.md'
-
-hyperquest(url).pipe(bl(function (err, data) {
- console.log(data.toString())
-}))
-```
-
-Or, use it as a readable stream to recompose a list of Buffers to an output source:
-
-```js
-const BufferList = require('bl')
- , fs = require('fs')
-
-var bl = new BufferList()
-bl.append(new Buffer('abcd'))
-bl.append(new Buffer('efg'))
-bl.append(new Buffer('hi'))
-bl.append(new Buffer('j'))
-
-bl.pipe(fs.createWriteStream('gibberish.txt'))
-```
-
-## API
-
- * <a href="#ctor"><code><b>new BufferList([ callback ])</b></code></a>
- * <a href="#length"><code>bl.<b>length</b></code></a>
- * <a href="#append"><code>bl.<b>append(buffer)</b></code></a>
- * <a href="#get"><code>bl.<b>get(index)</b></code></a>
- * <a href="#slice"><code>bl.<b>slice([ start[, end ] ])</b></code></a>
- * <a href="#shallowSlice"><code>bl.<b>shallowSlice([ start[, end ] ])</b></code></a>
- * <a href="#copy"><code>bl.<b>copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])</b></code></a>
- * <a href="#duplicate"><code>bl.<b>duplicate()</b></code></a>
- * <a href="#consume"><code>bl.<b>consume(bytes)</b></code></a>
- * <a href="#toString"><code>bl.<b>toString([encoding, [ start, [ end ]]])</b></code></a>
- * <a href="#readXX"><code>bl.<b>readDoubleBE()</b></code>, <code>bl.<b>readDoubleLE()</b></code>, <code>bl.<b>readFloatBE()</b></code>, <code>bl.<b>readFloatLE()</b></code>, <code>bl.<b>readInt32BE()</b></code>, <code>bl.<b>readInt32LE()</b></code>, <code>bl.<b>readUInt32BE()</b></code>, <code>bl.<b>readUInt32LE()</b></code>, <code>bl.<b>readInt16BE()</b></code>, <code>bl.<b>readInt16LE()</b></code>, <code>bl.<b>readUInt16BE()</b></code>, <code>bl.<b>readUInt16LE()</b></code>, <code>bl.<b>readInt8()</b></code>, <code>bl.<b>readUInt8()</b></code></a>
- * <a href="#streams">Streams</a>
-
---------------------------------------------------------
-<a name="ctor"></a>
-### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
-The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
-
-Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
-
-`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
-
-```js
-var bl = require('bl')
-var myinstance = bl()
-
-// equivilant to:
-
-var BufferList = require('bl')
-var myinstance = new BufferList()
-```
-
---------------------------------------------------------
-<a name="length"></a>
-### bl.length
-Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.
-
---------------------------------------------------------
-<a name="append"></a>
-### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
-`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
-
---------------------------------------------------------
-<a name="get"></a>
-### bl.get(index)
-`get()` will return the byte at the specified index.
-
---------------------------------------------------------
-<a name="slice"></a>
-### bl.slice([ start, [ end ] ])
-`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
-
-If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.
-
---------------------------------------------------------
-<a name="shallowSlice"></a>
-### bl.shallowSlice([ start, [ end ] ])
-`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
-
-No copies will be performed. All buffers in the result share memory with the original list.
-
---------------------------------------------------------
-<a name="copy"></a>
-### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
-`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.
-
---------------------------------------------------------
-<a name="duplicate"></a>
-### bl.duplicate()
-`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:
-
-```js
-var bl = new BufferList()
-
-bl.append('hello')
-bl.append(' world')
-bl.append('\n')
-
-bl.duplicate().pipe(process.stdout, { end: false })
-
-console.log(bl.toString())
-```
-
---------------------------------------------------------
-<a name="consume"></a>
-### bl.consume(bytes)
-`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers&mdash;initial offsets will be calculated accordingly in order to give you a consistent view of the data.
-
---------------------------------------------------------
-<a name="toString"></a>
-### bl.toString([encoding, [ start, [ end ]]])
-`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.
-
---------------------------------------------------------
-<a name="readXX"></a>
-### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
-
-All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.
-
-See the <b><code>[Buffer](http://nodejs.org/docs/latest/api/buffer.html)</code></b> documentation for how these work.
-
---------------------------------------------------------
-<a name="streams"></a>
-### Streams
-**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance.
-
---------------------------------------------------------
-
-## Contributors
-
-**bl** is brought to you by the following hackers:
-
- * [Rod Vagg](https://github.com/rvagg)
- * [Matteo Collina](https://github.com/mcollina)
- * [Jarett Cruger](https://github.com/jcrugzz)
-
-=======
-
-<a name="license"></a>
-## License &amp; copyright
-
-Copyright (c) 2013-2016 bl contributors (listed above).
-
-bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/bl.js b/node_modules/pacote/node_modules/tar-stream/node_modules/bl/bl.js
deleted file mode 100644
index 98983316c..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/bl.js
+++ /dev/null
@@ -1,280 +0,0 @@
-var DuplexStream = require('readable-stream/duplex')
- , util = require('util')
-
-
-function BufferList (callback) {
- if (!(this instanceof BufferList))
- return new BufferList(callback)
-
- this._bufs = []
- this.length = 0
-
- if (typeof callback == 'function') {
- this._callback = callback
-
- var piper = function piper (err) {
- if (this._callback) {
- this._callback(err)
- this._callback = null
- }
- }.bind(this)
-
- this.on('pipe', function onPipe (src) {
- src.on('error', piper)
- })
- this.on('unpipe', function onUnpipe (src) {
- src.removeListener('error', piper)
- })
- } else {
- this.append(callback)
- }
-
- DuplexStream.call(this)
-}
-
-
-util.inherits(BufferList, DuplexStream)
-
-
-BufferList.prototype._offset = function _offset (offset) {
- var tot = 0, i = 0, _t
- if (offset === 0) return [ 0, 0 ]
- for (; i < this._bufs.length; i++) {
- _t = tot + this._bufs[i].length
- if (offset < _t || i == this._bufs.length - 1)
- return [ i, offset - tot ]
- tot = _t
- }
-}
-
-
-BufferList.prototype.append = function append (buf) {
- var i = 0
-
- if (Buffer.isBuffer(buf)) {
- this._appendBuffer(buf);
- } else if (Array.isArray(buf)) {
- for (; i < buf.length; i++)
- this.append(buf[i])
- } else if (buf instanceof BufferList) {
- // unwrap argument into individual BufferLists
- for (; i < buf._bufs.length; i++)
- this.append(buf._bufs[i])
- } else if (buf != null) {
- // coerce number arguments to strings, since Buffer(number) does
- // uninitialized memory allocation
- if (typeof buf == 'number')
- buf = buf.toString()
-
- this._appendBuffer(new Buffer(buf));
- }
-
- return this
-}
-
-
-BufferList.prototype._appendBuffer = function appendBuffer (buf) {
- this._bufs.push(buf)
- this.length += buf.length
-}
-
-
-BufferList.prototype._write = function _write (buf, encoding, callback) {
- this._appendBuffer(buf)
-
- if (typeof callback == 'function')
- callback()
-}
-
-
-BufferList.prototype._read = function _read (size) {
- if (!this.length)
- return this.push(null)
-
- size = Math.min(size, this.length)
- this.push(this.slice(0, size))
- this.consume(size)
-}
-
-
-BufferList.prototype.end = function end (chunk) {
- DuplexStream.prototype.end.call(this, chunk)
-
- if (this._callback) {
- this._callback(null, this.slice())
- this._callback = null
- }
-}
-
-
-BufferList.prototype.get = function get (index) {
- return this.slice(index, index + 1)[0]
-}
-
-
-BufferList.prototype.slice = function slice (start, end) {
- if (typeof start == 'number' && start < 0)
- start += this.length
- if (typeof end == 'number' && end < 0)
- end += this.length
- return this.copy(null, 0, start, end)
-}
-
-
-BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
- if (typeof srcStart != 'number' || srcStart < 0)
- srcStart = 0
- if (typeof srcEnd != 'number' || srcEnd > this.length)
- srcEnd = this.length
- if (srcStart >= this.length)
- return dst || new Buffer(0)
- if (srcEnd <= 0)
- return dst || new Buffer(0)
-
- var copy = !!dst
- , off = this._offset(srcStart)
- , len = srcEnd - srcStart
- , bytes = len
- , bufoff = (copy && dstStart) || 0
- , start = off[1]
- , l
- , i
-
- // copy/slice everything
- if (srcStart === 0 && srcEnd == this.length) {
- if (!copy) { // slice, but full concat if multiple buffers
- return this._bufs.length === 1
- ? this._bufs[0]
- : Buffer.concat(this._bufs, this.length)
- }
-
- // copy, need to copy individual buffers
- for (i = 0; i < this._bufs.length; i++) {
- this._bufs[i].copy(dst, bufoff)
- bufoff += this._bufs[i].length
- }
-
- return dst
- }
-
- // easy, cheap case where it's a subset of one of the buffers
- if (bytes <= this._bufs[off[0]].length - start) {
- return copy
- ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
- : this._bufs[off[0]].slice(start, start + bytes)
- }
-
- if (!copy) // a slice, we need something to copy in to
- dst = new Buffer(len)
-
- for (i = off[0]; i < this._bufs.length; i++) {
- l = this._bufs[i].length - start
-
- if (bytes > l) {
- this._bufs[i].copy(dst, bufoff, start)
- } else {
- this._bufs[i].copy(dst, bufoff, start, start + bytes)
- break
- }
-
- bufoff += l
- bytes -= l
-
- if (start)
- start = 0
- }
-
- return dst
-}
-
-BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
- start = start || 0
- end = end || this.length
-
- if (start < 0)
- start += this.length
- if (end < 0)
- end += this.length
-
- var startOffset = this._offset(start)
- , endOffset = this._offset(end)
- , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
-
- if (endOffset[1] == 0)
- buffers.pop()
- else
- buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
-
- if (startOffset[1] != 0)
- buffers[0] = buffers[0].slice(startOffset[1])
-
- return new BufferList(buffers)
-}
-
-BufferList.prototype.toString = function toString (encoding, start, end) {
- return this.slice(start, end).toString(encoding)
-}
-
-BufferList.prototype.consume = function consume (bytes) {
- while (this._bufs.length) {
- if (bytes >= this._bufs[0].length) {
- bytes -= this._bufs[0].length
- this.length -= this._bufs[0].length
- this._bufs.shift()
- } else {
- this._bufs[0] = this._bufs[0].slice(bytes)
- this.length -= bytes
- break
- }
- }
- return this
-}
-
-
-BufferList.prototype.duplicate = function duplicate () {
- var i = 0
- , copy = new BufferList()
-
- for (; i < this._bufs.length; i++)
- copy.append(this._bufs[i])
-
- return copy
-}
-
-
-BufferList.prototype.destroy = function destroy () {
- this._bufs.length = 0
- this.length = 0
- this.push(null)
-}
-
-
-;(function () {
- var methods = {
- 'readDoubleBE' : 8
- , 'readDoubleLE' : 8
- , 'readFloatBE' : 4
- , 'readFloatLE' : 4
- , 'readInt32BE' : 4
- , 'readInt32LE' : 4
- , 'readUInt32BE' : 4
- , 'readUInt32LE' : 4
- , 'readInt16BE' : 2
- , 'readInt16LE' : 2
- , 'readUInt16BE' : 2
- , 'readUInt16LE' : 2
- , 'readInt8' : 1
- , 'readUInt8' : 1
- }
-
- for (var m in methods) {
- (function (m) {
- BufferList.prototype[m] = function (offset) {
- return this.slice(offset, offset + methods[m])[m](0)
- }
- }(m))
- }
-}())
-
-
-module.exports = BufferList
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/package.json b/node_modules/pacote/node_modules/tar-stream/node_modules/bl/package.json
deleted file mode 100644
index edfb199db..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "bl@^1.0.0",
- "_id": "bl@1.2.1",
- "_inBundle": false,
- "_integrity": "sha1-ysMo977kVzDUBLaSID/LWQ4XLV4=",
- "_location": "/pacote/tar-stream/bl",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "bl@^1.0.0",
- "name": "bl",
- "escapedName": "bl",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-stream"
- ],
- "_resolved": "https://registry.npmjs.org/bl/-/bl-1.2.1.tgz",
- "_shasum": "cac328f7bee45730d404b692203fcb590e172d5e",
- "_spec": "bl@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-stream",
- "authors": [
- "Rod Vagg <rod@vagg.org> (https://github.com/rvagg)",
- "Matteo Collina <matteo.collina@gmail.com> (https://github.com/mcollina)",
- "Jarett Cruger <jcrugzz@gmail.com> (https://github.com/jcrugzz)"
- ],
- "bugs": {
- "url": "https://github.com/rvagg/bl/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "readable-stream": "^2.0.5"
- },
- "deprecated": false,
- "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
- "devDependencies": {
- "faucet": "0.0.1",
- "hash_file": "~0.1.1",
- "tape": "~4.6.0"
- },
- "homepage": "https://github.com/rvagg/bl",
- "keywords": [
- "buffer",
- "buffers",
- "stream",
- "awesomesauce"
- ],
- "license": "MIT",
- "main": "bl.js",
- "name": "bl",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/rvagg/bl.git"
- },
- "scripts": {
- "test": "node test/test.js | faucet"
- },
- "version": "1.2.1"
-}
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/test/test.js b/node_modules/pacote/node_modules/tar-stream/node_modules/bl/test/test.js
deleted file mode 100644
index 396974ec1..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/bl/test/test.js
+++ /dev/null
@@ -1,701 +0,0 @@
-var tape = require('tape')
- , crypto = require('crypto')
- , fs = require('fs')
- , hash = require('hash_file')
- , BufferList = require('../')
-
- , encodings =
- ('hex utf8 utf-8 ascii binary base64'
- + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ')
-
-tape('single bytes from single buffer', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abcd'))
-
- t.equal(bl.length, 4)
-
- t.equal(bl.get(0), 97)
- t.equal(bl.get(1), 98)
- t.equal(bl.get(2), 99)
- t.equal(bl.get(3), 100)
-
- t.end()
-})
-
-tape('single bytes from multiple buffers', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.get(0), 97)
- t.equal(bl.get(1), 98)
- t.equal(bl.get(2), 99)
- t.equal(bl.get(3), 100)
- t.equal(bl.get(4), 101)
- t.equal(bl.get(5), 102)
- t.equal(bl.get(6), 103)
- t.equal(bl.get(7), 104)
- t.equal(bl.get(8), 105)
- t.equal(bl.get(9), 106)
- t.end()
-})
-
-tape('multi bytes from single buffer', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abcd'))
-
- t.equal(bl.length, 4)
-
- t.equal(bl.slice(0, 4).toString('ascii'), 'abcd')
- t.equal(bl.slice(0, 3).toString('ascii'), 'abc')
- t.equal(bl.slice(1, 4).toString('ascii'), 'bcd')
- t.equal(bl.slice(-4, -1).toString('ascii'), 'abc')
-
- t.end()
-})
-
-tape('multi bytes from single buffer (negative indexes)', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('buffer'))
-
- t.equal(bl.length, 6)
-
- t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe')
- t.equal(bl.slice(-6, -2).toString('ascii'), 'buff')
- t.equal(bl.slice(-5, -2).toString('ascii'), 'uff')
-
- t.end()
-})
-
-tape('multiple bytes from multiple buffers', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
- t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
- t.equal(bl.slice(3, 6).toString('ascii'), 'def')
- t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
- t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
- t.equal(bl.slice(-7, -4).toString('ascii'), 'def')
-
- t.end()
-})
-
-tape('multiple bytes from multiple buffer lists', function (t) {
- var bl = new BufferList()
-
- bl.append(new BufferList([ new Buffer('abcd'), new Buffer('efg') ]))
- bl.append(new BufferList([ new Buffer('hi'), new Buffer('j') ]))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
-
- t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
- t.equal(bl.slice(3, 6).toString('ascii'), 'def')
- t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
- t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
-
- t.end()
-})
-
-// same data as previous test, just using nested constructors
-tape('multiple bytes from crazy nested buffer lists', function (t) {
- var bl = new BufferList()
-
- bl.append(new BufferList([
- new BufferList([
- new BufferList(new Buffer('abc'))
- , new Buffer('d')
- , new BufferList(new Buffer('efg'))
- ])
- , new BufferList([ new Buffer('hi') ])
- , new BufferList(new Buffer('j'))
- ]))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
-
- t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
- t.equal(bl.slice(3, 6).toString('ascii'), 'def')
- t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
- t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
-
- t.end()
-})
-
-tape('append accepts arrays of Buffers', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abc'))
- bl.append([ new Buffer('def') ])
- bl.append([ new Buffer('ghi'), new Buffer('jkl') ])
- bl.append([ new Buffer('mnop'), new Buffer('qrstu'), new Buffer('vwxyz') ])
- t.equal(bl.length, 26)
- t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
- t.end()
-})
-
-tape('append accepts arrays of BufferLists', function (t) {
- var bl = new BufferList()
- bl.append(new Buffer('abc'))
- bl.append([ new BufferList('def') ])
- bl.append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ]))
- bl.append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ])
- t.equal(bl.length, 26)
- t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
- t.end()
-})
-
-tape('append chainable', function (t) {
- var bl = new BufferList()
- t.ok(bl.append(new Buffer('abcd')) === bl)
- t.ok(bl.append([ new Buffer('abcd') ]) === bl)
- t.ok(bl.append(new BufferList(new Buffer('abcd'))) === bl)
- t.ok(bl.append([ new BufferList(new Buffer('abcd')) ]) === bl)
- t.end()
-})
-
-tape('append chainable (test results)', function (t) {
- var bl = new BufferList('abc')
- .append([ new BufferList('def') ])
- .append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ]))
- .append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ])
-
- t.equal(bl.length, 26)
- t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
- t.end()
-})
-
-tape('consuming from multiple buffers', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.length, 10)
-
- t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
-
- bl.consume(3)
- t.equal(bl.length, 7)
- t.equal(bl.slice(0, 7).toString('ascii'), 'defghij')
-
- bl.consume(2)
- t.equal(bl.length, 5)
- t.equal(bl.slice(0, 5).toString('ascii'), 'fghij')
-
- bl.consume(1)
- t.equal(bl.length, 4)
- t.equal(bl.slice(0, 4).toString('ascii'), 'ghij')
-
- bl.consume(1)
- t.equal(bl.length, 3)
- t.equal(bl.slice(0, 3).toString('ascii'), 'hij')
-
- bl.consume(2)
- t.equal(bl.length, 1)
- t.equal(bl.slice(0, 1).toString('ascii'), 'j')
-
- t.end()
-})
-
-tape('complete consumption', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('a'))
- bl.append(new Buffer('b'))
-
- bl.consume(2)
-
- t.equal(bl.length, 0)
- t.equal(bl._bufs.length, 0)
-
- t.end()
-})
-
-tape('test readUInt8 / readInt8', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x3
- buf2[2] = 0x4
- buf3[0] = 0x23
- buf3[1] = 0x42
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readUInt8(2), 0x3)
- t.equal(bl.readInt8(2), 0x3)
- t.equal(bl.readUInt8(3), 0x4)
- t.equal(bl.readInt8(3), 0x4)
- t.equal(bl.readUInt8(4), 0x23)
- t.equal(bl.readInt8(4), 0x23)
- t.equal(bl.readUInt8(5), 0x42)
- t.equal(bl.readInt8(5), 0x42)
- t.end()
-})
-
-tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x3
- buf2[2] = 0x4
- buf3[0] = 0x23
- buf3[1] = 0x42
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readUInt16BE(2), 0x0304)
- t.equal(bl.readUInt16LE(2), 0x0403)
- t.equal(bl.readInt16BE(2), 0x0304)
- t.equal(bl.readInt16LE(2), 0x0403)
- t.equal(bl.readUInt16BE(3), 0x0423)
- t.equal(bl.readUInt16LE(3), 0x2304)
- t.equal(bl.readInt16BE(3), 0x0423)
- t.equal(bl.readInt16LE(3), 0x2304)
- t.equal(bl.readUInt16BE(4), 0x2342)
- t.equal(bl.readUInt16LE(4), 0x4223)
- t.equal(bl.readInt16BE(4), 0x2342)
- t.equal(bl.readInt16LE(4), 0x4223)
- t.end()
-})
-
-tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x3
- buf2[2] = 0x4
- buf3[0] = 0x23
- buf3[1] = 0x42
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readUInt32BE(2), 0x03042342)
- t.equal(bl.readUInt32LE(2), 0x42230403)
- t.equal(bl.readInt32BE(2), 0x03042342)
- t.equal(bl.readInt32LE(2), 0x42230403)
- t.end()
-})
-
-tape('test readFloatLE / readFloatBE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(3)
- , bl = new BufferList()
-
- buf2[1] = 0x00
- buf2[2] = 0x00
- buf3[0] = 0x80
- buf3[1] = 0x3f
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readFloatLE(2), 0x01)
- t.end()
-})
-
-tape('test readDoubleLE / readDoubleBE', function (t) {
- var buf1 = new Buffer(1)
- , buf2 = new Buffer(3)
- , buf3 = new Buffer(10)
- , bl = new BufferList()
-
- buf2[1] = 0x55
- buf2[2] = 0x55
- buf3[0] = 0x55
- buf3[1] = 0x55
- buf3[2] = 0x55
- buf3[3] = 0x55
- buf3[4] = 0xd5
- buf3[5] = 0x3f
-
- bl.append(buf1)
- bl.append(buf2)
- bl.append(buf3)
-
- t.equal(bl.readDoubleLE(2), 0.3333333333333333)
- t.end()
-})
-
-tape('test toString', function (t) {
- var bl = new BufferList()
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
-
- t.equal(bl.toString('ascii', 0, 10), 'abcdefghij')
- t.equal(bl.toString('ascii', 3, 10), 'defghij')
- t.equal(bl.toString('ascii', 3, 6), 'def')
- t.equal(bl.toString('ascii', 3, 8), 'defgh')
- t.equal(bl.toString('ascii', 5, 10), 'fghij')
-
- t.end()
-})
-
-tape('test toString encoding', function (t) {
- var bl = new BufferList()
- , b = new Buffer('abcdefghij\xff\x00')
-
- bl.append(new Buffer('abcd'))
- bl.append(new Buffer('efg'))
- bl.append(new Buffer('hi'))
- bl.append(new Buffer('j'))
- bl.append(new Buffer('\xff\x00'))
-
- encodings.forEach(function (enc) {
- t.equal(bl.toString(enc), b.toString(enc), enc)
- })
-
- t.end()
-})
-
-!process.browser && tape('test stream', function (t) {
- var random = crypto.randomBytes(65534)
- , rndhash = hash(random, 'md5')
- , md5sum = crypto.createHash('md5')
- , bl = new BufferList(function (err, buf) {
- t.ok(Buffer.isBuffer(buf))
- t.ok(err === null)
- t.equal(rndhash, hash(bl.slice(), 'md5'))
- t.equal(rndhash, hash(buf, 'md5'))
-
- bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat'))
- .on('close', function () {
- var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat')
- s.on('data', md5sum.update.bind(md5sum))
- s.on('end', function() {
- t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!')
- t.end()
- })
- })
-
- })
-
- fs.writeFileSync('/tmp/bl_test_rnd.dat', random)
- fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl)
-})
-
-tape('instantiation with Buffer', function (t) {
- var buf = crypto.randomBytes(1024)
- , buf2 = crypto.randomBytes(1024)
- , b = BufferList(buf)
-
- t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer')
- b = BufferList([ buf, buf2 ])
- t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('test String appendage', function (t) {
- var bl = new BufferList()
- , b = new Buffer('abcdefghij\xff\x00')
-
- bl.append('abcd')
- bl.append('efg')
- bl.append('hi')
- bl.append('j')
- bl.append('\xff\x00')
-
- encodings.forEach(function (enc) {
- t.equal(bl.toString(enc), b.toString(enc))
- })
-
- t.end()
-})
-
-tape('test Number appendage', function (t) {
- var bl = new BufferList()
- , b = new Buffer('1234567890')
-
- bl.append(1234)
- bl.append(567)
- bl.append(89)
- bl.append(0)
-
- encodings.forEach(function (enc) {
- t.equal(bl.toString(enc), b.toString(enc))
- })
-
- t.end()
-})
-
-tape('write nothing, should get empty buffer', function (t) {
- t.plan(3)
- BufferList(function (err, data) {
- t.notOk(err, 'no error')
- t.ok(Buffer.isBuffer(data), 'got a buffer')
- t.equal(0, data.length, 'got a zero-length buffer')
- t.end()
- }).end()
-})
-
-tape('unicode string', function (t) {
- t.plan(2)
- var inp1 = '\u2600'
- , inp2 = '\u2603'
- , exp = inp1 + ' and ' + inp2
- , bl = BufferList()
- bl.write(inp1)
- bl.write(' and ')
- bl.write(inp2)
- t.equal(exp, bl.toString())
- t.equal(new Buffer(exp).toString('hex'), bl.toString('hex'))
-})
-
-tape('should emit finish', function (t) {
- var source = BufferList()
- , dest = BufferList()
-
- source.write('hello')
- source.pipe(dest)
-
- dest.on('finish', function () {
- t.equal(dest.toString('utf8'), 'hello')
- t.end()
- })
-})
-
-tape('basic copy', function (t) {
- var buf = crypto.randomBytes(1024)
- , buf2 = new Buffer(1024)
- , b = BufferList(buf)
-
- b.copy(buf2)
- t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy after many appends', function (t) {
- var buf = crypto.randomBytes(512)
- , buf2 = new Buffer(1024)
- , b = BufferList(buf)
-
- b.append(buf)
- b.copy(buf2)
- t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy at a precise position', function (t) {
- var buf = crypto.randomBytes(1004)
- , buf2 = new Buffer(1024)
- , b = BufferList(buf)
-
- b.copy(buf2, 20)
- t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy starting from a precise location', function (t) {
- var buf = crypto.randomBytes(10)
- , buf2 = new Buffer(5)
- , b = BufferList(buf)
-
- b.copy(buf2, 0, 5)
- t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy in an interval', function (t) {
- var rnd = crypto.randomBytes(10)
- , b = BufferList(rnd) // put the random bytes there
- , actual = new Buffer(3)
- , expected = new Buffer(3)
-
- rnd.copy(expected, 0, 5, 8)
- b.copy(actual, 0, 5, 8)
-
- t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('copy an interval between two buffers', function (t) {
- var buf = crypto.randomBytes(10)
- , buf2 = new Buffer(10)
- , b = BufferList(buf)
-
- b.append(buf)
- b.copy(buf2, 0, 5, 15)
-
- t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer')
- t.end()
-})
-
-tape('shallow slice across buffer boundaries', function (t) {
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh')
- t.end()
-})
-
-tape('shallow slice within single buffer', function (t) {
- t.plan(2)
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice(5, 10).toString(), 'Secon')
- t.equal(bl.shallowSlice(7, 10).toString(), 'con')
- t.end()
-})
-
-tape('shallow slice single buffer', function (t) {
- t.plan(3)
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice(0, 5).toString(), 'First')
- t.equal(bl.shallowSlice(5, 11).toString(), 'Second')
- t.equal(bl.shallowSlice(11, 16).toString(), 'Third')
-})
-
-tape('shallow slice with negative or omitted indices', function (t) {
- t.plan(4)
- var bl = new BufferList(['First', 'Second', 'Third'])
-
- t.equal(bl.shallowSlice().toString(), 'FirstSecondThird')
- t.equal(bl.shallowSlice(5).toString(), 'SecondThird')
- t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh')
- t.equal(bl.shallowSlice(-8).toString(), 'ondThird')
-})
-
-tape('shallow slice does not make a copy', function (t) {
- t.plan(1)
- var buffers = [new Buffer('First'), new Buffer('Second'), new Buffer('Third')]
- var bl = (new BufferList(buffers)).shallowSlice(5, -3)
-
- buffers[1].fill('h')
- buffers[2].fill('h')
-
- t.equal(bl.toString(), 'hhhhhhhh')
-})
-
-tape('duplicate', function (t) {
- t.plan(2)
-
- var bl = new BufferList('abcdefghij\xff\x00')
- , dup = bl.duplicate()
-
- t.equal(bl.prototype, dup.prototype)
- t.equal(bl.toString('hex'), dup.toString('hex'))
-})
-
-tape('destroy no pipe', function (t) {
- t.plan(2)
-
- var bl = new BufferList('alsdkfja;lsdkfja;lsdk')
- bl.destroy()
-
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
-})
-
-!process.browser && tape('destroy with pipe before read end', function (t) {
- t.plan(2)
-
- var bl = new BufferList()
- fs.createReadStream(__dirname + '/test.js')
- .pipe(bl)
-
- bl.destroy()
-
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
-
-})
-
-!process.browser && tape('destroy with pipe before read end with race', function (t) {
- t.plan(2)
-
- var bl = new BufferList()
- fs.createReadStream(__dirname + '/test.js')
- .pipe(bl)
-
- setTimeout(function () {
- bl.destroy()
- setTimeout(function () {
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
- }, 500)
- }, 500)
-})
-
-!process.browser && tape('destroy with pipe after read end', function (t) {
- t.plan(2)
-
- var bl = new BufferList()
- fs.createReadStream(__dirname + '/test.js')
- .on('end', onEnd)
- .pipe(bl)
-
- function onEnd () {
- bl.destroy()
-
- t.equal(bl._bufs.length, 0)
- t.equal(bl.length, 0)
- }
-})
-
-!process.browser && tape('destroy with pipe while writing to a destination', function (t) {
- t.plan(4)
-
- var bl = new BufferList()
- , ds = new BufferList()
-
- fs.createReadStream(__dirname + '/test.js')
- .on('end', onEnd)
- .pipe(bl)
-
- function onEnd () {
- bl.pipe(ds)
-
- setTimeout(function () {
- bl.destroy()
-
- t.equals(bl._bufs.length, 0)
- t.equals(bl.length, 0)
-
- ds.destroy()
-
- t.equals(bl._bufs.length, 0)
- t.equals(bl.length, 0)
-
- }, 100)
- }
-})
-
-!process.browser && tape('handle error', function (t) {
- t.plan(2)
- fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) {
- t.ok(err instanceof Error, 'has error')
- t.notOk(data, 'no data')
- }))
-})
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/LICENSE b/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/LICENSE
deleted file mode 100644
index 757562ec5..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE. \ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/README.md b/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/README.md
deleted file mode 100644
index f2560c939..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# end-of-stream
-
-A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
-
- npm install end-of-stream
-
-## Usage
-
-Simply pass a stream and a callback to the `eos`.
-Both legacy streams, streams2 and stream3 are supported.
-
-``` js
-var eos = require('end-of-stream');
-
-eos(readableStream, function(err) {
- // this will be set to the stream instance
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended', this === readableStream);
-});
-
-eos(writableStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished', this === writableStream);
-});
-
-eos(duplexStream, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended and finished', this === duplexStream);
-});
-
-eos(duplexStream, {readable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has finished but might still be readable');
-});
-
-eos(duplexStream, {writable:false}, function(err) {
- if (err) return console.log('stream had an error or closed early');
- console.log('stream has ended but might still be writable');
-});
-
-eos(readableStream, {error:false}, function(err) {
- // do not treat emit('error', err) as a end-of-stream
-});
-```
-
-## License
-
-MIT
-
-## Related
-
-`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/index.js b/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/index.js
deleted file mode 100644
index b3a906863..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-var once = require('once');
-
-var noop = function() {};
-
-var isRequest = function(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
-};
-
-var isChildProcess = function(stream) {
- return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
-};
-
-var eos = function(stream, opts, callback) {
- if (typeof opts === 'function') return eos(stream, null, opts);
- if (!opts) opts = {};
-
- callback = once(callback || noop);
-
- var ws = stream._writableState;
- var rs = stream._readableState;
- var readable = opts.readable || (opts.readable !== false && stream.readable);
- var writable = opts.writable || (opts.writable !== false && stream.writable);
-
- var onlegacyfinish = function() {
- if (!stream.writable) onfinish();
- };
-
- var onfinish = function() {
- writable = false;
- if (!readable) callback.call(stream);
- };
-
- var onend = function() {
- readable = false;
- if (!writable) callback.call(stream);
- };
-
- var onexit = function(exitCode) {
- callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
- };
-
- var onclose = function() {
- if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close'));
- if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close'));
- };
-
- var onrequest = function() {
- stream.req.on('finish', onfinish);
- };
-
- if (isRequest(stream)) {
- stream.on('complete', onfinish);
- stream.on('abort', onclose);
- if (stream.req) onrequest();
- else stream.on('request', onrequest);
- } else if (writable && !ws) { // legacy streams
- stream.on('end', onlegacyfinish);
- stream.on('close', onlegacyfinish);
- }
-
- if (isChildProcess(stream)) stream.on('exit', onexit);
-
- stream.on('end', onend);
- stream.on('finish', onfinish);
- if (opts.error !== false) stream.on('error', callback);
- stream.on('close', onclose);
-
- return function() {
- stream.removeListener('complete', onfinish);
- stream.removeListener('abort', onclose);
- stream.removeListener('request', onrequest);
- if (stream.req) stream.req.removeListener('finish', onfinish);
- stream.removeListener('end', onlegacyfinish);
- stream.removeListener('close', onlegacyfinish);
- stream.removeListener('finish', onfinish);
- stream.removeListener('exit', onexit);
- stream.removeListener('end', onend);
- stream.removeListener('error', callback);
- stream.removeListener('close', onclose);
- };
-};
-
-module.exports = eos;
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/package.json b/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/package.json
deleted file mode 100644
index 5ed019ae4..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/end-of-stream/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "end-of-stream@^1.0.0",
- "_id": "end-of-stream@1.4.0",
- "_inBundle": false,
- "_integrity": "sha1-epDYM+/abPpurA9JSduw+tOmMgY=",
- "_location": "/pacote/tar-stream/end-of-stream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "end-of-stream@^1.0.0",
- "name": "end-of-stream",
- "escapedName": "end-of-stream",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-stream"
- ],
- "_resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.0.tgz",
- "_shasum": "7a90d833efda6cfa6eac0f4949dbb0fad3a63206",
- "_spec": "end-of-stream@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-stream",
- "author": {
- "name": "Mathias Buus",
- "email": "mathiasbuus@gmail.com"
- },
- "bugs": {
- "url": "https://github.com/mafintosh/end-of-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "once": "^1.4.0"
- },
- "deprecated": false,
- "description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/mafintosh/end-of-stream",
- "keywords": [
- "stream",
- "streams",
- "callback",
- "finish",
- "close",
- "end",
- "wait"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "end-of-stream",
- "repository": {
- "type": "git",
- "url": "git://github.com/mafintosh/end-of-stream.git"
- },
- "scripts": {
- "test": "node test.js"
- },
- "version": "1.4.0"
-}
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/.npmignore b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/.npmignore
deleted file mode 100644
index 3c3629e64..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/LICENCE b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/LICENCE
deleted file mode 100644
index 1a14b437e..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/LICENCE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2012-2014 Raynos.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/Makefile b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/Makefile
deleted file mode 100644
index d583fcf49..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/Makefile
+++ /dev/null
@@ -1,4 +0,0 @@
-browser:
- node ./support/compile
-
-.PHONY: browser \ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/README.md b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/README.md
deleted file mode 100644
index 093cb2978..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/README.md
+++ /dev/null
@@ -1,32 +0,0 @@
-# xtend
-
-[![browser support][3]][4]
-
-[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges)
-
-Extend like a boss
-
-xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence.
-
-## Examples
-
-```js
-var extend = require("xtend")
-
-// extend returns a new object. Does not mutate arguments
-var combination = extend({
- a: "a",
- b: 'c'
-}, {
- b: "b"
-})
-// { a: "a", b: "b" }
-```
-
-## Stability status: Locked
-
-## MIT Licenced
-
-
- [3]: http://ci.testling.com/Raynos/xtend.png
- [4]: http://ci.testling.com/Raynos/xtend
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/immutable.js b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/immutable.js
deleted file mode 100644
index 94889c9de..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/immutable.js
+++ /dev/null
@@ -1,19 +0,0 @@
-module.exports = extend
-
-var hasOwnProperty = Object.prototype.hasOwnProperty;
-
-function extend() {
- var target = {}
-
- for (var i = 0; i < arguments.length; i++) {
- var source = arguments[i]
-
- for (var key in source) {
- if (hasOwnProperty.call(source, key)) {
- target[key] = source[key]
- }
- }
- }
-
- return target
-}
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/mutable.js b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/mutable.js
deleted file mode 100644
index 72debede6..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/mutable.js
+++ /dev/null
@@ -1,17 +0,0 @@
-module.exports = extend
-
-var hasOwnProperty = Object.prototype.hasOwnProperty;
-
-function extend(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i]
-
- for (var key in source) {
- if (hasOwnProperty.call(source, key)) {
- target[key] = source[key]
- }
- }
- }
-
- return target
-}
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/package.json b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/package.json
deleted file mode 100644
index 7c38a0edd..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/package.json
+++ /dev/null
@@ -1,86 +0,0 @@
-{
- "_from": "xtend@^4.0.0",
- "_id": "xtend@4.0.1",
- "_inBundle": false,
- "_integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=",
- "_location": "/pacote/tar-stream/xtend",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "xtend@^4.0.0",
- "name": "xtend",
- "escapedName": "xtend",
- "rawSpec": "^4.0.0",
- "saveSpec": null,
- "fetchSpec": "^4.0.0"
- },
- "_requiredBy": [
- "/pacote/tar-stream"
- ],
- "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
- "_shasum": "a5c6d532be656e23db820efb943a1f04998d63af",
- "_spec": "xtend@^4.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote/node_modules/tar-stream",
- "author": {
- "name": "Raynos",
- "email": "raynos2@gmail.com"
- },
- "bugs": {
- "url": "https://github.com/Raynos/xtend/issues",
- "email": "raynos2@gmail.com"
- },
- "bundleDependencies": false,
- "contributors": [
- {
- "name": "Jake Verbaten"
- },
- {
- "name": "Matt Esch"
- }
- ],
- "dependencies": {},
- "deprecated": false,
- "description": "extend like a boss",
- "devDependencies": {
- "tape": "~1.1.0"
- },
- "engines": {
- "node": ">=0.4"
- },
- "homepage": "https://github.com/Raynos/xtend",
- "keywords": [
- "extend",
- "merge",
- "options",
- "opts",
- "object",
- "array"
- ],
- "license": "MIT",
- "main": "immutable",
- "name": "xtend",
- "repository": {
- "type": "git",
- "url": "git://github.com/Raynos/xtend.git"
- },
- "scripts": {
- "test": "node test"
- },
- "testling": {
- "files": "test.js",
- "browsers": [
- "ie/7..latest",
- "firefox/16..latest",
- "firefox/nightly",
- "chrome/22..latest",
- "chrome/canary",
- "opera/12..latest",
- "opera/next",
- "safari/5.1..latest",
- "ipad/6.0..latest",
- "iphone/6.0..latest"
- ]
- },
- "version": "4.0.1"
-}
diff --git a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/test.js b/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/test.js
deleted file mode 100644
index 093a2b061..000000000
--- a/node_modules/pacote/node_modules/tar-stream/node_modules/xtend/test.js
+++ /dev/null
@@ -1,83 +0,0 @@
-var test = require("tape")
-var extend = require("./")
-var mutableExtend = require("./mutable")
-
-test("merge", function(assert) {
- var a = { a: "foo" }
- var b = { b: "bar" }
-
- assert.deepEqual(extend(a, b), { a: "foo", b: "bar" })
- assert.end()
-})
-
-test("replace", function(assert) {
- var a = { a: "foo" }
- var b = { a: "bar" }
-
- assert.deepEqual(extend(a, b), { a: "bar" })
- assert.end()
-})
-
-test("undefined", function(assert) {
- var a = { a: undefined }
- var b = { b: "foo" }
-
- assert.deepEqual(extend(a, b), { a: undefined, b: "foo" })
- assert.deepEqual(extend(b, a), { a: undefined, b: "foo" })
- assert.end()
-})
-
-test("handle 0", function(assert) {
- var a = { a: "default" }
- var b = { a: 0 }
-
- assert.deepEqual(extend(a, b), { a: 0 })
- assert.deepEqual(extend(b, a), { a: "default" })
- assert.end()
-})
-
-test("is immutable", function (assert) {
- var record = {}
-
- extend(record, { foo: "bar" })
- assert.equal(record.foo, undefined)
- assert.end()
-})
-
-test("null as argument", function (assert) {
- var a = { foo: "bar" }
- var b = null
- var c = void 0
-
- assert.deepEqual(extend(b, a, c), { foo: "bar" })
- assert.end()
-})
-
-test("mutable", function (assert) {
- var a = { foo: "bar" }
-
- mutableExtend(a, { bar: "baz" })
-
- assert.equal(a.bar, "baz")
- assert.end()
-})
-
-test("null prototype", function(assert) {
- var a = { a: "foo" }
- var b = Object.create(null)
- b.b = "bar";
-
- assert.deepEqual(extend(a, b), { a: "foo", b: "bar" })
- assert.end()
-})
-
-test("null prototype mutable", function (assert) {
- var a = { foo: "bar" }
- var b = Object.create(null)
- b.bar = "baz";
-
- mutableExtend(a, b)
-
- assert.equal(a.bar, "baz")
- assert.end()
-})
diff --git a/node_modules/pacote/node_modules/tar-stream/pack.js b/node_modules/pacote/node_modules/tar-stream/pack.js
deleted file mode 100644
index 025f00713..000000000
--- a/node_modules/pacote/node_modules/tar-stream/pack.js
+++ /dev/null
@@ -1,254 +0,0 @@
-var constants = require('constants')
-var eos = require('end-of-stream')
-var util = require('util')
-
-var Readable = require('readable-stream').Readable
-var Writable = require('readable-stream').Writable
-var StringDecoder = require('string_decoder').StringDecoder
-
-var headers = require('./headers')
-
-var DMODE = parseInt('755', 8)
-var FMODE = parseInt('644', 8)
-
-var END_OF_TAR = new Buffer(1024)
-END_OF_TAR.fill(0)
-
-var noop = function () {}
-
-var overflow = function (self, size) {
- size &= 511
- if (size) self.push(END_OF_TAR.slice(0, 512 - size))
-}
-
-function modeToType (mode) {
- switch (mode & constants.S_IFMT) {
- case constants.S_IFBLK: return 'block-device'
- case constants.S_IFCHR: return 'character-device'
- case constants.S_IFDIR: return 'directory'
- case constants.S_IFIFO: return 'fifo'
- case constants.S_IFLNK: return 'symlink'
- }
-
- return 'file'
-}
-
-var Sink = function (to) {
- Writable.call(this)
- this.written = 0
- this._to = to
- this._destroyed = false
-}
-
-util.inherits(Sink, Writable)
-
-Sink.prototype._write = function (data, enc, cb) {
- this.written += data.length
- if (this._to.push(data)) return cb()
- this._to._drain = cb
-}
-
-Sink.prototype.destroy = function () {
- if (this._destroyed) return
- this._destroyed = true
- this.emit('close')
-}
-
-var LinkSink = function () {
- Writable.call(this)
- this.linkname = ''
- this._decoder = new StringDecoder('utf-8')
- this._destroyed = false
-}
-
-util.inherits(LinkSink, Writable)
-
-LinkSink.prototype._write = function (data, enc, cb) {
- this.linkname += this._decoder.write(data)
- cb()
-}
-
-LinkSink.prototype.destroy = function () {
- if (this._destroyed) return
- this._destroyed = true
- this.emit('close')
-}
-
-var Void = function () {
- Writable.call(this)
- this._destroyed = false
-}
-
-util.inherits(Void, Writable)
-
-Void.prototype._write = function (data, enc, cb) {
- cb(new Error('No body allowed for this entry'))
-}
-
-Void.prototype.destroy = function () {
- if (this._destroyed) return
- this._destroyed = true
- this.emit('close')
-}
-
-var Pack = function (opts) {
- if (!(this instanceof Pack)) return new Pack(opts)
- Readable.call(this, opts)
-
- this._drain = noop
- this._finalized = false
- this._finalizing = false
- this._destroyed = false
- this._stream = null
-}
-
-util.inherits(Pack, Readable)
-
-Pack.prototype.entry = function (header, buffer, callback) {
- if (this._stream) throw new Error('already piping an entry')
- if (this._finalized || this._destroyed) return
-
- if (typeof buffer === 'function') {
- callback = buffer
- buffer = null
- }
-
- if (!callback) callback = noop
-
- var self = this
-
- if (!header.size || header.type === 'symlink') header.size = 0
- if (!header.type) header.type = modeToType(header.mode)
- if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
- if (!header.uid) header.uid = 0
- if (!header.gid) header.gid = 0
- if (!header.mtime) header.mtime = new Date()
-
- if (typeof buffer === 'string') buffer = new Buffer(buffer)
- if (Buffer.isBuffer(buffer)) {
- header.size = buffer.length
- this._encode(header)
- this.push(buffer)
- overflow(self, header.size)
- process.nextTick(callback)
- return new Void()
- }
-
- if (header.type === 'symlink' && !header.linkname) {
- var linkSink = new LinkSink()
- eos(linkSink, function (err) {
- if (err) { // stream was closed
- self.destroy()
- return callback(err)
- }
-
- header.linkname = linkSink.linkname
- self._encode(header)
- callback()
- })
-
- return linkSink
- }
-
- this._encode(header)
-
- if (header.type !== 'file' && header.type !== 'contiguous-file') {
- process.nextTick(callback)
- return new Void()
- }
-
- var sink = new Sink(this)
-
- this._stream = sink
-
- eos(sink, function (err) {
- self._stream = null
-
- if (err) { // stream was closed
- self.destroy()
- return callback(err)
- }
-
- if (sink.written !== header.size) { // corrupting tar
- self.destroy()
- return callback(new Error('size mismatch'))
- }
-
- overflow(self, header.size)
- if (self._finalizing) self.finalize()
- callback()
- })
-
- return sink
-}
-
-Pack.prototype.finalize = function () {
- if (this._stream) {
- this._finalizing = true
- return
- }
-
- if (this._finalized) return
- this._finalized = true
- this.push(END_OF_TAR)
- this.push(null)
-}
-
-Pack.prototype.destroy = function (err) {
- if (this._destroyed) return
- this._destroyed = true
-
- if (err) this.emit('error', err)
- this.emit('close')
- if (this._stream && this._stream.destroy) this._stream.destroy()
-}
-
-Pack.prototype._encode = function (header) {
- if (!header.pax) {
- var buf = headers.encode(header)
- if (buf) {
- this.push(buf)
- return
- }
- }
- this._encodePax(header)
-}
-
-Pack.prototype._encodePax = function (header) {
- var paxHeader = headers.encodePax({
- name: header.name,
- linkname: header.linkname,
- pax: header.pax
- })
-
- var newHeader = {
- name: 'PaxHeader',
- mode: header.mode,
- uid: header.uid,
- gid: header.gid,
- size: paxHeader.length,
- mtime: header.mtime,
- type: 'pax-header',
- linkname: header.linkname && 'PaxHeader',
- uname: header.uname,
- gname: header.gname,
- devmajor: header.devmajor,
- devminor: header.devminor
- }
-
- this.push(headers.encode(newHeader))
- this.push(paxHeader)
- overflow(this, paxHeader.length)
-
- newHeader.size = header.size
- newHeader.type = header.type
- this.push(headers.encode(newHeader))
-}
-
-Pack.prototype._read = function (n) {
- var drain = this._drain
- this._drain = noop
- drain()
-}
-
-module.exports = Pack
diff --git a/node_modules/pacote/node_modules/tar-stream/package.json b/node_modules/pacote/node_modules/tar-stream/package.json
deleted file mode 100644
index b1d243efe..000000000
--- a/node_modules/pacote/node_modules/tar-stream/package.json
+++ /dev/null
@@ -1,88 +0,0 @@
-{
- "_from": "tar-stream@^1.5.4",
- "_id": "tar-stream@1.5.4",
- "_inBundle": false,
- "_integrity": "sha1-NlSc8E7RrumyowwBQyUiONr5QBY=",
- "_location": "/pacote/tar-stream",
- "_phantomChildren": {
- "once": "1.4.0",
- "readable-stream": "2.3.3"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "tar-stream@^1.5.4",
- "name": "tar-stream",
- "escapedName": "tar-stream",
- "rawSpec": "^1.5.4",
- "saveSpec": null,
- "fetchSpec": "^1.5.4"
- },
- "_requiredBy": [
- "/pacote",
- "/pacote/tar-fs"
- ],
- "_resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.4.tgz",
- "_shasum": "36549cf04ed1aee9b2a30c0143252238daf94016",
- "_spec": "tar-stream@^1.5.4",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/pacote",
- "author": {
- "name": "Mathias Buus",
- "email": "mathiasbuus@gmail.com"
- },
- "bugs": {
- "url": "https://github.com/mafintosh/tar-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "bl": "^1.0.0",
- "end-of-stream": "^1.0.0",
- "readable-stream": "^2.0.0",
- "xtend": "^4.0.0"
- },
- "deprecated": false,
- "description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
- "devDependencies": {
- "concat-stream": "^1.4.6",
- "standard": "^5.3.1",
- "tape": "^3.0.3"
- },
- "directories": {
- "test": "test"
- },
- "engines": {
- "node": ">= 0.8.0"
- },
- "files": [
- "*.js",
- "LICENSE"
- ],
- "homepage": "https://github.com/mafintosh/tar-stream",
- "keywords": [
- "tar",
- "tarball",
- "parse",
- "parser",
- "generate",
- "generator",
- "stream",
- "stream2",
- "streams",
- "streams2",
- "streaming",
- "pack",
- "extract",
- "modify"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "tar-stream",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/mafintosh/tar-stream.git"
- },
- "scripts": {
- "test": "standard && tape test/*.js"
- },
- "version": "1.5.4"
-}
diff --git a/node_modules/pacote/node_modules/tar/LICENSE b/node_modules/pacote/node_modules/tar/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/tar/README.md b/node_modules/pacote/node_modules/tar/README.md
new file mode 100644
index 000000000..a356a78da
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/README.md
@@ -0,0 +1,883 @@
+# node-tar
+
+[![Build Status](https://travis-ci.org/npm/node-tar.svg?branch=master)](https://travis-ci.org/npm/node-tar)
+
+[Fast](./benchmarks) and full-featured Tar for Node.js
+
+The API is designed to mimic the behavior of `tar(1)` on unix systems.
+If you are familiar with how tar works, most of this will hopefully be
+straightforward for you. If not, then hopefully this module can teach
+you useful unix skills that may come in handy someday :)
+
+## Background
+
+A "tar file" or "tarball" is an archive of file system entries
+(directories, files, links, etc.) The name comes from "tape archive".
+If you run `man tar` on almost any Unix command line, you'll learn
+quite a bit about what it can do, and its history.
+
+Tar has 5 main top-level commands:
+
+* `c` Create an archive
+* `r` Replace entries within an archive
+* `u` Update entries within an archive (ie, replace if they're newer)
+* `t` List out the contents of an archive
+* `x` Extract an archive to disk
+
+The other flags and options modify how this top level function works.
+
+## High-Level API
+
+These 5 functions are the high-level API. All of them have a
+single-character name (for unix nerds familiar with `tar(1)`) as well
+as a long name (for everyone else).
+
+All the high-level functions take the following arguments, all three
+of which are optional and may be omitted.
+
+1. `options` - An optional object specifying various options
+2. `paths` - An array of paths to add or extract
+3. `callback` - Called when the command is completed, if async. (If
+ sync or no file specified, providing a callback throws a
+ `TypeError`.)
+
+If the command is sync (ie, if `options.sync=true`), then the
+callback is not allowed, since the action will be completed immediately.
+
+If a `file` argument is specified, and the command is async, then a
+`Promise` is returned. In this case, if async, a callback may be
+provided which is called when the command is completed.
+
+If a `file` option is not specified, then a stream is returned. For
+`create`, this is a readable stream of the generated archive. For
+`list` and `extract` this is a writable stream that an archive should
+be written into. If a file is not specified, then a callback is not
+allowed, because you're already getting a stream to work with.
+
+`replace` and `update` only work on existing archives, and so require
+a `file` argument.
+
+Sync commands without a file argument return a stream that acts on its
+input immediately in the same tick. For readable streams, this means
+that all of the data is immediately available by calling
+`stream.read()`. For writable streams, it will be acted upon as soon
+as it is provided, but this can be at any time.
+
+### Warnings
+
+Some things cause tar to emit a warning, but should usually not cause
+the entire operation to fail. There are three ways to handle
+warnings:
+
+1. **Ignore them** (default) Invalid entries won't be put in the
+ archive, and invalid entries won't be unpacked. This is usually
+ fine, but can hide failures that you might care about.
+2. **Notice them** Add an `onwarn` function to the options, or listen
+ to the `'warn'` event on any tar stream. The function will get
+ called as `onwarn(message, data)`. Handle as appropriate.
+3. **Explode them.** Set `strict: true` in the options object, and
+ `warn` messages will be emitted as `'error'` events instead. If
+ there's no `error` handler, this causes the program to crash. If
+ used with a promise-returning/callback-taking method, then it'll
+ send the error to the promise/callback.
+
+### Examples
+
+The API mimics the `tar(1)` command line functionality, with aliases
+for more human-readable option and function names. The goal is that
+if you know how to use `tar(1)` in Unix, then you know how to use
+`require('tar')` in JavaScript.
+
+To replicate `tar czf my-tarball.tgz files and folders`, you'd do:
+
+```js
+tar.c(
+ {
+ gzip: <true|gzip options>,
+ file: 'my-tarball.tgz'
+ },
+ ['some', 'files', 'and', 'folders']
+).then(_ => { .. tarball has been created .. })
+```
+
+To replicate `tar cz files and folders > my-tarball.tgz`, you'd do:
+
+```js
+tar.c( // or tar.create
+ {
+ gzip: <true|gzip options>
+ },
+ ['some', 'files', 'and', 'folders']
+).pipe(fs.createWriteStream('my-tarball.tgz')
+```
+
+To replicate `tar xf my-tarball.tgz` you'd do:
+
+```js
+tar.x( // or tar.extract(
+ {
+ file: 'my-tarball.tgz'
+ }
+).then(_=> { .. tarball has been dumped in cwd .. })
+```
+
+To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`:
+
+```js
+fs.createReadStream('my-tarball.tgz').pipe(
+ tar.x({
+ strip: 1,
+ C: 'some-dir' // alias for cwd:'some-dir', also ok
+ })
+)
+```
+
+To replicate `tar tf my-tarball.tgz`, do this:
+
+```js
+tar.t({
+ file: 'my-tarball.tgz',
+ onentry: entry => { .. do whatever with it .. }
+})
+```
+
+To replicate `cat my-tarball.tgz | tar t` do:
+
+```js
+fs.createReadStream('my-tarball.tgz')
+ .pipe(tar.t())
+ .on('entry', entry => { .. do whatever with it .. })
+```
+
+To do anything synchronous, add `sync: true` to the options. Note
+that sync functions don't take a callback and don't return a promise.
+When the function returns, it's already done. Sync methods without a
+file argument return a sync stream, which flushes immediately. But,
+of course, it still won't be done until you `.end()` it.
+
+To filter entries, add `filter: <function>` to the options.
+Tar-creating methods call the filter with `filter(path, stat)`.
+Tar-reading methods (including extraction) call the filter with
+`filter(path, entry)`. The filter is called in the `this`-context of
+the `Pack` or `Unpack` stream object.
+
+The arguments list to `tar t` and `tar x` specify a list of filenames
+to extract or list, so they're equivalent to a filter that tests if
+the file is in the list.
+
+For those who _aren't_ fans of tar's single-character command names:
+
+```
+tar.c === tar.create
+tar.r === tar.replace (appends to archive, file is required)
+tar.u === tar.update (appends if newer, file is required)
+tar.x === tar.extract
+tar.t === tar.list
+```
+
+Keep reading for all the command descriptions and options, as well as
+the low-level API that they are built on.
+
+### tar.c(options, fileList, callback) [alias: tar.create]
+
+Create a tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Write the tarball archive to the specified filename. If this
+ is specified, then the callback will be fired when the file has been
+ written, and a promise will be returned that resolves when the file
+ is written. If a filename is not specified, then a Readable Stream
+ will be returned which will emit the file data. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`. If this is set,
+ and a file is not provided, then the resulting stream will already
+ have the data ready to `read` or `emit('data')` as soon as you
+ request it.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `mode` The mode to set on the created file archive
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+
+### tar.x(options, fileList, callback) [alias: tar.extract]
+
+Extract a tarball archive.
+
+The `fileList` is an array of paths to extract from the tarball. If
+no paths are provided, then all the entries are extracted.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+Most extraction errors will cause a `warn` event to be emitted. If
+the `cwd` is missing, or not a directory, then the extraction will
+fail completely.
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory. [Alias: `C`]
+- `file` The archive file to extract. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Create files and directories synchronously.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive. [Alias: `keep-newer`,
+ `keep-newer-files`]
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies. [Alias: `k`, `keep-existing`]
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+ [Alias: `P`]
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file. [Alias:
+ `U`]
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks. [Alias: `strip-components`, `stripComponents`]
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+ [Alias: `p`]
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+
+### tar.t(options, fileList, callback) [alias: tar.list]
+
+List the contents of a tarball archive.
+
+The `fileList` is an array of paths to list from the tarball. If
+no paths are provided, then all the entries are listed.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'`
+events. (If you want to get actual readable entries, use the
+`tar.Parse` class instead.)
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. [Alias: `C`]
+- `file` The archive file to list. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Read the specified file synchronously. (This has no effect
+ when a file option isn't specified, because entries are emitted as
+ fast as they are parsed from the stream anyway.)
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter. This is important for when both `file` and
+ `sync` are set, because it will be called synchronously.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noResume` By default, `entry` streams are resumed immediately after
+ the call to `onentry`. Set `noResume: true` to suppress this
+ behavior. Note that by opting into this, the stream will never
+ complete until the entry data is consumed.
+
+### tar.u(options, fileList, callback) [alias: tar.update]
+
+Add files to an archive if they are newer than the entry already in
+the tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+### tar.r(options, fileList, callback) [alias: tar.replace]
+
+Add files to an existing archive. Because later entries override
+earlier entries, this effectively replaces any existing entries.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+## Low-Level API
+
+### class tar.Pack
+
+A readable tar stream.
+
+Has all the standard readable stream interface stuff. `'data'` and
+`'end'` events, `read()` method, `pause()` and `resume()`, etc.
+
+#### constructor(options)
+
+The following options are supported:
+
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()`
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories.
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+#### add(path)
+
+Adds an entry to the archive. Returns the Pack stream.
+
+#### write(path)
+
+Adds an entry to the archive. Returns true if flushed.
+
+#### end()
+
+Finishes the archive.
+
+### class tar.Pack.Sync
+
+Synchronous version of `tar.Pack`.
+
+### class tar.Unpack
+
+A writable stream that unpacks a tar archive onto the file system.
+
+All the normal writable stream stuff is supported. `write()` and
+`end()` methods, `'drain'` events, etc.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+`'close'` is emitted when it's done writing stuff to the file system.
+
+Most unpack errors will cause a `warn` event to be emitted. If the
+`cwd` is missing, or not a directory, then an error will be emitted.
+
+#### constructor(options)
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive.
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies.
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file.
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+- `win32` True if on a windows platform. Causes behavior where
+ filenames containing `<|>?` chars are converted to
+ windows-compatible values while being unpacked.
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+
+### class tar.Unpack.Sync
+
+Synchronous version of `tar.Unpack`.
+
+### class tar.Parse
+
+A writable stream that parses a tar archive stream. All the standard
+writable stream stuff is supported.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Emits `'entry'` events with `tar.ReadEntry` objects, which are
+themselves readable streams that you can pipe wherever.
+
+Each `entry` will not emit until the one before it is flushed through,
+so make sure to either consume the data (with `on('data', ...)` or
+`.pipe(...)`) or throw it away with `.resume()` to keep the stream
+flowing.
+
+#### constructor(options)
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects.
+
+The following options are supported:
+
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+#### abort(message, error)
+
+Stop all parsing activities. This is called when there are zlib
+errors. It also emits a warning with the message and error provided.
+
+### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being read out of a tar archive.
+
+It has the following fields:
+
+- `extended` The extended metadata object provided to the constructor.
+- `globalExtended` The global extended metadata object provided to the
+ constructor.
+- `remain` The number of bytes remaining to be written into the
+ stream.
+- `blockRemain` The number of 512-byte blocks remaining to be written
+ into the stream.
+- `ignore` Whether this entry should be ignored.
+- `meta` True if this represents metadata about the next entry, false
+ if it represents a filesystem object.
+- All the fields from the header, extended header, and global extended
+ header are added to the ReadEntry object. So it has `path`, `type`,
+ `size, `mode`, and so on.
+
+#### constructor(header, extended, globalExtended)
+
+Create a new ReadEntry object with the specified header, extended
+header, and global extended header values.
+
+### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being written from the file
+system into a tar archive.
+
+Emits data for the Header, and for the Pax Extended Header if one is
+required, as well as any body data.
+
+Creating a WriteEntry for a directory does not also create
+WriteEntry objects for all of the directory contents.
+
+It has the following fields:
+
+- `path` The path field that will be written to the archive. By
+ default, this is also the path from the cwd to the file system
+ object.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `myuid` If supported, the uid of the user running the current
+ process.
+- `myuser` The `env.USER` string if set, or `''`. Set as the entry
+ `uname` field if the file's `uid` matches `this.myuid`.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/` and filenames containing the windows-compatible
+ forms of `<|>?:` characters are converted to actual `<|>?:` characters
+ in the archive.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+#### constructor(path, options)
+
+`path` is the path of the entry as it is written in the archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+#### warn(message, data)
+
+If strict, emit an error with the provided message.
+
+Othewise, emit a `'warn'` event with the provided message and data.
+
+### class tar.WriteEntry.Sync
+
+Synchronous version of tar.WriteEntry
+
+### class tar.WriteEntry.Tar
+
+A version of tar.WriteEntry that gets its data from a tar.ReadEntry
+instead of from the filesystem.
+
+#### constructor(readEntry, options)
+
+`readEntry` is the entry being read out of another archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+### class tar.Header
+
+A class for reading and writing header blocks.
+
+It has the following fields:
+
+- `nullBlock` True if decoding a block which is entirely composed of
+ `0x00` null bytes. (Useful because tar files are terminated by
+ at least 2 null blocks.)
+- `cksumValid` True if the checksum in the header is valid, false
+ otherwise.
+- `needPax` True if the values, as encoded, will require a Pax
+ extended header.
+- `path` The path of the entry.
+- `mode` The 4 lowest-order octal digits of the file mode. That is,
+ read/write/execute permissions for world, group, and owner, and the
+ setuid, setgid, and sticky bits.
+- `uid` Numeric user id of the file owner
+- `gid` Numeric group id of the file owner
+- `size` Size of the file in bytes
+- `mtime` Modified time of the file
+- `cksum` The checksum of the header. This is generated by adding all
+ the bytes of the header block, treating the checksum field itself as
+ all ascii space characters (that is, `0x20`).
+- `type` The human-readable name of the type of entry this represents,
+ or the alphanumeric key if unknown.
+- `typeKey` The alphanumeric key for the type of entry this header
+ represents.
+- `linkpath` The target of Link and SymbolicLink entries.
+- `uname` Human-readable user name of the file owner
+- `gname` Human-readable group name of the file owner
+- `devmaj` The major portion of the device number. Always `0` for
+ files, directories, and links.
+- `devmin` The minor portion of the device number. Always `0` for
+ files, directories, and links.
+- `atime` File access time.
+- `ctime` File change time.
+
+#### constructor(data, [offset=0])
+
+`data` is optional. It is either a Buffer that should be interpreted
+as a tar Header starting at the specified offset and continuing for
+512 bytes, or a data object of keys and values to set on the header
+object, and eventually encode as a tar Header.
+
+#### decode(block, offset)
+
+Decode the provided buffer starting at the specified offset.
+
+Buffer length must be greater than 512 bytes.
+
+#### set(data)
+
+Set the fields in the data object.
+
+#### encode(buffer, offset)
+
+Encode the header fields into the buffer at the specified offset.
+
+Returns `this.needPax` to indicate whether a Pax Extended Header is
+required to properly encode the specified data.
+
+### class tar.Pax
+
+An object representing a set of key-value pairs in an Pax extended
+header entry.
+
+It has the following fields. Where the same name is used, they have
+the same semantics as the tar.Header field of the same name.
+
+- `global` True if this represents a global extended header, or false
+ if it is for a single entry.
+- `atime`
+- `charset`
+- `comment`
+- `ctime`
+- `gid`
+- `gname`
+- `linkpath`
+- `mtime`
+- `path`
+- `size`
+- `uid`
+- `uname`
+- `dev`
+- `ino`
+- `nlink`
+
+#### constructor(object, global)
+
+Set the fields set in the object. `global` is a boolean that defaults
+to false.
+
+#### encode()
+
+Return a Buffer containing the header and body for the Pax extended
+header entry, or `null` if there is nothing to encode.
+
+#### encodeBody()
+
+Return a string representing the body of the pax extended header
+entry.
+
+#### encodeField(fieldName)
+
+Return a string representing the key/value encoding for the specified
+fieldName, or `''` if the field is unset.
+
+### tar.Pax.parse(string, extended, global)
+
+Return a new Pax object created by parsing the contents of the string
+provided.
+
+If the `extended` object is set, then also add the fields from that
+object. (This is necessary because multiple metadata entries can
+occur in sequence.)
+
+### tar.types
+
+A translation table for the `type` field in tar headers.
+
+#### tar.types.name.get(code)
+
+Get the human-readable name for a given alphanumeric code.
+
+#### tar.types.code.get(name)
+
+Get the alphanumeric code for a given human-readable name.
diff --git a/node_modules/pacote/node_modules/tar/index.js b/node_modules/pacote/node_modules/tar/index.js
new file mode 100644
index 000000000..c9ae06e79
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/index.js
@@ -0,0 +1,18 @@
+'use strict'
+
+// high-level commands
+exports.c = exports.create = require('./lib/create.js')
+exports.r = exports.replace = require('./lib/replace.js')
+exports.t = exports.list = require('./lib/list.js')
+exports.u = exports.update = require('./lib/update.js')
+exports.x = exports.extract = require('./lib/extract.js')
+
+// classes
+exports.Pack = require('./lib/pack.js')
+exports.Unpack = require('./lib/unpack.js')
+exports.Parse = require('./lib/parse.js')
+exports.ReadEntry = require('./lib/read-entry.js')
+exports.WriteEntry = require('./lib/write-entry.js')
+exports.Header = require('./lib/header.js')
+exports.Pax = require('./lib/pax.js')
+exports.types = require('./lib/types.js')
diff --git a/node_modules/pacote/node_modules/tar/lib/create.js b/node_modules/pacote/node_modules/tar/lib/create.js
new file mode 100644
index 000000000..5d46b3ba7
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/create.js
@@ -0,0 +1,110 @@
+'use strict'
+
+// tar -c
+const hlo = require('./high-level-opt.js')
+
+const Pack = require('./pack.js')
+const fs = require('fs')
+const t = require('./list.js')
+const path = require('path')
+
+const c = module.exports = (opt_, files, cb) => {
+ if (typeof files === 'function')
+ cb = files
+
+ if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ return opt.file && opt.sync ? createFileSync(opt, files)
+ : opt.file ? createFile(opt, files, cb)
+ : opt.sync ? createSync(opt, files)
+ : create(opt, files)
+}
+
+const createFileSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+
+ let threw = true
+ let fd
+ try {
+ fd = fs.openSync(opt.file, 'w', opt.mode || 0o666)
+ p.on('data', chunk => fs.writeSync(fd, chunk, 0, chunk.length))
+ p.on('end', _ => fs.closeSync(fd))
+ addFilesSync(p, files)
+ threw = false
+ } finally {
+ if (threw)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const createFile = (opt, files, cb) => {
+ const p = new Pack(opt)
+ const stream = fs.createWriteStream(opt.file, { mode: opt.mode || 0o666 })
+ p.pipe(stream)
+
+ const promise = new Promise((res, rej) => {
+ stream.on('error', rej)
+ stream.on('close', res)
+ p.on('error', rej)
+ })
+
+ addFilesAsync(p, files)
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
+
+const createSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+ addFilesSync(p, files)
+ return p
+}
+
+const create = (opt, files) => {
+ const p = new Pack(opt)
+ addFilesAsync(p, files)
+ return p
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/extract.js b/node_modules/pacote/node_modules/tar/lib/extract.js
new file mode 100644
index 000000000..53ecf6789
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/extract.js
@@ -0,0 +1,127 @@
+'use strict'
+
+// tar -x
+const hlo = require('./high-level-opt.js')
+const Unpack = require('./unpack.js')
+const fs = require('fs')
+const path = require('path')
+
+const x = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (typeof files === 'function')
+ cb = files, files = null
+
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ if (files.length)
+ filesFilter(opt, files)
+
+ return opt.file && opt.sync ? extractFileSync(opt)
+ : opt.file ? extractFile(opt, cb)
+ : opt.sync ? extractSync(opt)
+ : extract(opt)
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
+
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
+
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const extractFileSync = opt => {
+ const u = new Unpack.Sync(opt)
+
+ const file = opt.file
+ let threw = true
+ let fd
+ try {
+ const stat = fs.statSync(file)
+ const readSize = opt.maxReadSize || 16*1024*1024
+ if (stat.size < readSize)
+ u.end(fs.readFileSync(file))
+ else {
+ let pos = 0
+ const buf = Buffer.allocUnsafe(readSize)
+ fd = fs.openSync(file, 'r')
+ while (pos < stat.size) {
+ let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ pos += bytesRead
+ u.write(buf.slice(0, bytesRead))
+ }
+ u.end()
+ fs.closeSync(fd)
+ }
+ threw = false
+ } finally {
+ if (threw && fd)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const extractFile = (opt, cb) => {
+ const u = new Unpack(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ u.on('error', reject)
+ u.on('close', resolve)
+
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else if (stat.size < readSize)
+ fs.readFile(file, (er, data) => {
+ if (er)
+ return reject(er)
+ u.end(data)
+ })
+ else {
+ const stream = fs.createReadStream(file, {
+ highWaterMark: readSize
+ })
+ stream.on('error', reject)
+ stream.pipe(u)
+ }
+ })
+ })
+ return cb ? p.then(cb, cb) : p
+}
+
+const extractSync = opt => {
+ return new Unpack.Sync(opt)
+}
+
+const extract = opt => {
+ return new Unpack(opt)
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/header.js b/node_modules/pacote/node_modules/tar/lib/header.js
new file mode 100644
index 000000000..db002e8c1
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/header.js
@@ -0,0 +1,272 @@
+'use strict'
+// parse a 512-byte header block to a data object, or vice-versa
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+
+const types = require('./types.js')
+const pathModule = require('path')
+const large = require('./large-numbers.js')
+
+const TYPE = Symbol('type')
+
+class Header {
+ constructor (data, off) {
+ this.cksumValid = false
+ this.needPax = false
+ this.nullBlock = false
+
+ this.block = null
+ this.path = null
+ this.mode = null
+ this.uid = null
+ this.gid = null
+ this.size = null
+ this.mtime = null
+ this.cksum = null
+ this[TYPE] = '0'
+ this.linkpath = null
+ this.uname = null
+ this.gname = null
+ this.devmaj = 0
+ this.devmin = 0
+ this.atime = null
+ this.ctime = null
+
+ if (Buffer.isBuffer(data)) {
+ this.decode(data, off || 0)
+ } else if (data)
+ this.set(data)
+ }
+
+ decode (buf, off) {
+ if (!off)
+ off = 0
+
+ if (!buf || !(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ this.path = decString(buf, off, 100)
+ this.mode = decNumber(buf, off + 100, 8)
+ this.uid = decNumber(buf, off + 108, 8)
+ this.gid = decNumber(buf, off + 116, 8)
+ this.size = decNumber(buf, off + 124, 12)
+ this.mtime = decDate(buf, off + 136, 12)
+ this.cksum = decNumber(buf, off + 148, 12)
+
+ // old tar versions marked dirs as a file with a trailing /
+ this[TYPE] = decString(buf, off + 156, 1)
+ if (this[TYPE] === '')
+ this[TYPE] = '0'
+ if (this[TYPE] === '0' && this.path.substr(-1) === '/')
+ this[TYPE] = '5'
+
+ // tar implementations sometimes incorrectly put the stat(dir).size
+ // as the size in the tarball, even though Directory entries are
+ // not able to have any body at all. In the very rare chance that
+ // it actually DOES have a body, we weren't going to do anything with
+ // it anyway, and it'll just be a warning about an invalid header.
+ if (this[TYPE] === '5')
+ this.size = 0
+
+ this.linkpath = decString(buf, off + 157, 100)
+ if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
+ this.uname = decString(buf, off + 265, 32)
+ this.gname = decString(buf, off + 297, 32)
+ this.devmaj = decNumber(buf, off + 329, 8)
+ this.devmin = decNumber(buf, off + 337, 8)
+ if (buf[off + 475] !== 0) {
+ // definitely a prefix, definitely >130 chars.
+ const prefix = decString(buf, off + 345, 155)
+ this.path = prefix + '/' + this.path
+ } else {
+ const prefix = decString(buf, off + 345, 130)
+ if (prefix)
+ this.path = prefix + '/' + this.path
+ this.atime = decDate(buf, off + 476, 12)
+ this.ctime = decDate(buf, off + 488, 12)
+ }
+ }
+
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
+ }
+ this.cksumValid = sum === this.cksum
+ if (this.cksum === null && sum === 8 * 0x20)
+ this.nullBlock = true
+ }
+
+ encode (buf, off) {
+ if (!buf) {
+ buf = this.block = Buffer.alloc(512)
+ off = 0
+ }
+
+ if (!off)
+ off = 0
+
+ if (!(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ const prefixSize = this.ctime || this.atime ? 130 : 155
+ const split = splitPrefix(this.path || '', prefixSize)
+ const path = split[0]
+ const prefix = split[1]
+ this.needPax = split[2]
+
+ this.needPax = encString(buf, off, 100, path) || this.needPax
+ this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
+ this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
+ this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
+ this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
+ this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
+ buf[off + 156] = this[TYPE].charCodeAt(0)
+ this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
+ buf.write('ustar\u000000', off + 257, 8)
+ this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
+ this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
+ this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
+ this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
+ this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
+ if (buf[off + 475] !== 0)
+ this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
+ else {
+ this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
+ this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
+ this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
+ }
+
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
+ }
+ this.cksum = sum
+ encNumber(buf, off + 148, 8, this.cksum)
+ this.cksumValid = true
+
+ return this.needPax
+ }
+
+ set (data) {
+ for (let i in data) {
+ if (data[i] !== null && data[i] !== undefined)
+ this[i] = data[i]
+ }
+ }
+
+ get type () {
+ return types.name.get(this[TYPE]) || this[TYPE]
+ }
+
+ get typeKey () {
+ return this[TYPE]
+ }
+
+ set type (type) {
+ if (types.code.has(type))
+ this[TYPE] = types.code.get(type)
+ else
+ this[TYPE] = type
+ }
+}
+
+const splitPrefix = (p, prefixSize) => {
+ const pathSize = 100
+ let pp = p
+ let prefix = ''
+ let ret
+ const root = pathModule.parse(p).root || '.'
+
+ if (Buffer.byteLength(pp) < pathSize)
+ ret = [pp, prefix, false]
+ else {
+ // first set prefix to the dir, and path to the base
+ prefix = pathModule.dirname(pp)
+ pp = pathModule.basename(pp)
+
+ do {
+ // both fit!
+ if (Buffer.byteLength(pp) <= pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp, prefix, false]
+
+ // prefix fits in prefix, but path doesn't fit in path
+ else if (Buffer.byteLength(pp) > pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp.substr(0, pathSize - 1), prefix, true]
+
+ else {
+ // make path take a bit from prefix
+ pp = pathModule.join(pathModule.basename(prefix), pp)
+ prefix = pathModule.dirname(prefix)
+ }
+ } while (prefix !== root && !ret)
+
+ // at this point, found no resolution, just truncate
+ if (!ret)
+ ret = [p.substr(0, pathSize - 1), '', true]
+ }
+ return ret
+}
+
+const decString = (buf, off, size) =>
+ buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
+
+const decDate = (buf, off, size) =>
+ numToDate(decNumber(buf, off, size))
+
+const numToDate = num => num === null ? null : new Date(num * 1000)
+
+const decNumber = (buf, off, size) =>
+ buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
+ : decSmallNumber(buf, off, size)
+
+const nanNull = value => isNaN(value) ? null : value
+
+const decSmallNumber = (buf, off, size) =>
+ nanNull(parseInt(
+ buf.slice(off, off + size)
+ .toString('utf8').replace(/\0.*$/, '').trim(), 8))
+
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+ 12: 0o77777777777,
+ 8 : 0o7777777
+}
+
+const encNumber = (buf, off, size, number) =>
+ number === null ? false :
+ number > MAXNUM[size] || number < 0
+ ? (large.encode(number, buf.slice(off, off + size)), true)
+ : (encSmallNumber(buf, off, size, number), false)
+
+const encSmallNumber = (buf, off, size, number) =>
+ buf.write(octalString(number, size), off, size, 'ascii')
+
+const octalString = (number, size) =>
+ padOctal(Math.floor(number).toString(8), size)
+
+const padOctal = (string, size) =>
+ (string.length === size - 1 ? string
+ : new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
+
+const encDate = (buf, off, size, date) =>
+ date === null ? false :
+ encNumber(buf, off, size, date.getTime() / 1000)
+
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0')
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, string) =>
+ string === null ? false :
+ (buf.write(string + NULLS, off, size, 'utf8'),
+ string.length !== Buffer.byteLength(string) || string.length > size)
+
+module.exports = Header
diff --git a/node_modules/pacote/node_modules/tar/lib/high-level-opt.js b/node_modules/pacote/node_modules/tar/lib/high-level-opt.js
new file mode 100644
index 000000000..7333db915
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/high-level-opt.js
@@ -0,0 +1,29 @@
+'use strict'
+
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+
+const argmap = new Map([
+ ['C', 'cwd'],
+ ['f', 'file'],
+ ['z', 'gzip'],
+ ['P', 'preservePaths'],
+ ['U', 'unlink'],
+ ['strip-components', 'strip'],
+ ['stripComponents', 'strip'],
+ ['keep-newer', 'newer'],
+ ['keepNewer', 'newer'],
+ ['keep-newer-files', 'newer'],
+ ['keepNewerFiles', 'newer'],
+ ['k', 'keep'],
+ ['keep-existing', 'keep'],
+ ['keepExisting', 'keep'],
+ ['m', 'noMtime'],
+ ['no-mtime', 'noMtime'],
+ ['p', 'preserveOwner'],
+ ['L', 'follow'],
+ ['h', 'follow']
+])
+
+const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
+ argmap.has(k) ? argmap.get(k) : k, opt[k]
+]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
diff --git a/node_modules/pacote/node_modules/tar/lib/large-numbers.js b/node_modules/pacote/node_modules/tar/lib/large-numbers.js
new file mode 100644
index 000000000..ff4999263
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/large-numbers.js
@@ -0,0 +1,92 @@
+'use strict'
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive. The trailing byte in the
+// section will always be 0x20, or in some implementations 0x00.
+// this module encodes and decodes these things.
+
+const encode = exports.encode = (num, buf) => {
+ buf[buf.length - 1] = 0x20
+ if (num < 0)
+ encodeNegative(num, buf)
+ else
+ encodePositive(num, buf)
+ return buf
+}
+
+const encodePositive = (num, buf) => {
+ buf[0] = 0x80
+ for (var i = buf.length - 2; i > 0; i--) {
+ if (num === 0)
+ buf[i] = 0
+ else {
+ buf[i] = num % 0x100
+ num = Math.floor(num / 0x100)
+ }
+ }
+}
+
+const encodeNegative = (num, buf) => {
+ buf[0] = 0xff
+ var flipped = false
+ num = num * -1
+ for (var i = buf.length - 2; i > 0; i--) {
+ var byte
+ if (num === 0)
+ byte = 0
+ else {
+ byte = num % 0x100
+ num = Math.floor(num / 0x100)
+ }
+ if (flipped)
+ buf[i] = onesComp(byte)
+ else if (byte === 0)
+ buf[i] = 0
+ else {
+ flipped = true
+ buf[i] = twosComp(byte)
+ }
+ }
+}
+
+const parse = exports.parse = (buf) => {
+ var post = buf[buf.length - 1]
+ var pre = buf[0]
+ return pre === 0x80 ? pos(buf.slice(1, buf.length - 1))
+ : twos(buf.slice(1, buf.length - 1))
+}
+
+const twos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ var flipped = false
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ var f
+ if (flipped)
+ f = onesComp(byte)
+ else if (byte === 0)
+ f = byte
+ else {
+ flipped = true
+ f = twosComp(byte)
+ }
+ if (f !== 0)
+ sum += f * Math.pow(256, len - i - 1)
+ }
+ return sum * -1
+}
+
+const pos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ if (byte !== 0)
+ sum += byte * Math.pow(256, len - i - 1)
+ }
+ return sum
+}
+
+const onesComp = byte => (0xff ^ byte) & 0xff
+
+const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
diff --git a/node_modules/pacote/node_modules/tar/lib/list.js b/node_modules/pacote/node_modules/tar/lib/list.js
new file mode 100644
index 000000000..1f5e70bd3
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/list.js
@@ -0,0 +1,132 @@
+'use strict'
+
+// XXX: This shares a lot in common with extract.js
+// maybe some DRY opportunity here?
+
+// tar -t
+const hlo = require('./high-level-opt.js')
+const Parser = require('./parse.js')
+const fs = require('fs')
+const path = require('path')
+
+const t = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (typeof files === 'function')
+ cb = files, files = null
+
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ if (files.length)
+ filesFilter(opt, files)
+
+ if (!opt.noResume)
+ onentryFunction(opt)
+
+ return opt.file && opt.sync ? listFileSync(opt)
+ : opt.file ? listFile(opt, cb)
+ : list(opt)
+}
+
+const onentryFunction = opt => {
+ const onentry = opt.onentry
+ opt.onentry = onentry ? e => {
+ onentry(e)
+ e.resume()
+ } : e => e.resume()
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
+
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
+
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const listFileSync = opt => {
+ const p = list(opt)
+ const file = opt.file
+ let threw = true
+ let fd
+ try {
+ const stat = fs.statSync(file)
+ const readSize = opt.maxReadSize || 16*1024*1024
+ if (stat.size < readSize) {
+ p.end(fs.readFileSync(file))
+ } else {
+ let pos = 0
+ const buf = Buffer.allocUnsafe(readSize)
+ fd = fs.openSync(file, 'r')
+ while (pos < stat.size) {
+ let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ pos += bytesRead
+ p.write(buf.slice(0, bytesRead))
+ }
+ p.end()
+ }
+ threw = false
+ } finally {
+ if (threw && fd)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const listFile = (opt, cb) => {
+ const parse = new Parser(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ parse.on('error', reject)
+ parse.on('end', resolve)
+
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else if (stat.size < readSize)
+ fs.readFile(file, (er, data) => {
+ if (er)
+ return reject(er)
+ parse.end(data)
+ })
+ else {
+ const stream = fs.createReadStream(file, {
+ highWaterMark: readSize
+ })
+ stream.on('error', reject)
+ stream.pipe(parse)
+ }
+ })
+ })
+ return cb ? p.then(cb, cb) : p
+}
+
+const list = opt => new Parser(opt)
diff --git a/node_modules/pacote/node_modules/tar/lib/mkdir.js b/node_modules/pacote/node_modules/tar/lib/mkdir.js
new file mode 100644
index 000000000..2a8f461af
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/mkdir.js
@@ -0,0 +1,207 @@
+'use strict'
+// wrapper around mkdirp for tar's needs.
+
+// TODO: This should probably be a class, not functionally
+// passing around state in a gazillion args.
+
+const mkdirp = require('mkdirp')
+const fs = require('fs')
+const path = require('path')
+const chownr = require('chownr')
+
+class SymlinkError extends Error {
+ constructor (symlink, path) {
+ super('Cannot extract through symbolic link')
+ this.path = path
+ this.symlink = symlink
+ }
+
+ get name () {
+ return 'SylinkError'
+ }
+}
+
+class CwdError extends Error {
+ constructor (path, code) {
+ super(code + ': Cannot cd into \'' + path + '\'')
+ this.path = path
+ this.code = code
+ }
+
+ get name () {
+ return 'CwdError'
+ }
+}
+
+const mkdir = module.exports = (dir, opt, cb) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (er, created) => {
+ if (er)
+ cb(er)
+ else {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr(created, uid, gid, er => done(er))
+ else if (needChmod)
+ fs.chmod(dir, mode, cb)
+ else
+ cb()
+ }
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd)
+ return fs.lstat(dir, (er, st) => {
+ if (er || !st.isDirectory())
+ er = new CwdError(dir, er && er.code || 'ENOTDIR')
+ done(er)
+ })
+
+ if (preserve)
+ return mkdirp(dir, mode, done)
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
+}
+
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+ if (!parts.length)
+ return cb(null, created)
+ const p = parts.shift()
+ const part = base + '/' + p
+ if (cache.get(part))
+ return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+}
+
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
+ if (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return cb(new CwdError(cwd, er.code))
+
+ fs.lstat(part, (statEr, st) => {
+ if (statEr)
+ cb(statEr)
+ else if (st.isDirectory())
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ else if (unlink)
+ fs.unlink(part, er => {
+ if (er)
+ return cb(er)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+ })
+ else if (st.isSymbolicLink())
+ return cb(new SymlinkError(part, part + '/' + parts.join('/')))
+ else
+ cb(er)
+ })
+ } else {
+ created = created || part
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ }
+}
+
+const mkdirSync = module.exports.sync = (dir, opt) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (created) => {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr.sync(created, uid, gid)
+ if (needChmod)
+ fs.chmodSync(dir, mode)
+ cache.set(dir, true)
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd) {
+ let ok = false
+ let code = 'ENOTDIR'
+ try {
+ ok = fs.lstatSync(dir).isDirectory()
+ } catch (er) {
+ code = er.code
+ } finally {
+ if (!ok)
+ throw new CwdError(dir, code)
+ }
+ done()
+ return
+ }
+
+ if (preserve)
+ return done(mkdirp.sync(dir, mode))
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ let created = null
+ for (let p = parts.shift(), part = cwd;
+ p && (part += '/' + p);
+ p = parts.shift()) {
+
+ if (cache.get(part))
+ continue
+
+ try {
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ } catch (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return new CwdError(cwd, er.code)
+
+ const st = fs.lstatSync(part)
+ if (st.isDirectory()) {
+ cache.set(part, true)
+ continue
+ } else if (unlink) {
+ fs.unlinkSync(part)
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ continue
+ } else if (st.isSymbolicLink())
+ return new SymlinkError(part, part + '/' + parts.join('/'))
+ }
+ }
+
+ return done(created)
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/pack.js b/node_modules/pacote/node_modules/tar/lib/pack.js
new file mode 100644
index 000000000..09b6ac590
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/pack.js
@@ -0,0 +1,399 @@
+'use strict'
+
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+
+class PackJob {
+ constructor (path, absolute) {
+ this.path = path || './'
+ this.absolute = absolute
+ this.entry = null
+ this.stat = null
+ this.readdir = null
+ this.pending = false
+ this.ignore = false
+ this.piped = false
+ }
+}
+
+const MiniPass = require('minipass')
+const zlib = require('minizlib')
+const ReadEntry = require('./read-entry.js')
+const WriteEntry = require('./write-entry.js')
+const WriteEntrySync = WriteEntry.Sync
+const WriteEntryTar = WriteEntry.Tar
+const Yallist = require('yallist')
+const EOF = Buffer.alloc(1024)
+const ONSTAT = Symbol('onStat')
+const ENDED = Symbol('ended')
+const QUEUE = Symbol('queue')
+const CURRENT = Symbol('current')
+const PROCESS = Symbol('process')
+const PROCESSING = Symbol('processing')
+const PROCESSJOB = Symbol('processJob')
+const JOBS = Symbol('jobs')
+const JOBDONE = Symbol('jobDone')
+const ADDFSENTRY = Symbol('addFSEntry')
+const ADDTARENTRY = Symbol('addTarEntry')
+const STAT = Symbol('stat')
+const READDIR = Symbol('readdir')
+const ONREADDIR = Symbol('onreaddir')
+const PIPE = Symbol('pipe')
+const ENTRY = Symbol('entry')
+const ENTRYOPT = Symbol('entryOpt')
+const WRITEENTRYCLASS = Symbol('writeEntryClass')
+const WRITE = Symbol('write')
+const ONDRAIN = Symbol('ondrain')
+
+const fs = require('fs')
+const path = require('path')
+const warner = require('./warn-mixin.js')
+
+const Pack = warner(class Pack extends MiniPass {
+ constructor (opt) {
+ super(opt)
+ opt = opt || Object.create(null)
+ this.opt = opt
+ this.cwd = opt.cwd || process.cwd()
+ this.maxReadSize = opt.maxReadSize
+ this.preservePaths = !!opt.preservePaths
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.readdirCache = opt.readdirCache || new Map()
+ this[WRITEENTRYCLASS] = WriteEntry
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ this.zip = null
+ if (opt.gzip) {
+ if (typeof opt.gzip !== 'object')
+ opt.gzip = {}
+ this.zip = new zlib.Gzip(opt.gzip)
+ this.zip.on('data', chunk => super.write(chunk))
+ this.zip.on('end', _ => super.end())
+ this.zip.on('drain', _ => this[ONDRAIN]())
+ this.on('resume', _ => this.zip.resume())
+ } else
+ this.on('drain', this[ONDRAIN])
+
+ this.portable = !!opt.portable
+ this.noDirRecurse = !!opt.noDirRecurse
+ this.follow = !!opt.follow
+
+ this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
+
+ this[QUEUE] = new Yallist
+ this[JOBS] = 0
+ this.jobs = +opt.jobs || 4
+ this[PROCESSING] = false
+ this[ENDED] = false
+ }
+
+ [WRITE] (chunk) {
+ return super.write(chunk)
+ }
+
+ add (path) {
+ this.write(path)
+ return this
+ }
+
+ end (path) {
+ if (path)
+ this.write(path)
+ this[ENDED] = true
+ this[PROCESS]()
+ return this
+ }
+
+ write (path) {
+ if (this[ENDED])
+ throw new Error('write after end')
+
+ if (path instanceof ReadEntry)
+ this[ADDTARENTRY](path)
+ else
+ this[ADDFSENTRY](path)
+ return this.flowing
+ }
+
+ [ADDTARENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p.path)
+ if (this.prefix)
+ p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
+
+ // in this case, we don't have to wait for the stat
+ if (!this.filter(p.path, p))
+ p.resume()
+ else {
+ const job = new PackJob(p.path, absolute, false)
+ job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
+ job.entry.on('end', _ => this[JOBDONE](job))
+ this[JOBS] += 1
+ this[QUEUE].push(job)
+ }
+
+ this[PROCESS]()
+ }
+
+ [ADDFSENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p)
+ if (this.prefix)
+ p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
+
+ this[QUEUE].push(new PackJob(p, absolute))
+ this[PROCESS]()
+ }
+
+ [STAT] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ const stat = this.follow ? 'stat' : 'lstat'
+ fs[stat](job.absolute, (er, stat) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ this.emit('error', er)
+ else
+ this[ONSTAT](job, stat)
+ })
+ }
+
+ [ONSTAT] (job, stat) {
+ this.statCache.set(job.absolute, stat)
+ job.stat = stat
+
+ // now we have the stat, we can filter it.
+ if (!this.filter(job.path, stat))
+ job.ignore = true
+
+ this[PROCESS]()
+ }
+
+ [READDIR] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ fs.readdir(job.absolute, (er, entries) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ return this.emit('error', er)
+ this[ONREADDIR](job, entries)
+ })
+ }
+
+ [ONREADDIR] (job, entries) {
+ this.readdirCache.set(job.absolute, entries)
+ job.readdir = entries
+ this[PROCESS]()
+ }
+
+ [PROCESS] () {
+ if (this[PROCESSING])
+ return
+
+ this[PROCESSING] = true
+ for (let w = this[QUEUE].head;
+ w !== null && this[JOBS] < this.jobs;
+ w = w.next) {
+ this[PROCESSJOB](w.value)
+ if (w.value.ignore) {
+ const p = w.next
+ this[QUEUE].removeNode(w)
+ w.next = p
+ }
+ }
+
+ this[PROCESSING] = false
+
+ if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+ if (this.zip)
+ this.zip.end(EOF)
+ else {
+ super.write(EOF)
+ super.end()
+ }
+ }
+ }
+
+ get [CURRENT] () {
+ return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
+ }
+
+ [JOBDONE] (job) {
+ this[QUEUE].shift()
+ this[JOBS] -= 1
+ this[PROCESS]()
+ }
+
+ [PROCESSJOB] (job) {
+ if (job.pending)
+ return
+
+ if (job.entry) {
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ return
+ }
+
+ if (!job.stat) {
+ if (this.statCache.has(job.absolute))
+ this[ONSTAT](job, this.statCache.get(job.absolute))
+ else
+ this[STAT](job)
+ }
+ if (!job.stat)
+ return
+
+ // filtered out!
+ if (job.ignore)
+ return
+
+ if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
+ if (this.readdirCache.has(job.absolute))
+ this[ONREADDIR](job, this.readdirCache.get(job.absolute))
+ else
+ this[READDIR](job)
+ if (!job.readdir)
+ return
+ }
+
+ // we know it doesn't have an entry, because that got checked above
+ job.entry = this[ENTRY](job)
+ if (!job.entry) {
+ job.ignore = true
+ return
+ }
+
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ }
+
+ [ENTRYOPT] (job) {
+ return {
+ onwarn: (msg, data) => {
+ this.warn(msg, data)
+ },
+ noPax: this.noPax,
+ cwd: this.cwd,
+ absolute: job.absolute,
+ preservePaths: this.preservePaths,
+ maxReadSize: this.maxReadSize,
+ strict: this.strict,
+ portable: this.portable,
+ linkCache: this.linkCache,
+ statCache: this.statCache
+ }
+ }
+
+ [ENTRY] (job) {
+ this[JOBS] += 1
+ try {
+ return new this[WRITEENTRYCLASS](
+ job.path, this[ENTRYOPT](job)).on('end', _ => {
+ this[JOBDONE](job)
+ }).on('error', er => this.emit('error', er))
+ } catch (er) {
+ this.emit('error', er)
+ }
+ }
+
+ [ONDRAIN] () {
+ if (this[CURRENT] && this[CURRENT].entry)
+ this[CURRENT].entry.resume()
+ }
+
+ // like .pipe() but using super, because our write() is special
+ [PIPE] (job) {
+ job.piped = true
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ const source = job.entry
+ const zip = this.zip
+
+ if (zip)
+ source.on('data', chunk => {
+ if (!zip.write(chunk))
+ source.pause()
+ })
+ else
+ source.on('data', chunk => {
+ if (!super.write(chunk))
+ source.pause()
+ })
+ }
+
+ pause () {
+ if (this.zip)
+ this.zip.pause()
+ return super.pause()
+ }
+})
+
+class PackSync extends Pack {
+ constructor (opt) {
+ super(opt)
+ this[WRITEENTRYCLASS] = WriteEntrySync
+ }
+
+ // pause/resume are no-ops in sync streams.
+ pause () {}
+ resume () {}
+
+ [STAT] (job) {
+ const stat = this.follow ? 'statSync' : 'lstatSync'
+ this[ONSTAT](job, fs[stat](job.absolute))
+ }
+
+ [READDIR] (job, stat) {
+ this[ONREADDIR](job, fs.readdirSync(job.absolute))
+ }
+
+ // gotta get it all in this tick
+ [PIPE] (job) {
+ const source = job.entry
+ const zip = this.zip
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ if (zip)
+ source.on('data', chunk => {
+ zip.write(chunk)
+ })
+ else
+ source.on('data', chunk => {
+ super[WRITE](chunk)
+ })
+ }
+}
+
+Pack.Sync = PackSync
+
+module.exports = Pack
diff --git a/node_modules/pacote/node_modules/tar/lib/parse.js b/node_modules/pacote/node_modules/tar/lib/parse.js
new file mode 100644
index 000000000..63c7ee9ce
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/parse.js
@@ -0,0 +1,415 @@
+'use strict'
+
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in. We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry. The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+
+const warner = require('./warn-mixin.js')
+const path = require('path')
+const Header = require('./header.js')
+const EE = require('events')
+const Yallist = require('yallist')
+const maxMetaEntrySize = 1024 * 1024
+const Entry = require('./read-entry.js')
+const Pax = require('./pax.js')
+const zlib = require('minizlib')
+
+const gzipHeader = new Buffer([0x1f, 0x8b])
+const STATE = Symbol('state')
+const WRITEENTRY = Symbol('writeEntry')
+const READENTRY = Symbol('readEntry')
+const NEXTENTRY = Symbol('nextEntry')
+const PROCESSENTRY = Symbol('processEntry')
+const EX = Symbol('extendedHeader')
+const GEX = Symbol('globalExtendedHeader')
+const META = Symbol('meta')
+const EMITMETA = Symbol('emitMeta')
+const BUFFER = Symbol('buffer')
+const QUEUE = Symbol('queue')
+const ENDED = Symbol('ended')
+const EMITTEDEND = Symbol('emittedEnd')
+const EMIT = Symbol('emit')
+const UNZIP = Symbol('unzip')
+const CONSUMECHUNK = Symbol('consumeChunk')
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
+const CONSUMEBODY = Symbol('consumeBody')
+const CONSUMEMETA = Symbol('consumeMeta')
+const CONSUMEHEADER = Symbol('consumeHeader')
+const CONSUMING = Symbol('consuming')
+const BUFFERCONCAT = Symbol('bufferConcat')
+const MAYBEEND = Symbol('maybeEnd')
+const WRITING = Symbol('writing')
+const ABORTED = Symbol('aborted')
+const DONE = Symbol('onDone')
+
+const noop = _ => true
+
+module.exports = warner(class Parser extends EE {
+ constructor (opt) {
+ opt = opt || {}
+ super(opt)
+
+ if (opt.ondone)
+ this.on(DONE, opt.ondone)
+ else
+ this.on(DONE, _ => {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ })
+
+ this.strict = !!opt.strict
+ this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
+ this.filter = typeof opt.filter === 'function' ? opt.filter : noop
+
+ // have to set this so that streams are ok piping into it
+ this.writable = true
+ this.readable = false
+
+ this[QUEUE] = new Yallist()
+ this[BUFFER] = null
+ this[READENTRY] = null
+ this[WRITEENTRY] = null
+ this[STATE] = 'begin'
+ this[META] = ''
+ this[EX] = null
+ this[GEX] = null
+ this[ENDED] = false
+ this[UNZIP] = null
+ this[ABORTED] = false
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+ if (typeof opt.onentry === 'function')
+ this.on('entry', opt.onentry)
+ }
+
+ [CONSUMEHEADER] (chunk, position) {
+ const header = new Header(chunk, position)
+
+ if (header.nullBlock)
+ this[EMIT]('nullBlock')
+ else if (!header.cksumValid)
+ this.warn('invalid entry', header)
+ else if (!header.path)
+ this.warn('invalid: path is required', header)
+ else {
+ const type = header.type
+ if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
+ this.warn('invalid: linkpath required', header)
+ else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
+ this.warn('invalid: linkpath forbidden', header)
+ else {
+ const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
+
+ if (entry.meta) {
+ if (entry.size > this.maxMetaEntrySize) {
+ entry.ignore = true
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = 'ignore'
+ } else if (entry.size > 0) {
+ this[META] = ''
+ entry.on('data', c => this[META] += c)
+ this[STATE] = 'meta'
+ }
+ } else {
+
+ this[EX] = null
+ entry.ignore = entry.ignore || !this.filter(entry.path, entry)
+ if (entry.ignore) {
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = entry.remain ? 'ignore' : 'begin'
+ } else {
+ if (entry.remain)
+ this[STATE] = 'body'
+ else {
+ this[STATE] = 'begin'
+ entry.end()
+ }
+
+ if (!this[READENTRY]) {
+ this[QUEUE].push(entry)
+ this[NEXTENTRY]()
+ } else
+ this[QUEUE].push(entry)
+ }
+ }
+ }
+ }
+ }
+
+ [PROCESSENTRY] (entry) {
+ let go = true
+
+ if (!entry) {
+ this[READENTRY] = null
+ go = false
+ } else if (Array.isArray(entry))
+ this.emit.apply(this, entry)
+ else {
+ this[READENTRY] = entry
+ this.emit('entry', entry)
+ if (!entry.emittedEnd) {
+ entry.on('end', _ => this[NEXTENTRY]())
+ go = false
+ }
+ }
+
+ return go
+ }
+
+ [NEXTENTRY] () {
+ do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
+
+ if (!this[QUEUE].length) {
+ // At this point, there's nothing in the queue, but we may have an
+ // entry which is being consumed (readEntry).
+ // If we don't, then we definitely can handle more data.
+ // If we do, and either it's flowing, or it has never had any data
+ // written to it, then it needs more.
+ // The only other possibility is that it has returned false from a
+ // write() call, so we wait for the next drain to continue.
+ const re = this[READENTRY]
+ const drainNow = !re || re.flowing || re.size === re.remain
+ if (drainNow) {
+ if (!this[WRITING])
+ this.emit('drain')
+ } else
+ re.once('drain', _ => this.emit('drain'))
+ }
+ }
+
+ [CONSUMEBODY] (chunk, position) {
+ // write up to but no more than writeEntry.blockRemain
+ const entry = this[WRITEENTRY]
+ const br = entry.blockRemain
+ const c = (br >= chunk.length && position === 0) ? chunk
+ : chunk.slice(position, position + br)
+
+ entry.write(c)
+
+ if (!entry.blockRemain) {
+ this[STATE] = 'begin'
+ this[WRITEENTRY] = null
+ entry.end()
+ }
+
+ return c.length
+ }
+
+ [CONSUMEMETA] (chunk, position) {
+ const entry = this[WRITEENTRY]
+ const ret = this[CONSUMEBODY](chunk, position)
+
+ // if we finished, then the entry is reset
+ if (!this[WRITEENTRY])
+ this[EMITMETA](entry)
+
+ return ret
+ }
+
+ [EMIT] (ev, data, extra) {
+ if (!this[QUEUE].length && !this[READENTRY])
+ this.emit(ev, data, extra)
+ else
+ this[QUEUE].push([ev, data, extra])
+ }
+
+ [EMITMETA] (entry) {
+ this[EMIT]('meta', this[META])
+ switch (entry.type) {
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this[EX] = Pax.parse(this[META], this[EX], false)
+ break
+
+ case 'GlobalExtendedHeader':
+ this[GEX] = Pax.parse(this[META], this[GEX], true)
+ break
+
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].path = this[META].replace(/\0.*/, '')
+ break
+
+ case 'NextFileHasLongLinkpath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].linkpath = this[META].replace(/\0.*/, '')
+ break
+
+ /* istanbul ignore next */
+ default: throw new Error('unknown meta: ' + entry.type)
+ }
+ }
+
+ abort (msg, error) {
+ this[ABORTED] = true
+ this.warn(msg, error)
+ this.emit('abort')
+ }
+
+ write (chunk) {
+ if (this[ABORTED])
+ return
+
+ // first write, might be gzipped
+ if (this[UNZIP] === null && chunk) {
+ if (this[BUFFER]) {
+ chunk = Buffer.concat([this[BUFFER], chunk])
+ this[BUFFER] = null
+ }
+ if (chunk.length < gzipHeader.length) {
+ this[BUFFER] = chunk
+ return true
+ }
+ for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
+ if (chunk[i] !== gzipHeader[i])
+ this[UNZIP] = false
+ }
+ if (this[UNZIP] === null) {
+ const ended = this[ENDED]
+ this[ENDED] = false
+ this[UNZIP] = new zlib.Unzip()
+ this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
+ this[UNZIP].on('error', er =>
+ this.abort('zlib error: ' + er.message, er))
+ this[UNZIP].on('end', _ => {
+ this[ENDED] = true
+ this[CONSUMECHUNK]()
+ })
+ return ended ? this[UNZIP].end(chunk) : this[UNZIP].write(chunk)
+ }
+ }
+
+ this[WRITING] = true
+ if (this[UNZIP])
+ this[UNZIP].write(chunk)
+ else
+ this[CONSUMECHUNK](chunk)
+ this[WRITING] = false
+
+ // return false if there's a queue, or if the current entry isn't flowing
+ const ret =
+ this[QUEUE].length ? false :
+ this[READENTRY] ? this[READENTRY].flowing :
+ true
+
+ // if we have no queue, then that means a clogged READENTRY
+ if (!ret && !this[QUEUE].length)
+ this[READENTRY].once('drain', _ => this.emit('drain'))
+
+ return ret
+ }
+
+ [BUFFERCONCAT] (c) {
+ if (c && !this[ABORTED])
+ this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
+ }
+
+ [MAYBEEND] () {
+ if (this[ENDED] && !this[EMITTEDEND] && !this[ABORTED]) {
+ this[EMITTEDEND] = true
+ const entry = this[WRITEENTRY]
+ if (entry && entry.blockRemain) {
+ const have = this[BUFFER] ? this[BUFFER].length : 0
+ this.warn('Truncated input (needed ' + entry.blockRemain +
+ ' more bytes, only ' + have + ' available)', entry)
+ if (this[BUFFER])
+ entry.write(this[BUFFER])
+ entry.end()
+ }
+ this[EMIT](DONE)
+ }
+ }
+
+ [CONSUMECHUNK] (chunk) {
+ if (this[CONSUMING]) {
+ this[BUFFERCONCAT](chunk)
+ } else if (!chunk && !this[BUFFER]) {
+ this[MAYBEEND]()
+ } else {
+ this[CONSUMING] = true
+ if (this[BUFFER]) {
+ this[BUFFERCONCAT](chunk)
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ } else {
+ this[CONSUMECHUNKSUB](chunk)
+ }
+
+ while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ }
+ this[CONSUMING] = false
+ }
+
+ if (!this[BUFFER] || this[ENDED])
+ this[MAYBEEND]()
+ }
+
+ [CONSUMECHUNKSUB] (chunk) {
+ // we know that we are in CONSUMING mode, so anything written goes into
+ // the buffer. Advance the position and put any remainder in the buffer.
+ let position = 0
+ let length = chunk.length
+ while (position + 512 <= length && !this[ABORTED]) {
+ switch (this[STATE]) {
+ case 'begin':
+ this[CONSUMEHEADER](chunk, position)
+ position += 512
+ break
+
+ case 'ignore':
+ case 'body':
+ position += this[CONSUMEBODY](chunk, position)
+ break
+
+ case 'meta':
+ position += this[CONSUMEMETA](chunk, position)
+ break
+
+ /* istanbul ignore next */
+ default:
+ throw new Error('invalid state: ' + this[STATE])
+ }
+ }
+
+ if (position < length) {
+ if (this[BUFFER])
+ this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
+ else
+ this[BUFFER] = chunk.slice(position)
+ }
+ }
+
+ end (chunk) {
+ if (!this[ABORTED]) {
+ if (this[UNZIP])
+ this[UNZIP].end(chunk)
+ else {
+ this[ENDED] = true
+ this.write(chunk)
+ }
+ }
+ }
+})
diff --git a/node_modules/pacote/node_modules/tar/lib/pax.js b/node_modules/pacote/node_modules/tar/lib/pax.js
new file mode 100644
index 000000000..214a459f3
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/pax.js
@@ -0,0 +1,145 @@
+'use strict'
+const Header = require('./header.js')
+const path = require('path')
+
+class Pax {
+ constructor (obj, global) {
+ this.atime = obj.atime || null
+ this.charset = obj.charset || null
+ this.comment = obj.comment || null
+ this.ctime = obj.ctime || null
+ this.gid = obj.gid || null
+ this.gname = obj.gname || null
+ this.linkpath = obj.linkpath || null
+ this.mtime = obj.mtime || null
+ this.path = obj.path || null
+ this.size = obj.size || null
+ this.uid = obj.uid || null
+ this.uname = obj.uname || null
+ this.dev = obj.dev || null
+ this.ino = obj.ino || null
+ this.nlink = obj.nlink || null
+ this.global = global || false
+ }
+
+ encode () {
+ const body = this.encodeBody()
+ if (body === '')
+ return null
+
+ const bodyLen = Buffer.byteLength(body)
+ // round up to 512 bytes
+ // add 512 for header
+ const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
+ const buf = Buffer.allocUnsafe(bufLen)
+
+ // 0-fill the header section, it might not hit every field
+ for (let i = 0; i < 512; i++) {
+ buf[i] = 0
+ }
+
+ new Header({
+ // XXX split the path
+ // then the path should be PaxHeader + basename, but less than 99,
+ // prepend with the dirname
+ path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
+ mode: this.mode || 0o644,
+ uid: this.uid || null,
+ gid: this.gid || null,
+ size: bodyLen,
+ mtime: this.mtime || null,
+ type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+ linkpath: '',
+ uname: this.uname || '',
+ gname: this.gname || '',
+ devmaj: 0,
+ devmin: 0,
+ atime: this.atime || null,
+ ctime: this.ctime || null
+ }).encode(buf)
+
+ buf.write(body, 512, bodyLen, 'utf8')
+
+ // null pad after the body
+ for (let i = bodyLen + 512; i < buf.length; i++) {
+ buf[i] = 0
+ }
+
+ return buf
+ }
+
+ encodeBody () {
+ return (
+ this.encodeField('path') +
+ this.encodeField('ctime') +
+ this.encodeField('atime') +
+ this.encodeField('dev') +
+ this.encodeField('ino') +
+ this.encodeField('nlink') +
+ this.encodeField('charset') +
+ this.encodeField('comment') +
+ this.encodeField('gid') +
+ this.encodeField('gname') +
+ this.encodeField('linkpath') +
+ this.encodeField('mtime') +
+ this.encodeField('size') +
+ this.encodeField('uid') +
+ this.encodeField('uname')
+ )
+ }
+
+ encodeField (field) {
+ if (this[field] === null || this[field] === undefined)
+ return ''
+ const v = this[field] instanceof Date ? this[field].getTime() / 1000
+ : this[field]
+ const s = ' ' +
+ (field === 'dev' || field === 'ino' || field === 'nlink'
+ ? 'SCHILY.' : '') +
+ field + '=' + v + '\n'
+ const byteLen = Buffer.byteLength(s)
+ // the digits includes the length of the digits in ascii base-10
+ // so if it's 9 characters, then adding 1 for the 9 makes it 10
+ // which makes it 11 chars.
+ let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
+ if (byteLen + digits >= Math.pow(10, digits))
+ digits += 1
+ const len = digits + byteLen
+ return len + s
+ }
+}
+
+Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
+
+const merge = (a, b) =>
+ b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
+
+const parseKV = string =>
+ string
+ .replace(/\n$/, '')
+ .split('\n')
+ .reduce(parseKVLine, Object.create(null))
+
+const parseKVLine = (set, line) => {
+ const n = parseInt(line, 10)
+
+ // XXX Values with \n in them will fail this.
+ // Refactor to not be a naive line-by-line parse.
+ if (n !== Buffer.byteLength(line) + 1)
+ return set
+
+ line = line.substr((n + ' ').length)
+ const kv = line.split('=')
+ const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
+ if (!k)
+ return set
+
+ const v = kv.join('=')
+ set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
+ ? new Date(v * 1000)
+ : /^[0-9]+$/.test(v) ? +v
+ : v
+ return set
+}
+
+module.exports = Pax
diff --git a/node_modules/pacote/node_modules/tar/lib/read-entry.js b/node_modules/pacote/node_modules/tar/lib/read-entry.js
new file mode 100644
index 000000000..aa369c74f
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/read-entry.js
@@ -0,0 +1,94 @@
+'use strict'
+const types = require('./types.js')
+const MiniPass = require('minipass')
+
+const SLURP = Symbol('slurp')
+module.exports = class ReadEntry extends MiniPass {
+ constructor (header, ex, gex) {
+ super()
+ this.extended = ex
+ this.globalExtended = gex
+ this.header = header
+ this.startBlockSize = 512 * Math.ceil(header.size / 512)
+ this.blockRemain = this.startBlockSize
+ this.remain = header.size
+ this.type = header.type
+ this.meta = false
+ this.ignore = false
+ switch (this.type) {
+ case 'File':
+ case 'OldFile':
+ case 'Link':
+ case 'SymbolicLink':
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'Directory':
+ case 'FIFO':
+ case 'ContiguousFile':
+ case 'GNUDumpDir':
+ break
+
+ case 'NextFileHasLongLinkpath':
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ case 'GlobalExtendedHeader':
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this.meta = true
+ break
+
+ // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+ // it may be worth doing the same, but with a warning.
+ default:
+ this.ignore = true
+ }
+
+ this.path = header.path
+ this.mode = header.mode
+ if (this.mode)
+ this.mode = this.mode & 0o7777
+ this.uid = header.uid
+ this.gid = header.gid
+ this.uname = header.uname
+ this.gname = header.gname
+ this.size = header.size
+ this.mtime = header.mtime
+ this.atime = header.atime
+ this.ctime = header.ctime
+ this.linkpath = header.linkpath
+ this.uname = header.uname
+ this.gname = header.gname
+
+ if (ex) this[SLURP](ex)
+ if (gex) this[SLURP](gex, true)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+
+ const r = this.remain
+ const br = this.blockRemain
+ this.remain = Math.max(0, r - writeLen)
+ this.blockRemain = Math.max(0, br - writeLen)
+ if (this.ignore)
+ return true
+
+ if (r >= writeLen)
+ return super.write(data)
+
+ // r < writeLen
+ return super.write(data.slice(0, r))
+ }
+
+ [SLURP] (ex, global) {
+ for (let k in ex) {
+ // we slurp in everything except for the path attribute in
+ // a global extended header, because that's weird.
+ if (ex[k] !== null && ex[k] !== undefined &&
+ !(global && k === 'path'))
+ this[k] = ex[k]
+ }
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/replace.js b/node_modules/pacote/node_modules/tar/lib/replace.js
new file mode 100644
index 000000000..aac6b57fa
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/replace.js
@@ -0,0 +1,211 @@
+'use strict'
+
+// tar -r
+const hlo = require('./high-level-opt.js')
+const Pack = require('./pack.js')
+const Parse = require('./parse.js')
+const fs = require('fs')
+const t = require('./list.js')
+const path = require('path')
+
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+
+const Header = require('./header.js')
+
+const r = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ return opt.sync ? replaceSync(opt, files)
+ : replace(opt, files, cb)
+}
+
+const replaceSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+
+ let threw = true
+ let fd
+ try {
+ try {
+ fd = fs.openSync(opt.file, 'r+')
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ fd = fs.openSync(opt.file, 'w+')
+ else
+ throw er
+ }
+
+ const st = fs.fstatSync(fd)
+ const headBuf = Buffer.alloc(512)
+ let position
+
+ POSITION: for (position = 0; position < st.size; position += 512) {
+ for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+ bytes = fs.readSync(
+ fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ throw new Error('cannot append to compressed archives')
+
+ if (!bytes)
+ break POSITION
+ }
+
+ let h = new Header(headBuf)
+ if (!h.cksumValid)
+ break
+ let entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > st.size)
+ break
+ // the 512 for the header we just parsed will be added as well
+ // also jump ahead all the blocks for the body
+ position += entryBlockSize
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ }
+
+ p.on('data', c => {
+ fs.writeSync(fd, c, 0, c.length, position)
+ position += c.length
+ })
+ p.on('end', _ => fs.closeSync(fd))
+
+ addFilesSync(p, files)
+ threw = false
+ } finally {
+ if (threw)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const replace = (opt, files, cb) => {
+ files = Array.from(files)
+ const p = new Pack(opt)
+
+ const getPos = (fd, size, cb_) => {
+ const cb = (er, pos) => {
+ if (er)
+ fs.close(fd, _ => cb_(er))
+ else
+ cb_(null, pos)
+ }
+
+ let position = 0
+ if (size === 0)
+ return cb(null, 0)
+
+ let bufPos = 0
+ const headBuf = Buffer.alloc(512)
+ const onread = (er, bytes) => {
+ if (er)
+ return cb(er)
+ bufPos += bytes
+ if (bufPos < 512 && bytes)
+ return fs.read(
+ fd, headBuf, bufPos, headBuf.length - bufPos,
+ position + bufPos, onread
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ return cb(new Error('cannot append to compressed archives'))
+
+ // truncated header
+ if (bufPos < 512)
+ return cb(null, position)
+
+ const h = new Header(headBuf)
+ if (!h.cksumValid)
+ return cb(null, position)
+
+ const entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > size)
+ return cb(null, position)
+
+ position += entryBlockSize + 512
+ if (position >= size)
+ return cb(null, position)
+
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ bufPos = 0
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+
+ const promise = new Promise((resolve, reject) => {
+ p.on('error', reject)
+ const onopen = (er, fd) => {
+ if (er) {
+ if (er.code === 'ENOENT')
+ return fs.open(opt.file, 'w+', onopen)
+ return reject(er)
+ }
+ fs.fstat(fd, (er, st) => {
+ if (er)
+ return reject(er)
+ getPos(fd, st.size, (er, position) => {
+ if (er)
+ return reject(er)
+ const stream = fs.createWriteStream(opt.file, {
+ fd: fd,
+ flags: 'r+',
+ start: position
+ })
+ p.pipe(stream)
+ stream.on('error', reject)
+ stream.on('close', resolve)
+ addFilesAsync(p, files)
+ })
+ })
+ }
+ fs.open(opt.file, 'r+', onopen)
+ })
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/types.js b/node_modules/pacote/node_modules/tar/lib/types.js
new file mode 100644
index 000000000..df425652b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/types.js
@@ -0,0 +1,44 @@
+'use strict'
+// map types from key to human-friendly name
+exports.name = new Map([
+ ['0', 'File'],
+ // same as File
+ ['', 'OldFile'],
+ ['1', 'Link'],
+ ['2', 'SymbolicLink'],
+ // Devices and FIFOs aren't fully supported
+ // they are parsed, but skipped when unpacking
+ ['3', 'CharacterDevice'],
+ ['4', 'BlockDevice'],
+ ['5', 'Directory'],
+ ['6', 'FIFO'],
+ // same as File
+ ['7', 'ContiguousFile'],
+ // pax headers
+ ['g', 'GlobalExtendedHeader'],
+ ['x', 'ExtendedHeader'],
+ // vendor-specific stuff
+ // skip
+ ['A', 'SolarisACL'],
+ // like 5, but with data, which should be skipped
+ ['D', 'GNUDumpDir'],
+ // metadata only, skip
+ ['I', 'Inode'],
+ // data = link path of next file
+ ['K', 'NextFileHasLongLinkpath'],
+ // data = path of next file
+ ['L', 'NextFileHasLongPath'],
+ // skip
+ ['M', 'ContinuationFile'],
+ // like L
+ ['N', 'OldGnuLongPath'],
+ // skip
+ ['S', 'SparseFile'],
+ // skip
+ ['V', 'TapeVolumeHeader'],
+ // like x
+ ['X', 'OldExtendedHeader']
+])
+
+// map the other direction
+exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
diff --git a/node_modules/pacote/node_modules/tar/lib/unpack.js b/node_modules/pacote/node_modules/tar/lib/unpack.js
new file mode 100644
index 000000000..e8c80c6fd
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/unpack.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const assert = require('assert')
+const EE = require('events').EventEmitter
+const Parser = require('./parse.js')
+const fs = require('fs')
+const path = require('path')
+const mkdir = require('./mkdir.js')
+const mkdirSync = mkdir.sync
+const wc = require('./winchars.js')
+
+const ONENTRY = Symbol('onEntry')
+const CHECKFS = Symbol('checkFs')
+const MAKEFS = Symbol('makeFs')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const LINK = Symbol('link')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const UNSUPPORTED = Symbol('unsupported')
+const UNKNOWN = Symbol('unknown')
+const CHECKPATH = Symbol('checkPath')
+const MKDIR = Symbol('mkdir')
+const ONERROR = Symbol('onError')
+const PENDING = Symbol('pending')
+const PEND = Symbol('pend')
+const UNPEND = Symbol('unpend')
+const ENDED = Symbol('ended')
+const MAYBECLOSE = Symbol('maybeClose')
+const SKIP = Symbol('skip')
+const DOCHOWN = Symbol('doChown')
+const UID = Symbol('uid')
+const GID = Symbol('gid')
+
+class Unpack extends Parser {
+ constructor (opt) {
+ if (!opt)
+ opt = {}
+
+ opt.ondone = _ => {
+ this[ENDED] = true
+ this[MAYBECLOSE]()
+ }
+
+ super(opt)
+
+ this.writable = true
+ this.readable = false
+
+ this[PENDING] = 0
+ this[ENDED] = false
+
+ this.dirCache = opt.dirCache || new Map()
+
+ if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+ // need both or neither
+ if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
+ throw new TypeError('cannot set owner without number uid and gid')
+ if (opt.preserveOwner)
+ throw new TypeError(
+ 'cannot preserve owner in archive and also set owner explicitly')
+ this.uid = opt.uid
+ this.gid = opt.gid
+ this.setOwner = true
+ } else {
+ this.uid = null
+ this.gid = null
+ this.setOwner = false
+ }
+
+ // default true for root
+ if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
+ this.preserveOwner = process.getuid && process.getuid() === 0
+ else
+ this.preserveOwner = !!opt.preserveOwner
+
+ this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
+ process.getuid() : null
+ this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
+ process.getgid() : null
+
+ // turn ><?| in filenames into 0xf000-higher encoded forms
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+
+ // do not unpack over files that are newer than what's in the archive
+ this.newer = !!opt.newer
+
+ // do not unpack over ANY files
+ this.keep = !!opt.keep
+
+ // do not set mtime/atime of extracted entries
+ this.noMtime = !!opt.noMtime
+
+ // allow .., absolute path entries, and unpacking through symlinks
+ // without this, warn and skip .., relativize absolutes, and error
+ // on symlinks in extraction path
+ this.preservePaths = !!opt.preservePaths
+
+ // unlink files and links before writing. This breaks existing hard
+ // links, and removes symlink directories rather than erroring
+ this.unlink = !!opt.unlink
+
+ this.cwd = path.resolve(opt.cwd || process.cwd())
+ this.strip = +opt.strip || 0
+ this.processUmask = process.umask()
+ this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
+ // default mode for dirs created as parents
+ this.dmode = opt.dmode || (0o0777 & (~this.umask))
+ this.fmode = opt.fmode || (0o0666 & (~this.umask))
+ this.on('entry', entry => this[ONENTRY](entry))
+ }
+
+ [MAYBECLOSE] () {
+ if (this[ENDED] && this[PENDING] === 0) {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ }
+ }
+
+ [CHECKPATH] (entry) {
+ if (this.strip) {
+ const parts = entry.path.split(/\/|\\/)
+ if (parts.length < this.strip)
+ return false
+ entry.path = parts.slice(this.strip).join('/')
+ }
+
+ if (!this.preservePaths) {
+ const p = entry.path
+ if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
+ this.warn('path contains \'..\'', p)
+ return false
+ }
+
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ if (path.win32.isAbsolute(p)) {
+ const parsed = path.win32.parse(p)
+ this.warn('stripping ' + parsed.root + ' from absolute path', p)
+ entry.path = p.substr(parsed.root.length)
+ }
+ }
+
+ // only encode : chars that aren't drive letter indicators
+ if (this.win32) {
+ const parsed = path.win32.parse(entry.path)
+ entry.path = parsed.root === '' ? wc.encode(entry.path)
+ : parsed.root + wc.encode(entry.path.substr(parsed.root.length))
+ }
+
+ if (path.isAbsolute(entry.path))
+ entry.absolute = entry.path
+ else
+ entry.absolute = path.resolve(this.cwd, entry.path)
+
+ return true
+ }
+
+ [ONENTRY] (entry) {
+ if (!this[CHECKPATH](entry))
+ return entry.resume()
+
+ assert.equal(typeof entry.absolute, 'string')
+
+ switch (entry.type) {
+ case 'Directory':
+ case 'GNUDumpDir':
+ if (entry.mode)
+ entry.mode = entry.mode | 0o700
+
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ case 'Link':
+ case 'SymbolicLink':
+ return this[CHECKFS](entry)
+
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'FIFO':
+ return this[UNSUPPORTED](entry)
+ }
+ }
+
+ [ONERROR] (er, entry) {
+ // Cwd has to exist, or else nothing works. That's serious.
+ // Other errors are warnings, which raise the error in strict
+ // mode, but otherwise continue on.
+ if (er.name === 'CwdError')
+ this.emit('error', er)
+ else {
+ this.warn(er.message, er)
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ [MKDIR] (dir, mode, cb) {
+ mkdir(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ }, cb)
+ }
+
+ [DOCHOWN] (entry) {
+ // in preserve owner mode, chown if the entry doesn't match process
+ // in set owner mode, chown if setting doesn't match process
+ return this.preserveOwner &&
+ ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
+ typeof entry.gid === 'number' && entry.gid !== this.processGid )
+ ||
+ ( typeof this.uid === 'number' && this.uid !== this.processUid ||
+ typeof this.gid === 'number' && this.gid !== this.processGid )
+ }
+
+ [UID] (entry) {
+ return typeof this.uid === 'number' ? this.uid
+ : typeof entry.uid === 'number' ? entry.uid
+ : this.processUid
+ }
+
+ [GID] (entry) {
+ return typeof this.gid === 'number' ? this.gid
+ : typeof entry.gid === 'number' ? entry.gid
+ : this.processGid
+ }
+
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ const stream = fs.createWriteStream(entry.absolute, { mode: mode })
+ stream.on('error', er => this[ONERROR](er, entry))
+
+ const queue = []
+ const processQueue = _ => {
+ const action = queue.shift()
+ if (action)
+ action(processQueue)
+ else
+ this[UNPEND]()
+ }
+
+ stream.on('close', _ => {
+ if (entry.mtime && !this.noMtime)
+ queue.push(cb =>
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb))
+ if (this[DOCHOWN](entry))
+ queue.push(cb =>
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb))
+ processQueue()
+ })
+ entry.pipe(stream)
+ }
+
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ this[MKDIR](entry.absolute, mode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ const queue = []
+ const processQueue = _ => {
+ const action = queue.shift()
+ if (action)
+ action(processQueue)
+ else {
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ if (entry.mtime && !this.noMtime)
+ queue.push(cb =>
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb))
+ if (this[DOCHOWN](entry))
+ queue.push(cb =>
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb))
+
+ processQueue()
+ })
+ }
+
+ [UNSUPPORTED] (entry) {
+ this.warn('unsupported entry type: ' + entry.type, entry)
+ entry.resume()
+ }
+
+ [SYMLINK] (entry) {
+ this[LINK](entry, entry.linkpath, 'symlink')
+ }
+
+ [HARDLINK] (entry) {
+ this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')
+ }
+
+ [PEND] () {
+ this[PENDING]++
+ }
+
+ [UNPEND] () {
+ this[PENDING]--
+ this[MAYBECLOSE]()
+ }
+
+ [SKIP] (entry) {
+ this[UNPEND]()
+ entry.resume()
+ }
+
+ // check if a thing is there, and if so, try to clobber it
+ [CHECKFS] (entry) {
+ this[PEND]()
+ this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ fs.lstat(entry.absolute, (er, st) => {
+ if (st && (this.keep || this.newer && st.mtime > entry.mtime))
+ this[SKIP](entry)
+ else if (er || (entry.type === 'File' && !this.unlink && st.isFile()))
+ this[MAKEFS](null, entry)
+ else if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (!entry.mode || (st.mode & 0o7777) === entry.mode)
+ this[MAKEFS](null, entry)
+ else
+ fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))
+ } else
+ fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))
+ } else
+ fs.unlink(entry.absolute, er => this[MAKEFS](er, entry))
+ })
+ })
+ }
+
+ [MAKEFS] (er, entry) {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ switch (entry.type) {
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ return this[FILE](entry)
+
+ case 'Link':
+ return this[HARDLINK](entry)
+
+ case 'SymbolicLink':
+ return this[SYMLINK](entry)
+
+ case 'Directory':
+ case 'GNUDumpDir':
+ return this[DIRECTORY](entry)
+ }
+ }
+
+ [LINK] (entry, linkpath, link) {
+ // XXX: get the type ('file' or 'dir') for windows
+ fs[link](linkpath, entry.absolute, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ this[UNPEND]()
+ entry.resume()
+ })
+ }
+}
+
+class UnpackSync extends Unpack {
+ constructor (opt) {
+ super(opt)
+ }
+
+ [CHECKFS] (entry) {
+ const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)
+ if (er)
+ return this[ONERROR](er, entry)
+ try {
+ const st = fs.lstatSync(entry.absolute)
+ if (this.keep || this.newer && st.mtime > entry.mtime)
+ return this[SKIP](entry)
+ else if (entry.type === 'File' && !this.unlink && st.isFile())
+ return this[MAKEFS](null, entry)
+ else {
+ try {
+ if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (entry.mode && (st.mode & 0o7777) !== entry.mode)
+ fs.chmodSync(entry.absolute, entry.mode)
+ } else
+ fs.rmdirSync(entry.absolute)
+ } else
+ fs.unlinkSync(entry.absolute)
+ return this[MAKEFS](null, entry)
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+ } catch (er) {
+ return this[MAKEFS](null, entry)
+ }
+ }
+
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ try {
+ const fd = fs.openSync(entry.absolute, 'w', mode)
+ entry.on('data', buf => fs.writeSync(fd, buf, 0, buf.length, null))
+ entry.on('end', _ => {
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.futimesSync(fd, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
+ }
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.fchownSync(fd, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ try { fs.closeSync(fd) } catch (er) { this[ONERROR](er, entry) }
+ })
+ } catch (er) { this[ONERROR](er, entry) }
+ }
+
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ const er = this[MKDIR](entry.absolute, mode)
+ if (er)
+ return this[ONERROR](er, entry)
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
+ }
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ entry.resume()
+ }
+
+ [MKDIR] (dir, mode) {
+ try {
+ return mkdir.sync(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ })
+ } catch (er) {
+ return er
+ }
+ }
+
+ [LINK] (entry, linkpath, link) {
+ try {
+ fs[link + 'Sync'](linkpath, entry.absolute)
+ entry.resume()
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+}
+
+Unpack.Sync = UnpackSync
+module.exports = Unpack
diff --git a/node_modules/pacote/node_modules/tar/lib/update.js b/node_modules/pacote/node_modules/tar/lib/update.js
new file mode 100644
index 000000000..16c3e93ed
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/update.js
@@ -0,0 +1,36 @@
+'use strict'
+
+// tar -u
+
+const hlo = require('./high-level-opt.js')
+const r = require('./replace.js')
+// just call tar.r with the filter and mtimeCache
+
+const u = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ mtimeFilter(opt)
+ return r(opt, files, cb)
+}
+
+const mtimeFilter = opt => {
+ const filter = opt.filter
+
+ if (!opt.mtimeCache)
+ opt.mtimeCache = new Map()
+
+ opt.filter = filter ? (path, stat) =>
+ filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
+ : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/warn-mixin.js b/node_modules/pacote/node_modules/tar/lib/warn-mixin.js
new file mode 100644
index 000000000..94a4b9b99
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/warn-mixin.js
@@ -0,0 +1,14 @@
+'use strict'
+module.exports = Base => class extends Base {
+ warn (msg, data) {
+ if (!this.strict)
+ this.emit('warn', msg, data)
+ else if (data instanceof Error)
+ this.emit('error', data)
+ else {
+ const er = new Error(msg)
+ er.data = data
+ this.emit('error', er)
+ }
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/winchars.js b/node_modules/pacote/node_modules/tar/lib/winchars.js
new file mode 100644
index 000000000..cf6ea0606
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/winchars.js
@@ -0,0 +1,23 @@
+'use strict'
+
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+
+const raw = [
+ '|',
+ '<',
+ '>',
+ '?',
+ ':'
+]
+
+const win = raw.map(char =>
+ String.fromCharCode(0xf000 + char.charCodeAt(0)))
+
+const toWin = new Map(raw.map((char, i) => [char, win[i]]))
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
+
+module.exports = {
+ encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
+ decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)
+}
diff --git a/node_modules/pacote/node_modules/tar/lib/write-entry.js b/node_modules/pacote/node_modules/tar/lib/write-entry.js
new file mode 100644
index 000000000..f562bf138
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/lib/write-entry.js
@@ -0,0 +1,395 @@
+'use strict'
+const MiniPass = require('minipass')
+const Pax = require('./pax.js')
+const Header = require('./header.js')
+const ReadEntry = require('./read-entry.js')
+const fs = require('fs')
+const path = require('path')
+
+const types = require('./types.js')
+const maxReadSize = 16 * 1024 * 1024
+const PROCESS = Symbol('process')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const HEADER = Symbol('header')
+const READ = Symbol('read')
+const LSTAT = Symbol('lstat')
+const ONLSTAT = Symbol('onlstat')
+const ONREAD = Symbol('onread')
+const ONREADLINK = Symbol('onreadlink')
+const OPENFILE = Symbol('openfile')
+const ONOPENFILE = Symbol('onopenfile')
+const CLOSE = Symbol('close')
+const warner = require('./warn-mixin.js')
+const winchars = require('./winchars.js')
+
+const WriteEntry = warner(class WriteEntry extends MiniPass {
+ constructor (p, opt) {
+ opt = opt || {}
+ super(opt)
+ if (typeof p !== 'string')
+ throw new TypeError('path is required')
+ this.path = p
+ // suppress atime, ctime, uid, gid, uname, gname
+ this.portable = !!opt.portable
+ // until node has builtin pwnam functions, this'll have to do
+ this.myuid = process.getuid && process.getuid()
+ this.myuser = process.env.USER || ''
+ this.maxReadSize = opt.maxReadSize || maxReadSize
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.preservePaths = !!opt.preservePaths
+ this.cwd = opt.cwd || process.cwd()
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ if (!this.preservePaths && path.win32.isAbsolute(p)) {
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ const parsed = path.win32.parse(p)
+ this.warn('stripping ' + parsed.root + ' from absolute path', p)
+ this.path = p.substr(parsed.root.length)
+ }
+
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+ if (this.win32) {
+ this.path = winchars.decode(this.path.replace(/\\/g, '/'))
+ p = p.replace(/\\/g, '/')
+ }
+
+ this.absolute = opt.absolute || path.resolve(this.cwd, p)
+
+ if (this.path === '')
+ this.path = './'
+
+ if (this.statCache.has(this.absolute))
+ this[ONLSTAT](this.statCache.get(this.absolute))
+ else
+ this[LSTAT]()
+ }
+
+ [LSTAT] () {
+ fs.lstat(this.absolute, (er, stat) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONLSTAT](stat)
+ })
+ }
+
+ [ONLSTAT] (stat) {
+ this.statCache.set(this.absolute, stat)
+ this.stat = stat
+ if (!stat.isFile())
+ stat.size = 0
+ this.type = getType(stat)
+ this.emit('stat', stat)
+ this[PROCESS]()
+ }
+
+ [PROCESS] () {
+ switch (this.type) {
+ case 'File': return this[FILE]()
+ case 'Directory': return this[DIRECTORY]()
+ case 'SymbolicLink': return this[SYMLINK]()
+ // unsupported types are ignored.
+ default: return this.end()
+ }
+ }
+
+ [HEADER] () {
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this.stat.mode & 0o7777,
+ uid: this.portable ? null : this.stat.uid,
+ gid: this.portable ? null : this.stat.gid,
+ size: this.stat.size,
+ mtime: this.type === 'Directory' && this.portable
+ ? null : this.stat.mtime,
+ type: this.type,
+ uname: this.portable ? null :
+ this.stat.uid === this.myuid ? this.myuser : '',
+ atime: this.portable ? null : this.stat.atime,
+ ctime: this.portable ? null : this.stat.ctime
+ })
+
+ if (this.header.encode() && !this.noPax)
+ this.write(new Pax({
+ atime: this.portable ? null : this.header.atime,
+ ctime: this.portable ? null : this.header.ctime,
+ gid: this.portable ? null : this.header.gid,
+ mtime: this.header.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.header.size,
+ uid: this.portable ? null : this.header.uid,
+ uname: this.portable ? null : this.header.uname,
+ dev: this.portable ? null : this.stat.dev,
+ ino: this.portable ? null : this.stat.ino,
+ nlink: this.portable ? null : this.stat.nlink
+ }).encode())
+ this.write(this.header.block)
+ }
+
+ [DIRECTORY] () {
+ if (this.path.substr(-1) !== '/')
+ this.path += '/'
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [SYMLINK] () {
+ fs.readlink(this.absolute, (er, linkpath) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONREADLINK](linkpath)
+ })
+ }
+
+ [ONREADLINK] (linkpath) {
+ this.linkpath = linkpath
+ this[HEADER]()
+ this.end()
+ }
+
+ [HARDLINK] (linkpath) {
+ this.type = 'Link'
+ this.linkpath = path.relative(this.cwd, linkpath)
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [FILE] () {
+ if (this.stat.nlink > 1) {
+ const linkKey = this.stat.dev + ':' + this.stat.ino
+ if (this.linkCache.has(linkKey)) {
+ const linkpath = this.linkCache.get(linkKey)
+ if (linkpath.indexOf(this.cwd) === 0)
+ return this[HARDLINK](linkpath)
+ }
+ this.linkCache.set(linkKey, this.absolute)
+ }
+
+ this[HEADER]()
+ if (this.stat.size === 0)
+ return this.end()
+
+ this[OPENFILE]()
+ }
+
+ [OPENFILE] () {
+ fs.open(this.absolute, 'r', (er, fd) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONOPENFILE](fd)
+ })
+ }
+
+ [ONOPENFILE] (fd) {
+ const blockLen = 512 * Math.ceil(this.stat.size / 512)
+ const bufLen = Math.min(blockLen, this.maxReadSize)
+ const buf = Buffer.allocUnsafe(bufLen)
+ this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen)
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+ if (er)
+ return this[CLOSE](fd, _ => this.emit('error', er))
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ })
+ }
+
+ [CLOSE] (fd, cb) {
+ fs.close(fd, cb)
+ }
+
+ [ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) {
+ if (bytesRead <= 0 && remain > 0) {
+ const er = new Error('unexpected EOF')
+ er.path = this.absolute
+ er.syscall = 'read'
+ er.code = 'EOF'
+ this.emit('error', er)
+ }
+
+ // null out the rest of the buffer, if we could fit the block padding
+ if (bytesRead === remain) {
+ for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {
+ buf[i + offset] = 0
+ bytesRead ++
+ remain ++
+ }
+ }
+
+ const writeBuf = offset === 0 && bytesRead === buf.length ?
+ buf : buf.slice(offset, offset + bytesRead)
+ remain -= bytesRead
+ blockRemain -= bytesRead
+ pos += bytesRead
+ offset += bytesRead
+
+ this.write(writeBuf)
+
+ if (!remain) {
+ if (blockRemain)
+ this.write(Buffer.alloc(blockRemain))
+ this.end()
+ this[CLOSE](fd, _ => _)
+ return
+ }
+
+ if (offset >= length) {
+ buf = Buffer.allocUnsafe(length)
+ offset = 0
+ }
+ length = buf.length - offset
+ this[READ](fd, buf, offset, length, pos, remain, blockRemain)
+ }
+})
+
+class WriteEntrySync extends WriteEntry {
+ constructor (path, opt) {
+ super(path, opt)
+ }
+
+ [LSTAT] () {
+ this[ONLSTAT](fs.lstatSync(this.absolute))
+ }
+
+ [SYMLINK] () {
+ this[ONREADLINK](fs.readlinkSync(this.absolute))
+ }
+
+ [OPENFILE] () {
+ this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ let threw = true
+ try {
+ const bytesRead = fs.readSync(fd, buf, offset, length, pos)
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ threw = false
+ } finally {
+ if (threw)
+ try { this[CLOSE](fd) } catch (er) {}
+ }
+ }
+
+ [CLOSE] (fd) {
+ fs.closeSync(fd)
+ }
+}
+
+const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
+ constructor (readEntry, opt) {
+ opt = opt || {}
+ super(opt)
+ this.readEntry = readEntry
+ this.type = readEntry.type
+ this.path = readEntry.path
+ this.mode = readEntry.mode
+ if (this.mode)
+ this.mode = this.mode & 0o7777
+ this.uid = readEntry.uid
+ this.gid = readEntry.gid
+ this.uname = readEntry.uname
+ this.gname = readEntry.gname
+ this.size = readEntry.size
+ this.mtime = readEntry.mtime
+ this.atime = readEntry.atime
+ this.ctime = readEntry.ctime
+ this.linkpath = readEntry.linkpath
+ this.uname = readEntry.uname
+ this.gname = readEntry.gname
+
+ this.preservePaths = !!opt.preservePaths
+ this.portable = !!opt.portable
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ if (path.isAbsolute(this.path) && !this.preservePaths) {
+ const parsed = path.parse(this.path)
+ this.warn(
+ 'stripping ' + parsed.root + ' from absolute path',
+ this.path
+ )
+ this.path = this.path.substr(parsed.root.length)
+ }
+
+ this.remain = readEntry.size
+ this.blockRemain = readEntry.startBlockSize
+
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this.mode,
+ uid: this.portable ? null : this.uid,
+ gid: this.portable ? null : this.gid,
+ size: this.size,
+ mtime: this.mtime,
+ type: this.type,
+ uname: this.portable ? null : this.uname,
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime
+ })
+
+ if (this.header.encode() && !this.noPax)
+ super.write(new Pax({
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime,
+ gid: this.portable ? null : this.gid,
+ mtime: this.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.size,
+ uid: this.portable ? null : this.uid,
+ uname: this.portable ? null : this.uname,
+ dev: this.portable ? null : this.readEntry.dev,
+ ino: this.portable ? null : this.readEntry.ino,
+ nlink: this.portable ? null : this.readEntry.nlink
+ }).encode())
+
+ super.write(this.header.block)
+ readEntry.pipe(this)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+ this.blockRemain -= writeLen
+ return super.write(data)
+ }
+
+ end () {
+ if (this.blockRemain)
+ this.write(Buffer.alloc(this.blockRemain))
+ return super.end()
+ }
+})
+
+WriteEntry.Sync = WriteEntrySync
+WriteEntry.Tar = WriteEntryTar
+
+const getType = stat =>
+ stat.isFile() ? 'File'
+ : stat.isDirectory() ? 'Directory'
+ : stat.isSymbolicLink() ? 'SymbolicLink'
+ : 'Unsupported'
+
+module.exports = WriteEntry
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/.npmignore b/node_modules/pacote/node_modules/tar/node_modules/minipass/.npmignore
new file mode 100644
index 000000000..183822a7f
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/.npmignore
@@ -0,0 +1,4 @@
+.*.swp
+node_modules
+.nyc_output/
+coverage/
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/.travis.yml b/node_modules/pacote/node_modules/tar/node_modules/minipass/.travis.yml
new file mode 100644
index 000000000..59410a36d
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/.travis.yml
@@ -0,0 +1,7 @@
+language: node_js
+sudo: false
+node_js:
+ - 7
+cache:
+ directories:
+ - /Users/isaacs/.npm
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/README.md b/node_modules/pacote/node_modules/tar/node_modules/minipass/README.md
new file mode 100644
index 000000000..97eadeaea
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/README.md
@@ -0,0 +1,46 @@
+# minipass
+
+A _very_ minimal implementation of a [PassThrough
+stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
+
+[It's very
+fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
+for objects, strings, and buffers.
+
+Supports pipe()ing (including multi-pipe() and backpressure
+transmission), buffering data until either a `data` event handler or
+`pipe()` is added (so you don't lose the first chunk), and most other
+cases where PassThrough is a good idea.
+
+There is a `read()` method, but it's much more efficient to consume
+data from this stream via `'data'` events or by calling `pipe()` into
+some other stream. Calling `read()` requires the buffer to be
+flattened in some cases, which requires copying memory. Also,
+`read()` always returns Buffers, even if an `encoding` option is
+specified.
+
+There is also no `unpipe()` method. Once you start piping, there is
+no stopping it!
+
+If you set `objectMode: true` in the options, then whatever is written
+will be emitted. Otherwise, it'll do a minimal amount of Buffer
+copying to ensure proper Streams semantics when `read(n)` is called.
+
+This is not a `through` or `through2` stream. It doesn't transform
+the data, it just passes it right through. If you want to transform
+the data, extend the class, and override the `write()` method. Once
+you're done transforming the data however you want, call
+`super.write()` with the transform output.
+
+For an example of a stream that extends MiniPass to provide transform
+capabilities, check out [minizlib](http://npm.im/minizlib).
+
+## USAGE
+
+```js
+const MiniPass = require('minipass')
+const mp = new MiniPass(options) // optional: { encoding }
+mp.write('foo')
+mp.pipe(someOtherStream)
+mp.end('bar')
+```
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/b.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/b.js
new file mode 100644
index 000000000..324c4190a
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/b.js
@@ -0,0 +1,12 @@
+const MiniPass = require('./')
+const butterfly = '🦋'
+var mp = new MiniPass({ encoding: 'utf8' })
+mp.on('data', chunk => {
+ console.error('data %s', chunk)
+})
+var butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+mp.write(butterbuf.slice(0, 1))
+mp.write(butterbuf.slice(1, 2))
+mp.write(butterbuf.slice(2, 3))
+mp.write(butterbuf.slice(3, 4))
+mp.end()
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js
new file mode 100644
index 000000000..8e7841a87
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js
@@ -0,0 +1,11 @@
+'use strict'
+const MiniPass = require('../..')
+
+module.exports = class ExtendMiniPass extends MiniPass {
+ constructor (opts) {
+ super(opts)
+ }
+ write (data, encoding) {
+ return super.write(data, encoding)
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js
new file mode 100644
index 000000000..6a021084c
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js
@@ -0,0 +1,12 @@
+'use strict'
+const through2 = require('through2')
+module.exports = function (opt) {
+ return opt.objectMode
+ ? through2.obj(func)
+ : through2(func)
+
+ function func (data, enc, done) {
+ this.push(data, enc)
+ done()
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js
new file mode 100644
index 000000000..1d2d24026
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js
@@ -0,0 +1,11 @@
+'use strict'
+const stream = require('stream')
+module.exports = class ExtendTransform extends stream.Transform {
+ constructor (opts) {
+ super(opts)
+ }
+ _transform (data, enc, done) {
+ this.push(data, enc)
+ done()
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js
new file mode 100644
index 000000000..13f6e916b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js
@@ -0,0 +1,12 @@
+'use strict'
+const EE = require('events').EventEmitter
+
+module.exports = class NullSink extends EE {
+ write (data, encoding, next) {
+ if (next) next()
+ return true
+ }
+ end () {
+ this.emit('finish')
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/numbers.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/numbers.js
new file mode 100644
index 000000000..bd1593299
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/numbers.js
@@ -0,0 +1,41 @@
+'use strict'
+const stream = require('stream')
+
+const numbers = new Array(1000).join(',').split(',').map((v, k) => k)
+let acc = ''
+const strings = numbers.map(n => acc += n)
+const bufs = strings.map(s => new Buffer(s))
+const objs = strings.map(s => ({ str: s }))
+
+module.exports = class Numbers {
+ constructor (opt) {
+ this.objectMode = opt.objectMode
+ this.encoding = opt.encoding
+ this.ii = 0
+ this.done = false
+ }
+ pipe (dest) {
+ this.dest = dest
+ this.go()
+ return dest
+ }
+
+ go () {
+ let flowing = true
+ while (flowing) {
+ if (this.ii >= 1000) {
+ this.dest.end()
+ this.done = true
+ flowing = false
+ } else {
+ flowing = this.dest.write(
+ (this.objectMode ? objs
+ : this.encoding ? strings
+ : bufs)[this.ii++])
+ }
+ }
+
+ if (!this.done)
+ this.dest.once('drain', _ => this.go())
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/timer.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/timer.js
new file mode 100644
index 000000000..8d8fe3d80
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/lib/timer.js
@@ -0,0 +1,15 @@
+'use strict'
+module.exports = _ => {
+ const start = process.hrtime()
+ return _ => {
+ const end = process.hrtime(start)
+ const ms = Math.round(end[0]*1e6 + end[1]/1e3)/1e3
+ if (!process.env.isTTY)
+ console.log(ms)
+ else {
+ const s = Math.round(end[0]*10 + end[1]/1e8)/10
+ const ss = s <= 1 ? '' : ' (' + s + 's)'
+ console.log('%d%s', ms, ss)
+ }
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/test.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/test.js
new file mode 100644
index 000000000..29c9fd07d
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/bench/test.js
@@ -0,0 +1,160 @@
+'use strict'
+
+const iterations = +process.env.BENCH_TEST_ITERATION || 100
+const testCount = +process.env.BENCH_TEST_COUNT || 20
+
+const tests = [
+ 'baseline',
+ 'minipass',
+ 'extend-minipass',
+ 'through2',
+ 'extend-through2',
+ 'passthrough',
+ 'extend-transform'
+]
+
+const manyOpts = [ 'many', 'single' ]
+const typeOpts = [ 'buffer', 'string', 'object' ]
+
+const main = () => {
+ const spawn = require('child_process').spawn
+ const node = process.execPath
+
+ const results = {}
+
+ const testSet = []
+ tests.forEach(t =>
+ manyOpts.forEach(many =>
+ typeOpts.forEach(type =>
+ new Array(testCount).join(',').split(',').forEach(() =>
+ t !== 'baseline' || (many === 'single' && type === 'object')
+ ? testSet.push([t, many, type]) : null))))
+
+ let didFirst = false
+ const mainRunTest = t => {
+ if (!t)
+ return afterMain(results)
+
+ const k = t.join('\t')
+ if (!results[k]) {
+ results[k] = []
+ if (!didFirst)
+ didFirst = true
+ else
+ process.stderr.write('\n')
+
+ process.stderr.write(k + ' #')
+ } else {
+ process.stderr.write('#')
+ }
+
+ const c = spawn(node, [__filename].concat(t), {
+ stdio: [ 'ignore', 'pipe', 2 ]
+ })
+ let out = ''
+ c.stdout.on('data', c => out += c)
+ c.on('close', (code, signal) => {
+ if (code || signal)
+ throw new Error('failed: ' + code + ' ' + signal)
+ results[k].push(+out)
+ mainRunTest(testSet.shift())
+ })
+ }
+
+ mainRunTest(testSet.shift())
+}
+
+const afterMain = results => {
+ console.log('test\tmany\ttype\tops/s\tmean\tmedian\tmax\tmin' +
+ '\tstdev\trange\traw')
+ // get the mean, median, stddev, and range of each test
+ Object.keys(results).forEach(test => {
+ const k = results[test].sort((a, b) => a - b)
+ const min = k[0]
+ const max = k[ k.length - 1 ]
+ const range = max - min
+ const sum = k.reduce((a,b) => a + b, 0)
+ const mean = sum / k.length
+ const ops = iterations / mean * 1000
+ const devs = k.map(n => n - mean).map(n => n * n)
+ const avgdev = devs.reduce((a,b) => a + b, 0) / k.length
+ const stdev = Math.pow(avgdev, 0.5)
+ const median = k.length % 2 ? k[Math.floor(k.length / 2)] :
+ (k[k.length/2] + k[k.length/2+1])/2
+ console.log(
+ '%s\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%s', test, round(ops),
+ round(mean), round(median),
+ max, min, round(stdev), round(range),
+ k.join('\t'))
+ })
+}
+
+const round = num => Math.round(num * 1000)/1000
+
+const test = (testname, many, type) => {
+ const timer = require('./lib/timer.js')
+ const Class = getClass(testname)
+
+ const done = timer()
+ runTest(Class, many, type, iterations, done)
+}
+
+// don't blow up the stack! loop unless deferred
+const runTest = (Class, many, type, iterations, done) => {
+ const Nullsink = require('./lib/nullsink.js')
+ const Numbers = require('./lib/numbers.js')
+ const opt = {}
+ if (type === 'string')
+ opt.encoding = 'utf8'
+ else if (type === 'object')
+ opt.objectMode = true
+
+ while (iterations--) {
+ let finished = false
+ let inloop = true
+ const after = iterations === 0 ? done
+ : () => {
+ if (iterations === 0)
+ done()
+ else if (inloop)
+ finished = true
+ else
+ runTest(Class, many, type, iterations, done)
+ }
+
+ const out = new Nullsink().on('finish', after)
+ let sink = Class ? new Class(opt) : out
+
+ if (many && Class)
+ sink = sink
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+
+ if (sink !== out)
+ sink.pipe(out)
+
+ new Numbers(opt).pipe(sink)
+
+ // keep tight-looping if the stream is done already
+ if (!finished) {
+ inloop = false
+ break
+ }
+ }
+}
+
+const getClass = testname =>
+ testname === 'through2' ? require('through2').obj
+ : testname === 'extend-through2' ? require('./lib/extend-through2.js')
+ : testname === 'minipass' ? require('../')
+ : testname === 'extend-minipass' ? require('./lib/extend-minipass.js')
+ : testname === 'passthrough' ? require('stream').PassThrough
+ : testname === 'extend-transform' ? require('./lib/extend-transform.js')
+ : null
+
+if (!process.argv[2])
+ main()
+else
+ test(process.argv[2], process.argv[3] === 'many', process.argv[4])
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/d.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/d.js
new file mode 100644
index 000000000..ceea51396
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/d.js
@@ -0,0 +1,7 @@
+var MD = require('./')
+var d = new MD()
+console.log(d.write('hello'))
+console.log(d.write('goodbye'))
+d.pipe(process.stderr)
+console.log(d.write('the end'))
+console.log(d.end())
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/e.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/e.js
new file mode 100644
index 000000000..f1da6c746
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/e.js
@@ -0,0 +1,17 @@
+const MP = require('stream').PassThrough // require('./')
+const mp = new MP()
+const wait = (n) => new Promise(resolve => setTimeout(resolve, n))
+const t = require('tap')
+
+t.test('end ordering', async t => {
+ mp.on('end', _ => console.log('end'))
+ mp.end()
+ console.log('called end')
+ // mp.resume()
+ // console.log('called resume()')
+ // mp.read()
+ // console.log('called read')
+ mp.on('data', _=>_)
+ console.log('added data handler')
+ await wait(1)
+})
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/eos.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/eos.js
new file mode 100644
index 000000000..225072094
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/eos.js
@@ -0,0 +1,12 @@
+const EE = require('events').EventEmitter
+const eos = require('end-of-stream')
+const ee = new EE()
+ee.readable = ee.writable = true
+eos(ee, er => {
+ if (er)
+ throw er
+ console.log('stream ended')
+})
+ee.emit('finish')
+ee.emit('close')
+ee.emit('end')
diff --git a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file1 b/node_modules/pacote/node_modules/tar/node_modules/minipass/foo
index e69de29bb..e69de29bb 100644
--- a/node_modules/pacote/node_modules/tar-fs/test/fixtures/d/file1
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/foo
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/index.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/index.js
new file mode 100644
index 000000000..3a3ad412b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/index.js
@@ -0,0 +1,295 @@
+'use strict'
+const EE = require('events')
+const Yallist = require('yallist')
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const SD = require('string_decoder').StringDecoder
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const RESUME = Symbol('resume')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+
+class MiniPass extends EE {
+ constructor (options) {
+ super()
+ this[FLOWING] = false
+ this.pipes = new Yallist()
+ this.buffer = new Yallist()
+ this[OBJECTMODE] = options && options.objectMode || false
+ if (this[OBJECTMODE])
+ this[ENCODING] = null
+ else
+ this[ENCODING] = options && options.encoding || null
+ if (this[ENCODING] === 'buffer')
+ this[ENCODING] = null
+ this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+ this[EOF] = false
+ this[EMITTED_END] = false
+ this[CLOSED] = false
+ this.writable = true
+ this.readable = true
+ this[BUFFERLENGTH] = 0
+ }
+
+ get bufferLength () { return this[BUFFERLENGTH] }
+
+ get encoding () { return this[ENCODING] }
+ set encoding (enc) {
+ if (this[OBJECTMODE])
+ throw new Error('cannot set encoding in objectMode')
+
+ if (this[ENCODING] && enc !== this[ENCODING] &&
+ (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
+ throw new Error('cannot change encoding')
+
+ if (this[ENCODING] !== enc) {
+ this[DECODER] = enc ? new SD(enc) : null
+ if (this.buffer.length)
+ this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
+ }
+
+ this[ENCODING] = enc
+ }
+
+ setEncoding (enc) {
+ this.encoding = enc
+ }
+
+ write (chunk, encoding, cb) {
+ if (this[EOF])
+ throw new Error('write after end')
+
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+
+ if (!encoding)
+ encoding = 'utf8'
+
+ // fast-path writing strings of same encoding to a stream with
+ // an empty buffer, skipping the buffer/decoder dance
+ if (typeof chunk === 'string' && !this[OBJECTMODE] &&
+ // unless it is a string already ready for us to use
+ !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
+ chunk = new Buffer(chunk, encoding)
+ }
+
+ if (Buffer.isBuffer(chunk) && this[ENCODING])
+ chunk = this[DECODER].write(chunk)
+
+ try {
+ return this.flowing
+ ? (this.emit('data', chunk), this.flowing)
+ : (this[BUFFERPUSH](chunk), false)
+ } finally {
+ this.emit('readable')
+ if (cb)
+ cb()
+ }
+ }
+
+ read (n) {
+ try {
+ if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
+ return null
+
+ if (this[OBJECTMODE])
+ n = null
+
+ if (this.buffer.length > 1 && !this[OBJECTMODE]) {
+ if (this.encoding)
+ this.buffer = new Yallist([
+ Array.from(this.buffer).join('')
+ ])
+ else
+ this.buffer = new Yallist([
+ Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH])
+ ])
+ }
+
+ return this[READ](n || null, this.buffer.head.value)
+ } finally {
+ this[MAYBE_EMIT_END]()
+ }
+ }
+
+ [READ] (n, chunk) {
+ if (n === chunk.length || n === null)
+ this[BUFFERSHIFT]()
+ else {
+ this.buffer.head.value = chunk.slice(n)
+ chunk = chunk.slice(0, n)
+ this[BUFFERLENGTH] -= n
+ }
+
+ this.emit('data', chunk)
+
+ if (!this.buffer.length && !this[EOF])
+ this.emit('drain')
+
+ return chunk
+ }
+
+ end (chunk, encoding, cb) {
+ if (typeof chunk === 'function')
+ cb = chunk, chunk = null
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+ if (chunk)
+ this.write(chunk, encoding)
+ if (cb)
+ this.once('end', cb)
+ this[EOF] = true
+ this.writable = false
+ if (this.flowing)
+ this[MAYBE_EMIT_END]()
+ }
+
+ // don't let the internal resume be overwritten
+ [RESUME] () {
+ this[FLOWING] = true
+ this.emit('resume')
+ if (this.buffer.length)
+ this[FLUSH]()
+ else if (this[EOF])
+ this[MAYBE_EMIT_END]()
+ else
+ this.emit('drain')
+ }
+
+ resume () {
+ return this[RESUME]()
+ }
+
+ pause () {
+ this[FLOWING] = false
+ }
+
+ get flowing () {
+ return this[FLOWING]
+ }
+
+ [BUFFERPUSH] (chunk) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] += 1
+ else
+ this[BUFFERLENGTH] += chunk.length
+ return this.buffer.push(chunk)
+ }
+
+ [BUFFERSHIFT] () {
+ if (this.buffer.length) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] -= 1
+ else
+ this[BUFFERLENGTH] -= this.buffer.head.value.length
+ }
+ return this.buffer.shift()
+ }
+
+ [FLUSH] () {
+ do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
+
+ if (!this.buffer.length && !this[EOF])
+ this.emit('drain')
+ }
+
+ [FLUSHCHUNK] (chunk) {
+ return chunk ? (this.emit('data', chunk), this.flowing) : false
+ }
+
+ pipe (dest, opts) {
+ if (dest === process.stdout || dest === process.stderr)
+ (opts = opts || {}).end = false
+ const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
+ this.pipes.push(p)
+
+ dest.on('drain', p.ondrain)
+ this[RESUME]()
+ return dest
+ }
+
+ addEventHandler (ev, fn) {
+ return this.on(ev, fn)
+ }
+
+ on (ev, fn) {
+ try {
+ return super.on(ev, fn)
+ } finally {
+ if (ev === 'data' && !this.pipes.length && !this.flowing) {
+ this[RESUME]()
+ }
+ }
+ }
+
+ get emittedEnd () {
+ return this[EMITTED_END]
+ }
+
+ [MAYBE_EMIT_END] () {
+ if (!this[EMITTED_END] && this.buffer.length === 0 && this[EOF]) {
+ this.emit('end')
+ this.emit('prefinish')
+ this.emit('finish')
+ if (this[CLOSED])
+ this.emit('close')
+ }
+ }
+
+ emit (ev, data) {
+ if (ev === 'data') {
+ if (!data)
+ return
+
+ if (this.pipes.length)
+ this.pipes.forEach(p => p.dest.write(data) || this.pause())
+ } else if (ev === 'end') {
+ if (this[DECODER]) {
+ data = this[DECODER].end()
+ if (data) {
+ this.pipes.forEach(p => p.dest.write(data))
+ super.emit('data', data)
+ }
+ }
+ this.pipes.forEach(p => {
+ p.dest.removeListener('drain', p.ondrain)
+ if (!p.opts || p.opts.end !== false)
+ p.dest.end()
+ })
+ this[EMITTED_END] = true
+ this.readable = false
+ } else if (ev === 'close') {
+ this[CLOSED] = true
+ // don't emit close before 'end' and 'finish'
+ if (!this[EMITTED_END])
+ return
+ }
+
+ const args = new Array(arguments.length)
+ args[0] = ev
+ args[1] = data
+ if (arguments.length > 2) {
+ for (let i = 2; i < arguments.length; i++) {
+ args[i] = arguments[i]
+ }
+ }
+
+ try {
+ return super.emit.apply(this, args)
+ } finally {
+ if (ev !== 'end')
+ this[MAYBE_EMIT_END]()
+ }
+ }
+}
+
+module.exports = MiniPass
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx b/node_modules/pacote/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx
new file mode 100644
index 000000000..05e19a41b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx
Binary files differ
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/package.json b/node_modules/pacote/node_modules/tar/node_modules/minipass/package.json
new file mode 100644
index 000000000..5cdc6970d
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/package.json
@@ -0,0 +1,64 @@
+{
+ "_from": "minipass@^2.0.2",
+ "_id": "minipass@2.2.1",
+ "_inBundle": false,
+ "_integrity": "sha512-u1aUllxPJUI07cOqzR7reGmQxmCqlH88uIIsf6XZFEWgw7gXKpJdR+5R9Y3KEDmWYkdIz9wXZs3C0jOPxejk/Q==",
+ "_location": "/pacote/tar/minipass",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minipass@^2.0.2",
+ "name": "minipass",
+ "escapedName": "minipass",
+ "rawSpec": "^2.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^2.0.2"
+ },
+ "_requiredBy": [
+ "/pacote/tar",
+ "/pacote/tar/minizlib"
+ ],
+ "_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.2.1.tgz",
+ "_shasum": "5ada97538b1027b4cf7213432428578cb564011f",
+ "_spec": "minipass@^2.0.2",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minipass/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "yallist": "^3.0.0"
+ },
+ "deprecated": false,
+ "description": "minimal implementation of a PassThrough stream",
+ "devDependencies": {
+ "end-of-stream": "^1.4.0",
+ "tap": "^10.7.0",
+ "through2": "^2.0.3"
+ },
+ "homepage": "https://github.com/isaacs/minipass#readme",
+ "keywords": [
+ "passthrough",
+ "stream"
+ ],
+ "license": "ISC",
+ "main": "index.js",
+ "name": "minipass",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/minipass.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "2.2.1"
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/test/basic.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/test/basic.js
new file mode 100644
index 000000000..e3885c808
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/test/basic.js
@@ -0,0 +1,438 @@
+const MiniPass = require('../')
+const t = require('tap')
+const EE = require('events').EventEmitter
+
+t.test('some basic piping and writing', async t => {
+ let mp = new MiniPass({ encoding: 'base64' })
+ t.notOk(mp.flowing)
+ mp.flowing = true
+ t.notOk(mp.flowing)
+ t.equal(mp.encoding, 'base64')
+ mp.encoding = null
+ t.equal(mp.encoding, null)
+ t.equal(mp.readable, true)
+ t.equal(mp.writable, true)
+ t.equal(mp.write('hello'), false)
+ let dest = new MiniPass()
+ let sawDestData = false
+ dest.once('data', chunk => {
+ sawDestData = true
+ t.isa(chunk, Buffer)
+ })
+ t.equal(mp.pipe(dest), dest, 'pipe returns dest')
+ t.ok(sawDestData, 'got data becasue pipe() flushes')
+ t.equal(mp.write('bye'), true, 'write() returns true when flowing')
+ dest.pause()
+ t.equal(mp.write('after pause'), false, 'false when dest is paused')
+ t.equal(mp.write('after false'), false, 'false when not flowing')
+ t.equal(dest.buffer.length, 1, '1 item is buffered in dest')
+ t.equal(mp.buffer.length, 1, '1 item buffered in src')
+ dest.resume()
+ t.equal(dest.buffer.length, 0, 'nothing is buffered in dest')
+ t.equal(mp.buffer.length, 0, 'nothing buffered in src')
+})
+
+t.test('unicode splitting', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ t.plan(2)
+ t.equal(mp.encoding, 'utf8')
+ mp.on('data', chunk => {
+ t.equal(chunk, butterfly)
+ })
+ const butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ mp.end()
+})
+
+t.test('unicode splitting with setEncoding', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'hex' })
+ t.plan(4)
+ t.equal(mp.encoding, 'hex')
+ mp.setEncoding('hex')
+ t.equal(mp.encoding, 'hex')
+ mp.setEncoding('utf8')
+ t.equal(mp.encoding, 'utf8')
+ mp.on('data', chunk => {
+ t.equal(chunk, butterfly)
+ })
+ const butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ mp.end()
+})
+
+t.test('base64 -> utf8 piping', t => {
+ t.plan(1)
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'base64' })
+ const dest = new MiniPass({ encoding: 'utf8' })
+ mp.pipe(dest)
+ let out = ''
+ dest.on('data', c => out += c)
+ dest.on('end', _ =>
+ t.equal(new Buffer(out, 'base64').toString('utf8'), butterfly))
+ mp.write(butterfly)
+ mp.end()
+})
+
+t.test('utf8 -> base64 piping', t => {
+ t.plan(1)
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ const dest = new MiniPass({ encoding: 'base64' })
+ mp.pipe(dest)
+ let out = ''
+ dest.on('data', c => out += c)
+ dest.on('end', _ =>
+ t.equal(new Buffer(out, 'base64').toString('utf8'), butterfly))
+ mp.write(butterfly)
+ mp.end()
+})
+
+t.test('read method', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ mp.write(new Buffer(butterfly))
+ t.equal(mp.read(5), null)
+ t.equal(mp.read(0), null)
+ t.same(mp.read(2), butterfly)
+})
+
+t.test('read with no args', async t => {
+ t.test('buffer -> string', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 2))
+ mp.write(butterbuf.slice(2))
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('buffer -> buffer', async t => {
+ const butterfly = new Buffer('🦋')
+ const mp = new MiniPass()
+ mp.on('data', c => t.same(c, butterfly))
+ mp.pause()
+ mp.write(butterfly.slice(0, 2))
+ mp.write(butterfly.slice(2))
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('string -> buffer', async t => {
+ const butterfly = '🦋'
+ const butterbuf = new Buffer(butterfly)
+ const mp = new MiniPass()
+ mp.on('data', c => t.same(c, butterbuf))
+ mp.pause()
+ mp.write(butterfly)
+ t.same(mp.read(), butterbuf)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('string -> string', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ mp.write(butterfly[0])
+ mp.write(butterfly[1])
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+})
+
+t.test('partial read', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass()
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ t.equal(mp.read(5), null)
+ t.equal(mp.read(0), null)
+ t.same(mp.read(2), butterbuf.slice(0, 2))
+ t.same(mp.read(2), butterbuf.slice(2, 4))
+})
+
+t.test('write after end', async t => {
+ const mp = new MiniPass()
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ mp.end()
+ t.throws(_ => mp.write('nope'))
+ t.notOk(sawEnd, 'should not get end event yet (not flowing)')
+ mp.resume()
+ t.ok(sawEnd, 'should get end event after resume()')
+})
+
+t.test('write cb', async t => {
+ const mp = new MiniPass()
+ let calledCb = false
+ mp.write('ok', () => calledCb = true)
+ t.ok(calledCb)
+})
+
+t.test('end with chunk', async t => {
+ let out = ''
+ const mp = new MiniPass({ encoding: 'utf8' })
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ mp.addEventHandler('data', c => out += c)
+ let endCb = false
+ mp.end('ok', _ => endCb = true)
+ t.equal(out, 'ok')
+ t.ok(sawEnd, 'should see end event')
+ t.ok(endCb, 'end cb should get called')
+})
+
+t.test('no drain if could not entirely drain on resume', async t => {
+ const mp = new MiniPass()
+ const dest = new MiniPass({ encoding: 'buffer' })
+ t.equal(mp.write('foo'), false)
+ t.equal(mp.write('bar'), false)
+ t.equal(mp.write('baz'), false)
+ t.equal(mp.write('qux'), false)
+ mp.on('drain', _ => t.fail('should not drain'))
+ mp.pipe(dest)
+})
+
+t.test('end with chunk pending', async t => {
+ const mp = new MiniPass()
+ t.equal(mp.write('foo'), false)
+ t.equal(mp.write('626172', 'hex'), false)
+ t.equal(mp.write('baz'), false)
+ t.equal(mp.write('qux'), false)
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ let endCb = false
+ mp.end(_ => endCb = true)
+ t.notOk(endCb, 'endcb should not happen yet')
+ t.notOk(sawEnd, 'should not see end yet')
+ let out = ''
+ mp.on('data', c => out += c)
+ t.ok(sawEnd, 'see end after flush')
+ t.ok(endCb, 'end cb after flush')
+ t.equal(out, 'foobarbazqux')
+})
+
+t.test('pipe to stderr does not throw', t => {
+ const spawn = require('child_process').spawn
+ const module = JSON.stringify(require.resolve('../'))
+ const fs = require('fs')
+ const file = __dirname + '/prog.js'
+ fs.writeFileSync(file, `
+ const MP = require(${module})
+ const mp = new MP()
+ mp.pipe(process.stderr)
+ mp.end("hello")
+ `)
+ let err = ''
+ return new Promise(res => {
+ const child = spawn(process.execPath, [file])
+ child.stderr.on('data', c => err += c)
+ child.on('close', (code, signal) => {
+ t.equal(code, 0)
+ t.equal(signal, null)
+ t.equal(err, 'hello')
+ fs.unlinkSync(file)
+ res()
+ })
+ })
+})
+
+t.test('emit works with many args', t => {
+ const mp = new MiniPass()
+ t.plan(2)
+ mp.on('foo', function (a, b, c, d, e, f, g) {
+ t.same([a,b,c,d,e,f,g], [1,2,3,4,5,6,7])
+ t.equal(arguments.length, 7)
+ })
+ mp.emit('foo', 1, 2, 3, 4, 5, 6, 7)
+})
+
+t.test('emit drain on resume, even if no flush', t => {
+ const mp = new MiniPass()
+ mp.encoding = 'utf8'
+
+ const chunks = []
+ class SlowStream extends EE {
+ write (chunk) {
+ chunks.push(chunk)
+ setTimeout(_ => this.emit('drain'))
+ return false
+ }
+ end () { return this.write() }
+ }
+
+ const ss = new SlowStream()
+
+ mp.pipe(ss)
+ t.ok(mp.flowing, 'flowing, because piped')
+ t.equal(mp.write('foo'), false, 'write() returns false, backpressure')
+ t.equal(mp.buffer.length, 0, 'buffer len is 0')
+ t.equal(mp.flowing, false, 'flowing false, awaiting drain')
+ t.same(chunks, ['foo'], 'chunk made it through')
+ mp.once('drain', _ => {
+ t.pass('received mp drain event')
+ t.end()
+ })
+})
+
+t.test('save close for end', t => {
+ const mp = new MiniPass()
+ let ended = false
+ mp.on('close', _ => {
+ t.equal(ended, true, 'end before close')
+ t.end()
+ })
+ mp.on('end', _ => {
+ t.equal(ended, false, 'only end once')
+ ended = true
+ })
+
+ mp.emit('close')
+ mp.end('foo')
+ t.equal(ended, false, 'no end until flushed')
+ mp.resume()
+})
+
+t.test('eos works', t => {
+ const eos = require('end-of-stream')
+ const mp = new MiniPass()
+
+ eos(mp, er => {
+ if (er)
+ throw er
+ t.end()
+ })
+
+ mp.emit('close')
+ mp.end('foo')
+ mp.resume()
+})
+
+t.test('bufferLength property', t => {
+ const eos = require('end-of-stream')
+ const mp = new MiniPass()
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+
+ t.equal(mp.bufferLength, 6)
+ t.equal(mp.read(7), null)
+ t.equal(mp.read(3).toString(), 'aaa')
+ t.equal(mp.bufferLength, 3)
+ t.equal(mp.read().toString(), 'aaa')
+ t.equal(mp.bufferLength, 0)
+ t.end()
+})
+
+t.test('emit resume event on resume', t => {
+ const mp = new MiniPass()
+ t.plan(3)
+ mp.on('resume', _ => t.pass('got resume event'))
+ mp.end('asdf')
+ t.equal(mp.flowing, false, 'not flowing yet')
+ mp.resume()
+ t.equal(mp.flowing, true, 'flowing after resume')
+})
+
+t.test('objectMode', t => {
+ const mp = new MiniPass({ objectMode: true })
+ const a = { a: 1 }
+ const b = { b: 1 }
+ const out = []
+ mp.on('data', c => out.push(c))
+ mp.on('end', _ => {
+ t.equal(out.length, 2)
+ t.equal(out[0], a)
+ t.equal(out[1], b)
+ t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
+ t.end()
+ })
+ t.ok(mp.write(a))
+ t.ok(mp.write(b))
+ mp.end()
+})
+
+t.test('objectMode no encoding', t => {
+ const mp = new MiniPass({
+ objectMode: true,
+ encoding: 'utf8'
+ })
+ t.equal(mp.encoding, null)
+ const a = { a: 1 }
+ const b = { b: 1 }
+ const out = []
+ mp.on('data', c => out.push(c))
+ mp.on('end', _ => {
+ t.equal(out.length, 2)
+ t.equal(out[0], a)
+ t.equal(out[1], b)
+ t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
+ t.end()
+ })
+ t.ok(mp.write(a))
+ t.ok(mp.write(b))
+ mp.end()
+})
+
+t.test('objectMode read() and buffering', t => {
+ const mp = new MiniPass({ objectMode: true })
+ const a = { a: 1 }
+ const b = { b: 1 }
+ t.notOk(mp.write(a))
+ t.notOk(mp.write(b))
+ t.equal(mp.read(2), a)
+ t.equal(mp.read(), b)
+ t.end()
+})
+
+t.test('set encoding in object mode throws', async t =>
+ t.throws(_ => new MiniPass({ objectMode: true }).encoding = 'utf8',
+ new Error('cannot set encoding in objectMode')))
+
+t.test('set encoding again throws', async t =>
+ t.throws(_ => {
+ const mp = new MiniPass({ encoding: 'hex' })
+ mp.write('ok')
+ mp.encoding = 'utf8'
+ }, new Error('cannot change encoding')))
+
+t.test('set encoding with existing buffer', async t => {
+ const mp = new MiniPass()
+ const butterfly = '🦋'
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.setEncoding('utf8')
+ mp.write(butterbuf.slice(2))
+ t.equal(mp.read(), butterfly)
+})
+
+t.test('end:false', async t => {
+ t.plan(1)
+ const mp = new MiniPass({ encoding: 'utf8' })
+ const d = new MiniPass({ encoding: 'utf8' })
+ d.end = () => t.threw(new Error('no end no exit no way out'))
+ d.on('data', c => t.equal(c, 'this is fine'))
+ mp.pipe(d, { end: false })
+ mp.end('this is fine')
+})
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minipass/test/empty-end.js b/node_modules/pacote/node_modules/tar/node_modules/minipass/test/empty-end.js
new file mode 100644
index 000000000..42387d51a
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minipass/test/empty-end.js
@@ -0,0 +1,38 @@
+const t = require('tap')
+const MP = require('../')
+
+t.test('emit end on resume', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ t.notOk(mp.emittedEnd)
+ list.push('called end')
+ mp.resume()
+ t.ok(mp.emittedEnd)
+ list.push('called resume')
+ t.same(list, ['called end', 'end', 'called resume'])
+})
+
+t.test('emit end on read()', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ list.push('called end')
+
+ mp.read()
+ list.push('called read()')
+ t.same(list, ['called end', 'end', 'called read()'])
+})
+
+t.test('emit end on data handler', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ list.push('called end')
+ mp.on('data', _=>_)
+ list.push('added data handler')
+ t.same(list, ['called end', 'end', 'added data handler'])
+})
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minizlib/LICENSE b/node_modules/pacote/node_modules/tar/node_modules/minizlib/LICENSE
new file mode 100644
index 000000000..ffce7383f
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minizlib/LICENSE
@@ -0,0 +1,26 @@
+Minizlib was created by Isaac Z. Schlueter.
+It is a derivative work of the Node.js project.
+
+"""
+Copyright Isaac Z. Schlueter and Contributors
+Copyright Node.js contributors. All rights reserved.
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minizlib/README.md b/node_modules/pacote/node_modules/tar/node_modules/minizlib/README.md
new file mode 100644
index 000000000..2b585545e
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minizlib/README.md
@@ -0,0 +1,44 @@
+# minizlib
+
+A tiny fast zlib stream built on [minipass](http://npm.im/minipass)
+and Node.js's zlib binding.
+
+This module was created to serve the needs of
+[node-tar](http://npm.im/tar) v2. If your needs are different, then
+it may not be for you.
+
+## How does this differ from the streams in `require('zlib')`?
+
+First, there are no convenience methods to compress or decompress a
+buffer. If you want those, use the built-in `zlib` module. This is
+only streams.
+
+This module compresses and decompresses the data as fast as you feed
+it in. It is synchronous, and runs on the main process thread. Zlib
+operations can be high CPU, but they're very fast, and doing it this
+way means much less bookkeeping and artificial deferral.
+
+Node's built in zlib streams are built on top of `stream.Transform`.
+They do the maximally safe thing with respect to consistent
+asynchrony, buffering, and backpressure.
+
+This module _does_ support backpressure, and will buffer output chunks
+that are not consumed, but is less of a mediator between the input and
+output. There is no high or low watermarks, no state objects, and so
+artificial async deferrals. It will not protect you from Zalgo.
+
+If you write, data will be emitted right away. If you write
+everything synchronously in one tick, and you are listening to the
+`data` event to consume it, then it'll all be emitted right away in
+that same tick. If you want data to be emitted in the next tick, then
+write it in the next tick.
+
+It is thus the responsibility of the reader and writer to manage their
+own consumption and process execution flow.
+
+The goal is to compress and decompress as fast as possible, even for
+files that are too large to store all in one buffer.
+
+The API is very similar to the built-in zlib module. There are
+classes that you instantiate with `new` and they are streams that can
+be piped together.
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minizlib/constants.js b/node_modules/pacote/node_modules/tar/node_modules/minizlib/constants.js
new file mode 100644
index 000000000..4edffde86
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minizlib/constants.js
@@ -0,0 +1,46 @@
+module.exports = Object.freeze({
+ Z_NO_FLUSH: 0,
+ Z_PARTIAL_FLUSH: 1,
+ Z_SYNC_FLUSH: 2,
+ Z_FULL_FLUSH: 3,
+ Z_FINISH: 4,
+ Z_BLOCK: 5,
+ Z_OK: 0,
+ Z_STREAM_END: 1,
+ Z_NEED_DICT: 2,
+ Z_ERRNO: -1,
+ Z_STREAM_ERROR: -2,
+ Z_DATA_ERROR: -3,
+ Z_MEM_ERROR: -4,
+ Z_BUF_ERROR: -5,
+ Z_VERSION_ERROR: -6,
+ Z_NO_COMPRESSION: 0,
+ Z_BEST_SPEED: 1,
+ Z_BEST_COMPRESSION: 9,
+ Z_DEFAULT_COMPRESSION: -1,
+ Z_FILTERED: 1,
+ Z_HUFFMAN_ONLY: 2,
+ Z_RLE: 3,
+ Z_FIXED: 4,
+ Z_DEFAULT_STRATEGY: 0,
+ ZLIB_VERNUM: 4736,
+ DEFLATE: 1,
+ INFLATE: 2,
+ GZIP: 3,
+ GUNZIP: 4,
+ DEFLATERAW: 5,
+ INFLATERAW: 6,
+ UNZIP: 7,
+ Z_MIN_WINDOWBITS: 8,
+ Z_MAX_WINDOWBITS: 15,
+ Z_DEFAULT_WINDOWBITS: 15,
+ Z_MIN_CHUNK: 64,
+ Z_MAX_CHUNK: Infinity,
+ Z_DEFAULT_CHUNK: 16384,
+ Z_MIN_MEMLEVEL: 1,
+ Z_MAX_MEMLEVEL: 9,
+ Z_DEFAULT_MEMLEVEL: 8,
+ Z_MIN_LEVEL: -1,
+ Z_MAX_LEVEL: 9,
+ Z_DEFAULT_LEVEL: -1
+})
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minizlib/index.js b/node_modules/pacote/node_modules/tar/node_modules/minizlib/index.js
new file mode 100644
index 000000000..7d595dec4
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minizlib/index.js
@@ -0,0 +1,333 @@
+'use strict'
+
+const assert = require('assert')
+const Buffer = require('buffer').Buffer
+const binding = process.binding('zlib')
+
+const constants = exports.constants = require('./constants.js')
+const MiniPass = require('minipass')
+
+// translation table for return codes.
+const codes = new Map([
+ [constants.Z_OK, 'Z_OK'],
+ [constants.Z_STREAM_END, 'Z_STREAM_END'],
+ [constants.Z_NEED_DICT, 'Z_NEED_DICT'],
+ [constants.Z_ERRNO, 'Z_ERRNO'],
+ [constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'],
+ [constants.Z_DATA_ERROR, 'Z_DATA_ERROR'],
+ [constants.Z_MEM_ERROR, 'Z_MEM_ERROR'],
+ [constants.Z_BUF_ERROR, 'Z_BUF_ERROR'],
+ [constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR']
+])
+
+const validFlushFlags = new Set([
+ constants.Z_NO_FLUSH,
+ constants.Z_PARTIAL_FLUSH,
+ constants.Z_SYNC_FLUSH,
+ constants.Z_FULL_FLUSH,
+ constants.Z_FINISH,
+ constants.Z_BLOCK
+])
+
+const strategies = new Set([
+ constants.Z_FILTERED,
+ constants.Z_HUFFMAN_ONLY,
+ constants.Z_RLE,
+ constants.Z_FIXED,
+ constants.Z_DEFAULT_STRATEGY
+])
+
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _opts = Symbol('opts')
+const _chunkSize = Symbol('chunkSize')
+const _flushFlag = Symbol('flushFlag')
+const _finishFlush = Symbol('finishFlush')
+const _handle = Symbol('handle')
+const _hadError = Symbol('hadError')
+const _buffer = Symbol('buffer')
+const _offset = Symbol('offset')
+const _level = Symbol('level')
+const _strategy = Symbol('strategy')
+const _ended = Symbol('ended')
+
+class Zlib extends MiniPass {
+ constructor (opts, mode) {
+ super(opts)
+ this[_ended] = false
+ this[_opts] = opts = opts || {}
+ this[_chunkSize] = opts.chunkSize || constants.Z_DEFAULT_CHUNK
+ if (opts.flush && !validFlushFlags.has(opts.flush)) {
+ throw new Error('Invalid flush flag: ' + opts.flush)
+ }
+ if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) {
+ throw new Error('Invalid flush flag: ' + opts.finishFlush)
+ }
+
+ this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH
+ this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ?
+ opts.finishFlush : constants.Z_FINISH
+
+ if (opts.chunkSize) {
+ if (opts.chunkSize < constants.Z_MIN_CHUNK) {
+ throw new Error('Invalid chunk size: ' + opts.chunkSize)
+ }
+ }
+
+ if (opts.windowBits) {
+ if (opts.windowBits < constants.Z_MIN_WINDOWBITS ||
+ opts.windowBits > constants.Z_MAX_WINDOWBITS) {
+ throw new Error('Invalid windowBits: ' + opts.windowBits)
+ }
+ }
+
+ if (opts.level) {
+ if (opts.level < constants.Z_MIN_LEVEL ||
+ opts.level > constants.Z_MAX_LEVEL) {
+ throw new Error('Invalid compression level: ' + opts.level)
+ }
+ }
+
+ if (opts.memLevel) {
+ if (opts.memLevel < constants.Z_MIN_MEMLEVEL ||
+ opts.memLevel > constants.Z_MAX_MEMLEVEL) {
+ throw new Error('Invalid memLevel: ' + opts.memLevel)
+ }
+ }
+
+ if (opts.strategy && !(strategies.has(opts.strategy)))
+ throw new Error('Invalid strategy: ' + opts.strategy)
+
+ if (opts.dictionary) {
+ if (!(opts.dictionary instanceof Buffer)) {
+ throw new Error('Invalid dictionary: it should be a Buffer instance')
+ }
+ }
+
+ this[_handle] = new binding.Zlib(mode)
+
+ this[_hadError] = false
+ this[_handle].onerror = (message, errno) => {
+ // there is no way to cleanly recover.
+ // continuing only obscures problems.
+ this.close()
+ this[_hadError] = true
+
+ const error = new Error(message)
+ error.errno = errno
+ error.code = codes.get(errno)
+ this.emit('error', error)
+ }
+
+ const level = typeof opts.level === 'number' ? opts.level
+ : constants.Z_DEFAULT_COMPRESSION
+
+ var strategy = typeof opts.strategy === 'number' ? opts.strategy
+ : constants.Z_DEFAULT_STRATEGY
+
+ this[_handle].init(opts.windowBits || constants.Z_DEFAULT_WINDOWBITS,
+ level,
+ opts.memLevel || constants.Z_DEFAULT_MEMLEVEL,
+ strategy,
+ opts.dictionary)
+
+ this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
+ this[_offset] = 0
+ this[_level] = level
+ this[_strategy] = strategy
+
+ this.once('end', this.close)
+ }
+
+ close () {
+ if (this[_handle]) {
+ this[_handle].close()
+ this[_handle] = null
+ this.emit('close')
+ }
+ }
+
+ params (level, strategy) {
+ if (!this[_handle])
+ throw new Error('cannot switch params when binding is closed')
+
+ // no way to test this without also not supporting params at all
+ /* istanbul ignore if */
+ if (!this[_handle].params)
+ throw new Error('not supported in this implementation')
+
+ if (level < constants.Z_MIN_LEVEL ||
+ level > constants.Z_MAX_LEVEL) {
+ throw new RangeError('Invalid compression level: ' + level)
+ }
+
+ if (!(strategies.has(strategy)))
+ throw new TypeError('Invalid strategy: ' + strategy)
+
+ if (this[_level] !== level || this[_strategy] !== strategy) {
+ this.flush(constants.Z_SYNC_FLUSH)
+ assert(this[_handle], 'zlib binding closed')
+ this[_handle].params(level, strategy)
+ /* istanbul ignore else */
+ if (!this[_hadError]) {
+ this[_level] = level
+ this[_strategy] = strategy
+ }
+ }
+ }
+
+ reset () {
+ assert(this[_handle], 'zlib binding closed')
+ return this[_handle].reset()
+ }
+
+ flush (kind) {
+ if (kind === undefined)
+ kind = constants.Z_FULL_FLUSH
+
+ if (this.ended)
+ return
+
+ const flushFlag = this[_flushFlag]
+ this[_flushFlag] = kind
+ this.write(Buffer.alloc(0))
+ this[_flushFlag] = flushFlag
+ }
+
+ end (chunk, encoding, cb) {
+ if (chunk)
+ this.write(chunk, encoding)
+ this.flush(this[_finishFlush])
+ this[_ended] = true
+ return super.end(null, null, cb)
+ }
+
+ get ended () {
+ return this[_ended]
+ }
+
+ write (chunk, encoding, cb) {
+ // process the chunk using the sync process
+ // then super.write() all the outputted chunks
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+
+ if (typeof chunk === 'string')
+ chunk = new Buffer(chunk, encoding)
+
+ let availInBefore = chunk && chunk.length
+ let availOutBefore = this[_chunkSize] - this[_offset]
+ let inOff = 0 // the offset of the input buffer
+ const flushFlag = this[_flushFlag]
+ let writeReturn = true
+
+ assert(this[_handle], 'zlib binding closed')
+ do {
+ let res = this[_handle].writeSync(
+ flushFlag,
+ chunk, // in
+ inOff, // in_off
+ availInBefore, // in_len
+ this[_buffer], // out
+ this[_offset], //out_off
+ availOutBefore // out_len
+ )
+ if (this[_hadError])
+ break
+
+ let availInAfter = res[0]
+ let availOutAfter = res[1]
+
+ const have = availOutBefore - availOutAfter
+ assert(have >= 0, 'have should not go down')
+
+ if (have > 0) {
+ const out = this[_buffer].slice(
+ this[_offset], this[_offset] + have
+ )
+
+ this[_offset] += have
+ // serve some output to the consumer.
+ writeReturn = super.write(out) && writeReturn
+ }
+
+ // exhausted the output buffer, or used all the input create a new one.
+ if (availOutAfter === 0 || this[_offset] >= this[_chunkSize]) {
+ availOutBefore = this[_chunkSize]
+ this[_offset] = 0
+ this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
+ }
+
+ if (availOutAfter === 0) {
+ // Not actually done. Need to reprocess.
+ // Also, update the availInBefore to the availInAfter value,
+ // so that if we have to hit it a third (fourth, etc.) time,
+ // it'll have the correct byte counts.
+ inOff += (availInBefore - availInAfter)
+ availInBefore = availInAfter
+ continue
+ }
+ break
+ } while (!this[_hadError])
+
+ if (cb)
+ cb()
+ return writeReturn
+ }
+}
+
+// minimal 2-byte header
+class Deflate extends Zlib {
+ constructor (opts) {
+ super(opts, constants.DEFLATE)
+ }
+}
+
+class Inflate extends Zlib {
+ constructor (opts) {
+ super(opts, constants.INFLATE)
+ }
+}
+
+// gzip - bigger header, same deflate compression
+class Gzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.GZIP)
+ }
+}
+
+class Gunzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.GUNZIP)
+ }
+}
+
+// raw - no header
+class DeflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, constants.DEFLATERAW)
+ }
+}
+
+class InflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, constants.INFLATERAW)
+ }
+}
+
+// auto-detect header.
+class Unzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.UNZIP)
+ }
+}
+
+exports.Deflate = Deflate
+exports.Inflate = Inflate
+exports.Gzip = Gzip
+exports.Gunzip = Gunzip
+exports.DeflateRaw = DeflateRaw
+exports.InflateRaw = InflateRaw
+exports.Unzip = Unzip
diff --git a/node_modules/pacote/node_modules/tar/node_modules/minizlib/package.json b/node_modules/pacote/node_modules/tar/node_modules/minizlib/package.json
new file mode 100644
index 000000000..a58da8064
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/minizlib/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "minizlib@^1.0.3",
+ "_id": "minizlib@1.0.3",
+ "_inBundle": false,
+ "_integrity": "sha1-1cGr93vhVGGZUuJTM27Mq5sqMvU=",
+ "_location": "/pacote/tar/minizlib",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minizlib@^1.0.3",
+ "name": "minizlib",
+ "escapedName": "minizlib",
+ "rawSpec": "^1.0.3",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.3"
+ },
+ "_requiredBy": [
+ "/pacote/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.0.3.tgz",
+ "_shasum": "d5c1abf77be154619952e253336eccab9b2a32f5",
+ "_spec": "minizlib@^1.0.3",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minizlib/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "minipass": "^2.0.0"
+ },
+ "deprecated": false,
+ "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+ "devDependencies": {
+ "tap": "^10.3.0"
+ },
+ "files": [
+ "index.js",
+ "constants.js"
+ ],
+ "homepage": "https://github.com/isaacs/minizlib#readme",
+ "keywords": [
+ "zlib",
+ "gzip",
+ "gunzip",
+ "deflate",
+ "inflate",
+ "compression",
+ "zip",
+ "unzip"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "minizlib",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/minizlib.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J"
+ },
+ "version": "1.0.3"
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/yallist/LICENSE b/node_modules/pacote/node_modules/tar/node_modules/yallist/LICENSE
new file mode 100644
index 000000000..19129e315
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/yallist/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/tar/node_modules/yallist/README.md b/node_modules/pacote/node_modules/tar/node_modules/yallist/README.md
new file mode 100644
index 000000000..f58610186
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/yallist/README.md
@@ -0,0 +1,204 @@
+# yallist
+
+Yet Another Linked List
+
+There are many doubly-linked list implementations like it, but this
+one is mine.
+
+For when an array would be too big, and a Map can't be iterated in
+reverse order.
+
+
+[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
+
+## basic usage
+
+```javascript
+var yallist = require('yallist')
+var myList = yallist.create([1, 2, 3])
+myList.push('foo')
+myList.unshift('bar')
+// of course pop() and shift() are there, too
+console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
+myList.forEach(function (k) {
+ // walk the list head to tail
+})
+myList.forEachReverse(function (k, index, list) {
+ // walk the list tail to head
+})
+var myDoubledList = myList.map(function (k) {
+ return k + k
+})
+// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
+// mapReverse is also a thing
+var myDoubledListReverse = myList.mapReverse(function (k) {
+ return k + k
+}) // ['foofoo', 6, 4, 2, 'barbar']
+
+var reduced = myList.reduce(function (set, entry) {
+ set += entry
+ return set
+}, 'start')
+console.log(reduced) // 'startfoo123bar'
+```
+
+## api
+
+The whole API is considered "public".
+
+Functions with the same name as an Array method work more or less the
+same way.
+
+There's reverse versions of most things because that's the point.
+
+### Yallist
+
+Default export, the class that holds and manages a list.
+
+Call it with either a forEach-able (like an array) or a set of
+arguments, to initialize the list.
+
+The Array-ish methods all act like you'd expect. No magic length,
+though, so if you change that it won't automatically prune or add
+empty spots.
+
+### Yallist.create(..)
+
+Alias for Yallist function. Some people like factories.
+
+#### yallist.head
+
+The first node in the list
+
+#### yallist.tail
+
+The last node in the list
+
+#### yallist.length
+
+The number of nodes in the list. (Change this at your peril. It is
+not magic like Array length.)
+
+#### yallist.toArray()
+
+Convert the list to an array.
+
+#### yallist.forEach(fn, [thisp])
+
+Call a function on each item in the list.
+
+#### yallist.forEachReverse(fn, [thisp])
+
+Call a function on each item in the list, in reverse order.
+
+#### yallist.get(n)
+
+Get the data at position `n` in the list. If you use this a lot,
+probably better off just using an Array.
+
+#### yallist.getReverse(n)
+
+Get the data at position `n`, counting from the tail.
+
+#### yallist.map(fn, thisp)
+
+Create a new Yallist with the result of calling the function on each
+item.
+
+#### yallist.mapReverse(fn, thisp)
+
+Same as `map`, but in reverse.
+
+#### yallist.pop()
+
+Get the data from the list tail, and remove the tail from the list.
+
+#### yallist.push(item, ...)
+
+Insert one or more items to the tail of the list.
+
+#### yallist.reduce(fn, initialValue)
+
+Like Array.reduce.
+
+#### yallist.reduceReverse
+
+Like Array.reduce, but in reverse.
+
+#### yallist.reverse
+
+Reverse the list in place.
+
+#### yallist.shift()
+
+Get the data from the list head, and remove the head from the list.
+
+#### yallist.slice([from], [to])
+
+Just like Array.slice, but returns a new Yallist.
+
+#### yallist.sliceReverse([from], [to])
+
+Just like yallist.slice, but the result is returned in reverse.
+
+#### yallist.toArray()
+
+Create an array representation of the list.
+
+#### yallist.toArrayReverse()
+
+Create a reversed array representation of the list.
+
+#### yallist.unshift(item, ...)
+
+Insert one or more items to the head of the list.
+
+#### yallist.unshiftNode(node)
+
+Move a Node object to the front of the list. (That is, pull it out of
+wherever it lives, and make it the new head.)
+
+If the node belongs to a different list, then that list will remove it
+first.
+
+#### yallist.pushNode(node)
+
+Move a Node object to the end of the list. (That is, pull it out of
+wherever it lives, and make it the new tail.)
+
+If the node belongs to a list already, then that list will remove it
+first.
+
+#### yallist.removeNode(node)
+
+Remove a node from the list, preserving referential integrity of head
+and tail and other nodes.
+
+Will throw an error if you try to have a list remove a node that
+doesn't belong to it.
+
+### Yallist.Node
+
+The class that holds the data and is actually the list.
+
+Call with `var n = new Node(value, previousNode, nextNode)`
+
+Note that if you do direct operations on Nodes themselves, it's very
+easy to get into weird states where the list is broken. Be careful :)
+
+#### node.next
+
+The next node in the list.
+
+#### node.prev
+
+The previous node in the list.
+
+#### node.value
+
+The data the node contains.
+
+#### node.list
+
+The list to which this node belongs. (Null if it does not belong to
+any list.)
diff --git a/node_modules/pacote/node_modules/tar/node_modules/yallist/iterator.js b/node_modules/pacote/node_modules/tar/node_modules/yallist/iterator.js
new file mode 100644
index 000000000..9149b3648
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/yallist/iterator.js
@@ -0,0 +1,8 @@
+'use strict'
+var Yallist = require('./yallist.js')
+
+Yallist.prototype[Symbol.iterator] = function* () {
+ for (let walker = this.head; walker; walker = walker.next) {
+ yield walker.value
+ }
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/yallist/package.json b/node_modules/pacote/node_modules/tar/node_modules/yallist/package.json
new file mode 100644
index 000000000..65dfe3251
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/yallist/package.json
@@ -0,0 +1,63 @@
+{
+ "_from": "yallist@^3.0.2",
+ "_id": "yallist@3.0.2",
+ "_inBundle": false,
+ "_integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=",
+ "_location": "/pacote/tar/yallist",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "yallist@^3.0.2",
+ "name": "yallist",
+ "escapedName": "yallist",
+ "rawSpec": "^3.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^3.0.2"
+ },
+ "_requiredBy": [
+ "/pacote/tar",
+ "/pacote/tar/minipass"
+ ],
+ "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz",
+ "_shasum": "8452b4bb7e83c7c188d8041c1a837c773d6d8bb9",
+ "_spec": "yallist@^3.0.2",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/yallist/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "Yet Another Linked List",
+ "devDependencies": {
+ "tap": "^10.3.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "yallist.js",
+ "iterator.js"
+ ],
+ "homepage": "https://github.com/isaacs/yallist#readme",
+ "license": "ISC",
+ "main": "yallist.js",
+ "name": "yallist",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/yallist.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "3.0.2"
+}
diff --git a/node_modules/pacote/node_modules/tar/node_modules/yallist/yallist.js b/node_modules/pacote/node_modules/tar/node_modules/yallist/yallist.js
new file mode 100644
index 000000000..4805bc69f
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/node_modules/yallist/yallist.js
@@ -0,0 +1,376 @@
+'use strict'
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+ var self = this
+ if (!(self instanceof Yallist)) {
+ self = new Yallist()
+ }
+
+ self.tail = null
+ self.head = null
+ self.length = 0
+
+ if (list && typeof list.forEach === 'function') {
+ list.forEach(function (item) {
+ self.push(item)
+ })
+ } else if (arguments.length > 0) {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ self.push(arguments[i])
+ }
+ }
+
+ return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+ if (node.list !== this) {
+ throw new Error('removing node which does not belong to this list')
+ }
+
+ var next = node.next
+ var prev = node.prev
+
+ if (next) {
+ next.prev = prev
+ }
+
+ if (prev) {
+ prev.next = next
+ }
+
+ if (node === this.head) {
+ this.head = next
+ }
+ if (node === this.tail) {
+ this.tail = prev
+ }
+
+ node.list.length--
+ node.next = null
+ node.prev = null
+ node.list = null
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+ if (node === this.head) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var head = this.head
+ node.list = this
+ node.next = head
+ if (head) {
+ head.prev = node
+ }
+
+ this.head = node
+ if (!this.tail) {
+ this.tail = node
+ }
+ this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+ if (node === this.tail) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var tail = this.tail
+ node.list = this
+ node.prev = tail
+ if (tail) {
+ tail.next = node
+ }
+
+ this.tail = node
+ if (!this.head) {
+ this.head = node
+ }
+ this.length++
+}
+
+Yallist.prototype.push = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ push(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.unshift = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ unshift(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.pop = function () {
+ if (!this.tail) {
+ return undefined
+ }
+
+ var res = this.tail.value
+ this.tail = this.tail.prev
+ if (this.tail) {
+ this.tail.next = null
+ } else {
+ this.head = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.shift = function () {
+ if (!this.head) {
+ return undefined
+ }
+
+ var res = this.head.value
+ this.head = this.head.next
+ if (this.head) {
+ this.head.prev = null
+ } else {
+ this.tail = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.head, i = 0; walker !== null; i++) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.next
+ }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.prev
+ }
+}
+
+Yallist.prototype.get = function (n) {
+ for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.next
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.getReverse = function (n) {
+ for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.prev
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.head; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.next
+ }
+ return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.tail; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.prev
+ }
+ return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+ var acc
+ var walker = this.head
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.head) {
+ walker = this.head.next
+ acc = this.head.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = 0; walker !== null; i++) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.next
+ }
+
+ return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+ var acc
+ var walker = this.tail
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.tail) {
+ walker = this.tail.prev
+ acc = this.tail.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = this.length - 1; walker !== null; i--) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.prev
+ }
+
+ return acc
+}
+
+Yallist.prototype.toArray = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.head; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.next
+ }
+ return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.tail; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.prev
+ }
+ return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+ walker = walker.next
+ }
+ for (; walker !== null && i < to; i++, walker = walker.next) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+ walker = walker.prev
+ }
+ for (; walker !== null && i > from; i--, walker = walker.prev) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.reverse = function () {
+ var head = this.head
+ var tail = this.tail
+ for (var walker = head; walker !== null; walker = walker.prev) {
+ var p = walker.prev
+ walker.prev = walker.next
+ walker.next = p
+ }
+ this.head = tail
+ this.tail = head
+ return this
+}
+
+function push (self, item) {
+ self.tail = new Node(item, self.tail, null, self)
+ if (!self.head) {
+ self.head = self.tail
+ }
+ self.length++
+}
+
+function unshift (self, item) {
+ self.head = new Node(item, null, self.head, self)
+ if (!self.tail) {
+ self.tail = self.head
+ }
+ self.length++
+}
+
+function Node (value, prev, next, list) {
+ if (!(this instanceof Node)) {
+ return new Node(value, prev, next, list)
+ }
+
+ this.list = list
+ this.value = value
+
+ if (prev) {
+ prev.next = this
+ this.prev = prev
+ } else {
+ this.prev = null
+ }
+
+ if (next) {
+ next.prev = this
+ this.next = next
+ } else {
+ this.next = null
+ }
+}
+
+try {
+ // add if support or Symbol.iterator is present
+ require('./iterator.js')
+} catch (er) {}
diff --git a/node_modules/pacote/node_modules/tar/package.json b/node_modules/pacote/node_modules/tar/package.json
new file mode 100644
index 000000000..bb5dc2952
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/package.json
@@ -0,0 +1,76 @@
+{
+ "_from": "tar@^4.0.0",
+ "_id": "tar@4.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-XBpU+/azPOMvE5m2Tn7Sl6U1ahpGfe77LkdrAlFilwrgHZsR+2iy0l8klQtfJNM+DACZO2Xrw10MTyQRB4du5A==",
+ "_location": "/pacote/tar",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "tar@^4.0.0",
+ "name": "tar",
+ "escapedName": "tar",
+ "rawSpec": "^4.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^4.0.0"
+ },
+ "_requiredBy": [
+ "/pacote"
+ ],
+ "_resolved": "https://registry.npmjs.org/tar/-/tar-4.0.1.tgz",
+ "_shasum": "3f5b2e5289db30c2abe4c960f43d0d9fff96aaf0",
+ "_spec": "tar@^4.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/pacote",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/node-tar/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "chownr": "^1.0.1",
+ "minipass": "^2.0.2",
+ "minizlib": "^1.0.3",
+ "mkdirp": "^0.5.0",
+ "yallist": "^3.0.2"
+ },
+ "deprecated": false,
+ "description": "tar for node",
+ "devDependencies": {
+ "chmodr": "^1.0.2",
+ "end-of-stream": "^1.4.0",
+ "events-to-array": "^1.1.2",
+ "mutate-fs": "^1.1.0",
+ "rimraf": "1.x",
+ "tap": "^10.3.3",
+ "tar-fs": "^1.15.2",
+ "tar-stream": "^1.5.2"
+ },
+ "engines": {
+ "node": ">=4.5"
+ },
+ "files": [
+ "index.js",
+ "lib/"
+ ],
+ "homepage": "https://github.com/npm/node-tar#readme",
+ "license": "ISC",
+ "name": "tar",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/node-tar.git"
+ },
+ "scripts": {
+ "bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done",
+ "genparse": "node scripts/generate-parse-fixtures.js",
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J --coverage-report=text"
+ },
+ "version": "4.0.1"
+}
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 4043b392e..944aa7a33 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,8 +1,8 @@
{
- "_from": "pacote@~4.0.0",
- "_id": "pacote@4.0.0",
+ "_from": "pacote@latest",
+ "_id": "pacote@6.0.1",
"_inBundle": false,
- "_integrity": "sha512-0RMbJefjPxG5De2Lra0j+tu7vrJkhilhuePZA4EByjqLHF60feW/zgbfDDUerx35Bpl8t2+rl+FpofSyWxf0fg==",
+ "_integrity": "sha512-BvGf8BnnkMcTqJk7MLimR8GWOsG/BJ+st4LS+Q82LoetVUQ1bqwoDw/WEjbUgfKdLIG/wKk9OWk9Zce8sNbPIw==",
"_location": "/pacote",
"_phantomChildren": {
"cacache": "9.2.9",
@@ -11,31 +11,29 @@
"mississippi": "1.3.0",
"mkdirp": "0.5.1",
"npm-package-arg": "5.1.2",
- "once": "1.4.0",
- "readable-stream": "2.3.3",
"retry": "0.10.1",
"safe-buffer": "5.1.1",
"semver": "5.4.1",
"ssri": "4.1.6"
},
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "pacote@~4.0.0",
+ "raw": "pacote@latest",
"name": "pacote",
"escapedName": "pacote",
- "rawSpec": "~4.0.0",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "~4.0.0"
+ "fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
- "_resolved": "https://registry.npmjs.org/pacote/-/pacote-4.0.0.tgz",
- "_shasum": "fca68a291e424229db2d7a500589fed7ae01c0a7",
- "_spec": "pacote@~4.0.0",
- "_where": "/Users/zkat/Documents/code/npm",
+ "_resolved": "https://registry.npmjs.org/pacote/-/pacote-6.0.1.tgz",
+ "_shasum": "4428b8d763f9a141ad32123948eedd5f697449b5",
+ "_spec": "pacote@latest",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
@@ -64,32 +62,33 @@
"mississippi": "^1.2.0",
"normalize-package-data": "^2.4.0",
"npm-package-arg": "^5.1.2",
+ "npm-packlist": "^1.1.6",
"npm-pick-manifest": "^1.0.4",
"osenv": "^0.1.4",
"promise-inflight": "^1.0.1",
"promise-retry": "^1.1.1",
"protoduck": "^4.0.0",
"safe-buffer": "^5.1.1",
- "semver": "^5.3.0",
+ "semver": "^5.4.1",
"ssri": "^4.1.6",
- "tar-fs": "^1.15.3",
- "tar-stream": "^1.5.4",
+ "tar": "^4.0.0",
"unique-filename": "^1.1.0",
- "which": "^1.2.12"
+ "which": "^1.3.0"
},
"deprecated": false,
"description": "JavaScript package downloader",
"devDependencies": {
"mkdirp": "^0.5.1",
- "nock": "^9.0.13",
+ "nock": "^9.0.14",
"npmlog": "^4.1.2",
- "nyc": "^11.0.3",
+ "nyc": "^11.1.0",
"require-inject": "^1.4.2",
"rimraf": "^2.5.4",
- "standard": "^10.0.1",
+ "standard": "^10.0.3",
"standard-version": "^4.2.0",
"tacks": "^1.2.6",
- "tap": "^10.7.0",
+ "tap": "^10.7.2",
+ "tar-stream": "^1.5.4",
"weallbehave": "^1.2.0",
"weallcontribute": "^1.0.7"
},
@@ -120,5 +119,5 @@
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "4.0.0"
+ "version": "6.0.1"
}